From arigo at codespeak.net Fri Aug 1 13:41:14 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Fri, 1 Aug 2008 13:41:14 +0200 (CEST) Subject: [pypy-svn] r56891 - pypy/dist/pypy/interpreter Message-ID: <20080801114114.F1CFB169E8B@codespeak.net> Author: arigo Date: Fri Aug 1 13:41:13 2008 New Revision: 56891 Modified: pypy/dist/pypy/interpreter/function.py Log: Never crash in __repr__. Modified: pypy/dist/pypy/interpreter/function.py ============================================================================== --- pypy/dist/pypy/interpreter/function.py (original) +++ pypy/dist/pypy/interpreter/function.py Fri Aug 1 13:41:13 2008 @@ -30,7 +30,7 @@ def __repr__(self): # return "function %s.%s" % (self.space, self.name) # maybe we want this shorter: - return "" % self.name + return "" % getattr(self, 'name', '?') def call_args(self, args): return self.code.funcrun(self, args) # delegate activation to code From arigo at codespeak.net Fri Aug 1 13:47:06 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Fri, 1 Aug 2008 13:47:06 +0200 (CEST) Subject: [pypy-svn] r56893 - pypy/dist/pypy/interpreter Message-ID: <20080801114706.C4523169E27@codespeak.net> Author: arigo Date: Fri Aug 1 13:47:06 2008 New Revision: 56893 Modified: pypy/dist/pypy/interpreter/pycode.py Log: Fix test_pickle_generator for Python 2.5. That's a hack, hopefully waiting to disappear when we move to full 2.5 support. Modified: pypy/dist/pypy/interpreter/pycode.py ============================================================================== --- pypy/dist/pypy/interpreter/pycode.py (original) +++ pypy/dist/pypy/interpreter/pycode.py Fri Aug 1 13:47:06 2008 @@ -48,7 +48,7 @@ return argnames, varargname, kwargname cpython_magic, = struct.unpack(" Author: pedronis Date: Fri Aug 1 19:20:08 2008 New Revision: 56898 Added: pypy/dist/pypy/interpreter/callbench/ pypy/dist/pypy/interpreter/callbench/bltn04.py pypy/dist/pypy/interpreter/callbench/bltna1.py pypy/dist/pypy/interpreter/callbench/bm14.py pypy/dist/pypy/interpreter/callbench/bmabvararg.py pypy/dist/pypy/interpreter/callbench/bmfilter.py pypy/dist/pypy/interpreter/callbench/bmmore.py pypy/dist/pypy/interpreter/callbench/compare.py pypy/dist/pypy/interpreter/callbench/f04.py pypy/dist/pypy/interpreter/callbench/fabvararg.py pypy/dist/pypy/interpreter/callbench/ffilter.py pypy/dist/pypy/interpreter/callbench/ffunccall.py pypy/dist/pypy/interpreter/callbench/fmore.py pypy/dist/pypy/interpreter/callbench/inst.py pypy/dist/pypy/interpreter/callbench/instcall.py pypy/dist/pypy/interpreter/callbench/sup.py Log: a set of benchmarks that exercizes the code paths for calls, in particular most of the current shortcuts and interesting cases Added: pypy/dist/pypy/interpreter/callbench/bltn04.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/bltn04.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,40 @@ +from sup import run + +def w(N, start): + c = chr + + start() + i = 0 + while i < N: + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + c(65) + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/bltna1.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/bltna1.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,22 @@ +from sup import run + +def w(N, start): + o = object + start() + i = 0 + while i < N: + o() + o() + o() + o() + o() + o() + o() + o() + o() + o() + o() + o() + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/bm14.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/bm14.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,51 @@ +from sup import run + +def w(N, start): + class A(object): + def f0(self): + pass + def f1(self, a): + pass + def f2(self, a, b): + pass + def f3(self, a, b, c): + pass + def f4(self, a, b, c, d): + pass + + a = A() + f0 = a.f0 + f1 = a.f1 + f2 = a.f2 + f3 = a.f3 + f4 = a.f4 + + start() + i = 0 + while i < N: + f0() + f0() + f0() + f0() + f1(1) + f1(1) + f1(1) + f1(1) + f2(1, 2) + f2(1, 2) + f2(1, 2) + f3(1, 2, 3) + f3(1, 2, 3) + f4(1, 2, 3, 4) + + f0() + f0() + f0() + f1(1) + f1(1) + f1(1) + f2(1, 2) + + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/bmabvararg.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/bmabvararg.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,29 @@ +from sup import run + +def w(N, start): + class A(object): + def f(self, a, b, *args): + pass + + a = A() + f = a.f + z = (3, 4, 5) + + start() + i = 0 + while i < N: + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/bmfilter.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/bmfilter.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,20 @@ +from sup import run + +def w(N, start): + x = range(50) + class A(object): + def f1(self, a): + return False + + x = range(50) + a = A() + f1 = a.f1 + flt = filter + + start() + i = 0 + while i < N: + flt(f1, x) + i+=1 + +run(w, 200) Added: pypy/dist/pypy/interpreter/callbench/bmmore.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/bmmore.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,30 @@ +from sup import run + +def w(N, start): + class A(object): + def f4(self, a, b, c, d): + pass + def f5(self, a, b, c, d, e): + pass + a = A() + f4 = a.f4 + f5 = a.f5 + + start() + i = 0 + while i < N: + f4(1, 2, 3, 4) + f4(1, 2, 3, 4) + f4(1, 2, 3, 4) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f4(1, 2, 3, 4) + f4(1, 2, 3, 4) + f4(1, 2, 3, 4) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/compare.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/compare.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,22 @@ +# compare.py + +import sys + +def main(cur, ref): + cur = open(cur, 'rU') + ref = open(ref, 'rU') + try: + while True: + cur_line = cur.next() + ref_line = ref.next() + cur_name, cur_t = cur_line.split() + ref_name, ref_t = ref_line.split() + assert cur_name == ref_name + cur_t = float(cur_t) + ref_t = float(ref_t) + print "%-16s %.06g (x%.02f)" % (cur_name, cur_t, cur_t/ref_t) + except StopIteration: + pass + +if __name__ == '__main__': + main(sys.argv[1], sys.argv[2]) Added: pypy/dist/pypy/interpreter/callbench/f04.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/f04.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,39 @@ +from sup import run + +def w(N, start): + def f0(): + pass + def f1(a): + pass + def f2(a, b): + pass + def f3(a, b, c): + pass + def f4(a, b, c, d): + pass + def f5(a, b, c, d, e): + pass + + start() + i = 0 + while i < N: + f0() + f0() + f0() + f1(1) + f1(1) + f2(1, 2) + f3(1, 2, 3) + f4(1, 2, 3, 4) + f5(1, 2, 3, 4, 5) + f0() + f0() + f0() + f1(1) + f1(1) + f2(1, 2) + f3(1, 2, 3) + f4(1, 2, 3, 4) + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/fabvararg.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/fabvararg.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,26 @@ +from sup import run + +def w(N, start): + def f(a, b, *args): + pass + + z = (3, 4, 5) + start() + + i = 0 + while i < N: + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + f(1, 2, *z) + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/ffilter.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/ffilter.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,14 @@ +from sup import run + +def w(N, start): + def f1(a): + return False + x = range(50) + + start() + i = 0 + while i < N: + filter(f1, x) + i+=1 + +run(w, 200) Added: pypy/dist/pypy/interpreter/callbench/ffunccall.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/ffunccall.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,36 @@ +from sup import run + +def w(N, start): + class A(object): + def foo(self, x): + pass + + __add__ = foo + + a = A() + a1 = A() + + start() + i = 0 + while i < N: + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + a + a1 + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/fmore.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/fmore.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,28 @@ +from sup import run + +def w(N, start): + def f5(a, b, c, d, e): + pass + def f6(a, b, c, d, e, f): + pass + + start() + + i = 0 + while i < N: + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + f5(1, 2, 3, 4, 5) + + f6(1, 2, 3, 4, 5, 6) + f6(1, 2, 3, 4, 5, 6) + f6(1, 2, 3, 4, 5, 6) + f6(1, 2, 3, 4, 5, 6) + f6(1, 2, 3, 4, 5, 6) + f6(1, 2, 3, 4, 5, 6) + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/inst.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/inst.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,22 @@ +from sup import run + +def w(N, start): + class A(object): + def __init__(self): + pass + + start() + i = 0 + while i < N: + A() + A() + A() + A() + A() + A() + A() + A() + A() + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/instcall.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/instcall.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,35 @@ +from sup import run + +def w(N, start): + class A(object): + def __call__(self): + pass + + a = A() + + start() + i = 0 + while i < N: + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + a() + i+=1 + +run(w, 1000) Added: pypy/dist/pypy/interpreter/callbench/sup.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/callbench/sup.py Fri Aug 1 19:20:08 2008 @@ -0,0 +1,29 @@ +import sys, time + +def ref(N, start): + start() + i = 0 + while i < N: + i+=1 + + +def run(func, n): + n *= int(sys.argv[1]) + st = [None] + t = time.time + + def start(): + st[0] = t() + + ref(n, start) + elapsed_ref = t() - st[0] + + func(n, start) + elapsed = t() - st[0] + + #if elapsed < elapsed_ref*10: + # print "not enough meat", elapsed, elapsed_ref + + print sys.argv[0].replace('.py', ''), elapsed-elapsed_ref + + From pedronis at codespeak.net Fri Aug 1 19:23:30 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Fri, 1 Aug 2008 19:23:30 +0200 (CEST) Subject: [pypy-svn] r56899 - pypy/dist/pypy/interpreter/callbench Message-ID: <20080801172330.E3F07169EA5@codespeak.net> Author: pedronis Date: Fri Aug 1 19:23:30 2008 New Revision: 56899 Modified: pypy/dist/pypy/interpreter/callbench/ (props changed) pypy/dist/pypy/interpreter/callbench/bltn04.py (props changed) pypy/dist/pypy/interpreter/callbench/bltna1.py (props changed) pypy/dist/pypy/interpreter/callbench/bm14.py (props changed) pypy/dist/pypy/interpreter/callbench/bmabvararg.py (props changed) pypy/dist/pypy/interpreter/callbench/bmfilter.py (props changed) pypy/dist/pypy/interpreter/callbench/bmmore.py (props changed) pypy/dist/pypy/interpreter/callbench/compare.py (props changed) pypy/dist/pypy/interpreter/callbench/f04.py (props changed) pypy/dist/pypy/interpreter/callbench/fabvararg.py (props changed) pypy/dist/pypy/interpreter/callbench/ffilter.py (props changed) pypy/dist/pypy/interpreter/callbench/ffunccall.py (props changed) pypy/dist/pypy/interpreter/callbench/fmore.py (props changed) pypy/dist/pypy/interpreter/callbench/inst.py (props changed) pypy/dist/pypy/interpreter/callbench/instcall.py (props changed) pypy/dist/pypy/interpreter/callbench/sup.py (props changed) Log: fixeol From pedronis at codespeak.net Fri Aug 1 19:36:37 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Fri, 1 Aug 2008 19:36:37 +0200 (CEST) Subject: [pypy-svn] r56900 - pypy/branch/garden-call-code Message-ID: <20080801173637.5E6A4169E7D@codespeak.net> Author: pedronis Date: Fri Aug 1 19:36:36 2008 New Revision: 56900 Added: pypy/branch/garden-call-code/ - copied from r56899, pypy/dist/ Log: a new branch to try to simplify our call code logic, maybe even speed things up a bit From pedronis at codespeak.net Fri Aug 1 20:35:58 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Fri, 1 Aug 2008 20:35:58 +0200 (CEST) Subject: [pypy-svn] r56902 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080801183558.AF439169E7C@codespeak.net> Author: pedronis Date: Fri Aug 1 20:35:58 2008 New Revision: 56902 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: white-box tests about fastcall shortcuts Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Fri Aug 1 20:35:58 2008 @@ -340,3 +340,72 @@ # --- with an incompatible class w_meth5 = meth3.descr_method_get(space.wrap('hello'), space.w_str) assert space.is_w(w_meth5, w_meth3) + +class TestShortcuts(object): + + def test_fastcall(self): + space = self.space + + def f(a): + return a + code = PyCode._from_code(self.space, f.func_code) + fn = Function(self.space, code, self.space.newdict()) + + assert fn.code.do_fastcall == 1 + + called = [] + fastcall_1 = fn.code.fastcall_1 + def witness_fastcall_1(space, w_func, w_arg): + called.append(w_func) + return fastcall_1(space, w_func, w_arg) + + fn.code.fastcall_1 = witness_fastcall_1 + + w_3 = space.newint(3) + w_res = space.call_function(fn, w_3) + + assert w_res is w_3 + assert called == [fn] + + called = [] + + w_res = space.appexec([fn, w_3], """(f, x): + return f(x) + """) + + assert w_res is w_3 + assert called == [fn] + + def test_fastcall_method(self): + space = self.space + + def f(self, a): + return a + code = PyCode._from_code(self.space, f.func_code) + fn = Function(self.space, code, self.space.newdict()) + + assert fn.code.do_fastcall == 2 + + called = [] + fastcall_2 = fn.code.fastcall_2 + def witness_fastcall_2(space, w_func, w_arg1, w_arg2): + called.append(w_func) + return fastcall_2(space, w_func, w_arg1, w_arg2) + + fn.code.fastcall_2 = witness_fastcall_2 + + w_3 = space.newint(3) + w_res = space.appexec([fn, w_3], """(f, x): + class A(object): + m = f + y = A().m(x) + b = A().m + z = b(x) + return y is x and z is x + """) + + assert space.is_true(w_res) + assert called == [fn, fn] + + + Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Fri Aug 1 20:35:58 2008 @@ -388,3 +388,74 @@ w_app_g_run = space.wrap(app_g_run) w_bound = space.get(w_app_g_run, w("hello"), space.w_str) assert space.eq_w(space.call_function(w_bound), w(42)) + + def test_interp2app_fastcall(self): + space = self.space + w = space.wrap + w_3 = w(3) + + def f(space): + return w_3 + app_f = gateway.interp2app_temp(f, unwrap_spec=[gateway.ObjSpace]) + w_app_f = w(app_f) + + # sanity + assert isinstance(w_app_f.code, gateway.BuiltinCode0) + + called = [] + fastcall_0 = w_app_f.code.fastcall_0 + def witness_fastcall_0(space, w_func): + called.append(w_func) + return fastcall_0(space, w_func) + + w_app_f.code.fastcall_0 = witness_fastcall_0 + + w_3 = space.newint(3) + w_res = space.call_function(w_app_f) + + assert w_res is w_3 + assert called == [w_app_f] + + called = [] + + w_res = space.appexec([w_app_f], """(f): + return f() + """) + + assert w_res is w_3 + assert called == [w_app_f] + + def test_interp2app_fastcall_method(self): + space = self.space + w = space.wrap + w_3 = w(3) + + def f(space, w_self, w_x): + return w_x + app_f = gateway.interp2app_temp(f, unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.W_Root]) + w_app_f = w(app_f) + + # sanity + assert isinstance(w_app_f.code, gateway.BuiltinCode2) + + called = [] + fastcall_2 = w_app_f.code.fastcall_2 + def witness_fastcall_2(space, w_func, w_a, w_b): + called.append(w_func) + return fastcall_2(space, w_func, w_a, w_b) + + w_app_f.code.fastcall_2 = witness_fastcall_2 + + w_res = space.appexec([w_app_f, w_3], """(f, x): + class A(object): + m = f # not a builtin function, so works as method + y = A().m(x) + b = A().m + z = b(x) + return y is x and z is x + """) + + assert space.is_true(w_res) + assert called == [w_app_f, w_app_f] From pedronis at codespeak.net Sat Aug 2 00:55:37 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 00:55:37 +0200 (CEST) Subject: [pypy-svn] r56906 - in pypy/branch/garden-call-code/pypy: interpreter interpreter/test module/operator Message-ID: <20080801225537.AF559169E04@codespeak.net> Author: pedronis Date: Sat Aug 2 00:55:35 2008 New Revision: 56906 Modified: pypy/branch/garden-call-code/pypy/interpreter/eval.py pypy/branch/garden-call-code/pypy/interpreter/function.py pypy/branch/garden-call-code/pypy/interpreter/gateway.py pypy/branch/garden-call-code/pypy/interpreter/pycode.py pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py Log: first cleanup: avoid the strange interface checking whether the result is None for fastcall_# methods slightly nicer code, no relavant perf impact for better or worse Modified: pypy/branch/garden-call-code/pypy/interpreter/eval.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/eval.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/eval.py Sat Aug 2 00:55:35 2008 @@ -11,6 +11,8 @@ Abstract base class.""" hidden_applevel = False + fast_natural_arity = -1 + def __init__(self, co_name): self.co_name = co_name @@ -58,15 +60,15 @@ # a performance hack (see gateway.BuiltinCode1/2/3 and pycode.PyCode) def fastcall_0(self, space, func): - return None + raise NotImplementedError def fastcall_1(self, space, func, w1): - return None + raise NotImplementedError def fastcall_2(self, space, func, w1, w2): - return None + raise NotImplementedError def fastcall_3(self, space, func, w1, w2, w3): - return None + raise NotImplementedError def fastcall_4(self, space, func, w1, w2, w3, w4): - return None + raise NotImplementedError class Frame(Wrappable): """A frame is an environment supporting the execution of a code object. Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sat Aug 2 00:55:35 2008 @@ -40,56 +40,39 @@ def funccall(self, *args_w): # speed hack code = self.getcode() # hook for the jit - if len(args_w) == 0: - w_res = code.fastcall_0(self.space, self) - if w_res is not None: - return w_res - elif len(args_w) == 1: - w_res = code.fastcall_1(self.space, self, args_w[0]) - if w_res is not None: - return w_res - elif len(args_w) == 2: - w_res = code.fastcall_2(self.space, self, args_w[0], args_w[1]) - if w_res is not None: - return w_res - elif len(args_w) == 3: - w_res = code.fastcall_3(self.space, self, args_w[0], - args_w[1], args_w[2]) - if w_res is not None: - return w_res - elif len(args_w) == 4: - w_res = code.fastcall_4(self.space, self, args_w[0], - args_w[1], args_w[2], args_w[3]) - if w_res is not None: - return w_res + nargs = len(args_w) + if nargs == code.fast_natural_arity: + if nargs == 0: + return code.fastcall_0(self.space, self) + elif nargs == 1: + return code.fastcall_1(self.space, self, args_w[0]) + elif nargs == 2: + return code.fastcall_2(self.space, self, args_w[0], args_w[1]) + elif nargs == 3: + return code.fastcall_3(self.space, self, args_w[0], + args_w[1], args_w[2]) + elif nargs == 4: + return code.fastcall_4(self.space, self, args_w[0], + args_w[1], args_w[2], args_w[3]) return self.call_args(Arguments(self.space, list(args_w))) def funccall_valuestack(self, nargs, frame): # speed hack code = self.getcode() # hook for the jit - if nargs == 0: - w_res = code.fastcall_0(self.space, self) - if w_res is not None: - return w_res - elif nargs == 1: - w_res = code.fastcall_1(self.space, self, frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 2: - w_res = code.fastcall_2(self.space, self, frame.peekvalue(1), - frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 3: - w_res = code.fastcall_3(self.space, self, frame.peekvalue(2), + if nargs == code.fast_natural_arity: + if nargs == 0: + return code.fastcall_0(self.space, self) + elif nargs == 1: + return code.fastcall_1(self.space, self, frame.peekvalue(0)) + elif nargs == 2: + return code.fastcall_2(self.space, self, frame.peekvalue(1), + frame.peekvalue(0)) + elif nargs == 3: + return code.fastcall_3(self.space, self, frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 4: - w_res = code.fastcall_4(self.space, self, frame.peekvalue(3), - frame.peekvalue(2), frame.peekvalue(1), - frame.peekvalue(0)) - if w_res is not None: - return w_res + elif nargs == 4: + return code.fastcall_4(self.space, self, frame.peekvalue(3), + frame.peekvalue(2), frame.peekvalue(1), + frame.peekvalue(0)) args = frame.make_arguments(nargs) try: return self.call_args(args) @@ -99,24 +82,21 @@ def funccall_obj_valuestack(self, w_obj, nargs, frame): # speed hack code = self.getcode() # hook for the jit - if nargs == 0: - w_res = code.fastcall_1(self.space, self, w_obj) - if w_res is not None: - return w_res - elif nargs == 1: - w_res = code.fastcall_2(self.space, self, w_obj, frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 2: - w_res = code.fastcall_3(self.space, self, w_obj, frame.peekvalue(1), - frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 3: - w_res = code.fastcall_4(self.space, self, w_obj, frame.peekvalue(2), - frame.peekvalue(1), frame.peekvalue(0)) - if w_res is not None: - return w_res + if nargs+1 == code.fast_natural_arity: + if nargs == 0: + return code.fastcall_1(self.space, self, w_obj) + elif nargs == 1: + return code.fastcall_2(self.space, self, w_obj, + frame.peekvalue(0)) + elif nargs == 2: + return code.fastcall_3(self.space, self, w_obj, + frame.peekvalue(1), + frame.peekvalue(0)) + elif nargs == 3: + return code.fastcall_4(self.space, self, w_obj, + frame.peekvalue(2), + frame.peekvalue(1), + frame.peekvalue(0)) stkargs = frame.make_arguments(nargs) args = stkargs.prepend(w_obj) try: Modified: pypy/branch/garden-call-code/pypy/interpreter/gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/gateway.py Sat Aug 2 00:55:35 2008 @@ -559,6 +559,8 @@ return w_result class BuiltinCode0(BuiltinCode): + fast_natural_arity = 0 + def fastcall_0(self, space, w_func): self = hint(self, deepfreeze=True) try: @@ -575,6 +577,8 @@ return w_result class BuiltinCode1(BuiltinCode): + fast_natural_arity = 1 + def fastcall_1(self, space, w_func, w1): self = hint(self, deepfreeze=True) try: @@ -598,6 +602,8 @@ return w_result class BuiltinCode2(BuiltinCode): + fast_natural_arity = 2 + def fastcall_2(self, space, w_func, w1, w2): self = hint(self, deepfreeze=True) try: @@ -621,6 +627,8 @@ return w_result class BuiltinCode3(BuiltinCode): + fast_natural_arity = 3 + def fastcall_3(self, space, func, w1, w2, w3): self = hint(self, deepfreeze=True) try: @@ -644,6 +652,8 @@ return w_result class BuiltinCode4(BuiltinCode): + fast_natural_arity = 4 + def fastcall_4(self, space, func, w1, w2, w3, w4): self = hint(self, deepfreeze=True) try: Modified: pypy/branch/garden-call-code/pypy/interpreter/pycode.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/pycode.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/pycode.py Sat Aug 2 00:55:35 2008 @@ -162,7 +162,7 @@ def _compute_fastcall(self): # Speed hack! - self.do_fastcall = -1 + self.fast_natural_arity = -1 if not (0 <= self.co_argcount <= 4): return if self.co_flags & (CO_VARARGS | CO_VARKEYWORDS): @@ -170,52 +170,42 @@ if len(self._args_as_cellvars) > 0: return - self.do_fastcall = self.co_argcount + self.fast_natural_arity = self.co_argcount def fastcall_0(self, space, w_func): - if self.do_fastcall == 0: - frame = space.createframe(self, w_func.w_func_globals, + frame = space.createframe(self, w_func.w_func_globals, w_func.closure) - return frame.run() - return None + return frame.run() def fastcall_1(self, space, w_func, w_arg): - if self.do_fastcall == 1: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg # frame.setfastscope([w_arg]) - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg # frame.setfastscope([w_arg]) + return frame.run() def fastcall_2(self, space, w_func, w_arg1, w_arg2): - if self.do_fastcall == 2: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) - frame.fastlocals_w[1] = w_arg2 - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) + frame.fastlocals_w[1] = w_arg2 + return frame.run() def fastcall_3(self, space, w_func, w_arg1, w_arg2, w_arg3): - if self.do_fastcall == 3: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) - frame.fastlocals_w[1] = w_arg2 - frame.fastlocals_w[2] = w_arg3 - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) + frame.fastlocals_w[1] = w_arg2 + frame.fastlocals_w[2] = w_arg3 + return frame.run() def fastcall_4(self, space, w_func, w_arg1, w_arg2, w_arg3, w_arg4): - if self.do_fastcall == 4: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) - frame.fastlocals_w[1] = w_arg2 - frame.fastlocals_w[2] = w_arg3 - frame.fastlocals_w[3] = w_arg4 - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) + frame.fastlocals_w[1] = w_arg2 + frame.fastlocals_w[2] = w_arg3 + frame.fastlocals_w[3] = w_arg4 + return frame.run() def funcrun(self, func, args): frame = self.space.createframe(self, func.w_func_globals, Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Sat Aug 2 00:55:35 2008 @@ -351,7 +351,7 @@ code = PyCode._from_code(self.space, f.func_code) fn = Function(self.space, code, self.space.newdict()) - assert fn.code.do_fastcall == 1 + assert fn.code.fast_natural_arity == 1 called = [] fastcall_1 = fn.code.fastcall_1 @@ -384,7 +384,7 @@ code = PyCode._from_code(self.space, f.func_code) fn = Function(self.space, code, self.space.newdict()) - assert fn.code.do_fastcall == 2 + assert fn.code.fast_natural_arity == 2 called = [] fastcall_2 = fn.code.fastcall_2 Modified: pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py (original) +++ pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py Sat Aug 2 00:55:35 2008 @@ -173,6 +173,8 @@ class SimpleClosureCode(eval.Code): + fast_natural_arity = 1 + sig = (['obj'], None, None) def __init__(self, co_name, is_attrgetter): From pedronis at codespeak.net Sat Aug 2 01:40:14 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 01:40:14 +0200 (CEST) Subject: [pypy-svn] r56907 - pypy/branch/garden-call-code/pypy/interpreter/callbench Message-ID: <20080801234014.C65C41684BA@codespeak.net> Author: pedronis Date: Sat Aug 2 01:40:12 2008 New Revision: 56907 Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/inst.py Log: benchmark the case of some actual parameters being passed around Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/inst.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/callbench/inst.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/inst.py Sat Aug 2 01:40:12 2008 @@ -5,6 +5,10 @@ def __init__(self): pass + class B(object): + def __init__(self, x, y): + pass + start() i = 0 while i < N: @@ -13,10 +17,10 @@ A() A() A() - A() - A() - A() - A() + B(1, 2) + B(1, 2) + B(1, 2) + B(1, 2) i+=1 run(w, 1000) From pedronis at codespeak.net Sat Aug 2 13:08:02 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 13:08:02 +0200 (CEST) Subject: [pypy-svn] r56918 - pypy/branch/garden-call-code/pypy/objspace/std Message-ID: <20080802110802.51F2E169E98@codespeak.net> Author: pedronis Date: Sat Aug 2 13:08:00 2008 New Revision: 56918 Modified: pypy/branch/garden-call-code/pypy/objspace/std/proxyobject.py Log: working toward reducing Arguments interface: kill popfirst usage here Modified: pypy/branch/garden-call-code/pypy/objspace/std/proxyobject.py ============================================================================== --- pypy/branch/garden-call-code/pypy/objspace/std/proxyobject.py (original) +++ pypy/branch/garden-call-code/pypy/objspace/std/proxyobject.py Sat Aug 2 13:08:00 2008 @@ -5,7 +5,7 @@ from pypy.objspace.std.objspace import * from pypy.objspace.std.proxy_helpers import register_type from pypy.interpreter.error import OperationError -from pypy.interpreter import baseobjspace +from pypy.interpreter import baseobjspace, argument #class W_Transparent(W_Object): # def __init__(self, w_controller): @@ -22,8 +22,10 @@ self.space = space def descr_call_mismatch(self, space, name, reqcls, args): - _, args = args.popfirst() - args = args.prepend(space.wrap(name)) + args_w, kwds_w = args.unpack() + args_w = args_w[:] + args_w[0] = space.wrap(name) + args = argument.Arguments(space, args_w, kwds_w) return space.call_args(self.w_controller, args) def getclass(self, space): From arigo at codespeak.net Sat Aug 2 14:03:15 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 14:03:15 +0200 (CEST) Subject: [pypy-svn] r56919 - in pypy/branch/opt-option/pypy: config config/test translator translator/goal Message-ID: <20080802120315.4812D169E98@codespeak.net> Author: arigo Date: Sat Aug 2 14:03:13 2008 New Revision: 56919 Modified: pypy/branch/opt-option/pypy/config/config.py pypy/branch/opt-option/pypy/config/pypyoption.py pypy/branch/opt-option/pypy/config/test/test_config.py pypy/branch/opt-option/pypy/config/test/test_pypyoption.py pypy/branch/opt-option/pypy/config/translationoption.py pypy/branch/opt-option/pypy/translator/driver.py pypy/branch/opt-option/pypy/translator/goal/translate.py Log: In-progress: implemented the '--opt' option. Modified: pypy/branch/opt-option/pypy/config/config.py ============================================================================== --- pypy/branch/opt-option/pypy/config/config.py (original) +++ pypy/branch/opt-option/pypy/config/config.py Sat Aug 2 14:03:13 2008 @@ -13,6 +13,9 @@ class ConfigError(Exception): pass +class ConflictConfigError(ConfigError): + pass + class Config(object): _cfgimpl_frozen = False @@ -99,11 +102,23 @@ if oldvalue != value and oldowner not in ("default", "suggested"): if who in ("default", "suggested"): return - raise ConfigError('cannot override value to %s for option %s' % - (value, name)) + raise ConflictConfigError('cannot override value to %s for ' + 'option %s' % (value, name)) child.setoption(self, value, who) self._cfgimpl_value_owners[name] = who + def suggest(self, **kwargs): + for name, value in kwargs.items(): + self.suggestoption(name, value) + + def suggestoption(self, name, value): + try: + self.setoption(name, value, "suggested") + except ConflictConfigError: + # setting didn't work, but that is fine, since it is + # suggested only + pass + def set(self, **kwargs): all_paths = [p.split(".") for p in self.getpaths()] for key, value in kwargs.iteritems(): @@ -248,12 +263,7 @@ for path, reqvalue in self._suggests.get(value, []): toplevel = config._cfgimpl_get_toplevel() homeconfig, name = toplevel._cfgimpl_get_home_by_path(path) - try: - homeconfig.setoption(name, reqvalue, "suggested") - except ConfigError: - # setting didn't work, but that is fine, since it is - # suggested only - pass + homeconfig.suggestoption(name, reqvalue) super(ChoiceOption, self).setoption(config, value, who) def validate(self, value): @@ -298,12 +308,7 @@ for path, reqvalue in self._suggests: toplevel = config._cfgimpl_get_toplevel() homeconfig, name = toplevel._cfgimpl_get_home_by_path(path) - try: - homeconfig.setoption(name, reqvalue, "suggested") - except ConfigError: - # setting didn't work, but that is fine, since it is - # suggested - pass + homeconfig.suggestoption(name, reqvalue) super(BoolOption, self).setoption(config, value, who) Modified: pypy/branch/opt-option/pypy/config/pypyoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/pypyoption.py (original) +++ pypy/branch/opt-option/pypy/config/pypyoption.py Sat Aug 2 14:03:13 2008 @@ -3,7 +3,7 @@ import sys from pypy.config.config import OptionDescription, BoolOption, IntOption, ArbitraryOption from pypy.config.config import ChoiceOption, StrOption, to_optparse, Config -from pypy.config.config import ConfigError +from pypy.config.config import ConflictConfigError modulepath = py.magic.autopath().dirpath().dirpath().join("module") all_modules = [p.basename for p in modulepath.listdir() @@ -65,7 +65,8 @@ "The module %r is disabled\n" % (modname,) + "because importing %s raised %s\n" % (name, errcls) + str(e)) - raise ConfigError("--withmod-%s: %s" % (modname, errcls)) + raise ConflictConfigError("--withmod-%s: %s" % (modname, + errcls)) return validator else: return None @@ -285,7 +286,7 @@ "special case the 'list[integer]' expressions", default=False), BoolOption("builtinshortcut", - "a shortcut for operations between built-in types XXX BROKEN", + "a shortcut for operations between built-in types", default=False), BoolOption("getattributeshortcut", "track types that override __getattribute__", @@ -295,45 +296,7 @@ "a instrumentation option: before exit, print the types seen by " "certain simpler bytecodes", default=False), - - BoolOption("allopts", - "enable all thought-to-be-working optimizations", - default=False, - suggests=[("objspace.opcodes.CALL_LIKELY_BUILTIN", True), - ("objspace.opcodes.CALL_METHOD", True), - ("translation.withsmallfuncsets", 5), - ("translation.profopt", - "-c 'from richards import main;main(); from test import pystone; pystone.main()'"), - ("objspace.std.withmultidict", True), -# ("objspace.std.withstrjoin", True), - ("objspace.std.withshadowtracking", True), -# ("objspace.std.withstrslice", True), -# ("objspace.std.withsmallint", True), - ("objspace.std.withrangelist", True), - ("objspace.std.withmethodcache", True), -# ("objspace.std.withfastslice", True), - ("objspace.std.withprebuiltchar", True), - ("objspace.std.builtinshortcut", True), - ("objspace.std.optimized_list_getitem", True), - ("objspace.std.getattributeshortcut", True), - ("translation.list_comprehension_operations",True), - ("translation.backendopt.remove_asserts",True), - ], - cmdline="--allopts --faassen", negation=False), - -## BoolOption("llvmallopts", -## "enable all optimizations, and use llvm compiled via C", -## default=False, -## requires=[("objspace.std.allopts", True), -## ("translation.llvm_via_c", True), -## ("translation.backend", "llvm")], -## cmdline="--llvm-faassen", negation=False), ]), - #BoolOption("lowmem", "Try to use less memory during translation", - # default=False, cmdline="--lowmem", - # requires=[("objspace.geninterp", False)]), - - ]) def get_pypy_config(overrides=None, translating=False): @@ -342,6 +305,46 @@ pypy_optiondescription, overrides=overrides, translating=translating) +def set_pypy_opt_level(config, level): + """Apply PyPy-specific optimization suggestions on the 'config'. + The optimizations depend on the selected level and possibly on the backend. + """ + # warning: during some tests, the type_system and the backend may be + # unspecified and we get None. It shouldn't occur in translate.py though. + type_system = config.translation.type_system + backend = config.translation.backend + + # all the good optimizations for PyPy should be listed here + if level in ['2', '3']: + config.objspace.opcodes.suggest(CALL_LIKELY_BUILTIN=True) + config.objspace.opcodes.suggest(CALL_METHOD=True) + config.objspace.std.suggest(withmultidict=True) + config.objspace.std.suggest(withshadowtracking=True) + config.objspace.std.suggest(withrangelist=True) + config.objspace.std.suggest(withmethodcache=True) + config.objspace.std.suggest(withprebuiltchar=True) + config.objspace.std.suggest(builtinshortcut=True) + config.objspace.std.suggest(optimized_list_getitem=True) + config.objspace.std.suggest(getattributeshortcut=True) + + # extra costly optimizations only go in level 3 + if level == '3': + config.translation.suggest(profopt= + "-c 'from richards import main;main(); " + "from test import pystone; pystone.main()'") + + # memory-saving optimizations + if level == 'mem': + config.objspace.std.suggest(withsmallint=True) + config.objspace.std.suggest(withrangelist=True) + config.objspace.std.suggest(withprebuiltchar=True) + config.objspace.std.suggest(withsharingdict=True) + + # completely disable geninterp in a level 0 translation + if level == '0': + config.objspace.suggest(geninterp=False) + + if __name__ == '__main__': config = get_pypy_config() print config.getpaths() Modified: pypy/branch/opt-option/pypy/config/test/test_config.py ============================================================================== --- pypy/branch/opt-option/pypy/config/test/test_config.py (original) +++ pypy/branch/opt-option/pypy/config/test/test_config.py Sat Aug 2 14:03:13 2008 @@ -530,6 +530,15 @@ assert not c.toplevel +def test_bogus_suggests(): + descr = OptionDescription("test", '', [ + BoolOption("toplevel", "", suggests=[("opt", "bogusvalue")]), + ChoiceOption("opt", "", ["a", "b", "c"], "a"), + ]) + c = Config(descr) + py.test.raises(ConfigError, "c.toplevel = True") + + def test_delattr(): descr = OptionDescription("opt", "", [ OptionDescription("s1", "", [ @@ -549,7 +558,7 @@ def my_validator_2(config): assert config is c - raise ConfigError + raise ConflictConfigError descr = OptionDescription("opt", "", [ BoolOption('booloption1', 'option test1', default=False, Modified: pypy/branch/opt-option/pypy/config/test/test_pypyoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/test/test_pypyoption.py (original) +++ pypy/branch/opt-option/pypy/config/test/test_pypyoption.py Sat Aug 2 14:03:13 2008 @@ -1,6 +1,7 @@ import py -from pypy.config.pypyoption import get_pypy_config +from pypy.config.pypyoption import get_pypy_config, set_pypy_opt_level from pypy.config.config import Config, ConfigError +from pypy.config.translationoption import set_opt_level thisdir = py.magic.autopath().dirpath() @@ -29,10 +30,32 @@ conf.translation.gc = name assert conf.translation.gctransformer == "framework" +def test_set_opt_level(): + conf = get_pypy_config() + set_opt_level(conf, '0') + assert conf.translation.gc == 'boehm' + assert conf.translation.backendopt.none == True + conf = get_pypy_config() + set_opt_level(conf, '2') + assert conf.translation.gc != 'boehm' + assert not conf.translation.backendopt.none + conf = get_pypy_config() + set_opt_level(conf, 'mem') + assert conf.translation.gc == 'marksweep' + assert not conf.translation.backendopt.none + +def test_set_pypy_opt_level(): + conf = get_pypy_config() + set_pypy_opt_level(conf, '2') + assert conf.objspace.std.withmultidict + conf = get_pypy_config() + set_pypy_opt_level(conf, '0') + assert not conf.objspace.std.withmultidict + def test_rweakref_required(): conf = get_pypy_config() conf.translation.rweakref = False - conf.objspace.std.allopts = True + set_pypy_opt_level(conf, '3') assert not conf.objspace.std.withtypeversion assert not conf.objspace.std.withmethodcache Modified: pypy/branch/opt-option/pypy/config/translationoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/translationoption.py (original) +++ pypy/branch/opt-option/pypy/config/translationoption.py Sat Aug 2 14:03:13 2008 @@ -2,12 +2,14 @@ import py, os from pypy.config.config import OptionDescription, BoolOption, IntOption, ArbitraryOption, FloatOption from pypy.config.config import ChoiceOption, StrOption, to_optparse, Config +from pypy.config.config import ConfigError DEFL_INLINE_THRESHOLD = 32.4 # just enough to inline add__Int_Int() # and just small enough to prevend inlining of some rlist functions. DEFL_PROF_BASED_INLINE_THRESHOLD = 32.4 DEFL_CLEVER_MALLOC_REMOVAL_INLINE_THRESHOLD = 32.4 +DEFL_LOW_INLINE_THRESHOLD = DEFL_INLINE_THRESHOLD / 2.0 translation_optiondescription = OptionDescription( "translation", "Translation Options", [ @@ -39,6 +41,8 @@ BoolOption("llvm_via_c", "compile llvm via C", default=False, cmdline="--llvm-via-c", requires=[("translation.backend", "llvm")]), + + # gc ChoiceOption("gc", "Garbage Collection Strategy", ["boehm", "ref", "marksweep", "semispace", "statistics", "generation", "hybrid", "none"], @@ -82,18 +86,10 @@ "llvmgc": [("translation.gc", "generation")], "asmgcc": [("translation.gc", "generation")], }), + + # other noticeable options BoolOption("thread", "enable use of threading primitives", default=False, cmdline="--thread"), - BoolOption("verbose", "Print extra information", default=False), - BoolOption("debug", "Record extra annotation information", - cmdline="-d --debug", default=False), - BoolOption("insist", "Try hard to go on RTyping", default=False, - cmdline="--insist"), - IntOption("withsmallfuncsets", - "Represent groups of less funtions than this as indices into an array", - default=0), - BoolOption("countmallocs", "Count mallocs and frees", default=False, - cmdline=None), BoolOption("sandbox", "Produce a fully-sandboxed executable", default=False, cmdline="--sandbox", requires=[("translation.thread", False)]), @@ -101,6 +97,11 @@ default=True), # misc + BoolOption("verbose", "Print extra information", default=False), + BoolOption("debug", "Record extra annotation information", + cmdline="-d --debug", default=True), + BoolOption("insist", "Try hard to go on RTyping", default=False, + cmdline="--insist"), StrOption("cc", "Specify compiler to use for compiling generated C", cmdline="--cc"), StrOption("profopt", "Specify profile based optimization script", cmdline="--profopt"), @@ -108,6 +109,13 @@ default=False, cmdline="--no-profopt", negation=False), BoolOption("instrument", "internal: turn instrumentation on", default=False, cmdline=None), + BoolOption("countmallocs", "Count mallocs and frees", default=False, + cmdline=None), + ChoiceOption("fork_before", + "(UNIX) Create restartable checkpoint before step", + ["annotate", "rtype", "backendopt", "database", "source", + "hintannotate", "timeshift"], + default=None, cmdline="--fork-before"), ArbitraryOption("instrumentctl", "internal", default=None), @@ -140,11 +148,9 @@ "attempt to pre-allocate the list", default=False, cmdline='--listcompr'), - ChoiceOption("fork_before", - "(UNIX) Create restartable checkpoint before step", - ["annotate", "rtype", "backendopt", "database", "source", - "hintannotate", "timeshift"], - default=None, cmdline="--fork-before"), + IntOption("withsmallfuncsets", + "Represent groups of less funtions than this as indices into an array", + default=0), # options for ootype OptionDescription("ootype", "Object Oriented Typesystem options", [ @@ -273,3 +279,61 @@ value = getattr(existing_config, child._name) config._cfgimpl_values[child._name] = value return config + +# ____________________________________________________________ + +OPT_LEVELS = ['0', '1', 'size', 'mem', '2', '3'] +DEFAULT_OPT_LEVEL = '2' + +OPT_TABLE_DOC = { + '0': 'No optimization. Uses the Boehm GC.', + '1': 'Enable a default set of optimizations. Uses the Boehm GC.', + 'size': 'Optimize for the size of the executable. Uses the Boehm GC.', + 'mem': 'Optimize for run-time memory usage and use a memory-saving GC.', + '2': 'Enable most optimizations and use a high-performance GC.', + '3': 'Enable all optimizations and use a high-performance GC.', + } + +OPT_TABLE = { + #level: gc backend optimizations... + '0': 'boehm nobackendopt', + '1': 'boehm lowinline', + 'size': 'boehm lowinline remove_asserts', + 'mem': 'marksweep lowinline remove_asserts', + '2': 'hybrid extraopts', + '3': 'hybrid extraopts remove_asserts', + } + +def set_opt_level(config, level): + """Apply optimization suggestions on the 'config'. + The optimizations depend on the selected level and possibly on the backend. + """ + # warning: during some tests, the type_system and the backend may be + # unspecified and we get None. It shouldn't occur in translate.py though. + type_system = config.translation.type_system + backend = config.translation.backend + + try: + opts = OPT_TABLE[level] + except KeyError: + raise ConfigError("no such optimization level: %r" % (level,)) + words = opts.split() + gc = words.pop(0) + + # set the GC (only meaningful with lltype) + config.translation.suggest(gc=gc) + + # set the backendopts + for word in words: + if word == 'nobackendopt': + config.translation.backendopt.suggest(none=True) + elif word == 'lowinline': + config.translation.backendopt.suggest(inline_threshold= + DEFL_LOW_INLINE_THRESHOLD) + elif word == 'remove_asserts': + config.translation.backendopt.suggest(remove_asserts=True) + elif word == 'extraopts': + config.translation.suggest(withsmallfuncsets=5) + config.translation.suggest(list_comprehension_operations=True) + else: + raise ValueError(word) Modified: pypy/branch/opt-option/pypy/translator/driver.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/driver.py (original) +++ pypy/branch/opt-option/pypy/translator/driver.py Sat Aug 2 14:03:13 2008 @@ -15,22 +15,6 @@ log = py.log.Producer("translation") py.log.setconsumer("translation", ansi_log) -DEFAULTS = { - 'translation.gc': 'ref', - 'translation.cc': None, - 'translation.profopt': None, - - 'translation.thread': False, # influences GC policy - - 'translation.stackless': False, - 'translation.debug': True, - 'translation.insist': False, - 'translation.backend': 'c', - 'translation.fork_before': None, - 'translation.backendopt.raisingop2direct_call' : False, - 'translation.backendopt.merge_if_blocks': True, -} - def taskdef(taskfunc, deps, title, new_state=None, expected_states=[], idemp=False, earlycheck=None): @@ -93,7 +77,7 @@ if config is None: from pypy.config.pypyoption import get_pypy_config - config = get_pypy_config(DEFAULTS, translating=True) + config = get_pypy_config(translating=True) self.config = config if overrides is not None: self.config.override(overrides) Modified: pypy/branch/opt-option/pypy/translator/goal/translate.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/translate.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/translate.py Sat Aug 2 14:03:13 2008 @@ -12,6 +12,8 @@ ArbitraryOption, StrOption, IntOption, Config, \ ChoiceOption, OptHelpFormatter from pypy.config.translationoption import get_combined_translation_config +from pypy.config.translationoption import set_opt_level +from pypy.config.translationoption import OPT_LEVELS, DEFAULT_OPT_LEVEL GOALS= [ @@ -46,6 +48,9 @@ translate_optiondescr = OptionDescription("translate", "XXX", [ StrOption("targetspec", "XXX", default='targetpypystandalone', cmdline=None), + ChoiceOption("opt", + "optimization level", OPT_LEVELS, default=DEFAULT_OPT_LEVEL, + cmdline="--opt"), BoolOption("profile", "cProfile (to debug the speed of the translation process)", default=False, @@ -72,17 +77,7 @@ OVERRIDES = { 'translation.debug': False, - 'translation.insist': False, - - 'translation.gc': 'boehm', 'translation.backend': 'c', - 'translation.stackless': False, - 'translation.backendopt.raisingop2direct_call' : False, - 'translation.backendopt.merge_if_blocks': True, - - 'translation.cc': None, - 'translation.profopt': None, - 'translation.output': None, } import py @@ -162,6 +157,9 @@ existing_config=config, translating=True) + # apply the optimization level settings + set_opt_level(config, translateconfig.opt) + # let the target modify or prepare itself # based on the config if 'handle_config' in targetspec_dic: From arigo at codespeak.net Sat Aug 2 14:32:38 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 14:32:38 +0200 (CEST) Subject: [pypy-svn] r56920 - in pypy/branch/opt-option/pypy: config translator/goal Message-ID: <20080802123238.DDF18169E3F@codespeak.net> Author: arigo Date: Sat Aug 2 14:32:38 2008 New Revision: 56920 Removed: pypy/branch/opt-option/pypy/translator/goal/targetmultiplespaces.py Modified: pypy/branch/opt-option/pypy/config/pypyoption.py pypy/branch/opt-option/pypy/translator/goal/targetprologstandalone.py pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py pypy/branch/opt-option/pypy/translator/goal/translate.py Log: Make the --opt option to translate.py also affect the PyPy objspace settings. Modified: pypy/branch/opt-option/pypy/config/pypyoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/pypyoption.py (original) +++ pypy/branch/opt-option/pypy/config/pypyoption.py Sat Aug 2 14:32:38 2008 @@ -296,6 +296,9 @@ "a instrumentation option: before exit, print the types seen by " "certain simpler bytecodes", default=False), + ChoiceOption("multimethods", "the multimethod implementation to use", + ["doubledispatch", "mrd"], + default="mrd"), ]), ]) @@ -344,6 +347,10 @@ if level == '0': config.objspace.suggest(geninterp=False) + # some optimizations have different effects depending on the typesystem + if type_system == 'ootype': + config.objspace.std.suggest(multimethods="doubledispatch") + if __name__ == '__main__': config = get_pypy_config() Modified: pypy/branch/opt-option/pypy/translator/goal/targetprologstandalone.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/targetprologstandalone.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/targetprologstandalone.py Sat Aug 2 14:32:38 2008 @@ -27,9 +27,7 @@ # _____ Define and setup target ___ -def handle_config(config): - return - config.translation.stackless = True +# XXX this should suggest --stackless somehow def target(driver, args): driver.exe_name = 'pyrolog-%(backend)s' Modified: pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py Sat Aug 2 14:32:38 2008 @@ -90,26 +90,29 @@ parserkwargs={'usage': self.usage}) return parser - def handle_config(self, config): + def handle_config(self, config, translateconfig): + self.translateconfig = translateconfig + # set up the objspace optimizations based on the --opt argument + from pypy.config.pypyoption import set_pypy_opt_level + set_pypy_opt_level(config, translateconfig.opt) + # as of revision 27081, multimethod.py uses the InstallerVersion1 by default # because it is much faster both to initialize and run on top of CPython. # The InstallerVersion2 is optimized for making a translator-friendly # structure for low level backends. However, InstallerVersion1 is still # preferable for high level backends, so we patch here. + from pypy.objspace.std import multimethod - if config.translation.type_system == 'lltype': + if config.objspace.std.multimethods == 'mrd': assert multimethod.InstallerVersion1.instance_counter == 0,\ 'The wrong Installer version has already been instatiated' multimethod.Installer = multimethod.InstallerVersion2 - else: + elif config.objspace.std.multimethods == 'doubledispatch': # don't rely on the default, set again here assert multimethod.InstallerVersion2.instance_counter == 0,\ 'The wrong Installer version has already been instatiated' multimethod.Installer = multimethod.InstallerVersion1 - def handle_translate_config(self, translateconfig): - self.translateconfig = translateconfig - def print_help(self, config): self.opt_parser(config).print_help() @@ -184,7 +187,7 @@ def interface(self, ns): for name in ['take_options', 'handle_config', 'print_help', 'target', - 'handle_translate_config', 'portal', + 'portal', 'get_additional_config_options']: ns[name] = getattr(self, name) Modified: pypy/branch/opt-option/pypy/translator/goal/translate.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/translate.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/translate.py Sat Aug 2 14:32:38 2008 @@ -163,10 +163,7 @@ # let the target modify or prepare itself # based on the config if 'handle_config' in targetspec_dic: - targetspec_dic['handle_config'](config) - - if 'handle_translate_config' in targetspec_dic: - targetspec_dic['handle_translate_config'](translateconfig) + targetspec_dic['handle_config'](config, translateconfig) if translateconfig.help: opt_parser.print_help() From arigo at codespeak.net Sat Aug 2 14:38:16 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 14:38:16 +0200 (CEST) Subject: [pypy-svn] r56921 - in pypy/branch/opt-option/pypy: doc/config translator/goal Message-ID: <20080802123816.E2008169E4A@codespeak.net> Author: arigo Date: Sat Aug 2 14:38:16 2008 New Revision: 56921 Added: pypy/branch/opt-option/pypy/doc/config/objspace.std.multimethods.txt (contents, props changed) Modified: pypy/branch/opt-option/pypy/translator/goal/translate.py Log: * Add -O# as synonym for --opt=#. * Document new multimethod option. Added: pypy/branch/opt-option/pypy/doc/config/objspace.std.multimethods.txt ============================================================================== --- (empty file) +++ pypy/branch/opt-option/pypy/doc/config/objspace.std.multimethods.txt Sat Aug 2 14:38:16 2008 @@ -0,0 +1,8 @@ +Choose the multimethod implementation. + +* ``doubledispatch`` turns + a multimethod call into a sequence of normal method calls. + +* ``mrd`` uses a technique known as Multiple Row Displacement + which precomputes a few compact tables of numbers and + function pointers. Modified: pypy/branch/opt-option/pypy/translator/goal/translate.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/translate.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/translate.py Sat Aug 2 14:38:16 2008 @@ -50,7 +50,7 @@ cmdline=None), ChoiceOption("opt", "optimization level", OPT_LEVELS, default=DEFAULT_OPT_LEVEL, - cmdline="--opt"), + cmdline="--opt -O"), BoolOption("profile", "cProfile (to debug the speed of the translation process)", default=False, From arigo at codespeak.net Sat Aug 2 14:41:21 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 14:41:21 +0200 (CEST) Subject: [pypy-svn] r56922 - in pypy/branch/opt-option/pypy: tool/bench/test translator/goal Message-ID: <20080802124121.B8894169E4D@codespeak.net> Author: arigo Date: Sat Aug 2 14:41:20 2008 New Revision: 56922 Modified: pypy/branch/opt-option/pypy/tool/bench/test/test_pypyresult.py pypy/branch/opt-option/pypy/translator/goal/bench-cronjob.py Log: Remove the last references to --faassen. Modified: pypy/branch/opt-option/pypy/tool/bench/test/test_pypyresult.py ============================================================================== --- pypy/branch/opt-option/pypy/tool/bench/test/test_pypyresult.py (original) +++ pypy/branch/opt-option/pypy/tool/bench/test/test_pypyresult.py Sat Aug 2 14:41:20 2008 @@ -12,8 +12,8 @@ return cache[0] pp = tmpdir.join("testpickle") f = pp.open("wb") - pickle.dump({'./pypy-llvm-39474-faassen-c_richards': 5}, f) - pickle.dump({'./pypy-llvm-39474-faassen-c_richards': 42.0}, f) + pickle.dump({'./pypy-llvm-39474-O3-c_richards': 5}, f) + pickle.dump({'./pypy-llvm-39474-O3-c_richards': 42.0}, f) f.close() cache.append(pp) return pp @@ -38,9 +38,9 @@ assert res.besttime == 2.0 def test_BenchResult_pypy(): - res = BenchResult("pypy-llvm-39474-faassen-c_richards", + res = BenchResult("pypy-llvm-39474-O3-c_richards", besttime=2.0, numruns=3) - assert res.executable == "pypy-llvm-39474-faassen-c" + assert res.executable == "pypy-llvm-39474-O3-c" assert res.revision == 39474 assert res.name == "richards" assert res.numruns == 3 Modified: pypy/branch/opt-option/pypy/translator/goal/bench-cronjob.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/bench-cronjob.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/bench-cronjob.py Sat Aug 2 14:41:20 2008 @@ -181,15 +181,15 @@ if backends == []: #_ prefix means target specific option, # prefix to outcomment backends = [backend.strip() for backend in """ c - c--stackless--_faassen - c--_faassen--_allworkingmodules - c--thread--gc=hybrid--_faassen - c--gc=semispace--_faassen - c--gc=generation--_faassen - c--gc=hybrid--_faassen - cli--_faassen - jvm--_faassen - jvm--inline-threshold=0--_faassen + c--stackless--_O3 + c--_O3--_allworkingmodules + c--thread--gc=hybrid--_O3 + c--gc=semispace--_O3 + c--gc=generation--_O3 + c--gc=hybrid--_O3 + cli--_O3 + jvm--_O3 + jvm--inline-threshold=0--_O3 """.split('\n') if backend.strip() and not backend.strip().startswith('#')] print time.ctime() for backend in backends: From arigo at codespeak.net Sat Aug 2 15:03:58 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:03:58 +0200 (CEST) Subject: [pypy-svn] r56923 - pypy/branch/gc+thread Message-ID: <20080802130358.C536F169E17@codespeak.net> Author: arigo Date: Sat Aug 2 15:03:56 2008 New Revision: 56923 Removed: pypy/branch/gc+thread/ Log: This branch was merged and the result seems to work. From arigo at codespeak.net Sat Aug 2 15:10:13 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:10:13 +0200 (CEST) Subject: [pypy-svn] r56924 - pypy/branch/asmgcc-exceptions Message-ID: <20080802131013.B449C169E3F@codespeak.net> Author: arigo Date: Sat Aug 2 15:10:13 2008 New Revision: 56924 Removed: pypy/branch/asmgcc-exceptions/ Log: Too evil hacks are probably better forgotten in the limbo of svn history. From arigo at codespeak.net Sat Aug 2 15:12:12 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:12:12 +0200 (CEST) Subject: [pypy-svn] r56925 - pypy/branch/io-improvements Message-ID: <20080802131212.DC74A169E3F@codespeak.net> Author: arigo Date: Sat Aug 2 15:12:12 2008 New Revision: 56925 Removed: pypy/branch/io-improvements/ Log: This branch was merged. From arigo at codespeak.net Sat Aug 2 15:13:17 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:13:17 +0200 (CEST) Subject: [pypy-svn] r56926 - pypy/branch/hybrid-io Message-ID: <20080802131317.7794B169E3F@codespeak.net> Author: arigo Date: Sat Aug 2 15:13:17 2008 New Revision: 56926 Removed: pypy/branch/hybrid-io/ Log: This was merged too. From arigo at codespeak.net Sat Aug 2 15:14:06 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:14:06 +0200 (CEST) Subject: [pypy-svn] r56927 - pypy/branch/roadshow Message-ID: <20080802131406.DC392169E4A@codespeak.net> Author: arigo Date: Sat Aug 2 15:14:06 2008 New Revision: 56927 Removed: pypy/branch/roadshow/ Log: No point in keep this around any more. From arigo at codespeak.net Sat Aug 2 15:16:07 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:16:07 +0200 (CEST) Subject: [pypy-svn] r56928 - pypy/branch/llvmgcroot Message-ID: <20080802131607.2DBCC168416@codespeak.net> Author: arigo Date: Sat Aug 2 15:16:06 2008 New Revision: 56928 Removed: pypy/branch/llvmgcroot/ Log: This branch became the asmgcroot branch in r50261. From arigo at codespeak.net Sat Aug 2 15:41:22 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:41:22 +0200 (CEST) Subject: [pypy-svn] r56929 - pypy/branch/gc-prefetch Message-ID: <20080802134122.0F66D169E3E@codespeak.net> Author: arigo Date: Sat Aug 2 15:41:21 2008 New Revision: 56929 Removed: pypy/branch/gc-prefetch/ Log: Kill branch for now. I abandonned it because it didn't seem to make any difference. From arigo at codespeak.net Sat Aug 2 15:42:29 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 2 Aug 2008 15:42:29 +0200 (CEST) Subject: [pypy-svn] r56930 - pypy/branch/dist-rctypes2 Message-ID: <20080802134229.531B2169E4A@codespeak.net> Author: arigo Date: Sat Aug 2 15:42:28 2008 New Revision: 56930 Removed: pypy/branch/dist-rctypes2/ Log: Kill kill! From pedronis at codespeak.net Sat Aug 2 16:17:13 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 16:17:13 +0200 (CEST) Subject: [pypy-svn] r56931 - in pypy/branch/garden-call-code/pypy: interpreter interpreter/test module/thread objspace objspace/std Message-ID: <20080802141713.59DB8169E4C@codespeak.net> Author: pedronis Date: Sat Aug 2 16:17:10 2008 New Revision: 56931 Modified: pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py pypy/branch/garden-call-code/pypy/interpreter/function.py pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py pypy/branch/garden-call-code/pypy/interpreter/test/test_objspace.py pypy/branch/garden-call-code/pypy/module/thread/os_local.py pypy/branch/garden-call-code/pypy/objspace/descroperation.py pypy/branch/garden-call-code/pypy/objspace/std/typeobject.py Log: WIP introduce space.call_obj_args instead of various uses of prepend. The final goal is in the common cases not to have to put the obj in an argument at all, so not to need ArgumentPrepended anymore. added tests to cover the area touched and related details Modified: pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py Sat Aug 2 16:17:10 2008 @@ -665,6 +665,9 @@ return True return False + def call_obj_args(self, w_callable, w_obj, args): + return self.call_args(w_callable, args.prepend(w_obj)) + def call(self, w_callable, w_args, w_kwds=None): args = Arguments.frompacked(self, w_args, w_kwds) return self.call_args(w_callable, args) Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sat Aug 2 16:17:10 2008 @@ -100,7 +100,7 @@ stkargs = frame.make_arguments(nargs) args = stkargs.prepend(w_obj) try: - return self.call_args(args) + return self.call_args(args) # xxx Function.call_obj_args finally: if isinstance(stkargs, ArgumentsFromValuestack): stkargs.frame = None @@ -319,29 +319,29 @@ space = self.space if self.w_instance is not None: # bound method - args = args.prepend(self.w_instance) - else: - # unbound method - w_firstarg = args.firstarg() - if w_firstarg is not None and space.is_true( - space.abstract_isinstance(w_firstarg, self.w_class)): - pass # ok + return space.call_obj_args(self.w_function, self.w_instance, args) + + # unbound method + w_firstarg = args.firstarg() + if w_firstarg is not None and space.is_true( + space.abstract_isinstance(w_firstarg, self.w_class)): + pass # ok + else: + myname = self.getname(space,"") + clsdescr = self.w_class.getname(space,"") + if clsdescr: + clsdescr+=" " + if w_firstarg is None: + instdescr = "nothing" else: - myname = self.getname(space,"") - clsdescr = self.w_class.getname(space,"") - if clsdescr: - clsdescr+=" " - if w_firstarg is None: - instdescr = "nothing" - else: - instname = space.abstract_getclass(w_firstarg).getname(space,"") - if instname: - instname += " " - instdescr = "%sinstance" %instname - msg = ("unbound method %s() must be called with %s" - "instance as first argument (got %s instead)") % (myname, clsdescr, instdescr) - raise OperationError(space.w_TypeError, - space.wrap(msg)) + instname = space.abstract_getclass(w_firstarg).getname(space,"") + if instname: + instname += " " + instdescr = "%sinstance" %instname + msg = ("unbound method %s() must be called with %s" + "instance as first argument (got %s instead)") % (myname, clsdescr, instdescr) + raise OperationError(space.w_TypeError, + space.wrap(msg)) return space.call_args(self.w_function, args) def descr_method_get(self, w_obj, w_cls=None): Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Sat Aug 2 16:17:10 2008 @@ -84,6 +84,10 @@ assert res[0] == 23 assert res[1] == (42,) + res = func(23, *(42,)) + assert res[0] == 23 + assert res[1] == (42,) + def test_simple_kwargs(self): def func(arg1, **kwargs): return arg1, kwargs @@ -91,6 +95,10 @@ assert res[0] == 23 assert res[1] == {'value': 42} + res = func(23, **{'value': 42}) + assert res[0] == 23 + assert res[1] == {'value': 42} + def test_kwargs_sets_wrong_positional_raises(self): def func(arg1): pass @@ -146,6 +154,15 @@ return arg1, kw raises(TypeError, func, 42, **{'arg1': 23}) + def test_kwargs_bound_blind(self): + class A(object): + def func(self, **kw): + return self, kw + func = A().func + + func(self=23) # XXX different behavior from CPython + # xxx raises(TypeError, func, self=23) + def test_kwargs_confusing_name(self): def func(self): # 'self' conflicts with the interp-level return self*7 # argument to call_function() @@ -177,6 +194,42 @@ assert type(f.__doc__) is unicode class AppTestMethod: + def test_simple_call(self): + class A(object): + def func(self, arg2): + return self, arg2 + a = A() + res = a.func(42) + assert res[0] is a + assert res[1] == 42 + + def test_simple_varargs(self): + class A(object): + def func(self, *args): + return self, args + a = A() + res = a.func(42) + assert res[0] is a + assert res[1] == (42,) + + res = a.func(*(42,)) + assert res[0] is a + assert res[1] == (42,) + + def test_simple_kwargs(self): + class A(object): + def func(self, **kwargs): + return self, kwargs + a = A() + + res = a.func(value=42) + assert res[0] is a + assert res[1] == {'value': 42} + + res = a.func(**{'value': 42}) + assert res[0] is a + assert res[1] == {'value': 42} + def test_get(self): def func(self): return self class Object(object): pass Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 2 16:17:10 2008 @@ -459,3 +459,54 @@ assert space.is_true(w_res) assert called == [w_app_f, w_app_f] + + def test_pass_trough_arguments(self): + space = self.space + + called = [] + + def f(space, __args__): + called.append(__args__) + a_w, _ = __args__.unpack() + return space.newtuple([space.wrap('f')]+a_w) + + def g(space, w_self, __args__): + called.append(__args__) + a_w, _ = __args__.unpack() + return space.newtuple([space.wrap('g'), w_self, ]+a_w) + + w_f = space.wrap(gateway.interp2app_temp(f, + unwrap_spec=[gateway.ObjSpace, + gateway.Arguments])) + + w_g = space.wrap(gateway.interp2app_temp(g, + unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.Arguments])) + + args = argument.Arguments(space, [space.wrap(7)]) + + w_res = space.call_args(w_f, args) + assert space.is_true(space.eq(w_res, space.wrap(('f', 7)))) + + # white-box check for opt + assert called[0] is args + called = [] + + w_self = space.wrap('self') + + args0 = argument.Arguments(space, [space.wrap(0)]) + args = args0.prepend(w_self) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + + # white-box check for opt + assert called[0] is args0 + called = [] + + args3 = argument.Arguments(space, [space.wrap(3)]) + w_res = space.call_obj_args(w_g, w_self, args3) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) + # white-box check for opt + assert called[0] is args3 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_objspace.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_objspace.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_objspace.py Sat Aug 2 16:17:10 2008 @@ -180,6 +180,36 @@ w_obj = space.wrap(-12) space.raises_w(space.w_ValueError, space.r_ulonglong_w, w_obj) + def test_call_obj_args(self): + from pypy.interpreter.argument import Arguments + + space = self.space + + w_f = space.appexec([], """(): + def f(x, y): + return (x, y) + return f +""") + + w_a = space.appexec([], """(): + class A(object): + def __call__(self, x): + return x + return A() +""") + + w_9 = space.wrap(9) + w_1 = space.wrap(1) + + w_res = space.call_obj_args(w_f, w_9, Arguments(space, [w_1])) + + w_x, w_y = space.unpacktuple(w_res, 2) + assert w_x is w_9 + assert w_y is w_1 + + w_res = space.call_obj_args(w_a, w_9, Arguments(space, [])) + assert w_res is w_9 + class TestModuleMinimal: def test_sys_exists(self): Modified: pypy/branch/garden-call-code/pypy/module/thread/os_local.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/thread/os_local.py (original) +++ pypy/branch/garden-call-code/pypy/module/thread/os_local.py Sat Aug 2 16:17:10 2008 @@ -29,7 +29,7 @@ w_self = space.wrap(self) w_type = space.type(w_self) w_init = space.getattr(w_type, space.wrap("__init__")) - space.call_args(w_init, self.initargs.prepend(w_self)) + space.call_obj_args(w_init, w_self, self.initargs) except: # failed, forget w_dict and propagate the exception del self.dicts[ident] Modified: pypy/branch/garden-call-code/pypy/objspace/descroperation.py ============================================================================== --- pypy/branch/garden-call-code/pypy/objspace/descroperation.py (original) +++ pypy/branch/garden-call-code/pypy/objspace/descroperation.py Sat Aug 2 16:17:10 2008 @@ -75,7 +75,7 @@ descr = space.interpclass_w(w_descr) # a special case for performance and to avoid infinite recursion if type(descr) is Function: - return descr.call_args(args.prepend(w_obj)) + return descr.call_args(args.prepend(w_obj)) # xxx Function.call_obj_args else: w_impl = space.get(w_descr, w_obj) return space.call_args(w_impl, args) Modified: pypy/branch/garden-call-code/pypy/objspace/std/typeobject.py ============================================================================== --- pypy/branch/garden-call-code/pypy/objspace/std/typeobject.py (original) +++ pypy/branch/garden-call-code/pypy/objspace/std/typeobject.py Sat Aug 2 16:17:10 2008 @@ -499,7 +499,7 @@ return space.type(w_obj) # invoke the __new__ of the type w_newfunc = space.getattr(w_type, space.wrap('__new__')) - w_newobject = space.call_args(w_newfunc, __args__.prepend(w_type)) + w_newobject = space.call_obj_args(w_newfunc, w_type, __args__) # maybe invoke the __init__ of the type if space.is_true(space.isinstance(w_newobject, w_type)): w_descr = space.lookup(w_newobject, '__init__') From bgola at codespeak.net Sat Aug 2 16:55:24 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Sat, 2 Aug 2008 16:55:24 +0200 (CEST) Subject: [pypy-svn] r56932 - pypy/branch/2.5-features/pypy/interpreter/astcompiler Message-ID: <20080802145524.1E545169EEA@codespeak.net> Author: bgola Date: Sat Aug 2 16:55:23 2008 New Revision: 56932 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py Log: fixing the computeStackDepth method to work with new (2.5) MAKE_CLOSURE semantics Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py Sat Aug 2 16:55:23 2008 @@ -311,26 +311,12 @@ except KeyError: pass else: - if opcode == pythonopcode.opmap['MAKE_CLOSURE']: - # only supports "LOAD_CONST co / MAKE_CLOSURE n" - if just_loaded_const is None: - raise InternalCompilerError("MAKE_CLOSURE not " - "following LOAD_CONST") - codeobj = self.space.interp_w(PyCode, just_loaded_const) - nfreevars = len(codeobj.co_freevars) - effect = - nfreevars - oparg - else: - effect = tracker(oparg) + effect = tracker(oparg) curstackdepth += effect if i in finally_targets: curstackdepth += 2 # see pyopcode.FinallyBlock.cleanup() self._setdepth(i, curstackdepth) - if opcode == pythonopcode.opmap['LOAD_CONST']: - just_loaded_const = consts_w[oparg] - else: - just_loaded_const = None - self.stacksize = largestsize def fixLabelTargets(self): @@ -481,8 +467,7 @@ def depth_MAKE_FUNCTION(argc): return -argc def depth_MAKE_CLOSURE(argc): - raise InternalCompilerError("must special-case this in order to account" - " for the free variables") + return -argc def depth_BUILD_SLICE(argc): if argc == 2: return -1 From pedronis at codespeak.net Sat Aug 2 18:50:23 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 18:50:23 +0200 (CEST) Subject: [pypy-svn] r56935 - pypy/branch/garden-call-code/pypy/interpreter Message-ID: <20080802165023.3C6E7169E00@codespeak.net> Author: pedronis Date: Sat Aug 2 18:50:21 2008 New Revision: 56935 Modified: pypy/branch/garden-call-code/pypy/interpreter/argument.py Log: kill extra space Modified: pypy/branch/garden-call-code/pypy/interpreter/argument.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/argument.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/argument.py Sat Aug 2 18:50:21 2008 @@ -86,7 +86,7 @@ scopelen = len(argnames) has_vararg = varargname is not None has_kwarg = kwargname is not None - if has_vararg: + if has_vararg: scopelen += 1 if has_kwarg: scopelen += 1 From pedronis at codespeak.net Sat Aug 2 18:50:45 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 18:50:45 +0200 (CEST) Subject: [pypy-svn] r56936 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080802165045.A76BE169E0E@codespeak.net> Author: pedronis Date: Sat Aug 2 18:50:44 2008 New Revision: 56936 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Log: add a test about a corner case of prepended argument logic Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Sat Aug 2 18:50:44 2008 @@ -216,6 +216,19 @@ assert res[0] is a assert res[1] == (42,) + def test_obscure_varargs(self): + class A(object): + def func(*args): + return args + a = A() + res = a.func(42) + assert res[0] is a + assert res[1] == 42 + + res = a.func(*(42,)) + assert res[0] is a + assert res[1] == 42 + def test_simple_kwargs(self): class A(object): def func(self, **kwargs): From pedronis at codespeak.net Sat Aug 2 20:05:36 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 2 Aug 2008 20:05:36 +0200 (CEST) Subject: [pypy-svn] r56937 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080802180536.E5B67169E36@codespeak.net> Author: pedronis Date: Sat Aug 2 20:05:34 2008 New Revision: 56937 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: test covering various situations for BuiltinCode.funcrun Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 2 20:05:34 2008 @@ -509,4 +509,32 @@ w_res = space.call_obj_args(w_g, w_self, args3) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) # white-box check for opt - assert called[0] is args3 + assert called[0] is args3 + + def test_plain(self): + space = self.space + + def g(space, w_a, w_x): + return space.newtuple([space.wrap('g'), w_a, w_x]) + + w_g = space.wrap(gateway.interp2app_temp(g, + unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.W_Root])) + + args = argument.Arguments(space, [space.wrap(-1), space.wrap(0)]) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', -1, 0)))) + + w_self = space.wrap('self') + + args0 = argument.Arguments(space, [space.wrap(0)]) + args = args0.prepend(w_self) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + + args3 = argument.Arguments(space, [space.wrap(3)]) + w_res = space.call_obj_args(w_g, w_self, args3) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) From pedronis at codespeak.net Sun Aug 3 04:03:50 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 3 Aug 2008 04:03:50 +0200 (CEST) Subject: [pypy-svn] r56939 - in pypy/branch/garden-call-code/pypy: interpreter interpreter/test objspace Message-ID: <20080803020350.86D07169E22@codespeak.net> Author: pedronis Date: Sun Aug 3 04:03:48 2008 New Revision: 56939 Modified: pypy/branch/garden-call-code/pypy/interpreter/argument.py pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py pypy/branch/garden-call-code/pypy/interpreter/eval.py pypy/branch/garden-call-code/pypy/interpreter/function.py pypy/branch/garden-call-code/pypy/interpreter/gateway.py pypy/branch/garden-call-code/pypy/interpreter/pycode.py pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py pypy/branch/garden-call-code/pypy/objspace/descroperation.py Log: killed Arguments.popfirst and ArgumentsPrepended introducing call_obj_args and related methods. a bunch of tests about the blindargs need. here this speeded up builtin instantiation (bltna1) by 20%, some 10% for user type instantiation too (inst). some other slow downs/speed ups in the noise (1-5%), one issue is that this overall increased the size of the executable, some of the next possible changes will prune some stuff though Modified: pypy/branch/garden-call-code/pypy/interpreter/argument.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/argument.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/argument.py Sun Aug 3 04:03:48 2008 @@ -6,32 +6,62 @@ class AbstractArguments: - def parse(self, fnname, signature, defaults_w=[]): + def parse_into_scope(self, w_firstarg, + scope_w, fnname, signature, defaults_w=[]): """Parse args and kwargs to initialize a frame according to the signature of code object. + Store the argumentvalues into scope_w. + scope_w must be big enough for signature. """ + argnames, varargname, kwargname = signature + has_vararg = varargname is not None + has_kwarg = kwargname is not None try: - return self.match_signature(signature, defaults_w) + return self._match_signature(w_firstarg, + scope_w, argnames, has_vararg, + has_kwarg, defaults_w, 0) except ArgErr, e: raise OperationError(self.space.w_TypeError, self.space.wrap(e.getmsg(fnname))) - def parse_into_scope(self, scope_w, fnname, signature, defaults_w=[]): - """Parse args and kwargs to initialize a frame - according to the signature of code object. - Store the argumentvalues into scope_w. - scope_w must be big enough for signature. + def _parse(self, w_firstarg, signature, defaults_w, blindargs=0): + """Parse args and kwargs according to the signature of a code object, + or raise an ArgErr in case of failure. """ argnames, varargname, kwargname = signature + scopelen = len(argnames) has_vararg = varargname is not None has_kwarg = kwargname is not None + if has_vararg: + scopelen += 1 + if has_kwarg: + scopelen += 1 + scope_w = [None] * scopelen + self._match_signature(w_firstarg, scope_w, argnames, has_vararg, has_kwarg, defaults_w, blindargs) + return scope_w + + def parse(self, fnname, signature, defaults_w=[], blindargs=0): + """Parse args and kwargs to initialize a frame + according to the signature of code object. + """ try: - return self._match_signature(scope_w, argnames, has_vararg, - has_kwarg, defaults_w, 0, None) + return self._parse(None, signature, defaults_w, blindargs) except ArgErr, e: raise OperationError(self.space.w_TypeError, self.space.wrap(e.getmsg(fnname))) + # xxx have only this one + def parse_obj(self, w_firstarg, + fnname, signature, defaults_w=[], blindargs=0): + """Parse args and kwargs to initialize a frame + according to the signature of code object. + """ + try: + return self._parse(w_firstarg, signature, defaults_w, blindargs) + except ArgErr, e: + raise OperationError(self.space.w_TypeError, + self.space.wrap(e.getmsg(fnname))) + def frompacked(space, w_args=None, w_kwds=None): """Convenience static method to build an Arguments from a wrapped sequence and a wrapped dictionary.""" @@ -68,31 +98,11 @@ return Arguments(space, args_w, kwds_w, w_star, w_starstar) fromshape = staticmethod(fromshape) - def prepend(self, w_firstarg): - "Return a new Arguments with a new argument inserted first." - return ArgumentsPrepended(self, w_firstarg) - - def popfirst(self): - """For optimization only: might return (w_firstarg, args_with_rest), - or might just raise IndexError. - """ - raise IndexError - def match_signature(self, signature, defaults_w): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. """ - argnames, varargname, kwargname = signature - scopelen = len(argnames) - has_vararg = varargname is not None - has_kwarg = kwargname is not None - if has_vararg: - scopelen += 1 - if has_kwarg: - scopelen += 1 - scope_w = [None] * scopelen - self._match_signature(scope_w, argnames, has_vararg, has_kwarg, defaults_w, 0, None) - return scope_w + return self._parse(None, signature, defaults_w) def unmatch_signature(self, signature, data_w): """kind of inverse of match_signature""" @@ -156,7 +166,12 @@ """ raise NotImplementedError() - def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None): + def prepend(self, w_firstarg): + """ Purely abstract + """ + raise NotImplementedError() + + def _match_signature(self, w_firstarg, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0): """ Purely abstract """ raise NotImplementedError() @@ -164,66 +179,9 @@ def fixedunpack(self, argcount): """ Purely abstract """ - raise NotImplementedError() - -class ArgumentsPrepended(AbstractArguments): - def __init__(self, args, w_firstarg): - self.space = args.space - self.args = args - self.w_firstarg = w_firstarg - - def firstarg(self): - "Return the first argument for inspection." - return self.w_firstarg - - def popfirst(self): - return self.w_firstarg, self.args - - def __repr__(self): - return 'ArgumentsPrepended(%r, %r)' % (self.args, self.w_firstarg) - - def has_keywords(self): - return self.args.has_keywords() - - def unpack(self): - arguments_w, kwds_w = self.args.unpack() - return ([self.w_firstarg] + arguments_w), kwds_w - - def fixedunpack(self, argcount): - if argcount <= 0: - raise ValueError, "too many arguments (%d expected)" % argcount # XXX: Incorrect - return [self.w_firstarg] + self.args.fixedunpack(argcount - 1) - - def _rawshape(self, nextra=0): - return self.args._rawshape(nextra + 1) - - def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None): - """Parse args and kwargs according to the signature of a code object, - or raise an ArgErr in case of failure. - Return the number of arguments filled in. - """ - if blindargs < len(argnames): - scope_w[blindargs] = self.w_firstarg - else: - if extravarargs is None: - extravarargs = [ self.w_firstarg ] - else: - extravarargs.append(self.w_firstarg) - return self.args._match_signature(scope_w, argnames, has_vararg, - has_kwarg, defaults_w, - blindargs + 1, extravarargs) - - def flatten(self): - (shape_cnt, shape_keys, shape_star, shape_stst), data_w = self.args.flatten() - data_w.insert(0, self.w_firstarg) - return (shape_cnt + 1, shape_keys, shape_star, shape_stst), data_w + raise NotImplementedError() - def num_args(self): - return self.args.num_args() + 1 - def num_kwds(self): - return self.args.num_kwds() - class ArgumentsFromValuestack(AbstractArguments): """ Collects the arguments of a function call as stored on a PyFrame @@ -242,14 +200,11 @@ return None return self.frame.peekvalue(self.nargs - 1) - def popfirst(self): - if self.nargs <= 0: - raise IndexError - frame = self.frame - newnargs = self.nargs-1 - return (frame.peekvalue(newnargs), - ArgumentsFromValuestack(self.space, frame, newnargs)) - + def prepend(self, w_firstarg): + "Return a new Arguments with a new argument inserted first." + args_w = self.frame.peekvalues(self.nargs) + return Arguments(self.space, [w_firstarg] + args_w) + def __repr__(self): return 'ArgumentsFromValuestack(%r, %r)' % (self.frame, self.nargs) @@ -276,52 +231,66 @@ def _rawshape(self, nextra=0): return nextra + self.nargs, (), False, False - def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None): + def _match_signature(self, w_firstarg, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. Return the number of arguments filled in. """ co_argcount = len(argnames) - if blindargs + self.nargs + len(defaults_w) < co_argcount: + extravarargs = None + input_argcount = 0 + + if w_firstarg is not None: + blindargs = blindargs or 1 + upfront = 1 + if co_argcount > 0: + scope_w[0] = w_firstarg + input_argcount = 1 + else: + extravarargs = [ w_firstarg ] + else: + upfront = 0 + + avail = upfront + self.nargs + + if avail + len(defaults_w) < co_argcount: raise ArgErrCount(blindargs + self.nargs , 0, (co_argcount, has_vararg, has_kwarg), - defaults_w, co_argcount - blindargs - - self.nargs - len(defaults_w)) - if blindargs + self.nargs > co_argcount and not has_vararg: + defaults_w, co_argcount - avail - len(defaults_w)) + if avail > co_argcount and not has_vararg: raise ArgErrCount(blindargs + self.nargs, 0, (co_argcount, has_vararg, has_kwarg), defaults_w, 0) - if blindargs + self.nargs >= co_argcount: - for i in range(co_argcount - blindargs): - scope_w[i + blindargs] = self.frame.peekvalue(self.nargs - 1 - i) + if avail >= co_argcount: + for i in range(co_argcount - input_argcount): + scope_w[i + input_argcount] = self.frame.peekvalue(self.nargs - 1 - i) if has_vararg: - if blindargs > co_argcount: + if upfront > co_argcount: + assert extravarargs is not None stararg_w = extravarargs for i in range(self.nargs): stararg_w.append(self.frame.peekvalue(self.nargs - 1 - i)) else: - stararg_w = [None] * (self.nargs + blindargs - co_argcount) - for i in range(co_argcount - blindargs, self.nargs): - stararg_w[i - co_argcount + blindargs] = self.frame.peekvalue(self.nargs - 1 - i) + args_left = co_argcount - upfront + stararg_w = [None] * (avail - co_argcount) + for i in range(args_left, self.nargs): + stararg_w[i - args_left] = self.frame.peekvalue(self.nargs - 1 - i) scope_w[co_argcount] = self.space.newtuple(stararg_w) - co_argcount += 1 else: for i in range(self.nargs): - scope_w[i + blindargs] = self.frame.peekvalue(self.nargs - 1 - i) + scope_w[i + input_argcount] = self.frame.peekvalue(self.nargs - 1 - i) ndefaults = len(defaults_w) - missing = co_argcount - self.nargs - blindargs + missing = co_argcount - avail first_default = ndefaults - missing for i in range(missing): - scope_w[self.nargs + blindargs + i] = defaults_w[first_default + i] + scope_w[avail + i] = defaults_w[first_default + i] if has_vararg: scope_w[co_argcount] = self.space.newtuple([]) - co_argcount += 1 if has_kwarg: - scope_w[co_argcount] = self.space.newdict() - co_argcount += 1 - return co_argcount + scope_w[co_argcount + has_vararg] = self.space.newdict() + return co_argcount + has_vararg + has_kwarg def flatten(self): data_w = [None] * self.nargs @@ -382,12 +351,12 @@ "Return a ([w1,w2...], {'kw':w3...}) pair." self._unpack() return self.arguments_w, self.kwds_w - - def popfirst(self): - self._unpack() - return self.arguments_w[0], Arguments(self.space, self.arguments_w[1:], - kwds_w = self.kwds_w) + def prepend(self, w_firstarg): + "Return a new Arguments with a new argument inserted first." + return Arguments(self.space, [w_firstarg] + self.arguments_w, + self.kwds_w, self.w_stararg, self.w_starstararg) + def _unpack(self): "unpack the *arg and **kwd into w_arguments and kwds_w" # --- unpack the * argument now --- @@ -460,9 +429,8 @@ ### Parsing for function calls ### - def _match_signature(self, scope_w, argnames, has_vararg=False, - has_kwarg=False, defaults_w=[], blindargs=0, - extravarargs=None): + def _match_signature(self, w_firstarg, scope_w, argnames, has_vararg=False, + has_kwarg=False, defaults_w=[], blindargs=0): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. Return the number of arguments filled in. @@ -474,10 +442,24 @@ # scope_w = resulting list of wrapped values # co_argcount = len(argnames) # expected formal arguments, without */** + extravarargs = None + input_argcount = 0 + + if w_firstarg is not None: + blindargs = blindargs or 1 + upfront = 1 + if co_argcount > 0: + scope_w[0] = w_firstarg + input_argcount = 1 + else: + extravarargs = [ w_firstarg ] + else: + upfront = 0 + if self.w_stararg is not None: # There is a case where we don't have to unpack() a w_stararg: # if it matches exactly a *arg in the signature. - if (len(self.arguments_w) + blindargs == co_argcount and + if (len(self.arguments_w) + upfront == co_argcount and has_vararg and self.space.is_w(self.space.type(self.w_stararg), self.space.w_tuple)): @@ -489,23 +471,25 @@ self._unpack() args_w = self.arguments_w + num_args = len(args_w) + kwds_w = self.kwds_w num_kwds = 0 if kwds_w is not None: num_kwds = len(kwds_w) - - # put as many positional input arguments into place as available - if blindargs >= co_argcount: - input_argcount = co_argcount - elif len(args_w) + blindargs > co_argcount: - for i in range(co_argcount - blindargs): - scope_w[i + blindargs] = args_w[i] - input_argcount = co_argcount - next_arg = co_argcount - blindargs - else: - for i in range(len(args_w)): - scope_w[i + blindargs] = args_w[i] - input_argcount = len(args_w) + blindargs + + avail = num_args + upfront + + if input_argcount < co_argcount: + # put as many positional input arguments into place as available + if avail > co_argcount: + take = co_argcount - input_argcount + else: + take = num_args + + for i in range(take): + scope_w[i + input_argcount] = args_w[i] + input_argcount += take # check that no keyword argument conflicts with these # note that for this purpose we ignore the first blindargs, @@ -542,21 +526,21 @@ # collect extra positional arguments into the *vararg if has_vararg: if self.w_stararg is None: # common case - args_left = co_argcount - blindargs + args_left = co_argcount - upfront if args_left < 0: # check required by rpython assert extravarargs is not None starargs_w = extravarargs - if len(args_w): + if num_args: starargs_w.extend(args_w) - elif len(args_w) > args_left: + elif num_args > args_left: starargs_w = args_w[args_left:] else: starargs_w = [] scope_w[co_argcount] = self.space.newtuple(starargs_w) else: # shortcut for the non-unpack() case above scope_w[co_argcount] = self.w_stararg - elif len(args_w) + blindargs > co_argcount: - raise ArgErrCount(len(args_w) + blindargs, num_kwds, + elif avail > co_argcount: + raise ArgErrCount(avail, num_kwds, (co_argcount, has_vararg, has_kwarg), defaults_w, 0) @@ -571,7 +555,7 @@ raise ArgErrUnknownKwds(remainingkwds_w) if missing: - raise ArgErrCount(len(args_w) + blindargs, num_kwds, + raise ArgErrCount(avail, num_kwds, (co_argcount, has_vararg, has_kwarg), defaults_w, missing) Modified: pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py Sun Aug 3 04:03:48 2008 @@ -666,6 +666,12 @@ return False def call_obj_args(self, w_callable, w_obj, args): + if not self.config.objspace.disable_call_speedhacks: + # XXX start of hack for performance + from pypy.interpreter.function import Function + if isinstance(w_callable, Function): + return w_callable.call_obj_args(w_obj, args) + # XXX end of hack for performance return self.call_args(w_callable, args.prepend(w_obj)) def call(self, w_callable, w_args, w_kwds=None): Modified: pypy/branch/garden-call-code/pypy/interpreter/eval.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/eval.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/eval.py Sun Aug 3 04:03:48 2008 @@ -57,6 +57,8 @@ frame.setfastscope(scope_w) return frame.run() + def funcrun_obj(self, func, w_obj, args): + return self.funcrun(func, args.prepend(w_obj)) # a performance hack (see gateway.BuiltinCode1/2/3 and pycode.PyCode) def fastcall_0(self, space, func): Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sun Aug 3 04:03:48 2008 @@ -33,7 +33,12 @@ return "" % getattr(self, 'name', '?') def call_args(self, args): - return self.code.funcrun(self, args) # delegate activation to code + # delegate activation to code + return self.code.funcrun(self, args) + + def call_obj_args(self, w_obj, args): + # delegate activation to code + return self.code.funcrun_obj(self, w_obj, args) def getcode(self): return self.code @@ -98,9 +103,8 @@ frame.peekvalue(1), frame.peekvalue(0)) stkargs = frame.make_arguments(nargs) - args = stkargs.prepend(w_obj) try: - return self.call_args(args) # xxx Function.call_obj_args + return self.call_obj_args(w_obj, stkargs) finally: if isinstance(stkargs, ArgumentsFromValuestack): stkargs.frame = None Modified: pypy/branch/garden-call-code/pypy/interpreter/gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/gateway.py Sun Aug 3 04:03:48 2008 @@ -479,9 +479,13 @@ return space.wrap(self.docstring) def funcrun(self, func, args): + return BuiltinCode.funcrun_obj(self, func, None, args) + + def funcrun_obj(self, func, w_obj, args): space = func.space activation = self.activation - scope_w = args.parse(func.name, self.sig, func.defs_w) + scope_w = args.parse_obj(w_obj, func.name, self.sig, + func.defs_w, self.minargs) try: w_result = activation._run(space, scope_w) except KeyboardInterrupt: @@ -495,6 +499,8 @@ raise OperationError(space.w_RuntimeError, space.wrap("internal error: " + str(e))) except DescrMismatch, e: + if w_obj is not None: + args = args.prepend(w_obj) return scope_w[0].descr_call_mismatch(space, self.descrmismatch_op, self.descr_reqcls, @@ -531,32 +537,27 @@ class BuiltinCodePassThroughArguments1(BuiltinCode): - def funcrun(self, func, args): + def funcrun_obj(self, func, w_obj, args): space = func.space try: - w_obj, newargs = args.popfirst() - except IndexError: - return BuiltinCode.funcrun(self, func, args) - else: - try: - w_result = self.func__args__(space, w_obj, newargs) - except KeyboardInterrupt: - raise OperationError(space.w_KeyboardInterrupt, space.w_None) - except MemoryError: - raise OperationError(space.w_MemoryError, space.w_None) - except NotImplementedError, e: - raise - except RuntimeError, e: - raise OperationError(space.w_RuntimeError, - space.wrap("internal error: " + str(e))) - except DescrMismatch, e: - return args.firstarg().descr_call_mismatch(space, - self.descrmismatch_op, - self.descr_reqcls, - args) - if w_result is None: - w_result = space.w_None - return w_result + w_result = self.func__args__(space, w_obj, args) + except KeyboardInterrupt: + raise OperationError(space.w_KeyboardInterrupt, space.w_None) + except MemoryError: + raise OperationError(space.w_MemoryError, space.w_None) + except NotImplementedError, e: + raise + except RuntimeError, e: + raise OperationError(space.w_RuntimeError, + space.wrap("internal error: " + str(e))) + except DescrMismatch, e: + return args.firstarg().descr_call_mismatch(space, + self.descrmismatch_op, + self.descr_reqcls, + args.prepend(w_obj)) + if w_result is None: + w_result = space.w_None + return w_result class BuiltinCode0(BuiltinCode): fast_natural_arity = 0 Modified: pypy/branch/garden-call-code/pypy/interpreter/pycode.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/pycode.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/pycode.py Sun Aug 3 04:03:48 2008 @@ -212,7 +212,19 @@ func.closure) sig = self._signature # speed hack - args_matched = args.parse_into_scope(frame.fastlocals_w, func.name, + args_matched = args.parse_into_scope(None, frame.fastlocals_w, + func.name, + sig, func.defs_w) + frame.init_cells() + return frame.run() + + def funcrun_obj(self, func, w_obj, args): + frame = self.space.createframe(self, func.w_func_globals, + func.closure) + sig = self._signature + # speed hack + args_matched = args.parse_into_scope(w_obj, frame.fastlocals_w, + func.name, sig, func.defs_w) frame.init_cells() return frame.run() Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sun Aug 3 04:03:48 2008 @@ -495,22 +495,19 @@ w_self = space.wrap('self') - args0 = argument.Arguments(space, [space.wrap(0)]) - args = args0.prepend(w_self) - - w_res = space.call_args(w_g, args) - assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) - - # white-box check for opt - assert called[0] is args0 - called = [] - args3 = argument.Arguments(space, [space.wrap(3)]) w_res = space.call_obj_args(w_g, w_self, args3) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) # white-box check for opt assert called[0] is args3 + # no opt in this case + args0 = argument.Arguments(space, [space.wrap(0)]) + args = args0.prepend(w_self) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + def test_plain(self): space = self.space @@ -538,3 +535,46 @@ args3 = argument.Arguments(space, [space.wrap(3)]) w_res = space.call_obj_args(w_g, w_self, args3) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) + + +class AppTestKeywordsToBuiltinSanity(object): + + def test_type(self): + class X(object): + def __init__(self, **kw): + pass + clash = type.__call__.func_code.co_varnames[0] + + X(**{clash: 33}) + type.__call__(X, **{clash: 33}) + + def test_object_new(self): + class X(object): + def __init__(self, **kw): + pass + clash = object.__new__.func_code.co_varnames[0] + + X(**{clash: 33}) + object.__new__(X, **{clash: 33}) + + + def test_dict_new(self): + clash = dict.__new__.func_code.co_varnames[0] + + dict(**{clash: 33}) + dict.__new__(dict, **{clash: 33}) + + def test_dict_init(self): + d = {} + clash = dict.__init__.func_code.co_varnames[0] + + d.__init__(**{clash: 33}) + dict.__init__(d, **{clash: 33}) + + def test_dict_update(self): + d = {} + clash = dict.update.func_code.co_varnames[0] + + d.update(**{clash: 33}) + dict.update(d, **{clash: 33}) + Modified: pypy/branch/garden-call-code/pypy/objspace/descroperation.py ============================================================================== --- pypy/branch/garden-call-code/pypy/objspace/descroperation.py (original) +++ pypy/branch/garden-call-code/pypy/objspace/descroperation.py Sun Aug 3 04:03:48 2008 @@ -75,7 +75,7 @@ descr = space.interpclass_w(w_descr) # a special case for performance and to avoid infinite recursion if type(descr) is Function: - return descr.call_args(args.prepend(w_obj)) # xxx Function.call_obj_args + return descr.call_obj_args(w_obj, args) else: w_impl = space.get(w_descr, w_obj) return space.call_args(w_impl, args) From pypy-svn at codespeak.net Sun Aug 3 14:40:50 2008 From: pypy-svn at codespeak.net (pypy-svn at codespeak.net) Date: Sun, 3 Aug 2008 14:40:50 +0200 (CEST) Subject: [pypy-svn] Angelina Jolie Free Video. Message-ID: <20080803034053.2775.qmail@awk146.internetdsl.tpnet.pl> An HTML attachment was scrubbed... URL: From arigo at codespeak.net Sun Aug 3 15:17:59 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sun, 3 Aug 2008 15:17:59 +0200 (CEST) Subject: [pypy-svn] r56940 - pypy/branch/opt-option/pypy/config Message-ID: <20080803131759.4054D169F38@codespeak.net> Author: arigo Date: Sun Aug 3 15:17:57 2008 New Revision: 56940 Modified: pypy/branch/opt-option/pypy/config/pypyoption.py Log: Ah. Can't use smallints together with a framework GC... Let's just use prebuilt ints, I guess they already help remove a large fraction of W_IntObject instances at run-time. Modified: pypy/branch/opt-option/pypy/config/pypyoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/pypyoption.py (original) +++ pypy/branch/opt-option/pypy/config/pypyoption.py Sun Aug 3 15:17:57 2008 @@ -338,10 +338,11 @@ # memory-saving optimizations if level == 'mem': - config.objspace.std.suggest(withsmallint=True) + config.objspace.std.suggest(withprebuiltint=True) config.objspace.std.suggest(withrangelist=True) config.objspace.std.suggest(withprebuiltchar=True) config.objspace.std.suggest(withsharingdict=True) + # xxx other options? ropes maybe? # completely disable geninterp in a level 0 translation if level == '0': From arigo at codespeak.net Sun Aug 3 15:27:40 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sun, 3 Aug 2008 15:27:40 +0200 (CEST) Subject: [pypy-svn] r56941 - pypy/branch/opt-option/pypy/config Message-ID: <20080803132740.D630D169F4E@codespeak.net> Author: arigo Date: Sun Aug 3 15:27:40 2008 New Revision: 56941 Modified: pypy/branch/opt-option/pypy/config/pypyoption.py Log: A bit messy. If the mutual exclusion between withsmallint and withprebuiltint is expressed in *that* way instead of *this* way, then we can say "translate.py --opt=mem targetpypysandalone --withsmallint". Modified: pypy/branch/opt-option/pypy/config/pypyoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/pypyoption.py (original) +++ pypy/branch/opt-option/pypy/config/pypyoption.py Sun Aug 3 15:27:40 2008 @@ -157,11 +157,11 @@ BoolOption("withsmallint", "use tagged integers", default=False, - requires=[("translation.gc", "boehm")]), + requires=[("translation.gc", "boehm"), + ("objspace.std.withprebuiltint", False)]), BoolOption("withprebuiltint", "prebuild commonly used int objects", - default=False, - requires=[("objspace.std.withsmallint", False)]), + default=False), IntOption("prebuiltintfrom", "lowest integer which is prebuilt", default=-5, cmdline="--prebuiltintfrom"), From pedronis at codespeak.net Sun Aug 3 16:31:58 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 3 Aug 2008 16:31:58 +0200 (CEST) Subject: [pypy-svn] r56942 - in pypy/branch/garden-call-code/pypy/interpreter: . test Message-ID: <20080803143158.A575E169F32@codespeak.net> Author: pedronis Date: Sun Aug 3 16:31:57 2008 New Revision: 56942 Modified: pypy/branch/garden-call-code/pypy/interpreter/argument.py pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Log: streamline: behave like CPython in this respect Modified: pypy/branch/garden-call-code/pypy/interpreter/argument.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/argument.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/argument.py Sun Aug 3 16:31:57 2008 @@ -241,7 +241,6 @@ input_argcount = 0 if w_firstarg is not None: - blindargs = blindargs or 1 upfront = 1 if co_argcount > 0: scope_w[0] = w_firstarg @@ -446,7 +445,6 @@ input_argcount = 0 if w_firstarg is not None: - blindargs = blindargs or 1 upfront = 1 if co_argcount > 0: scope_w[0] = w_firstarg Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_function.py Sun Aug 3 16:31:57 2008 @@ -160,8 +160,18 @@ return self, kw func = A().func - func(self=23) # XXX different behavior from CPython - # xxx raises(TypeError, func, self=23) + # don't want the extra argument passing of raises + try: + func(self=23) + assert False + except TypeError: + pass + + try: + func(**{'self': 23}) + assert False + except TypeError: + pass def test_kwargs_confusing_name(self): def func(self): # 'self' conflicts with the interp-level From pedronis at codespeak.net Sun Aug 3 22:48:34 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 3 Aug 2008 22:48:34 +0200 (CEST) Subject: [pypy-svn] r56949 - pypy/branch/garden-call-code/pypy/module/_stackless/test Message-ID: <20080803204834.24E20169ED5@codespeak.net> Author: pedronis Date: Sun Aug 3 22:48:32 2008 New Revision: 56949 Added: pypy/branch/garden-call-code/pypy/module/_stackless/test/test_frame_chain_reconstruction.py (contents, props changed) Log: tests about frame chain reconstruction that can run on CPython+greenlet, check the frame chain by faking rstack.resume_state_create skipped tests showing bad inderection of CALL_METHOD and coroutine pickling Added: pypy/branch/garden-call-code/pypy/module/_stackless/test/test_frame_chain_reconstruction.py ============================================================================== --- (empty file) +++ pypy/branch/garden-call-code/pypy/module/_stackless/test/test_frame_chain_reconstruction.py Sun Aug 3 22:48:32 2008 @@ -0,0 +1,264 @@ +from pypy.conftest import gettestobjspace +from py.test import skip + +class FrameCheck(object): + + def __init__(self, name): + self.name = name + + def __eq__(self, frame): + return frame.pycode.co_name == self.name + +class BytecodeCheck(object): + + def __init__(self, code, op, arg): + self.code = code + self.op = chr(op)+chr(arg & 0xff) + chr(arg >> 8 & 0xff) + + def __eq__(self, pos): + return self.code[pos-3:pos] == self.op + +class BaseTestReconstructFrameChain(object): + OPTIONS = {} + + def setup_class(cls): + space = gettestobjspace(usemodules=('_stackless',), **cls.OPTIONS) + cls.space = space + + from pypy.rlib import rstack + cls.old_resume_state_create = rstack.resume_state_create + + def tr(prevstate, label, *args): + if prevstate is None: + prevstate = [] + return prevstate+[(label, args)] + rstack.resume_state_create = tr + + w_opmap = space.appexec([], """(): + import opcode + + return opcode.opmap + """) + + opmap = space.unwrap(w_opmap) + cls.CALL_FUNCTION = opmap['CALL_FUNCTION'] + cls.CALL_FUNCTION_VAR = opmap['CALL_FUNCTION_VAR'] + + def teardown_class(cls): + from pypy.rlib import rstack + rstack.resume_state_create = cls.old_resume_state_create + + def start(self, w_coro): + self.i = 0 + self.frame_to_check = w_coro.frame + w_coro.frame = None # avoid exploding in kill > __del__ + + def end(self): + assert self.i == len(self.frame_to_check) + + def check_entry(self, label, *args): + frame = self.frame_to_check + assert frame[self.i] == (label, args) + self.i += 1 + + + def test_two_frames_simple(self): + space = self.space + + w_res = space.appexec([], """(): + import _stackless as stackless + import pickle + + main = stackless.coroutine.getcurrent() + d = {'main': main} + + exec \"\"\" +def f(): + g(1) + +def g(x): + main.switch() +\"\"\" in d + f = d['f'] + g = d['g'] + + co = stackless.coroutine() + co.bind(f) + co.switch() + + s = pickle.dumps(co) + co = pickle.loads(s) + + return co, f, g + """) + + w_co, w_f, w_g = space.unpacktuple(w_res) + + ec = space.getexecutioncontext() + fcode = w_f.code.co_code + gcode = w_g.code.co_code + + self.start(w_co) + e = self.check_entry + e('yield_current_frame_to_caller_1') + e('coroutine__bind', w_co.costate) + e('appthunk', w_co.costate) + # f + e('execute_frame', FrameCheck('f'), ec) + e('dispatch', FrameCheck('f'), fcode, ec) + e('handle_bytecode', FrameCheck('f'), fcode, ec) + e('dispatch_call', FrameCheck('f'), fcode, + BytecodeCheck(fcode, self.CALL_FUNCTION, 1), ec) + e('CALL_FUNCTION', FrameCheck('f'), 1) + # g + e('execute_frame', FrameCheck('g'), ec) + e('dispatch', FrameCheck('g'), gcode, ec) + e('handle_bytecode', FrameCheck('g'), gcode, ec) + e('dispatch_call', FrameCheck('g'), gcode, + BytecodeCheck(gcode, self.CALL_FUNCTION, 0), ec) + e('CALL_FUNCTION', FrameCheck('g'), 0) + e('w_switch', w_co.costate, space) + e('coroutine_switch', w_co.costate) + self.end() + + def test_two_frames_stararg(self): + space = self.space + + w_res = space.appexec([], """(): + import _stackless as stackless + import pickle + + main = stackless.coroutine.getcurrent() + d = {'main': main} + + exec \"\"\" +def f(): + g(4, 3, d=2, *(1,)) + +def g(a, b, c, d): + main.switch() +\"\"\" in d + f = d['f'] + g = d['g'] + + co = stackless.coroutine() + co.bind(f) + co.switch() + + s = pickle.dumps(co) + co = pickle.loads(s) + + return co, f, g + """) + + w_co, w_f, w_g = space.unpacktuple(w_res) + + ec = space.getexecutioncontext() + fcode = w_f.code.co_code + gcode = w_g.code.co_code + + self.start(w_co) + e = self.check_entry + e('yield_current_frame_to_caller_1') + e('coroutine__bind', w_co.costate) + e('appthunk', w_co.costate) + # f + e('execute_frame', FrameCheck('f'), ec) + e('dispatch', FrameCheck('f'), fcode, ec) + e('handle_bytecode', FrameCheck('f'), fcode, ec) + e('dispatch_call', FrameCheck('f'), fcode, + BytecodeCheck(fcode, self.CALL_FUNCTION_VAR, 2+(1<<8)), ec) + e('call_function', FrameCheck('f')) + # g + e('execute_frame', FrameCheck('g'), ec) + e('dispatch', FrameCheck('g'), gcode, ec) + e('handle_bytecode', FrameCheck('g'), gcode, ec) + e('dispatch_call', FrameCheck('g'), gcode, + BytecodeCheck(gcode, self.CALL_FUNCTION, 0), ec) + e('CALL_FUNCTION', FrameCheck('g'), 0) + e('w_switch', w_co.costate, space) + e('coroutine_switch', w_co.costate) + self.end() + + def test_two_frames_method(self): + space = self.space + + w_res = space.appexec([], """(): + import _stackless as stackless + import pickle + import new, sys + + mod = new.module('mod') + sys.modules['mod'] = mod + + main = stackless.coroutine.getcurrent() + d = {'main': main} + + exec \"\"\" +def f(): + a = A() + a.m(1) + +def g(_, x): + main.switch() + +class A(object): + m = g +\"\"\" in d + f = d['f'] + g = d['g'] + A = d['A'] + + # to make pickling work + mod.A = A + A.__module__ = 'mod' + + co = stackless.coroutine() + co.bind(f) + co.switch() + + s = pickle.dumps(co) + co = pickle.loads(s) + + return co, f, g + """) + + w_co, w_f, w_g = space.unpacktuple(w_res) + + ec = space.getexecutioncontext() + fcode = w_f.code.co_code + gcode = w_g.code.co_code + + self.start(w_co) + e = self.check_entry + e('yield_current_frame_to_caller_1') + e('coroutine__bind', w_co.costate) + e('appthunk', w_co.costate) + # f + e('execute_frame', FrameCheck('f'), ec) + e('dispatch', FrameCheck('f'), fcode, ec) + e('handle_bytecode', FrameCheck('f'), fcode, ec) + e('dispatch_call', FrameCheck('f'), fcode, + BytecodeCheck(fcode, self.CALL_FUNCTION, 1), ec) + e('CALL_FUNCTION', FrameCheck('f'), 1) + # g + e('execute_frame', FrameCheck('g'), ec) + e('dispatch', FrameCheck('g'), gcode, ec) + e('handle_bytecode', FrameCheck('g'), gcode, ec) + e('dispatch_call', FrameCheck('g'), gcode, + BytecodeCheck(gcode, self.CALL_FUNCTION, 0), ec) + e('CALL_FUNCTION', FrameCheck('g'), 0) + e('w_switch', w_co.costate, space) + e('coroutine_switch', w_co.costate) + self.end() + +class TestReconstructFrameChain(BaseTestReconstructFrameChain): + pass + +class TestReconstructFrameChain_CALL_METHOD(BaseTestReconstructFrameChain): + OPTIONS = {"objspace.opcodes.CALL_METHOD": True} + + def setup_class(cls): + skip("this needs special casing in Function reduce for BuiltinCodes like in Method as first thing") + + From arigo at codespeak.net Mon Aug 4 11:26:12 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 4 Aug 2008 11:26:12 +0200 (CEST) Subject: [pypy-svn] r56955 - pypy/dist/pypy/translator/benchmark Message-ID: <20080804092612.B80BE168405@codespeak.net> Author: arigo Date: Mon Aug 4 11:26:12 2008 New Revision: 56955 Modified: pypy/dist/pypy/translator/benchmark/benchmarks.py Log: Fix for py lib changes. Modified: pypy/dist/pypy/translator/benchmark/benchmarks.py ============================================================================== --- pypy/dist/pypy/translator/benchmark/benchmarks.py (original) +++ pypy/dist/pypy/translator/benchmark/benchmarks.py Mon Aug 4 11:26:12 2008 @@ -123,7 +123,7 @@ 'svn co -r100 http://johnnydebris.net/templess/trunk templess' """ here = py.magic.autopath().dirpath() - pypath = py.__package__.getpath().dirpath() + pypath = os.path.dirname(os.path.dirname(py.__file__)) templessdir = here.join('templess') testscript = templessdir.join('test/oneshot.py') command = 'PYTHONPATH="%s:%s" "%s" "%s" 100' % (here, pypath, From bgola at codespeak.net Mon Aug 4 20:08:36 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 4 Aug 2008 20:08:36 +0200 (CEST) Subject: [pypy-svn] r56980 - in pypy/branch/2.5-features/pypy: interpreter/astcompiler interpreter/pyparser interpreter/pyparser/test module/__builtin__ Message-ID: <20080804180836.1C041169F25@codespeak.net> Author: bgola Date: Mon Aug 4 20:08:35 2008 New Revision: 56980 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.py pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py pypy/branch/2.5-features/pypy/interpreter/astcompiler/future.py pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py pypy/branch/2.5-features/pypy/module/__builtin__/importing.py Log: absolute import support. astbuilder and astcompiler already done, missing the new import behavior (when __future__.asbolute_import) in importing.py Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.py Mon Aug 4 20:08:35 2008 @@ -2477,10 +2477,11 @@ For.typedef.acceptable_as_base_class = False class From(Node): - def __init__(self, modname, names, lineno=-1): + def __init__(self, modname, names, level, lineno=-1): Node.__init__(self, lineno) self.modname = modname self.names = names + self.level = level def getChildren(self): "NOT_RPYTHON" Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py Mon Aug 4 20:08:35 2008 @@ -19,4 +19,5 @@ CO_GENERATOR = 0x0020 CO_GENERATOR_ALLOWED = 0x1000 CO_FUTURE_DIVISION = 0x2000 +CO_FUTURE_ABSIMPORT = 0x4000 CO_FUTURE_WITH_STATEMENT = 0x8000 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/future.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/future.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/future.py Mon Aug 4 20:08:35 2008 @@ -15,7 +15,7 @@ class FutureParser(ast.ASTVisitor): - features = ("nested_scopes", "generators", "division", "with_statement") + features = ("nested_scopes", "generators", "division", "with_statement", "absolute_import") def __init__(self): self.found = {} # set Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py Mon Aug 4 20:08:35 2008 @@ -10,7 +10,7 @@ SC_FREE, SC_CELL, SC_DEFAULT, OP_APPLY, OP_ASSIGN, OP_DELETE, OP_NONE from pypy.interpreter.astcompiler.consts import CO_VARARGS, CO_VARKEYWORDS, \ CO_NEWLOCALS, CO_NESTED, CO_GENERATOR, CO_GENERATOR_ALLOWED, \ - CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT + CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSIMPORT from pypy.interpreter.pyparser.error import SyntaxError from pypy.interpreter.astcompiler.opt import is_constant_false from pypy.interpreter.astcompiler.opt import is_constant_true @@ -150,6 +150,8 @@ self.graph.setFlag(CO_GENERATOR_ALLOWED) elif feature == "with_statement": self.graph.setFlag(CO_FUTURE_WITH_STATEMENT) + elif feature == "absolute_import": + self.graph.setFlag(CO_FUTURE_ABSIMPORT) def emit(self, inst ): return self.graph.emit( inst ) @@ -865,7 +867,7 @@ def visitFrom(self, node): self.set_lineno(node) fromlist = [ self.space.wrap(name) for name,alias in node.names ] - self.emitop_obj('LOAD_CONST', self.space.wrap(-1)) # 2.5 flag + self.emitop_obj('LOAD_CONST', self.space.wrap(node.level)) # 2.5 flag self.emitop_obj('LOAD_CONST', self.space.newtuple(fromlist)) self.emitop('IMPORT_NAME', node.modname) for name, alias in node.names: Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Mon Aug 4 20:08:35 2008 @@ -841,8 +841,11 @@ import_as_name: NAME [NAME NAME] """ atoms = get_atoms(builder, nb) - - index = 1 + index = 1 # skip from + level = 0 + while atoms[index].name == builder.parser.tokens['DOT']: + level += 1 + index += 1 incr, from_name = parse_dotted_names(atoms[index:], builder) index += (incr + 1) # skip 'import' token = atoms[index] @@ -879,7 +882,9 @@ names.append((name, as_name)) if index < l: # case ',' index += 1 - builder.push(ast.From(from_name, names, atoms[0].lineno)) + if level == 0: + level = -1 + builder.push(ast.From(from_name, names, level, atoms[0].lineno)) def build_yield_stmt(builder, nb): Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py Mon Aug 4 20:08:35 2008 @@ -25,7 +25,7 @@ """ from pypy.interpreter.astcompiler.consts import CO_GENERATOR_ALLOWED, \ - CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT + CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSIMPORT def getFutures(futureFlags, source): futures = FutureAutomaton(futureFlags, source) @@ -53,7 +53,7 @@ * other future statements. The features recognized by Python 2.5 are "generators", - "division", "nested_scopes" and "with_statement". + "division", "nested_scopes" and "with_statement", "absolute_import". "generators", "division" and "nested_scopes" are redundant in 2.5 because they are always enabled. Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py Mon Aug 4 20:08:35 2008 @@ -141,4 +141,10 @@ assert f.pos == len(s) assert f.flags == 0 - +def test_from_import_abs_import(): + s = 'from __future__ import absolute_import\n' + f = run(s) + assert f.pos == len(s) + assert f.flags == fut.CO_FUTURE_ABSIMPORT + + Modified: pypy/branch/2.5-features/pypy/module/__builtin__/importing.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/__builtin__/importing.py (original) +++ pypy/branch/2.5-features/pypy/module/__builtin__/importing.py Mon Aug 4 20:08:35 2008 @@ -152,13 +152,13 @@ return None def importhook(space, modulename, w_globals=None, - w_locals=None, w_fromlist=None): + w_locals=None, w_fromlist=None, w_level=-1): if not modulename: raise OperationError( space.w_ValueError, space.wrap("Empty module name")) w = space.wrap - + ctxt_name = None if w_globals is not None and not space.is_w(w_globals, space.w_None): ctxt_w_name = try_getitem(space, w_globals, w('__name__')) @@ -200,7 +200,7 @@ space.setitem(space.sys.get('modules'), w(rel_modulename),space.w_None) return w_mod # -importhook.unwrap_spec = [ObjSpace,str,W_Root,W_Root,W_Root] +importhook.unwrap_spec = [ObjSpace,str,W_Root,W_Root,W_Root,W_Root] def absolute_import(space, modulename, baselevel, w_fromlist, tentative): lock = getimportlock(space) From cami at codespeak.net Mon Aug 4 22:53:15 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Mon, 4 Aug 2008 22:53:15 +0200 (CEST) Subject: [pypy-svn] r56983 - pypy/dist/pypy/lang/gameboy Message-ID: <20080804205315.28547169E8A@codespeak.net> Author: cami Date: Mon Aug 4 22:53:13 2008 New Revision: 56983 Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/interrupt.py Log: fixed typo in interrupt which had no effect on the execution changed shutdown order in gameboy_implementation.py Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Mon Aug 4 22:53:13 2008 @@ -37,9 +37,9 @@ self.emulate(constants.GAMEBOY_CLOCK >> 2) #RSDL.Delay(1) finally: + self.handle_execution_error() lltype.free(self.event, flavor='raw') RSDL.Quit() - self.handle_execution_error() return 0 def handle_execution_error(self): Modified: pypy/dist/pypy/lang/gameboy/interrupt.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/interrupt.py (original) +++ pypy/dist/pypy/lang/gameboy/interrupt.py Mon Aug 4 22:53:13 2008 @@ -66,7 +66,7 @@ if address == constants.IE: self.set_enable_mask(data) elif address == constants.IF: - self.set_fnterrupt_flag(data) + self.set_interrupt_flag(data) def read(self, address): if address == constants.IE: @@ -105,6 +105,6 @@ flag |= interrupt_flag.mask return flag | 0xE0 - def set_fnterrupt_flag(self, data): + def set_interrupt_flag(self, data): for flag in self.interrupt_flags: flag.set_pending((data & flag.mask) != 0) From cami at codespeak.net Tue Aug 5 10:24:41 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 5 Aug 2008 10:24:41 +0200 (CEST) Subject: [pypy-svn] r56991 - pypy/dist/pypy/lang/gameboy Message-ID: <20080805082441.99A88169FA1@codespeak.net> Author: cami Date: Tue Aug 5 10:24:39 2008 New Revision: 56991 Added: pypy/dist/pypy/lang/gameboy/cpu.py Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Log: removed some debug statements from the cpu Added: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/lang/gameboy/cpu.py Tue Aug 5 10:24:39 2008 @@ -0,0 +1,1174 @@ + +from pypy.lang.gameboy import constants +from pypy.lang.gameboy.ram import * +from pypy.lang.gameboy.interrupt import * + +from pypy.rlib.objectmodel import we_are_translated +if not we_are_translated(): + from pypy.lang.gameboy.debug import * + +# --------------------------------------------------------------------------- + +def process_2_complement(value): + # check if the left most bit is set + if (value >> 7) == 1: + return -((~value) & 0xFF) - 1 + else : + return value +# --------------------------------------------------------------------------- + +class AbstractRegister(object): + def get(self, use_cycles=True): + return 0xFF + +class Register(AbstractRegister): + + def __init__(self, cpu, value=0): + assert isinstance(cpu, CPU) + self.reset_value = self.value = value + self.cpu = cpu + if value != 0: + self.set(value) + + def reset(self): + self.value = self.reset_value + + def set(self, value, use_cycles=True): + self.value = value & 0xFF + if use_cycles: + self.cpu.cycles -= 1 + + def get(self, use_cycles=True): + return self.value + + def add(self, value, use_cycles=True): + self.set(self.get(use_cycles)+value, use_cycles) + + def sub(self, value, use_cycles=True): + self.set(self.get(use_cycles)-value, use_cycles) + +#------------------------------------------------------------------------------ + +class DoubleRegister(AbstractRegister): + + def __init__(self, cpu, hi, lo, reset_value=0): + assert isinstance(cpu, CPU) + assert isinstance(lo, Register) + assert isinstance(hi, Register) + self.cpu = cpu + self.hi = hi + self.lo = lo + self.reset_value = reset_value + + def set(self, value, use_cycles=True): + value = value & 0xFFFF + self.set_hi(value >> 8, use_cycles) + self.set_lo(value & 0xFF, use_cycles) + if use_cycles: + self.cpu.cycles += 1 + + def set_hi_lo(self, hi, lo, use_cycles=True): + self.set_hi(hi, use_cycles) + self.set_lo(lo, use_cycles) + + def reset(self): + self.set(self.reset_value, use_cycles=False) + + def set_hi(self, hi=0, use_cycles=True): + self.hi.set(hi, use_cycles) + + def set_lo(self, lo=0, use_cycles=True): + self.lo.set(lo, use_cycles) + + def get(self, use_cycles=True): + return (self.hi.get(use_cycles)<<8) + self.lo.get(use_cycles) + + def get_hi(self, use_cycles=True): + return self.hi.get(use_cycles) + + def get_lo(self, use_cycles=True): + return self.lo.get(use_cycles) + + def inc(self, use_cycles=True): + self.set(self.get(use_cycles) +1, use_cycles=use_cycles) + if use_cycles: + self.cpu.cycles -= 1 + + def dec(self, use_cycles=True): + self.set(self.get(use_cycles) - 1, use_cycles=use_cycles) + if use_cycles: + self.cpu.cycles -= 1 + + def add(self, value, use_cycles=True): + self.set(self.get(use_cycles) + value, use_cycles=use_cycles) + if use_cycles: + self.cpu.cycles -= 2 + + +# ------------------------------------------------------------------------------ + +class ImmediatePseudoRegister(Register): + + def __init__(self, cpu, hl): + assert isinstance(cpu, CPU) + self.cpu = cpu + self.hl = hl + + def set(self, value, use_cycles=True): + self.cpu.write(self.hl.get(use_cycles=use_cycles), value) # 2 + 0 + if not use_cycles: + self.cpu.cycles += 2 + + def get(self, use_cycles=True): + if not use_cycles: + self.cpu.cycles += 1 + return self.cpu.read(self.hl.get(use_cycles=use_cycles)) # 1 + +# ------------------------------------------------------------------------------ + +class FlagRegister(Register): + + def __init__(self, cpu, reset_value): + assert isinstance(cpu, CPU) + self.cpu = cpu + self.reset_value = reset_value + self.reset() + + def reset(self): + self.partial_reset() + + def partial_reset(self, keep_z=False, keep_n=False, keep_h=False, keep_c=False,\ + keep_p=False, keep_s=False): + if not keep_z: + self.z_flag = False + if not keep_n: + self.n_flag = False + if not keep_h: + self.h_flag = False + if not keep_c: + self.c_flag = False + if not keep_p: + self.p_flag = False + if not keep_s: + self.s_flag = False + self.lower = 0x00 + + def get(self, use_cycles=True): + value = 0 + value += (int(self.c_flag) << 4) + value += (int(self.h_flag) << 5) + value += (int(self.n_flag) << 6) + value += (int(self.z_flag) << 7) + return value + self.lower + + def set(self, value, use_cycles=True): + self.c_flag = bool(value & (1 << 4)) + self.h_flag = bool(value & (1 << 5)) + self.n_flag = bool(value & (1 << 6)) + self.z_flag = bool(value & (1 << 7)) + self.lower = value & 0x0F + if use_cycles: + self.cpu.cycles -= 1 + + def z_flag_compare(self, a, reset=False): + if reset: + self.reset() + if isinstance(a, (Register)): + a = a.get() + self.z_flag = ((a & 0xFF) == 0) + + def c_flag_compare(self, value, compare_and=0x01, reset=False): + if reset: + self.reset() + #print hex(value), hex(compare_and), (value & compare_and) != 0 + self.c_flag = ((value & compare_and) != 0) + + def h_flag_compare(self, value, a, inverted=False): + if inverted: + self.h_flag = ((value & 0x0F) < (a & 0x0F)) + else: + self.h_flag = ((value & 0x0F) > (a & 0x0F)) + + #def c_flag_compare(self, a, b): + # self.c_flag = (a < b) + +# # ------------------------------------------------------------------------------ + + +DEBUG_INSTRUCTION_COUNTER = 1 + +class CPU(object): + """ + PyGIRL GameBoy (TM) Emulator + + Central Unit Processor_a (Sharp LR35902 CPU) + """ + def __init__(self, interrupt, memory): + assert isinstance(interrupt, Interrupt) + self.interrupt = interrupt + self.memory = memory + self.ime = False + self.halted = False + self.cycles = 0 + self.ini_registers() + self.rom = [0] + self.reset() + + def ini_registers(self): + self.b = Register(self) + self.c = Register(self) + self.bc = DoubleRegister(self, self.b, self.c, constants.RESET_BC) + + self.d = Register(self) + self.e = Register(self) + self.de = DoubleRegister(self, self.d, self.e, constants.RESET_DE) + + self.h = Register(self) + self.l = Register(self) + self.hl = DoubleRegister(self, self.h, self.l, constants.RESET_HL) + + self.hli = ImmediatePseudoRegister(self, self.hl) + self.pc = DoubleRegister(self, Register(self), Register(self), reset_value=constants.RESET_PC) + self.sp = DoubleRegister(self, Register(self), Register(self), reset_value=constants.RESET_SP) + + self.a = Register(self, constants.RESET_A) + self.f = FlagRegister(self, constants.RESET_F) + self.af = DoubleRegister(self, self.a, self.f) + + + def reset(self): + self.reset_registers() + self.f.reset() + self.f.z_flag = True + self.ime = False + self.halted = False + self.cycles = 0 + self.instruction_counter = 0 + self.last_op_code = -1 + self.last_fetch_execute_op_code = -1 + + def reset_registers(self): + self.a.reset() + self.f.reset() + self.bc.reset() + self.de.reset() + self.hl.reset() + self.sp.reset() + self.pc.reset() + + # --------------------------------------------------------------- + + def get_af(self): + return self.af + + def get_a(self): + return self.a + + def get_f(self): + return self.f + + def get_bc(self): + return self.bc + + def get_b(self): + return self.b + + def get_c(self): + return self.c + + def get_de(self): + return self.de + + def get_d(self): + return self.d + + def get_e(self): + return self.e + + def get_hl(self): + return self.hl + + def get_hli(self): + return self.hli + + def get_h(self): + return self.h + + def get_l(self): + return self.l + + def get_sp(self): + return self.sp + + def get_if(self): + val = 0x00 + if self.ime: + val = 0x01 + if self.halted: + val += 0x80 + return val + + def is_z(self): + """ zero flag""" + return self.f.z_flag + + def is_c(self): + """ carry flag, true if the result did not fit in the register""" + return self.f.c_flag + + def is_h(self): + """ half carry, carry from bit 3 to 4""" + return self.f.h_flag + + def is_n(self): + """ subtract flag, true if the last operation was a subtraction""" + return self.f.n_flag + + def isS(self): + return self.f.s_flag + + def is_p(self): + return self.f.p_flag + + def is_not_z(self): + return not self.is_z() + + def is_not_c(self): + return not self.is_c() + + def is_not_h(self): + return not self.is_h() + + def is_not_n(self): + return not self.is_n() + + def set_rom(self, banks): + self.rom = banks + + # --------------------------------------------------------------- + + def emulate(self, ticks): + self.cycles += ticks + self.handle_pending_interrupts() + while self.cycles > 0: + self.execute(self.fetch()) + + def emulate_step(self): + self.handle_pending_interrupts() + self.execute(self.fetch()) + + + def handle_pending_interrupts(self): + if self.halted: + self.update_interrupt_cycles() + if self.ime and self.interrupt.is_pending(): + self.lower_pending_interrupt() + + def update_interrupt_cycles(self): + if self.interrupt.is_pending(): + self.halted = False + self.cycles -= 4 + elif self.cycles > 0: + self.cycles = 0 + + def lower_pending_interrupt(self): + for flag in self.interrupt.interrupt_flags: + if flag.is_pending(): + self.ime = False + self.call(flag.call_code, use_cycles=False) + flag.set_pending(False) + return + + def fetch_execute(self): + op_code = self.fetch() + self.last_fetch_execute_op_code = op_code + FETCH_EXECUTE_OP_CODES[op_code](self) + + + def execute(self, op_code): + self.instruction_counter += 1 + self.last_op_code = op_code + OP_CODES[op_code](self) + + + # ------------------------------------------------------------------- + + def debug(self): + print "0xDD called" + + def read(self, hi, lo=None): + # memory Access, 1 cycle + address = hi + if lo is not None: + address = (hi << 8) + lo + self.cycles -= 1 + return self.memory.read(address) + + def write(self, address, data): + # 2 cycles + self.memory.write(address, data) + self.cycles -= 2 + + def fetch(self, use_cycles=True): + # Fetching 1 cycle + self.cycles += 1 + if self.pc.get(use_cycles) <= 0x3FFF: + data = self.rom[self.pc.get(use_cycles)] + else: + data = self.memory.read(self.pc.get(use_cycles)) + self.pc.inc(use_cycles) # 2 cycles + return data + + def fetch_double_address(self): + lo = self.fetch() # 1 cycle + hi = self.fetch() # 1 cycle + return (hi << 8) + lo + + def fetch_double_register(self, register): + self.double_register_inverse_call(CPUFetchCaller(self), register) + + def push(self, data, use_cycles=True): + # Stack, 2 cycles + self.sp.dec(use_cycles) # 2 cycles + self.memory.write(self.sp.get(use_cycles), data) + + def push_double_register(self, register, use_cycles=True): + # PUSH rr 4 cycles + self.push(register.get_hi(), use_cycles) # 2 cycles + self.push(register.get_lo(), use_cycles) # 2 cycles + + def pop(self, use_cycles=True): + # 1 cycle + data = self.memory.read(self.sp.get()) + self.sp.inc() # 2 cycles + self.cycles += 1 + return data + + def pop_double_register(self, register): + # 3 cycles + self.double_register_inverse_call(CPUPopCaller(self), register) + + def double_register_inverse_call(self, getCaller, register): + b = getCaller.get() # 1 cycle + a = getCaller.get() # 1 cycle + register.set_hi_lo(a, b) # 2 cycles + self.cycles += 1 + + def call(self, address, use_cycles=True): + # 4 cycles + self.push_double_register(self.pc, use_cycles) + self.pc.set(address, use_cycles=use_cycles) # 1 cycle + if use_cycles: + self.cycles += 1 + + def ld(self, getCaller, setCaller): + # 1 cycle + setCaller.set(getCaller.get()) # 1 cycle + + def load_fetch_register(self, register): + self.ld(CPUFetchCaller(self), RegisterCallWrapper(register)) + + def store_hl_in_pc(self): + # LD PC,HL, 1 cycle + self.ld(DoubleRegisterCallWrapper(self.hl), + DoubleRegisterCallWrapper(self.pc)) + + def fetch_load(self, getCaller, setCaller): + self.ld(CPUFetchCaller(self), setCaller) + + def add_a(self, getCaller, setCaller=None): + data = getCaller.get() + # ALU, 1 cycle + added = (self.a.get() + data) & 0xFF + self.add_sub_flag_finish(added, data) + + def add_hl(self, register): + # 2 cycles + data = register.get() + added = (self.hl.get() + data) # 1 cycle + self.f.partial_reset(keep_z=True) + self.f.h_flag = (((added ^ self.hl.get() ^ data) & 0x1000) != 0) + self.f.c_flag = (added >= 0x10000 or added < 0) + self.hl.set(added & 0xFFFF) + self.cycles -= 1 + + def add_a_with_carry(self, getCaller, setCaller=None): + # 1 cycle + data = getCaller.get() + s = self.a.get() + data + int(self.f.c_flag) + self.add_sub_flag_finish(s,data) + + def subtract_with_carry_a(self, getCaller, setCaller=None): + # 1 cycle + data = getCaller.get() + s = self.a.get() - data - int(self.f.c_flag) + self.add_sub_flag_finish(s, data) + self.f.n_flag = True + + def add_sub_flag_finish(self, s, data): + self.f.reset() + # set the h flag if the 0x10 bit was affected + self.f.h_flag = (((s ^ self.a.get() ^ data) & 0x10) != 0) + self.f.c_flag = (s >= 0x100 or s < 0) + self.f.z_flag_compare(s) + self.a.set(s & 0xFF) # 1 cycle + + def subtract_a(self, getCaller, setCaller=None): + # 1 cycle + data = getCaller.get() + self.compare_a_simple(data) + self.a.sub(data, False) + + def fetch_subtract_a(self): + data = self.fetch() + # 1 cycle + self.compare_a_simple(data) # 1 cycle + self.a.sub(data, False) + + def compare_a(self, getCaller, setCaller=None): + # 1 cycle + self.compare_a_simple(getCaller.get()) + + def compare_a_simple(self, s): + s = (self.a.get() - s) & 0xFF + self.f.reset() + self.f.n_flag = True + self.f.z_flag_compare(s) + self.subtract_hc_flag_finish(s) + self.cycles -= 1 + + def subtract_hc_flag_finish(self, data): + self.f.c_flag = (data > self.a.get()) + self.f.h_flag_compare(data, self.a.get()) + + def and_a(self, getCaller, setCaller=None): + # 1 cycle + self.a.set(self.a.get() & getCaller.get()) # 1 cycle + self.f.reset() + self.f.z_flag_compare(self.a.get()) + self.f.h_flag = True + + def xor_a(self, getCaller, setCaller=None): + # 1 cycle + self.a.set( self.a.get() ^ getCaller.get()) # 1 cycle + self.f.z_flag_compare(self.a.get(), reset=True) + + def or_a(self, getCaller, setCaller=None): + # 1 cycle + self.a.set(self.a.get() | getCaller.get()) # 1 cycle + self.f.z_flag_compare(self.a.get(), reset=True) + + def inc_double_register(self, register): + # INC rr + register.inc() + + def dec_double_register(self, register): + # DEC rr + register.dec() + + def inc(self, getCaller, setCaller): + # 1 cycle + data = (getCaller.get() + 1) & 0xFF + self.dec_inc_flag_finish(data, setCaller, 0x00) + + def dec(self, getCaller, setCaller): + # 1 cycle + data = (getCaller.get() - 1) & 0xFF + self.dec_inc_flag_finish(data, setCaller, 0x0F) + self.f.n_flag = True + + def dec_inc_flag_finish(self, data, setCaller, compare): + self.f.partial_reset(keep_c=True) + self.f.z_flag_compare(data) + self.f.h_flag = ((data & 0x0F) == compare) + setCaller.set(data) # 1 cycle + + def rotate_left_circular(self, getCaller, setCaller): + # RLC 1 cycle + data = getCaller.get() + s = ((data << 1) & 0xFF) + ((data & 0x80) >> 7) + self.flags_and_setter_finish(s, data, setCaller, 0x80) + #self.cycles -= 1 + + def rotate_left_circular_a(self): + # RLCA rotate_left_circular_a 1 cycle + self.rotate_left_circular(RegisterCallWrapper(self.a), + RegisterCallWrapper(self.a)) + + def rotate_left(self, getCaller, setCaller): + # 1 cycle + data = getCaller.get() + s = ((data & 0x7F) << 1) + int(self.f.c_flag) + self.flags_and_setter_finish(s, data, setCaller, 0x80) # 1 cycle + + def rotate_left_a(self): + # RLA 1 cycle + self.rotate_left(RegisterCallWrapper(self.a), + RegisterCallWrapper(self.a)) + + def rotate_right_circular(self, getCaller, setCaller): + data = getCaller.get() + # RRC 1 cycle + s = (data >> 1) + ((data & 0x01) << 7) + self.flags_and_setter_finish(s, data, setCaller) # 1 cycle + + def rotate_right_circular_a(self): + # RRCA 1 cycle + self.rotate_right_circular(RegisterCallWrapper(self.a), + RegisterCallWrapper(self.a)) + + def rotate_right(self, getCaller, setCaller): + # 1 cycle + data = getCaller.get() + s = (data >> 1) + if self.f.c_flag: + s += 0x80 + self.flags_and_setter_finish(s, data, setCaller) # 1 cycle + + def rotate_right_a(self): + # RRA 1 cycle + self.rotate_right(RegisterCallWrapper(self.a), + RegisterCallWrapper(self.a)) + + def shift_left_arithmetic(self, getCaller, setCaller): + # 2 cycles + data = getCaller.get() + s = (data << 1) & 0xFF + self.flags_and_setter_finish(s, data, setCaller, 0x80) # 1 cycle + + def shift_right_arithmetic(self, getCaller, setCaller): + data = getCaller.get() + # 1 cycle + s = (data >> 1) + (data & 0x80) + self.flags_and_setter_finish(s, data, setCaller) # 1 cycle + + def shift_word_right_logical(self, getCaller, setCaller): + # 2 cycles + data = getCaller.get() + s = (data >> 1) + self.flags_and_setter_finish(s, data, setCaller) # 2 cycles + + def flags_and_setter_finish(self, s, data, setCaller, compare_and=0x01): + # 2 cycles + s &= 0xFF + self.f.reset() + self.f.z_flag_compare(s) + self.f.c_flag_compare(data, compare_and) + setCaller.set(s) # 1 cycle + + def swap(self, getCaller, setCaller): + data = getCaller.get() + # 1 cycle + s = ((data << 4) + (data >> 4)) & 0xFF + self.f.z_flag_compare(s, reset=True) + setCaller.set(s) + + + def test_bit(self, getCaller, setCaller, n): + # 2 cycles + self.f.partial_reset(keep_c=True) + self.f.h_flag = True + self.f.z_flag = False + self.f.z_flag = ((getCaller.get() & (1 << n)) == 0) + self.cycles -= 1 + + def set_bit(self, getCaller, setCaller, n): + # 1 cycle + setCaller.set(getCaller.get() | (1 << n)) # 1 cycle + + def reset_bit(self, getCaller, setCaller, n): + # 1 cycle + setCaller.set(getCaller.get() & (~(1 << n))) # 1 cycle + + def store_fetched_memory_in_a(self): + # LD A,(nnnn), 4 cycles + self.a.set(self.read(self.fetch_double_address())) # 1+1 + 2 cycles + + def write_a_at_bc_address(self): + # 2 cycles + self.write(self.bc.get(), self.a.get()) + + def write_a_at_de_address(self): + self.write(self.de.get(), self.a.get()) + + def store_memory_at_bc_in_a(self): + self.a.set(self.read(self.bc.get())) + + def store_memory_at_de_in_a(self): + self.a.set(self.read(self.de.get())) + + def ld_dbRegisteri_A(self, register): + # LD (rr),A 2 cycles + self.write(register.get(), self.a.get()) # 2 cycles + + def load_mem_sp(self): + # LD (nnnn),SP 5 cycles + address = self.fetch_double_address() # 2 cycles + self.write(address, self.sp.get_lo()) # 2 cycles + self.write((address + 1), self.sp.get_hi()) # 2 cycles + self.cycles += 1 + + def store_a_at_fetched_address(self): + # LD (nnnn),A 4 cycles + self.write(self.fetch_double_address(), self.a.get()) # 2 cycles + + def store_memory_at_axpanded_fetch_address_in_a(self): + # LDH A,(nn) 3 cycles + self.a.set(self.read(0xFF00 + self.fetch())) # 1+1+1 cycles + + def store_expanded_c_in_a(self): + # LDH A,(C) 2 cycles + self.a.set(self.read(0xFF00 + self.bc.get_lo())) # 1+2 cycles + + def load_and_increment_a_hli(self): + # loadAndIncrement A,(HL) 2 cycles + self.a.set(self.read(self.hl.get())) # 2 cycles + self.hl.inc()# 2 cycles + self.cycles += 2 + + def load_and_decrement_a_hli(self): + # loadAndDecrement A,(HL) 2 cycles + self.a.set(self.read(self.hl.get())) # 2 cycles + self.hl.dec() # 2 cycles + self.cycles += 2 + + def write_a_at_expanded_fetch_address(self): + # LDH (nn),A 3 cycles + self.write(0xFF00 + self.fetch(), self.a.get()) # 2 + 1 cycles + + def write_a_at_expaded_c_address(self): + # LDH (C),A 2 cycles + self.write(0xFF00 + self.c.get(), self.a.get()) # 2 cycles + + def load_and_increment_hli_a(self): + # loadAndIncrement (HL),A 2 cycles + self.write(self.hl.get(), self.a.get()) # 2 cycles + self.hl.inc() # 2 cycles + self.cycles += 2 + + def load_and_decrement_hli_a(self): + # loadAndDecrement (HL),A 2 cycles + self.write(self.hl.get(), self.a.get()) # 2 cycles + self.hl.dec() # 2 cycles + self.cycles += 2 + + def store_hl_in_sp(self): + # LD SP,HL 2 cycles + self.sp.set(self.hl.get()) # 1 cycle + self.cycles -= 1 + + def complement_a(self): + # CPA + self.a.set(self.a.get() ^ 0xFF) + self.f.n_flag = True + self.f.h_flag = True + + def decimal_adjust_a(self): + # DAA 1 cycle + delta = 0 + if self.is_h(): + delta |= 0x06 + if self.is_c(): + delta |= 0x60 + if (self.a.get() & 0x0F) > 0x09: + delta |= 0x06 + if (self.a.get() & 0xF0) > 0x80: + delta |= 0x60 + if (self.a.get() & 0xF0) > 0x90: + delta |= 0x60 + if not self.is_n(): + self.a.set((self.a.get() + delta) & 0xFF) # 1 cycle + else: + self.a.set((self.a.get() - delta) & 0xFF) # 1 cycle + self.f.partial_reset(keep_n=True) + if delta >= 0x60: + self.f.c_flag = True + self.f.z_flag_compare(self.a.get()) + + def increment_sp_by_fetch(self): + # ADD SP,nn 4 cycles + self.sp.set(self.get_fetchadded_sp()) # 1+1 cycle + self.cycles -= 2 + + def store_fetch_added_sp_in_hl(self): + # LD HL,SP+nn 3 cycles + self.hl.set(self.get_fetchadded_sp()) # 1+1 cycle + self.cycles -= 1 + + def get_fetchadded_sp(self): + # 1 cycle + offset = process_2_complement(self.fetch()) # 1 cycle + s = (self.sp.get() + offset) & 0xFFFF + self.f.reset() + if (offset >= 0): + self.f.c_flag = (s < self.sp.get()) + if (s & 0x0F00) < (self.sp.get() & 0x0F00): + self.f.h_flag = True + else: + self.f.c_flag = (s > self.sp.get()) + if (s & 0x0F00) > (self.sp.get() & 0x0F00): + self.f.h_flag = True + return s + + + def complement_carry_flag(self): + # CCF/SCF + self.f.partial_reset(keep_z=True, keep_c=True) + self.f.c_flag = not self.f.c_flag + + def set_carry_flag(self): + self.f.partial_reset(keep_z=True) + self.f.c_flag = True + + def nop(self): + # NOP 1 cycle + self.cycles -= 1 + + def jump(self): + # JP nnnn, 4 cycles + self.pc.set(self.fetch_double_address()) # 1+2 cycles + self.cycles -= 1 + + def conditional_jump(self, cc): + # JP cc,nnnn 3,4 cycles + if cc: + self.jump() # 4 cycles + else: + self.pc.add(2) # 3 cycles + + def relative_jump(self): + # JR +nn, 3 cycles + self.pc.add(process_2_complement(self.fetch())) # 3 + 1 cycles + self.cycles += 1 + + def relative_conditional_jump(self, cc): + # JR cc,+nn, 2,3 cycles + if cc: + self.relative_jump() # 3 cycles + else: + self.pc.inc() # 2 cycles + + def unconditional_call(self): + # CALL nnnn, 6 cycles + self.call(self.fetch_double_address()) # 4+2 cycles + + def conditional_call(self, cc): + # CALL cc,nnnn, 3,6 cycles + if cc: + self.unconditional_call() # 6 cycles + else: + self.pc.add(2) # 3 cycles + + def ret(self): + # RET 4 cycles + lo = self.pop() # 1 cycle + hi = self.pop() # 1 cycle + self.pc.set_hi_lo(hi, lo) # 2 cycles + + def conditional_return(self, cc): + # RET cc 2,5 cycles + if cc: + self.ret() # 4 cycles + # FIXME maybe this should be the same + self.cycles -= 1 + else: + self.cycles -= 2 + + def return_form_interrupt(self): + # RETI 4 cycles + self.ret() # 4 cycles + self.enable_interrupts() # 1 cycle + others + #self.cycles += 1 + + def restart(self, nn): + # RST nn 4 cycles + self.call(nn) # 4 cycles + + def disable_interrups(self): + # DI/EI 1 cycle + self.ime = False + self.cycles -= 1 + + def enable_interrupts(self): + # 1 cycle + self.ime = True + self.execute(self.fetch()) # 1 + self.handle_pending_interrupts() + + def halt(self): + # HALT/STOP + self.halted = True + # emulate bug when interrupts are pending + if not self.ime and self.interrupt.is_pending(): + self.execute(self.memory.read(self.pc.get())) + self.handle_pending_interrupts() + + def stop(self): + # 0 cycles + self.cycles += 1 + self.fetch() + +# ------------------------------------------------------------------------------ + +class CallWrapper(object): + + def get(self, use_cycles=True): + raise Exception("called CallWrapper.get") + return 0 + + def set(self, value, use_cycles=True): + raise Exception("called CallWrapper.set") + pass + +class NumberCallWrapper(CallWrapper): + + def __init__(self, number): + self.number = number + + def get(self, use_cycles=True): + return self.number + + def set(self, value, use_cycles=True): + raise Exception("called CallWrapper.set") + pass + +class RegisterCallWrapper(CallWrapper): + def __init__(self, register): + self.register = register + + def get(self, use_cycles=True): + return self.register.get(use_cycles) + + def set(self, value, use_cycles=True): + return self.register.set(value, use_cycles) + + +class DoubleRegisterCallWrapper(CallWrapper): + def __init__(self, register): + self.register = register + + def get(self, use_cycles=True): + return self.register.get(use_cycles) + + def set(self, value, use_cycles=True): + return self.register.set(value, use_cycles) + + +class CPUPopCaller(CallWrapper): + def __init__(self, cpu): + self.cpu = cpu + + def get(self, use_cycles=True): + return self.cpu.pop(use_cycles) + + +class CPUFetchCaller(CallWrapper): + def __init__(self, cpu): + self.cpu = cpu + + def get(self, use_cycles=True): + return self.cpu.fetch(use_cycles) + +# op_code LOOKUP TABLE GENERATION ----------------------------------------------- + +GROUPED_REGISTERS = [CPU.get_b, CPU.get_c, CPU.get_d, CPU.get_e, + CPU.get_h, CPU.get_l, CPU.get_hli, CPU.get_a] + +def create_group_op_codes(table): + op_codes =[] + for entry in table: + op_code = entry[0] + step = entry[1] + function = entry[2] + if len(entry) == 4: + for registerGetter in GROUPED_REGISTERS: + for n in entry[3]: + op_codes.append((op_code, group_lambda(function, registerGetter, n))) + op_code += step + if len(entry) == 5: + entryStep = entry[4] + for registerGetter in GROUPED_REGISTERS: + stepop_code = op_code + for n in entry[3]: + op_codes.append((stepop_code, group_lambda(function, registerGetter, n))) + stepop_code += entryStep + op_code+=step + else: + for registerGetter in GROUPED_REGISTERS: + op_codes.append((op_code,group_lambda(function, registerGetter))) + op_code += step + return op_codes + +def group_lambda(function, register_getter, value=None): + if value is None: + return lambda s: function(s, RegisterCallWrapper(register_getter(s)), + RegisterCallWrapper(register_getter(s))) + else: + return lambda s: function(s, RegisterCallWrapper(register_getter(s)), + RegisterCallWrapper(register_getter(s)), value) + +def create_load_group_op_codes(): + op_codes = [] + op_code = 0x40 + for storeRegister in GROUPED_REGISTERS: + for loadRegister in GROUPED_REGISTERS: + if loadRegister != CPU.get_hli or storeRegister != CPU.get_hli: + op_codes.append((op_code, load_group_lambda(storeRegister, loadRegister))) + op_code += 1 + return op_codes + +def load_group_lambda(store_register, load_register): + return lambda s: CPU.ld(s, RegisterCallWrapper(load_register(s)), + RegisterCallWrapper(store_register(s))) + +def create_register_op_codes(table): + op_codes = [] + for entry in table: + op_code = entry[0] + step = entry[1] + function = entry[2] + for registerOrGetter in entry[3]: + op_codes.append((op_code, register_lambda(function, registerOrGetter))) + op_code += step + return op_codes + +def register_lambda(function, registerOrGetter): + if callable(registerOrGetter): + return lambda s: function(s, registerOrGetter(s)) + else: + return lambda s: function(s, registerOrGetter) + + +def initialize_op_code_table(table): + result = [None] * (0xFF+1) + for entry in table: + if (entry is None) or (len(entry) == 0) or entry[-1] is None: + continue + if len(entry) == 2: + positions = [entry[0]] + else: + positions = range(entry[0], entry[1]+1) + for pos in positions: + result[pos] = entry[-1] + return result + +# op_code TABLES --------------------------------------------------------------- +# Table with one to one mapping of simple OP Codes +FIRST_ORDER_OP_CODES = [ + (0x00, CPU.nop), + (0x08, CPU.load_mem_sp), + (0x10, CPU.stop), + (0x18, CPU.relative_jump), + (0x02, CPU.write_a_at_bc_address), + (0x12, CPU.write_a_at_de_address), + (0x22, CPU.load_and_increment_hli_a), + (0x32, CPU.load_and_decrement_hli_a), + (0x0A, CPU.store_memory_at_bc_in_a), + (0x1A, CPU.store_memory_at_de_in_a), + (0x2A, CPU.load_and_increment_a_hli), + (0x3A, CPU.load_and_decrement_a_hli), + (0x07, CPU.rotate_left_circular_a), + (0x0F, CPU.rotate_right_circular_a), + (0x17, CPU.rotate_left_a), + (0x1F, CPU.rotate_right_a), + (0x27, CPU.decimal_adjust_a), + (0x2F, CPU.complement_a), + (0x37, CPU.set_carry_flag), + (0x3F, CPU.complement_carry_flag), + (0x76, CPU.halt), + (0xF3, CPU.disable_interrups), + (0xFB, CPU.enable_interrupts), + (0xE2, CPU.write_a_at_expaded_c_address), + (0xEA, CPU.store_a_at_fetched_address), + (0xF2, CPU.store_expanded_c_in_a), + (0xFA, CPU.store_fetched_memory_in_a), + (0xC3, CPU.jump), + (0xC9, CPU.ret), + (0xD9, CPU.return_form_interrupt), + (0xDD, CPU.debug), + (0xE9, CPU.store_hl_in_pc), + (0xF9, CPU.store_hl_in_sp), + (0xE0, CPU.write_a_at_expanded_fetch_address), + (0xE8, CPU.increment_sp_by_fetch), + (0xF0, CPU.store_memory_at_axpanded_fetch_address_in_a), + (0xF8, CPU.store_fetch_added_sp_in_hl), + (0xCB, CPU.fetch_execute), + (0xCD, CPU.unconditional_call), + (0xC6, lambda s: CPU.add_a(s, CPUFetchCaller(s))), + (0xCE, lambda s: CPU.add_a_with_carry(s, CPUFetchCaller(s))), + (0xD6, CPU.fetch_subtract_a), + (0xDE, lambda s: CPU.subtract_with_carry_a(s, CPUFetchCaller(s))), + (0xE6, lambda s: CPU.and_a(s, CPUFetchCaller(s))), + (0xEE, lambda s: CPU.xor_a(s, CPUFetchCaller(s))), + (0xF6, lambda s: CPU.or_a(s, CPUFetchCaller(s))), + (0xFE, lambda s: CPU.compare_a(s, CPUFetchCaller(s))), + (0xC7, lambda s: CPU.restart(s, 0x00)), + (0xCF, lambda s: CPU.restart(s, 0x08)), + (0xD7, lambda s: CPU.restart(s, 0x10)), + (0xDF, lambda s: CPU.restart(s, 0x18)), + (0xE7, lambda s: CPU.restart(s, 0x20)), + (0xEF, lambda s: CPU.restart(s, 0x28)), + (0xF7, lambda s: CPU.restart(s, 0x30)), + (0xFF, lambda s: CPU.restart(s, 0x38)) +] + +# Table for RegisterGroup OP Codes: (startAddress, delta, method) +REGISTER_GROUP_OP_CODES = [ + (0x04, 0x08, CPU.inc), + (0x05, 0x08, CPU.dec), + (0x06, 0x08, CPU.load_fetch_register), + (0x80, 0x01, CPU.add_a), + (0x88, 0x01, CPU.add_a_with_carry), + (0x90, 0x01, CPU.subtract_a), + (0x98, 0x01, CPU.subtract_with_carry_a), + (0xA0, 0x01, CPU.and_a), + (0xA8, 0x01, CPU.xor_a), + (0xB0, 0x01, CPU.or_a), + (0xB8, 0x01, CPU.compare_a), + (0x06, 0x08, CPU.fetch_load) +] + + +REGISTER_SET_A = [CPU.get_bc, CPU.get_de, CPU.get_hl, CPU.get_sp] +REGISTER_SET_B = [CPU.get_bc, CPU.get_de, CPU.get_hl, CPU.get_af] +FLAG_REGISTER_SET = [CPU.is_not_z, CPU.is_z, CPU.is_not_c, CPU.is_c] + +# Table for Register OP Codes: (startAddress, delta, method, registers) +REGISTER_OP_CODES = [ + (0x01, 0x10, CPU.fetch_double_register, REGISTER_SET_A), + (0x09, 0x10, CPU.add_hl, REGISTER_SET_A), + (0x03, 0x10, CPU.inc_double_register, REGISTER_SET_A), + (0x0B, 0x10, CPU.dec_double_register, REGISTER_SET_A), + (0xC0, 0x08, CPU.conditional_return, FLAG_REGISTER_SET), + (0xC2, 0x08, CPU.conditional_jump, FLAG_REGISTER_SET), + (0xC4, 0x08, CPU.conditional_call, FLAG_REGISTER_SET), + (0x20, 0x08, CPU.relative_conditional_jump, FLAG_REGISTER_SET), + (0xC1, 0x10, CPU.pop_double_register, REGISTER_SET_B), + (0xC5, 0x10, CPU.push_double_register, REGISTER_SET_B) +] +# Table for Second Order op_codes: (startAddress, delta, method, [args]) +SECOND_ORDER_REGISTER_GROUP_OP_CODES = [ + (0x00, 0x01, CPU.rotate_left_circular), + (0x08, 0x01, CPU.rotate_right_circular), + (0x10, 0x01, CPU.rotate_left), + (0x18, 0x01, CPU.rotate_right), + (0x20, 0x01, CPU.shift_left_arithmetic), + (0x28, 0x01, CPU.shift_right_arithmetic), + (0x30, 0x01, CPU.swap), + (0x38, 0x01, CPU.shift_word_right_logical), + (0x40, 0x01, CPU.test_bit, range(0, 8), 0x08), + (0xC0, 0x01, CPU.set_bit, range(0, 8), 0x08), + (0x80, 0x01, CPU.reset_bit, range(0, 8), 0x08) +] + +# RAW op_code TABLE INITIALIZATION ---------------------------------------------- + +FIRST_ORDER_OP_CODES += create_register_op_codes(REGISTER_OP_CODES) +FIRST_ORDER_OP_CODES += create_group_op_codes(REGISTER_GROUP_OP_CODES) +FIRST_ORDER_OP_CODES += create_load_group_op_codes() +SECOND_ORDER_OP_CODES = create_group_op_codes(SECOND_ORDER_REGISTER_GROUP_OP_CODES) + + +OP_CODES = initialize_op_code_table(FIRST_ORDER_OP_CODES) +FETCH_EXECUTE_OP_CODES = initialize_op_code_table(SECOND_ORDER_OP_CODES) + Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Tue Aug 5 10:24:39 2008 @@ -36,6 +36,8 @@ while isRunning and self.handle_events(): self.emulate(constants.GAMEBOY_CLOCK >> 2) #RSDL.Delay(1) + except Exception: + pass finally: self.handle_execution_error() lltype.free(self.event, flavor='raw') From cami at codespeak.net Tue Aug 5 10:29:20 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 5 Aug 2008 10:29:20 +0200 (CEST) Subject: [pypy-svn] r56992 - pypy/dist/pypy/translator/goal Message-ID: <20080805082920.39B301684C5@codespeak.net> Author: cami Date: Tue Aug 5 10:29:19 2008 New Revision: 56992 Added: pypy/dist/pypy/translator/goal/targetgbprofiling.py Modified: pypy/dist/pypy/translator/goal/targetgbimplementation.py Log: added profiling target wich uses a slightly different cpu (ProfilingCPU) Modified: pypy/dist/pypy/translator/goal/targetgbimplementation.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetgbimplementation.py (original) +++ pypy/dist/pypy/translator/goal/targetgbimplementation.py Tue Aug 5 10:29:19 2008 @@ -1,7 +1,7 @@ import os import py import pdb -from pypy.lang.gameboy.gameboyImplementation import GameBoyImplementation +from pypy.lang.gameboy.gameboy_implementation import GameBoyImplementation ROM_PATH = str(py.magic.autopath().dirpath().dirpath().dirpath())+"/lang/gameboy/rom" @@ -20,7 +20,10 @@ except: print "Corrupt Cartridge" gameBoy.load_cartridge_file(str(filename), verify=False) - gameBoy.mainLoop() + try: + gameBoy.mainLoop() + except: + pass #pdb.runcall(gameBoy.mainLoop) return 0 @@ -32,9 +35,3 @@ def test_target(): entry_point(["b", ROM_PATH+"/rom9/rom9.gb"]) - - - -#from AppKit import NSApplication -#NSApplication.sharedApplication() -#entry_point() Added: pypy/dist/pypy/translator/goal/targetgbprofiling.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/translator/goal/targetgbprofiling.py Tue Aug 5 10:29:19 2008 @@ -0,0 +1,37 @@ +import os +import py +import pdb +from pypy.lang.gameboy.profiling.gameboy_profiling_implementation import GameBoyProfilingImplementation + + +ROM_PATH = str(py.magic.autopath().dirpath().dirpath().dirpath())+"/lang/gameboy/rom" + + +def entry_point(argv=None): + if argv is not None and len(argv) > 1: + filename = argv[1] + else: + pos = str(9) + filename = ROM_PATH+"/rom"+pos+"/rom"+pos+".gb" + print "loading rom: ", str(filename) + gameBoy = GameBoyProfilingImplementation() + try: + gameBoy.load_cartridge_file(str(filename)) + except: + print "Corrupt Cartridge" + gameBoy.load_cartridge_file(str(filename), verify=False) + try: + gameBoy.mainLoop() + except: + pass + #pdb.runcall(gameBoy.mainLoop) + return 0 + + +# _____ Define and setup target ___ + +def target(*args): + return entry_point, None + +def test_target(): + entry_point(["b", ROM_PATH+"/rom9/rom9.gb"]) From cami at codespeak.net Tue Aug 5 10:30:58 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 5 Aug 2008 10:30:58 +0200 (CEST) Subject: [pypy-svn] r56993 - in pypy/dist/pypy/lang/gameboy: . profiling Message-ID: <20080805083058.7820F169F7A@codespeak.net> Author: cami Date: Tue Aug 5 10:30:57 2008 New Revision: 56993 Added: pypy/dist/pypy/lang/gameboy/profiling/ pypy/dist/pypy/lang/gameboy/profiling/__init__.py pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Log: added profiling version, which uses a logging cpu, thus allows to create histograms of the executed OP-codes Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Tue Aug 5 10:30:57 2008 @@ -36,9 +36,7 @@ while isRunning and self.handle_events(): self.emulate(constants.GAMEBOY_CLOCK >> 2) #RSDL.Delay(1) - except Exception: - pass - finally: + except : self.handle_execution_error() lltype.free(self.event, flavor='raw') RSDL.Quit() Added: pypy/dist/pypy/lang/gameboy/profiling/__init__.py ============================================================================== Added: pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py Tue Aug 5 10:30:57 2008 @@ -0,0 +1,56 @@ +#!/usr/bin/env python +from __future__ import generators + +from pypy.lang.gameboy.gameboy_implementation import * +from pypy.lang.gameboy.profiling.profiling_cpu import ProfilingCPU +from pypy.lang.gameboy.debug import debug +from pypy.lang.gameboy.debug.debug_socket_memory import * + +# GAMEBOY ---------------------------------------------------------------------- + +class GameBoyProfilingImplementation(GameBoyImplementation): + + def __init__(self, cycleLimit=0): + GameBoy.__init__(self) + self.cycleLimit = cycleLimit + self.cpu = ProfilingCPU(self.interrupt, self, self.cycleLimit) + + def handle_executed_op_code(self, is_fetch_execute=True): + self.process_cpu_profiling_data() + + def process_cpu_profiling_data(self): + self.print_time_used() + self.print_opcode_histo() + self.print_fetch_exec_histo() + + def print_time_used(self): + pass + + def print_opcode_histo(self): + pass + + def print_fetch_exec_histo(self): + pass + + +# CUSTOM DRIVER IMPLEMENTATIONS currently not used ============================= + +# VIDEO DRIVER ----------------------------------------------------------------- + +class VideoDriverDebugImplementation(VideoDriverImplementation): + pass + + +# JOYPAD DRIVER ---------------------------------------------------------------- + +class JoypadDriverDebugImplementation(JoypadDriverImplementation): + pass + + +# SOUND DRIVER ----------------------------------------------------------------- + +class SoundDriverDebugImplementation(SoundDriverImplementation): + pass + + +# ============================================================================== Added: pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py Tue Aug 5 10:30:57 2008 @@ -0,0 +1,28 @@ + +from __future__ import generators +from pypy.lang.gameboy.cpu import CPU +from pypy.lang.gameboy.debug import debug + + +class ProfilingCPU(CPU): + + + def __init__(self, interrupt, memory, cycle_limit): + CPU.__init__(interrupt, memory) + self.cycle_limit = 0 + self.op_code_count = 0 + self.fetch_exec_opcode_histo = [0]*(0xFF+1) + self.opcode_histo = [0]*(0xFF+1) + + def fetch_execute(self): + CPU.fetch_execute(self) + self.count += 1 + self.fetch_exec_opcode_histo[self.last_fetch_execute_op_code] += 1 + + + def execute(self, opCode): + CPU.execute(self, opCode) + self.count += 1 + self.opcode_histo[self.last_op_code] += 1 + if self.op_code_count >= self.cycle_limit: + raise Exception("Maximal Cyclecount reached") \ No newline at end of file From cami at codespeak.net Tue Aug 5 10:40:34 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 5 Aug 2008 10:40:34 +0200 (CEST) Subject: [pypy-svn] r56994 - in pypy/dist/pypy/lang/gameboy: . profiling Message-ID: <20080805084034.CF6B4169FB5@codespeak.net> Author: cami Date: Tue Aug 5 10:40:34 2008 New Revision: 56994 Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py Log: fixed some bugs for the profiling version discovered during translation Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Tue Aug 5 10:40:34 2008 @@ -37,9 +37,9 @@ self.emulate(constants.GAMEBOY_CLOCK >> 2) #RSDL.Delay(1) except : - self.handle_execution_error() lltype.free(self.event, flavor='raw') RSDL.Quit() + self.handle_execution_error() return 0 def handle_execution_error(self): Modified: pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/profiling/gameboy_profiling_implementation.py Tue Aug 5 10:40:34 2008 @@ -11,9 +11,10 @@ class GameBoyProfilingImplementation(GameBoyImplementation): def __init__(self, cycleLimit=0): - GameBoy.__init__(self) + GameBoyImplementation.__init__(self) self.cycleLimit = cycleLimit - self.cpu = ProfilingCPU(self.interrupt, self, self.cycleLimit) + self.cpu = ProfilingCPU(self.interrupt, self) + self.cpu.cycle_limit = cycleLimit def handle_executed_op_code(self, is_fetch_execute=True): self.process_cpu_profiling_data() Modified: pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/profiling/profiling_cpu.py Tue Aug 5 10:40:34 2008 @@ -7,8 +7,8 @@ class ProfilingCPU(CPU): - def __init__(self, interrupt, memory, cycle_limit): - CPU.__init__(interrupt, memory) + def __init__(self, interrupt, memory): + CPU.__init__(self, interrupt, memory) self.cycle_limit = 0 self.op_code_count = 0 self.fetch_exec_opcode_histo = [0]*(0xFF+1) @@ -16,13 +16,13 @@ def fetch_execute(self): CPU.fetch_execute(self) - self.count += 1 + self.op_code_count += 1 self.fetch_exec_opcode_histo[self.last_fetch_execute_op_code] += 1 def execute(self, opCode): CPU.execute(self, opCode) - self.count += 1 + self.op_code_count += 1 self.opcode_histo[self.last_op_code] += 1 if self.op_code_count >= self.cycle_limit: raise Exception("Maximal Cyclecount reached") \ No newline at end of file From cami at codespeak.net Tue Aug 5 10:43:00 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 5 Aug 2008 10:43:00 +0200 (CEST) Subject: [pypy-svn] r56995 - pypy/dist/pypy/lang/gameboy Message-ID: <20080805084300.3D77A169FB4@codespeak.net> Author: cami Date: Tue Aug 5 10:42:59 2008 New Revision: 56995 Modified: pypy/dist/pypy/lang/gameboy/cpu.py Log: small code formatting Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Tue Aug 5 10:42:59 2008 @@ -180,7 +180,6 @@ def c_flag_compare(self, value, compare_and=0x01, reset=False): if reset: self.reset() - #print hex(value), hex(compare_and), (value & compare_and) != 0 self.c_flag = ((value & compare_and) != 0) def h_flag_compare(self, value, a, inverted=False): @@ -809,7 +808,6 @@ if (s & 0x0F00) > (self.sp.get() & 0x0F00): self.f.h_flag = True return s - def complement_carry_flag(self): # CCF/SCF @@ -1002,10 +1000,10 @@ def group_lambda(function, register_getter, value=None): if value is None: return lambda s: function(s, RegisterCallWrapper(register_getter(s)), - RegisterCallWrapper(register_getter(s))) + RegisterCallWrapper(register_getter(s))) else: return lambda s: function(s, RegisterCallWrapper(register_getter(s)), - RegisterCallWrapper(register_getter(s)), value) + RegisterCallWrapper(register_getter(s)), value) def create_load_group_op_codes(): op_codes = [] From pedronis at codespeak.net Tue Aug 5 23:55:39 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Tue, 5 Aug 2008 23:55:39 +0200 (CEST) Subject: [pypy-svn] r57017 - pypy/branch/garden-call-code/pypy/interpreter/callbench Message-ID: <20080805215539.E2F29168562@codespeak.net> Author: pedronis Date: Tue Aug 5 23:55:37 2008 New Revision: 57017 Added: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltn_instantiate.py - copied unchanged from r57016, pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py (contents, props changed) pypy/branch/garden-call-code/pypy/interpreter/callbench/inst_no_init.py (contents, props changed) Removed: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py Log: tweak benchmarks to cover some more cases Added: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py ============================================================================== --- (empty file) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py Tue Aug 5 23:55:37 2008 @@ -0,0 +1,15 @@ +from sup import run + +def w(N, start): + d = {} + d1 = {} + start() + i = 0 + u = dict.update + while i < N: + u(d, d1) + u(d, d1) + u(d, d1) + i+=1 + +run(w, 1000) Added: pypy/branch/garden-call-code/pypy/interpreter/callbench/inst_no_init.py ============================================================================== --- (empty file) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/inst_no_init.py Tue Aug 5 23:55:37 2008 @@ -0,0 +1,22 @@ +from sup import run + +def w(N, start): + class A(object): + pass + + start() + i = 0 + while i < N: + A() + A() + A() + A() + A() + A() + A() + A() + A() + A() + i+=1 + +run(w, 1000) From pedronis at codespeak.net Wed Aug 6 00:03:15 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Wed, 6 Aug 2008 00:03:15 +0200 (CEST) Subject: [pypy-svn] r57019 - pypy/branch/garden-call-code/pypy/interpreter/callbench Message-ID: <20080805220315.0A1AE169DB5@codespeak.net> Author: pedronis Date: Wed Aug 6 00:03:15 2008 New Revision: 57019 Added: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py (contents, props changed) Log: and this one too Added: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py ============================================================================== --- (empty file) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py Wed Aug 6 00:03:15 2008 @@ -0,0 +1,14 @@ +from sup import run + +def w(N, start): + d = {} + d1 = {} + start() + i = 0 + while i < N: + d.update(d1) + d.update(d1) + d.update(d1) + i+=1 + +run(w, 1000) From pedronis at codespeak.net Wed Aug 6 01:27:32 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Wed, 6 Aug 2008 01:27:32 +0200 (CEST) Subject: [pypy-svn] r57021 - pypy/branch/garden-call-code/pypy/interpreter/callbench Message-ID: <20080805232732.DD4D3169F83@codespeak.net> Author: pedronis Date: Wed Aug 6 01:27:31 2008 New Revision: 57021 Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py Log: let's ignore the fun (appcaller anyone) involved in d.update for now, use something more obscure but straighforward Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py Wed Aug 6 01:27:31 2008 @@ -1,14 +1,28 @@ from sup import run def w(N, start): - d = {} - d1 = {} + l = [] start() i = 0 while i < N: - d.update(d1) - d.update(d1) - d.update(d1) + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() + l.__init__() i+=1 run(w, 1000) Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py Wed Aug 6 01:27:31 2008 @@ -1,15 +1,29 @@ from sup import run def w(N, start): - d = {} - d1 = {} + l = [] start() i = 0 - u = dict.update + z = l.__init__ while i < N: - u(d, d1) - u(d, d1) - u(d, d1) + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() + z() i+=1 run(w, 1000) From bgola at codespeak.net Wed Aug 6 16:53:53 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Wed, 6 Aug 2008 16:53:53 +0200 (CEST) Subject: [pypy-svn] r57029 - in pypy/branch/2.5-features/pypy: interpreter interpreter/astcompiler interpreter/pyparser module/__builtin__ module/__builtin__/test Message-ID: <20080806145353.CAE1E1684EE@codespeak.net> Author: bgola Date: Wed Aug 6 16:53:52 2008 New Revision: 57029 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.txt pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py pypy/branch/2.5-features/pypy/interpreter/pyopcode.py pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py pypy/branch/2.5-features/pypy/module/__builtin__/importing.py pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py Log: absolute import working (__future__.absolute_import) Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.txt ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.txt (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/ast.txt Wed Aug 6 16:53:52 2008 @@ -31,7 +31,7 @@ While: test, body, else_& If: tests!, else_& Exec: expr, locals&, globals& -From: modname*str, names* +From: modname*str, names*, level*int Import: names* Raise: expr1&, expr2&, expr3& TryFinally: body, final Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py Wed Aug 6 16:53:52 2008 @@ -670,7 +670,7 @@ stack = [] i = 0 for for_ in node.quals: - assert isinstance(for_, ast.GenExprFor) + assert isinstance(for_, ast.GenExprFor) start, anchor = self._visitGenExprFor(for_) self.genexpr_cont_stack.append( None ) for if_ in for_.ifs: @@ -853,8 +853,12 @@ def visitImport(self, node): self.set_lineno(node) + if self.graph.checkFlag(CO_FUTURE_ABSIMPORT): + level = 0 + else: + level = -1 for name, alias in node.names: - self.emitop_obj('LOAD_CONST', self.space.wrap(-1)) # 2.5 flag + self.emitop_obj('LOAD_CONST', self.space.wrap(level)) # 2.5 flag self.emitop_obj('LOAD_CONST', self.space.w_None) self.emitop('IMPORT_NAME', name) mod = name.split(".")[0] @@ -866,8 +870,11 @@ def visitFrom(self, node): self.set_lineno(node) + level = node.level + if level == 0 and not self.graph.checkFlag(CO_FUTURE_ABSIMPORT): + level = -1 fromlist = [ self.space.wrap(name) for name,alias in node.names ] - self.emitop_obj('LOAD_CONST', self.space.wrap(node.level)) # 2.5 flag + self.emitop_obj('LOAD_CONST', self.space.wrap(level)) # 2.5 flag self.emitop_obj('LOAD_CONST', self.space.newtuple(fromlist)) self.emitop('IMPORT_NAME', node.modname) for name, alias in node.names: Modified: pypy/branch/2.5-features/pypy/interpreter/pyopcode.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyopcode.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyopcode.py Wed Aug 6 16:53:52 2008 @@ -727,17 +727,9 @@ modulename = f.space.str_w(w_modulename) w_fromlist = f.popvalue() - # CPython 2.5 adds an obscure extra flag consumed by this opcode + # CPython 2.5 adds an extra argument consumed by this opcode if f.pycode.magic >= 0xa0df294: w_flag = f.popvalue() - try: - if space.int_w(w_flag) == -1: - w_flag = None # don't provide the extra flag if == -1 - except OperationError, e: - # let SystemExit and KeyboardInterrupt go through - if e.async(space): - raise - # ignore other exceptions else: w_flag = None @@ -750,12 +742,8 @@ w_locals = space.w_None w_modulename = space.wrap(modulename) w_globals = f.w_globals - if w_flag is None: - w_obj = space.call_function(w_import, w_modulename, w_globals, - w_locals, w_fromlist) - else: - w_obj = space.call_function(w_import, w_modulename, w_globals, - w_locals, w_fromlist, w_flag) + w_obj = space.call_function(w_import, w_modulename, w_globals, + w_locals, w_fromlist, w_flag) f.pushvalue(w_obj) def IMPORT_STAR(f, *ignored): Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Wed Aug 6 16:53:52 2008 @@ -842,10 +842,9 @@ """ atoms = get_atoms(builder, nb) index = 1 # skip from - level = 0 while atoms[index].name == builder.parser.tokens['DOT']: - level += 1 index += 1 + level = index - 1 incr, from_name = parse_dotted_names(atoms[index:], builder) index += (incr + 1) # skip 'import' token = atoms[index] @@ -882,8 +881,6 @@ names.append((name, as_name)) if index < l: # case ',' index += 1 - if level == 0: - level = -1 builder.push(ast.From(from_name, names, level, atoms[0].lineno)) Modified: pypy/branch/2.5-features/pypy/module/__builtin__/importing.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/__builtin__/importing.py (original) +++ pypy/branch/2.5-features/pypy/module/__builtin__/importing.py Wed Aug 6 16:53:52 2008 @@ -158,7 +158,9 @@ space.w_ValueError, space.wrap("Empty module name")) w = space.wrap - + + level = space.int_w(w_level) + ctxt_name = None if w_globals is not None and not space.is_w(w_globals, space.w_None): ctxt_w_name = try_getitem(space, w_globals, w('__name__')) @@ -174,21 +176,25 @@ rel_modulename = None if ctxt_name is not None: - - ctxt_name_prefix_parts = ctxt_name.split('.') - if ctxt_w_path is None: # context is a plain module - ctxt_name_prefix_parts = ctxt_name_prefix_parts[:-1] - if ctxt_name_prefix_parts: - rel_modulename = '.'.join(ctxt_name_prefix_parts+[modulename]) - else: # context is a package module - rel_modulename = ctxt_name+'.'+modulename + if level == 0: + baselevel = 0 + rel_modulename = modulename + else: + ctxt_name_prefix_parts = ctxt_name.split('.') + if ctxt_w_path is None: # context is a plain module + ctxt_name_prefix_parts = ctxt_name_prefix_parts[:-1] + if ctxt_name_prefix_parts: + rel_modulename = '.'.join(ctxt_name_prefix_parts+[modulename]) + else: # context is a package module + rel_modulename = ctxt_name+'.'+modulename + baselevel = len(ctxt_name_prefix_parts) if rel_modulename is not None: w_mod = check_sys_modules(space, w(rel_modulename)) if (w_mod is None or not space.is_w(w_mod, space.w_None)): w_mod = absolute_import(space, rel_modulename, - len(ctxt_name_prefix_parts), + baselevel, w_fromlist, tentative=1) if w_mod is not None: return w_mod Modified: pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py (original) +++ pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py Wed Aug 6 16:53:52 2008 @@ -40,6 +40,8 @@ relative_a = "import a", abs_b = "import b", abs_x_y = "import x.y", + string = "inpackage = 1", + absolute = "from __future__ import absolute_import\nimport string", ) setuppkg("pkg.pkg1", a='') setuppkg("pkg.pkg2", a='', b='') @@ -269,6 +271,13 @@ import sys assert glob['sys'] is sys + def test_future_absolute_import(self): + def imp(): + from pkg import absolute + absolute.string.inpackage + raises(AttributeError, imp) + + def _getlong(data): x = marshal.dumps(data) return x[-4:] From bgola at codespeak.net Wed Aug 6 18:38:40 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Wed, 6 Aug 2008 18:38:40 +0200 (CEST) Subject: [pypy-svn] r57035 - in pypy/branch/2.5-features/pypy: interpreter/pyparser module/__builtin__ module/__builtin__/test Message-ID: <20080806163840.9D3E316850B@codespeak.net> Author: bgola Date: Wed Aug 6 18:38:39 2008 New Revision: 57035 Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py pypy/branch/2.5-features/pypy/module/__builtin__/importing.py pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py Log: PEP 328 (absolute/relative imports) Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Wed Aug 6 18:38:39 2008 @@ -845,7 +845,12 @@ while atoms[index].name == builder.parser.tokens['DOT']: index += 1 level = index - 1 - incr, from_name = parse_dotted_names(atoms[index:], builder) + if atoms[index].value == 'import': + # from . import x + from_name = "" + incr = 0 + else: + incr, from_name = parse_dotted_names(atoms[index:], builder) index += (incr + 1) # skip 'import' token = atoms[index] assert isinstance(token, TokenObject) # XXX Modified: pypy/branch/2.5-features/pypy/module/__builtin__/importing.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/__builtin__/importing.py (original) +++ pypy/branch/2.5-features/pypy/module/__builtin__/importing.py Wed Aug 6 18:38:39 2008 @@ -153,13 +153,13 @@ def importhook(space, modulename, w_globals=None, w_locals=None, w_fromlist=None, w_level=-1): - if not modulename: + level = space.int_w(w_level) + if not modulename and level < 0: raise OperationError( space.w_ValueError, space.wrap("Empty module name")) w = space.wrap - level = space.int_w(w_level) ctxt_name = None if w_globals is not None and not space.is_w(w_globals, space.w_None): @@ -182,7 +182,12 @@ else: ctxt_name_prefix_parts = ctxt_name.split('.') if ctxt_w_path is None: # context is a plain module - ctxt_name_prefix_parts = ctxt_name_prefix_parts[:-1] + if level < 0: + ctxt_name_prefix_parts = ctxt_name_prefix_parts[:-1] + else: + cnpp = ctxt_name_prefix_parts + ctxt_name_prefix_parts = [ ctxt_name_prefix_parts[i] + for i in range(len(cnpp)-level) ] if ctxt_name_prefix_parts: rel_modulename = '.'.join(ctxt_name_prefix_parts+[modulename]) else: # context is a package module @@ -200,7 +205,9 @@ return w_mod else: rel_modulename = None - + if level > 0: + msg = "Attempted relative import in non-package" + raise OperationError(space.w_ValueError, w(msg)) w_mod = absolute_import(space, modulename, 0, w_fromlist, tentative=0) if rel_modulename is not None: space.setitem(space.sys.get('modules'), w(rel_modulename),space.w_None) Modified: pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py (original) +++ pypy/branch/2.5-features/pypy/module/__builtin__/test/test_import.py Wed Aug 6 18:38:39 2008 @@ -42,8 +42,14 @@ abs_x_y = "import x.y", string = "inpackage = 1", absolute = "from __future__ import absolute_import\nimport string", + relative_b = "from __future__ import absolute_import\nfrom . import string", + relative_c = "from __future__ import absolute_import\nfrom .string import inpackage", + ) + setuppkg("pkg.pkg1", + a = '', + relative_d = "from __future__ import absolute_import\nfrom ..string import inpackage", + relative_e = "from __future__ import absolute_import\nfrom .. import string", ) - setuppkg("pkg.pkg1", a='') setuppkg("pkg.pkg2", a='', b='') setuppkg("pkg_r", inpkg = "import x.y") setuppkg("pkg_r.x") @@ -277,6 +283,27 @@ absolute.string.inpackage raises(AttributeError, imp) + def test_future_relative_import_without_from_name(self): + from pkg import relative_b + assert relative_b.string.inpackage == 1 + + def test_future_relative_import_level_1(self): + from pkg import relative_c + assert relative_c.inpackage == 1 + + def test_future_relative_import_level_2(self): + from pkg.pkg1 import relative_d + assert relative_d.inpackage == 1 + + def test_future_relative_import_level_2_without_from_name(self): + from pkg.pkg1 import relative_e + assert relative_e.string.inpackage == 1 + + def test_future_relative_import_error_when_in_non_package(self): + def imp(): + from .string import inpackage + raises(ValueError, imp) + def _getlong(data): x = marshal.dumps(data) From arigo at codespeak.net Thu Aug 7 18:59:20 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 18:59:20 +0200 (CEST) Subject: [pypy-svn] r57070 - pypy/branch/isinstance-refactor Message-ID: <20080807165920.1A6A2169EC4@codespeak.net> Author: arigo Date: Thu Aug 7 18:59:17 2008 New Revision: 57070 Added: pypy/branch/isinstance-refactor/ - copied from r57069, pypy/dist/ Log: Sanitizing the isinstance() and issubclass() situation. From arigo at codespeak.net Thu Aug 7 19:01:06 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:01:06 +0200 (CEST) Subject: [pypy-svn] r57071 - in pypy/branch/isinstance-refactor/pypy: interpreter interpreter/test objspace/flow Message-ID: <20080807170106.96106169EC4@codespeak.net> Author: arigo Date: Thu Aug 7 19:01:01 2008 New Revision: 57071 Modified: pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py pypy/branch/isinstance-refactor/pypy/interpreter/error.py pypy/branch/isinstance-refactor/pypy/interpreter/function.py pypy/branch/isinstance-refactor/pypy/interpreter/test/test_function.py pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py Log: Tests and refactoring. Some tests fail on the trunk. Modified: pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py Thu Aug 7 19:01:01 2008 @@ -655,15 +655,7 @@ """Checks if the given exception type matches 'w_check_class'.""" if self.is_w(w_exc_type, w_check_class): return True - if self.is_true(self.abstract_issubclass(w_exc_type, w_check_class)): - return True - - if self.is_true(self.isinstance(w_check_class, self.w_tuple)): - exclst_w = self.unpacktuple(w_check_class) - for w_e in exclst_w: - if self.exception_match(w_exc_type, w_e): - return True - return False + return self.abstract_issubclass_w(w_exc_type, w_check_class) def call(self, w_callable, w_args, w_kwds=None): args = Arguments.frompacked(self, w_args, w_kwds) @@ -679,8 +671,8 @@ func = w_func.w_function if isinstance(func, Function): return func.funccall(w_inst, *args_w) - elif args_w and self.is_true( - self.abstract_isinstance(args_w[0], w_func.w_class)): + elif args_w and ( + self.abstract_isinstance_w(args_w[0], w_func.w_class)): w_func = w_func.w_function if isinstance(w_func, Function): @@ -701,9 +693,9 @@ func = w_func.w_function if isinstance(func, Function): return func.funccall_obj_valuestack(w_inst, nargs, frame) - elif nargs > 0 and self.is_true( - self.abstract_isinstance(frame.peekvalue(nargs-1), # :-( - w_func.w_class)): + elif nargs > 0 and ( + self.abstract_isinstance_w(frame.peekvalue(nargs-1), # :-( + w_func.w_class)): w_func = w_func.w_function if isinstance(w_func, Function): @@ -755,61 +747,34 @@ w_objtype = self.type(w_obj) return self.issubtype(w_objtype, w_type) - def abstract_issubclass(self, w_obj, w_cls, failhard=False): - try: - return self.issubtype(w_obj, w_cls) - except OperationError, e: - if not e.match(self, self.w_TypeError): - raise - try: - self.getattr(w_cls, self.wrap('__bases__')) # type sanity check - return self.recursive_issubclass(w_obj, w_cls) - except OperationError, e: - if failhard or not (e.match(self, self.w_TypeError) or - e.match(self, self.w_AttributeError)): - raise - else: - return self.w_False - - def recursive_issubclass(self, w_obj, w_cls): - if self.is_w(w_obj, w_cls): - return self.w_True - for w_base in self.unpackiterable(self.getattr(w_obj, - self.wrap('__bases__'))): - if self.is_true(self.recursive_issubclass(w_base, w_cls)): - return self.w_True - return self.w_False - - def abstract_isinstance(self, w_obj, w_cls): - try: - return self.isinstance(w_obj, w_cls) - except OperationError, e: - if not e.match(self, self.w_TypeError): - raise - try: - w_objcls = self.getattr(w_obj, self.wrap('__class__')) - return self.abstract_issubclass(w_objcls, w_cls) - except OperationError, e: - if not (e.match(self, self.w_TypeError) or - e.match(self, self.w_AttributeError)): - raise - return self.w_False - - def abstract_isclass(self, w_obj): - if self.is_true(self.isinstance(w_obj, self.w_type)): - return self.w_True - if self.findattr(w_obj, self.wrap('__bases__')) is not None: - return self.w_True - else: - return self.w_False + def abstract_issubclass_w(self, w_cls1, w_cls2): + # Equivalent to 'issubclass(cls1, cls2)'. The code below only works + # for the simple case (new-style class, new-style class). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.unwrap(self.issubtype(w_cls1, w_cls2)) + + def abstract_isinstance_w(self, w_obj, w_cls): + # Equivalent to 'isinstance(obj, cls)'. The code below only works + # for the simple case (new-style instance, new-style class). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.unwrap(self.isinstance(w_obj, w_cls)) + + def abstract_isclass_w(self, w_obj): + # Equivalent to 'isinstance(obj, type)'. The code below only works + # for the simple case (new-style instance without special stuff). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.unwrap(self.isinstance(w_obj, self.w_type)) def abstract_getclass(self, w_obj): - try: - return self.getattr(w_obj, self.wrap('__class__')) - except OperationError, e: - if e.match(self, self.w_TypeError) or e.match(self, self.w_AttributeError): - return self.type(w_obj) - raise + # Equivalent to 'obj.__class__'. The code below only works + # for the simple case (new-style instance without special stuff). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.type(w_obj) + def eval(self, expression, w_globals, w_locals): "NOT_RPYTHON: For internal debugging." Modified: pypy/branch/isinstance-refactor/pypy/interpreter/error.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/interpreter/error.py (original) +++ pypy/branch/isinstance-refactor/pypy/interpreter/error.py Thu Aug 7 19:01:01 2008 @@ -151,15 +151,14 @@ while space.is_true(space.isinstance(w_type, space.w_tuple)): w_type = space.getitem(w_type, space.wrap(0)) - if space.is_true(space.abstract_isclass(w_type)): + if space.abstract_isclass_w(w_type): if space.is_w(w_value, space.w_None): # raise Type: we assume we have to instantiate Type w_value = space.call_function(w_type) w_type = space.abstract_getclass(w_value) else: w_valuetype = space.abstract_getclass(w_value) - if space.is_true(space.abstract_issubclass(w_valuetype, - w_type)): + if space.abstract_issubclass_w(w_valuetype, w_type): # raise Type, Instance: let etype be the exact type of value w_type = w_valuetype else: Modified: pypy/branch/isinstance-refactor/pypy/interpreter/function.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/interpreter/function.py (original) +++ pypy/branch/isinstance-refactor/pypy/interpreter/function.py Thu Aug 7 19:01:01 2008 @@ -343,8 +343,8 @@ else: # unbound method w_firstarg = args.firstarg() - if w_firstarg is not None and space.is_true( - space.abstract_isinstance(w_firstarg, self.w_class)): + if w_firstarg is not None and ( + space.abstract_isinstance_w(w_firstarg, self.w_class)): pass # ok else: myname = self.getname(space,"") @@ -372,7 +372,7 @@ # only allow binding to a more specific class than before if (w_cls is not None and not space.is_w(w_cls, space.w_None) and - not space.is_true(space.abstract_issubclass(w_cls, self.w_class))): + not space.abstract_issubclass_w(w_cls, self.w_class)): return space.wrap(self) # subclass test failed else: return descr_function_get(space, self.w_function, w_obj, w_cls) Modified: pypy/branch/isinstance-refactor/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/interpreter/test/test_function.py (original) +++ pypy/branch/isinstance-refactor/pypy/interpreter/test/test_function.py Thu Aug 7 19:01:01 2008 @@ -280,6 +280,46 @@ __metaclass__ = A().foo assert Fun[:2] == ('Fun', ()) + def test_unbound_abstract_typecheck(self): + import new + def f(*args): + return args + m = new.instancemethod(f, None, "foobar") + raises(TypeError, m) + raises(TypeError, m, None) + raises(TypeError, m, "egg") + + m = new.instancemethod(f, None, (str, int)) # really obscure... + assert m(4) == (4,) + assert m("uh") == ("uh",) + raises(TypeError, m, []) + + class MyBaseInst(object): + pass + class MyInst(MyBaseInst): + def __init__(self, myclass): + self.myclass = myclass + def __class__(self): + if self.myclass is None: + raise AttributeError + return self.myclass + __class__ = property(__class__) + class MyClass(object): + pass + BBase = MyClass() + BSub1 = MyClass() + BSub2 = MyClass() + BBase.__bases__ = () + BSub1.__bases__ = (BBase,) + BSub2.__bases__ = (BBase,) + x = MyInst(BSub1) + m = new.instancemethod(f, None, BSub1) + assert m(x) == (x,) + raises(TypeError, m, MyInst(BBase)) + raises(TypeError, m, MyInst(BSub2)) + raises(TypeError, m, MyInst(None)) + raises(TypeError, m, MyInst(42)) + class TestMethod: def setup_method(self, method): Modified: pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py Thu Aug 7 19:01:01 2008 @@ -218,18 +218,6 @@ return ecls return None - def abstract_issubclass(self, w_obj, w_cls, failhard=False): - return self.issubtype(w_obj, w_cls) - - def abstract_isinstance(self, w_obj, w_cls): - return self.isinstance(w_obj, w_cls) - - def abstract_isclass(self, w_obj): - return self.isinstance(w_obj, self.w_type) - - def abstract_getclass(self, w_obj): - return self.type(w_obj) - def build_flow(self, func, constargs={}): """ From arigo at codespeak.net Thu Aug 7 19:02:01 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:02:01 +0200 (CEST) Subject: [pypy-svn] r57072 - in pypy/branch/isinstance-refactor/pypy/module/__builtin__: . test Message-ID: <20080807170201.3059E169EC4@codespeak.net> Author: arigo Date: Thu Aug 7 19:02:00 2008 New Revision: 57072 Added: pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py (contents, props changed) pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py (contents, props changed) Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/__init__.py pypy/branch/isinstance-refactor/pypy/module/__builtin__/operation.py Log: The logic, more directly copied from CPython. Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/__init__.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/__init__.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/__init__.py Thu Aug 7 19:02:00 2008 @@ -87,8 +87,8 @@ 'coerce' : 'operation.coerce', 'divmod' : 'operation.divmod', '_issubtype' : 'operation._issubtype', - 'issubclass' : 'operation.issubclass', - 'isinstance' : 'operation.isinstance', + 'issubclass' : 'abstractinst.app_issubclass', + 'isinstance' : 'abstractinst.app_isinstance', 'getattr' : 'operation.getattr', 'setattr' : 'operation.setattr', 'delattr' : 'operation.delattr', @@ -151,6 +151,12 @@ # xxx hide the installer space.delitem(self.w_dict, space.wrap(name)) del self.loaders[name] + # install the more general version of isinstance() & co. in the space + from pypy.module.__builtin__ import abstractinst as ab + space.abstract_isinstance_w = ab.abstract_isinstance_w.__get__(space) + space.abstract_issubclass_w = ab.abstract_issubclass_w.__get__(space) + space.abstract_isclass_w = ab.abstract_isclass_w.__get__(space) + space.abstract_getclass = ab.abstract_getclass.__get__(space) def startup(self, space): # install zipimport hook if --withmod-zipimport is used Added: pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py ============================================================================== --- (empty file) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py Thu Aug 7 19:02:00 2008 @@ -0,0 +1,166 @@ +""" +Implementation of the 'abstract instance and subclasses' protocol: +objects can return pseudo-classes as their '__class__' attribute, and +pseudo-classes can have a '__bases__' attribute with a tuple of other +pseudo-classes. The standard built-in functions isinstance() and +issubclass() follow and trust these attributes is they are present, in +addition to checking for instances and subtypes in the normal way. +""" + +from pypy.interpreter.error import OperationError +from pypy.module.__builtin__.interp_classobj import W_ClassObject +from pypy.module.__builtin__.interp_classobj import W_InstanceObject + +def _get_bases(space, w_cls): + """Returns 'cls.__bases__'. Returns None if there is + no __bases__ or if cls.__bases__ is not a tuple. + """ + try: + w_bases = space.getattr(w_cls, space.wrap('__bases__')) + except OperationError, e: + if not e.match(space, space.w_AttributeError): + raise # propagate other errors + return None + if space.is_true(space.isinstance(w_bases, space.w_tuple)): + return w_bases + else: + return None + +def abstract_isclass_w(space, w_obj): + return _get_bases(space, w_obj) is not None + +def check_class(space, w_obj, msg): + if not abstract_isclass_w(space, w_obj): + raise OperationError(space.w_TypeError, space.wrap(msg)) + + +def abstract_getclass(space, w_obj): + try: + return space.getattr(w_obj, space.wrap('__class__')) + except OperationError, e: + if not e.match(space, space.w_AttributeError): + raise # propagate other errors + return space.type(w_obj) + + +def abstract_isinstance_w(space, w_obj, w_klass_or_tuple): + """Implementation for the full 'isinstance(obj, klass_or_tuple)'.""" + + # -- case (anything, type) + try: + w_result = space.isinstance(w_obj, w_klass_or_tuple) + except OperationError, e: # if w_klass_or_tuple was not a type, ignore it + if not e.match(space, space.w_TypeError): + raise # propagate other errors + else: + if space.is_true(w_result): + return True + # From now on we know that w_klass_or_tuple is indeed a type. + # Try also to compare it with obj.__class__, if this is not + # the same as type(obj). + try: + w_pretendtype = space.getattr(w_obj, space.wrap('__class__')) + if space.is_w(w_pretendtype, space.type(w_obj)): + return False # common case: obj.__class__ is type(obj) + w_result = space.issubtype(w_pretendtype, w_klass_or_tuple) + except OperationError, e: + if e.async(space): + raise + return False # ignore most exceptions + else: + return space.is_true(w_result) + + # -- case (old-style instance, old-style class) + oldstyleclass = space.interpclass_w(w_klass_or_tuple) + if isinstance(oldstyleclass, W_ClassObject): + oldstyleinst = space.interpclass_w(w_obj) + if isinstance(oldstyleinst, W_InstanceObject): + return oldstyleinst.w_class.is_subclass_of(oldstyleclass) + + # -- case (anything, tuple) + if space.is_true(space.isinstance(w_klass_or_tuple, space.w_tuple)): + for w_klass in space.unpacktuple(w_klass_or_tuple): + if abstract_isinstance_w(space, w_obj, w_klass): + return True + return False + + # -- case (anything, abstract-class) + check_class(space, w_klass_or_tuple, + "isinstance() arg 2 must be a class, type," + " or tuple of classes and types") + try: + w_abstractclass = space.getattr(w_obj, space.wrap('__class__')) + except OperationError, e: + if e.async(space): # ignore most exceptions + raise + return False + else: + return _issubclass_recurse(space, w_abstractclass, w_klass_or_tuple) + + +def _issubclass_recurse(space, w_derived, w_top): + """Internal helper for abstract cases. Here, w_top cannot be a tuple.""" + if space.is_w(w_derived, w_top): + return True + w_bases = _get_bases(space, w_derived) + if w_bases is not None: + for w_base in space.unpacktuple(w_bases): + if _issubclass_recurse(space, w_base, w_top): + return True + return False + + +def abstract_issubclass_w(space, w_derived, w_klass_or_tuple): + """Implementation for the full 'issubclass(derived, klass_or_tuple)'.""" + + # -- case (type, type) + try: + w_result = space.issubtype(w_derived, w_klass_or_tuple) + except OperationError, e: # if one of the args was not a type, ignore it + if not e.match(space, space.w_TypeError): + raise # propagate other errors + else: + return space.is_true(w_result) + + # -- case (old-style class, old-style class) + oldstylederived = space.interpclass_w(w_derived) + if isinstance(oldstylederived, W_ClassObject): + oldstyleklass = space.interpclass_w(w_klass_or_tuple) + if isinstance(oldstyleklass, W_ClassObject): + return oldstylederived.is_subclass_of(oldstyleklass) + else: + check_class(space, w_derived, "issubclass() arg 1 must be a class") + # from here on, we are sure that w_derived is a class-like object + + # -- case (class-like-object, tuple-of-classes) + if space.is_true(space.isinstance(w_klass_or_tuple, space.w_tuple)): + for w_klass in space.unpacktuple(w_klass_or_tuple): + if abstract_issubclass_w(w_derived, w_klass): + return True + return False + + # -- case (class-like-object, abstract-class) + check_class(space, w_klass_or_tuple, + "issubclass() arg 2 must be a class, type," + " or tuple of classes and types") + return _issubclass_recurse(space, w_derived, w_klass_or_tuple) + + +# ____________________________________________________________ +# App-level interface + +def issubclass(space, w_cls, w_klass_or_tuple): + """Check whether a class 'cls' is a subclass (i.e., a derived class) of +another class. When using a tuple as the second argument, check whether +'cls' is a subclass of any of the classes listed in the tuple.""" + return space.wrap(abstract_issubclass_w(space, w_cls, w_klass_or_tuple)) + +def isinstance(space, w_obj, w_klass_or_tuple): + """Check whether an object is an instance of a class (or of a subclass +thereof). When using a tuple as the second argument, check whether 'obj' +is an instance of any of the classes listed in the tuple.""" + return space.wrap(abstract_isinstance_w(space, w_obj, w_klass_or_tuple)) + +# avoid namespace pollution +app_issubclass = issubclass; del issubclass +app_isinstance = isinstance; del isinstance Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/operation.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/operation.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/operation.py Thu Aug 7 19:02:00 2008 @@ -220,95 +220,3 @@ function). Note that classes are callable.""" return space.callable(w_object) - - -def _recursive_issubclass(space, w_cls, w_klass_or_tuple): # returns interp-level bool - if space.is_w(w_cls, w_klass_or_tuple): - return True - try: - w_bases = space.getattr(w_cls, space.wrap("__bases__")) - except OperationError, e: - if e.match(space, space.w_AttributeError): - return False - else: - raise - w_iterator = space.iter(w_bases) - while True: - try: - w_base = space.next(w_iterator) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise - break - if _recursive_issubclass(space, w_base, w_klass_or_tuple): - return True - return False - -def _issubclass(space, w_cls, w_klass_or_tuple, check_cls, depth): # returns interp-level bool - if depth == 0: - # XXX overzealous test compliance hack - raise OperationError(space.w_RuntimeError, space.wrap("maximum recursion depth exceeded")) - if space.is_true(space.issubtype(space.type(w_klass_or_tuple), space.w_tuple)): - w_iter = space.iter(w_klass_or_tuple) - while True: - try: - w_klass = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise - break - if _issubclass(space, w_cls, w_klass, True, depth - 1): - return True - return False - - try: - return space.is_true(space.issubtype(w_cls, w_klass_or_tuple)) - except OperationError, e: - if e.match(space, space.w_TypeError): - w_bases = space.wrap('__bases__') - if check_cls: - try: - space.getattr(w_cls, w_bases) - except OperationError, e: - if not e.match(space, space.w_AttributeError): - raise - raise OperationError(space.w_TypeError, space.wrap('arg 1 must be a class or type')) - try: - space.getattr(w_klass_or_tuple, w_bases) - except OperationError, e: - if not e.match(space, space.w_AttributeError): - raise - raise OperationError(space.w_TypeError, space.wrap('arg 2 must be a class or type or a tuple thereof')) - return _recursive_issubclass(space, w_cls, w_klass_or_tuple) - else: - raise - - -def issubclass(space, w_cls, w_klass_or_tuple): - """Check whether a class 'cls' is a subclass (i.e., a derived class) of -another class. When using a tuple as the second argument, check whether -'cls' is a subclass of any of the classes listed in the tuple.""" - return space.wrap(issubclass_w(space, w_cls, w_klass_or_tuple)) - -def issubclass_w(space, w_cls, w_klass_or_tuple): - return _issubclass(space, w_cls, w_klass_or_tuple, True, space.sys.recursionlimit) - - -def isinstance(space, w_obj, w_klass_or_tuple): - """Check whether an object is an instance of a class (or of a subclass -thereof). When using a tuple as the second argument, check whether 'obj' -is an instance of any of the classes listed in the tuple.""" - w_objtype = space.type(w_obj) - if issubclass_w(space, w_objtype, w_klass_or_tuple): - return space.w_True - try: - w_objcls = space.getattr(w_obj, space.wrap("__class__")) - except OperationError, e: - if e.match(space, space.w_AttributeError): - return space.w_False - else: - raise - if space.is_w(w_objcls, w_objtype): - return space.w_False - else: - return space.wrap(_issubclass(space, w_objcls, w_klass_or_tuple, False, space.sys.recursionlimit)) Added: pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py ============================================================================== --- (empty file) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py Thu Aug 7 19:02:00 2008 @@ -0,0 +1,182 @@ +from pypy.module.__builtin__.abstractinst import * + + +class TestAbstractInst: + + def test_abstract_isclass(self): + space = self.space + w_B1, w_B2, w_B3, w_X, w_Y = space.unpacktuple(space.appexec([], """(): + class X(object): pass + class Y: pass + B1, B2, B3 = X(), X(), X() + B2.__bases__ = (42,) + B3.__bases__ = 'spam' + return B1, B2, B3, X, Y + """)) + assert abstract_isclass_w(space, space.w_int) is True + assert abstract_isclass_w(space, w_B1) is False + assert abstract_isclass_w(space, w_B2) is True + assert abstract_isclass_w(space, w_B3) is False + assert abstract_isclass_w(space, w_X) is True + assert abstract_isclass_w(space, w_Y) is True + + def test_abstract_getclass(self): + space = self.space + w_x, w_y, w_A, w_MyInst = space.unpacktuple(space.appexec([], """(): + class MyInst(object): + def __init__(self, myclass): + self.myclass = myclass + def __class__(self): + if self.myclass is None: + raise AttributeError + return self.myclass + __class__ = property(__class__) + A = object() + x = MyInst(A) + y = MyInst(None) + return x, y, A, MyInst + """)) + w_42 = space.wrap(42) + assert space.is_w(abstract_getclass(space, w_42), space.w_int) + assert space.is_w(abstract_getclass(space, w_x), w_A) + assert space.is_w(abstract_getclass(space, w_y), w_MyInst) + assert space.is_w(abstract_getclass(space, w_MyInst), space.w_type) + + +class AppTestAbstractInst: + + def test_abstract_isinstance(self): + class MyBaseInst(object): + pass + class MyInst(MyBaseInst): + def __init__(self, myclass): + self.myclass = myclass + def __class__(self): + if self.myclass is None: + raise AttributeError + return self.myclass + __class__ = property(__class__) + class MyInst2(MyBaseInst): + pass + class MyClass(object): + pass + + A = MyClass() + x = MyInst(A) + assert x.__class__ is A + assert isinstance(x, MyInst) + assert isinstance(x, MyBaseInst) + assert not isinstance(x, MyInst2) + raises(TypeError, isinstance, x, A) # A has no __bases__ + A.__bases__ = "hello world" + raises(TypeError, isinstance, x, A) # A.__bases__ is not tuple + + class Foo(object): + pass + class SubFoo1(Foo): + pass + class SubFoo2(Foo): + pass + y = MyInst(SubFoo1) + assert isinstance(y, MyInst) + assert isinstance(y, MyBaseInst) + assert not isinstance(y, MyInst2) + assert isinstance(y, SubFoo1) + assert isinstance(y, Foo) + assert not isinstance(y, SubFoo2) + + z = MyInst(None) + assert isinstance(z, MyInst) + assert isinstance(z, MyBaseInst) + assert not isinstance(z, MyInst2) + assert not isinstance(z, SubFoo1) + + assert isinstance(y, ((), MyInst2, SubFoo1)) + assert isinstance(y, (MyBaseInst, (SubFoo2,))) + assert not isinstance(y, (MyInst2, SubFoo2)) + assert not isinstance(z, ()) + + class Foo(object): + pass + class Bar: + pass + u = MyInst(Foo) + assert isinstance(u, MyInst) + assert isinstance(u, MyBaseInst) + assert not isinstance(u, MyInst2) + assert isinstance(u, Foo) + assert not isinstance(u, Bar) + v = MyInst(Bar) + assert isinstance(v, MyInst) + assert isinstance(v, MyBaseInst) + assert not isinstance(v, MyInst2) + assert not isinstance(v, Foo) + assert isinstance(v, Bar) + + BBase = MyClass() + BSub1 = MyClass() + BSub2 = MyClass() + BBase.__bases__ = () + BSub1.__bases__ = (BBase,) + BSub2.__bases__ = (BBase,) + x = MyInst(BSub1) + assert isinstance(x, BSub1) + assert isinstance(x, BBase) + assert not isinstance(x, BSub2) + assert isinstance(x, (BSub2, (), (BSub1,))) + + del BBase.__bases__ + assert isinstance(x, BSub1) + raises(TypeError, isinstance, x, BBase) + assert not isinstance(x, BSub2) + + BBase.__bases__ = "foobar" + assert isinstance(x, BSub1) + raises(TypeError, isinstance, x, BBase) + assert not isinstance(x, BSub2) + + def test_abstract_issubclass(self): + class MyBaseInst(object): + pass + class MyInst(MyBaseInst): + pass + class MyInst2(MyBaseInst): + pass + class MyClass(object): + pass + + assert issubclass(MyInst, MyBaseInst) + assert issubclass(MyInst2, MyBaseInst) + assert issubclass(MyBaseInst, MyBaseInst) + assert not issubclass(MyBaseInst, MyInst) + assert not issubclass(MyInst, MyInst2) + + BBase = MyClass() + BSub1 = MyClass() + BSub2 = MyClass() + BBase.__bases__ = () + BSub1.__bases__ = (BBase,) + BSub2.__bases__ = (BBase,) + assert issubclass(BSub1, BBase) + assert issubclass(BBase, BBase) + assert not issubclass(BBase, BSub1) + assert not issubclass(BSub1, BSub2) + assert not issubclass(MyInst, BSub1) + assert not issubclass(BSub1, MyInst) + + del BBase.__bases__ + raises(TypeError, issubclass, BSub1, BBase) + raises(TypeError, issubclass, BBase, BBase) + raises(TypeError, issubclass, BBase, BSub1) + assert not issubclass(BSub1, BSub2) + assert not issubclass(MyInst, BSub1) + assert not issubclass(BSub1, MyInst) + + BBase.__bases__ = 42 + raises(TypeError, issubclass, BSub1, BBase) + raises(TypeError, issubclass, BBase, BBase) + raises(TypeError, issubclass, BBase, BSub1) + assert not issubclass(BSub1, BSub2) + assert not issubclass(MyInst, BSub1) + assert not issubclass(BSub1, MyInst) + From arigo at codespeak.net Thu Aug 7 19:06:41 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:06:41 +0200 (CEST) Subject: [pypy-svn] r57077 - pypy/branch/isinstance-refactor/pypy/interpreter Message-ID: <20080807170641.9108F169EEE@codespeak.net> Author: arigo Date: Thu Aug 7 19:06:33 2008 New Revision: 57077 Modified: pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py Log: Uh? Nonsense. Modified: pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py Thu Aug 7 19:06:33 2008 @@ -752,21 +752,21 @@ # for the simple case (new-style class, new-style class). # This method is patched with the full logic by the __builtin__ # module when it is loaded. - return self.unwrap(self.issubtype(w_cls1, w_cls2)) + return self.is_true(self.issubtype(w_cls1, w_cls2)) def abstract_isinstance_w(self, w_obj, w_cls): # Equivalent to 'isinstance(obj, cls)'. The code below only works # for the simple case (new-style instance, new-style class). # This method is patched with the full logic by the __builtin__ # module when it is loaded. - return self.unwrap(self.isinstance(w_obj, w_cls)) + return self.is_true(self.isinstance(w_obj, w_cls)) def abstract_isclass_w(self, w_obj): # Equivalent to 'isinstance(obj, type)'. The code below only works # for the simple case (new-style instance without special stuff). # This method is patched with the full logic by the __builtin__ # module when it is loaded. - return self.unwrap(self.isinstance(w_obj, self.w_type)) + return self.is_true(self.isinstance(w_obj, self.w_type)) def abstract_getclass(self, w_obj): # Equivalent to 'obj.__class__'. The code below only works From arigo at codespeak.net Thu Aug 7 19:13:49 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:13:49 +0200 (CEST) Subject: [pypy-svn] r57078 - pypy/branch/isinstance-refactor/pypy/objspace/flow Message-ID: <20080807171349.1D5CE169E7D@codespeak.net> Author: arigo Date: Thu Aug 7 19:13:41 2008 New Revision: 57078 Modified: pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py Log: Fix the flow object space (by killing code, yay) Modified: pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py Thu Aug 7 19:13:41 2008 @@ -194,20 +194,6 @@ from pypy.objspace.flow import specialcase specialcase.setup(self) - def exception_match(self, w_exc_type, w_check_class): - try: - check_class = self.unwrap(w_check_class) - except UnwrapException: - raise Exception, "non-constant except guard" - if not isinstance(check_class, tuple): - # the simple case - return ObjSpace.exception_match(self, w_exc_type, w_check_class) - # checking a tuple of classes - for w_klass in self.unpacktuple(w_check_class): - if ObjSpace.exception_match(self, w_exc_type, w_klass): - return True - return False - def getconstclass(space, w_cls): try: ecls = space.unwrap(w_cls) From arigo at codespeak.net Thu Aug 7 19:14:04 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:14:04 +0200 (CEST) Subject: [pypy-svn] r57079 - in pypy/branch/isinstance-refactor/pypy/module/__builtin__: . test Message-ID: <20080807171404.E91AC169EC5@codespeak.net> Author: arigo Date: Thu Aug 7 19:13:58 2008 New Revision: 57079 Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py Log: Logic for subclass detection in old-style classes. Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py Thu Aug 7 19:13:58 2008 @@ -85,6 +85,16 @@ space.wrap("__bases__ items must be classes")) self.bases_w = bases_w + def is_subclass_of(self, other): + assert isinstance(other, W_ClassObject) + if self is other: + return True + for base in self.bases_w: + assert isinstance(base, W_ClassObject) + if base.is_subclass_of(other): + return True + return False + def lookup(self, space, w_attr): # returns w_value or interplevel None w_result = space.finditem(self.w_dict, w_attr) Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py Thu Aug 7 19:13:58 2008 @@ -15,6 +15,35 @@ assert a.__class__ is A assert a.__dict__ == {'b': 2} + def test_isinstance(self): + class A: + pass + class B(A): + pass + class C(A): + pass + assert isinstance(B(), A) + assert isinstance(B(), B) + assert not isinstance(B(), C) + assert not isinstance(A(), B) + assert isinstance(B(), (A, C)) + assert isinstance(B(), (C, (), (C, B))) + assert not isinstance(B(), ()) + + def test_issubclass(self): + class A: + pass + class B(A): + pass + class C(A): + pass + assert issubclass(A, A) + assert not issubclass(A, B) + assert not issubclass(A, C) + assert issubclass(B, A) + assert issubclass(B, B) + assert not issubclass(B, C) + def test_mutate_class_special(self): class A: a = 1 From arigo at codespeak.net Thu Aug 7 19:47:42 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:47:42 +0200 (CEST) Subject: [pypy-svn] r57082 - in pypy/branch/isinstance-refactor/pypy: interpreter objspace/flow objspace/flow/test Message-ID: <20080807174742.B2B94169E7D@codespeak.net> Author: arigo Date: Thu Aug 7 19:47:41 2008 New Revision: 57082 Modified: pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py pypy/branch/isinstance-refactor/pypy/objspace/flow/test/test_objspace.py Log: Delicate fixes. Modified: pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/interpreter/baseobjspace.py Thu Aug 7 19:47:41 2008 @@ -654,8 +654,13 @@ def exception_match(self, w_exc_type, w_check_class): """Checks if the given exception type matches 'w_check_class'.""" if self.is_w(w_exc_type, w_check_class): - return True - return self.abstract_issubclass_w(w_exc_type, w_check_class) + return True # fast path (also here to handle string exceptions) + try: + return self.abstract_issubclass_w(w_exc_type, w_check_class) + except OperationError, e: + if e.match(self, self.w_TypeError): # string exceptions maybe + return False + raise def call(self, w_callable, w_args, w_kwds=None): args = Arguments.frompacked(self, w_args, w_kwds) Modified: pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/objspace/flow/objspace.py Thu Aug 7 19:47:41 2008 @@ -194,6 +194,20 @@ from pypy.objspace.flow import specialcase specialcase.setup(self) + def exception_match(self, w_exc_type, w_check_class): + try: + check_class = self.unwrap(w_check_class) + except UnwrapException: + raise Exception, "non-constant except guard" + if not isinstance(check_class, tuple): + # the simple case + return ObjSpace.exception_match(self, w_exc_type, w_check_class) + # checking a tuple of classes + for w_klass in self.unpacktuple(w_check_class): + if ObjSpace.exception_match(self, w_exc_type, w_klass): + return True + return False + def getconstclass(space, w_cls): try: ecls = space.unwrap(w_cls) Modified: pypy/branch/isinstance-refactor/pypy/objspace/flow/test/test_objspace.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/objspace/flow/test/test_objspace.py (original) +++ pypy/branch/isinstance-refactor/pypy/objspace/flow/test/test_objspace.py Thu Aug 7 19:47:41 2008 @@ -1,7 +1,7 @@ import new import py from pypy.objspace.flow.model import Constant, Block, Link, Variable, traverse -from pypy.objspace.flow.model import flatten +from pypy.objspace.flow.model import flatten, mkentrymap from pypy.interpreter.argument import Arguments from pypy.translator.simplify import simplify_graph from pypy.objspace.flow.objspace import FlowObjSpace @@ -416,6 +416,27 @@ x = self.codetest(self.catch_simple_call) #__________________________________________________________ + def multiple_catch_simple_call(): + try: + user_defined_function() + except (IndexError, OSError): + return -1 + return 0 + + def test_multiple_catch_simple_call(self): + graph = self.codetest(self.multiple_catch_simple_call) + simplify_graph(graph) + assert self.all_operations(graph) == {'simple_call': 1} + entrymap = mkentrymap(graph) + links = entrymap[graph.returnblock] + assert len(links) == 3 + assert (dict.fromkeys([link.exitcase for link in links]) == + dict.fromkeys([None, IndexError, OSError])) + links = entrymap[graph.exceptblock] + assert len(links) == 1 + assert links[0].exitcase is Exception + + #__________________________________________________________ def dellocal(): x = 1 del x From arigo at codespeak.net Thu Aug 7 19:50:58 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 19:50:58 +0200 (CEST) Subject: [pypy-svn] r57083 - in pypy/branch/isinstance-refactor/pypy/module/__builtin__: . test Message-ID: <20080807175058.7F224169ECC@codespeak.net> Author: arigo Date: Thu Aug 7 19:50:51 2008 New Revision: 57083 Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py Log: Test and fix. Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py Thu Aug 7 19:50:51 2008 @@ -135,7 +135,7 @@ # -- case (class-like-object, tuple-of-classes) if space.is_true(space.isinstance(w_klass_or_tuple, space.w_tuple)): for w_klass in space.unpacktuple(w_klass_or_tuple): - if abstract_issubclass_w(w_derived, w_klass): + if abstract_issubclass_w(space, w_derived, w_klass): return True return False Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py Thu Aug 7 19:50:51 2008 @@ -150,6 +150,9 @@ assert issubclass(MyBaseInst, MyBaseInst) assert not issubclass(MyBaseInst, MyInst) assert not issubclass(MyInst, MyInst2) + assert issubclass(MyInst, (MyBaseInst, MyClass)) + assert issubclass(MyInst, (MyClass, (), (MyBaseInst,))) + assert not issubclass(MyInst, (MyClass, (), (MyInst2,))) BBase = MyClass() BSub1 = MyClass() @@ -179,4 +182,3 @@ assert not issubclass(BSub1, BSub2) assert not issubclass(MyInst, BSub1) assert not issubclass(BSub1, MyInst) - From arigo at codespeak.net Thu Aug 7 21:07:45 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 7 Aug 2008 21:07:45 +0200 (CEST) Subject: [pypy-svn] r57084 - in pypy/branch/isinstance-refactor/pypy/module/__builtin__: . test Message-ID: <20080807190745.3EDC12A00DC@codespeak.net> Author: arigo Date: Thu Aug 7 21:07:42 2008 New Revision: 57084 Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py Log: - a passing test - a failing test - skip a test when running on top of CPython - fix the failing test Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py Thu Aug 7 21:07:42 2008 @@ -235,6 +235,7 @@ except OperationError, e: if e.match(space, space.w_AttributeError): return space.w_NotImplemented + raise else: if w_meth is None: return space.w_NotImplemented Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_classobj.py Thu Aug 7 21:07:42 2008 @@ -444,6 +444,21 @@ raises(TypeError, "a + 1.1") assert l == [1, 1.1] + def test_binaryop_raises(self): + class A: + def __add__(self, other): + raise this_exception + def __iadd__(self, other): + raise this_exception + + a = A() + this_exception = ValueError + raises(ValueError, "a + 1") + raises(ValueError, "a += 1") + this_exception = AttributeError + raises(AttributeError, "a + 1") + raises(AttributeError, "a += 1") + def test_iadd(self): class A: def __init__(self): @@ -647,13 +662,14 @@ def test_catch_attributeerror_of_descriptor(self): def booh(self): - raise AttributeError, "booh" + raise this_exception, "booh" class E: __eq__ = property(booh) __iadd__ = property(booh) e = E() + this_exception = AttributeError raises(TypeError, "e += 1") # does not crash E() == E() @@ -661,6 +677,9 @@ __init__ = property(booh) raises(AttributeError, I) + this_exception = ValueError + raises(ValueError, "e += 1") + def test_multiple_inheritance_more(self): l = [] class A: # classic class @@ -715,6 +734,10 @@ assert Y() != X() def test_assignment_to_del(self): + import sys + if not hasattr(sys, 'pypy_objspaceclass'): + skip("assignment to __del__ doesn't give a warning in CPython") + import warnings warnings.simplefilter('error', RuntimeWarning) From arigo at codespeak.net Fri Aug 8 16:39:42 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Fri, 8 Aug 2008 16:39:42 +0200 (CEST) Subject: [pypy-svn] r57103 - pypy/branch/isinstance-refactor/pypy/module/__builtin__ Message-ID: <20080808143942.0354D169FCC@codespeak.net> Author: arigo Date: Fri Aug 8 16:39:42 2008 New Revision: 57103 Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py Log: Minor performance improvement, possibly. Modified: pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py (original) +++ pypy/branch/isinstance-refactor/pypy/module/__builtin__/interp_classobj.py Fri Aug 8 16:39:42 2008 @@ -325,11 +325,6 @@ def getattr(self, space, w_name, exc=True): - name = space.str_w(w_name) - if name == "__dict__": - return self.w_dict - elif name == "__class__": - return self.w_class w_result = space.finditem(self.w_dict, w_name) if w_result is not None: return w_result @@ -339,7 +334,7 @@ raise OperationError( space.w_AttributeError, space.wrap("%s instance has no attribute %s" % ( - self.w_class.name, name))) + self.w_class.name, space.str_w(w_name)))) else: return None w_descr_get = space.lookup(w_value, '__get__') @@ -348,7 +343,12 @@ return space.call_function(w_descr_get, w_value, self, self.w_class) def descr_getattribute(self, space, w_attr): - #import pdb; pdb.set_trace() + name = space.str_w(w_attr) + if len(name) >= 8 and name[0] == '_': + if name == "__dict__": + return self.w_dict + elif name == "__class__": + return self.w_class try: return self.getattr(space, w_attr) except OperationError, e: From hpk at codespeak.net Fri Aug 8 18:41:50 2008 From: hpk at codespeak.net (hpk at codespeak.net) Date: Fri, 8 Aug 2008 18:41:50 +0200 (CEST) Subject: [pypy-svn] r57107 - pypy/dist/pypy/doc Message-ID: <20080808164150.DB0CB169FDC@codespeak.net> Author: hpk Date: Fri Aug 8 18:41:49 2008 New Revision: 57107 Modified: pypy/dist/pypy/doc/index.txt Log: remove and mark some old "status" entries Modified: pypy/dist/pypy/doc/index.txt ============================================================================== --- pypy/dist/pypy/doc/index.txt (original) +++ pypy/dist/pypy/doc/index.txt Fri Aug 8 18:41:49 2008 @@ -93,8 +93,7 @@ `Nightly compliance test runs for compiled pypy-c`_. -`compliance test status`_ shows outcomes of compliance test runs -against PyPy on top of CPython. +information dating from early 2007: `PyPy LOC statistics`_ shows LOC statistics about PyPy. From arigo at codespeak.net Sat Aug 9 16:07:48 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 9 Aug 2008 16:07:48 +0200 (CEST) Subject: [pypy-svn] r57132 - pypy/branch/isinstance-refactor/pypy/rlib/test Message-ID: <20080809140748.26E8A16A034@codespeak.net> Author: arigo Date: Sat Aug 9 16:07:47 2008 New Revision: 57132 Modified: pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py Log: A failing test derived from a failure of test_stress in the nightly run. Modified: pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py (original) +++ pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py Sat Aug 9 16:07:47 2008 @@ -95,6 +95,16 @@ assert f.getvalue() == '\x00' * 3 assert f.tell() == 3 +def test_bug(): + import py; py.test.skip("in-progress") + f = RStringIO() + f.write('0') + f.write('1') + f.write('2') + assert f.getvalue() == '012' + f.write('3') + assert f.getvalue() == '0123' + def test_stress(): import cStringIO, random f = RStringIO() From arigo at codespeak.net Sat Aug 9 16:10:34 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 9 Aug 2008 16:10:34 +0200 (CEST) Subject: [pypy-svn] r57133 - in pypy/branch/isinstance-refactor/pypy/rlib: . test Message-ID: <20080809141034.D0CE4168575@codespeak.net> Author: arigo Date: Sat Aug 9 16:10:34 2008 New Revision: 57133 Modified: pypy/branch/isinstance-refactor/pypy/rlib/rStringIO.py pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py Log: Fix the test by restoring a broken invariant. Modified: pypy/branch/isinstance-refactor/pypy/rlib/rStringIO.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/rlib/rStringIO.py (original) +++ pypy/branch/isinstance-refactor/pypy/rlib/rStringIO.py Sat Aug 9 16:10:34 2008 @@ -45,6 +45,8 @@ return ''.join(self.bigbuffer) if self.numstrings > 1: result = self.strings[0] = ''.join(self.strings) + for i in range(1, self.numstrings): + self.strings[i] = '' self.numstrings = 1 self.numbigstrings = 1 else: Modified: pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py ============================================================================== --- pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py (original) +++ pypy/branch/isinstance-refactor/pypy/rlib/test/test_rStringIO.py Sat Aug 9 16:10:34 2008 @@ -96,7 +96,6 @@ assert f.tell() == 3 def test_bug(): - import py; py.test.skip("in-progress") f = RStringIO() f.write('0') f.write('1') From arigo at codespeak.net Sat Aug 9 16:11:38 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 9 Aug 2008 16:11:38 +0200 (CEST) Subject: [pypy-svn] r57135 - in pypy/dist/pypy/rlib: . test Message-ID: <20080809141138.0EADD169E3A@codespeak.net> Author: arigo Date: Sat Aug 9 16:11:38 2008 New Revision: 57135 Modified: pypy/dist/pypy/rlib/rStringIO.py pypy/dist/pypy/rlib/test/test_rStringIO.py Log: Test and fix (accidentally checked in a branch, merging now). Modified: pypy/dist/pypy/rlib/rStringIO.py ============================================================================== --- pypy/dist/pypy/rlib/rStringIO.py (original) +++ pypy/dist/pypy/rlib/rStringIO.py Sat Aug 9 16:11:38 2008 @@ -45,6 +45,8 @@ return ''.join(self.bigbuffer) if self.numstrings > 1: result = self.strings[0] = ''.join(self.strings) + for i in range(1, self.numstrings): + self.strings[i] = '' self.numstrings = 1 self.numbigstrings = 1 else: Modified: pypy/dist/pypy/rlib/test/test_rStringIO.py ============================================================================== --- pypy/dist/pypy/rlib/test/test_rStringIO.py (original) +++ pypy/dist/pypy/rlib/test/test_rStringIO.py Sat Aug 9 16:11:38 2008 @@ -95,6 +95,15 @@ assert f.getvalue() == '\x00' * 3 assert f.tell() == 3 +def test_bug(): + f = RStringIO() + f.write('0') + f.write('1') + f.write('2') + assert f.getvalue() == '012' + f.write('3') + assert f.getvalue() == '0123' + def test_stress(): import cStringIO, random f = RStringIO() From pedronis at codespeak.net Sat Aug 9 16:49:38 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 9 Aug 2008 16:49:38 +0200 (CEST) Subject: [pypy-svn] r57136 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080809144938.8303916A034@codespeak.net> Author: pedronis Date: Sat Aug 9 16:49:36 2008 New Revision: 57136 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: reorganize tests Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 9 16:49:36 2008 @@ -459,7 +459,38 @@ assert space.is_true(w_res) assert called == [w_app_f, w_app_f] + + def test_plain(self): + space = self.space + + def g(space, w_a, w_x): + return space.newtuple([space.wrap('g'), w_a, w_x]) + + w_g = space.wrap(gateway.interp2app_temp(g, + unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.W_Root])) + + args = argument.Arguments(space, [space.wrap(-1), space.wrap(0)]) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', -1, 0)))) + + w_self = space.wrap('self') + + args0 = argument.Arguments(space, [space.wrap(0)]) + args = args0.prepend(w_self) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + + args3 = argument.Arguments(space, [space.wrap(3)]) + w_res = space.call_obj_args(w_g, w_self, args3) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) + +class TestPassThroughArguments: + def test_pass_trough_arguments(self): space = self.space @@ -507,34 +538,6 @@ w_res = space.call_args(w_g, args) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) - - def test_plain(self): - space = self.space - - def g(space, w_a, w_x): - return space.newtuple([space.wrap('g'), w_a, w_x]) - - w_g = space.wrap(gateway.interp2app_temp(g, - unwrap_spec=[gateway.ObjSpace, - gateway.W_Root, - gateway.W_Root])) - - args = argument.Arguments(space, [space.wrap(-1), space.wrap(0)]) - - w_res = space.call_args(w_g, args) - assert space.is_true(space.eq(w_res, space.wrap(('g', -1, 0)))) - - w_self = space.wrap('self') - - args0 = argument.Arguments(space, [space.wrap(0)]) - args = args0.prepend(w_self) - - w_res = space.call_args(w_g, args) - assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) - - args3 = argument.Arguments(space, [space.wrap(3)]) - w_res = space.call_obj_args(w_g, w_self, args3) - assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) class AppTestKeywordsToBuiltinSanity(object): From pedronis at codespeak.net Sat Aug 9 16:51:28 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 9 Aug 2008 16:51:28 +0200 (CEST) Subject: [pypy-svn] r57137 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080809145128.DA7CA16A038@codespeak.net> Author: pedronis Date: Sat Aug 9 16:51:28 2008 New Revision: 57137 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: another bit of reorg Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 9 16:51:28 2008 @@ -491,7 +491,7 @@ class TestPassThroughArguments: - def test_pass_trough_arguments(self): + def test_pass_trough_arguments0(self): space = self.space called = [] @@ -501,20 +501,10 @@ a_w, _ = __args__.unpack() return space.newtuple([space.wrap('f')]+a_w) - def g(space, w_self, __args__): - called.append(__args__) - a_w, _ = __args__.unpack() - return space.newtuple([space.wrap('g'), w_self, ]+a_w) - w_f = space.wrap(gateway.interp2app_temp(f, unwrap_spec=[gateway.ObjSpace, gateway.Arguments])) - w_g = space.wrap(gateway.interp2app_temp(g, - unwrap_spec=[gateway.ObjSpace, - gateway.W_Root, - gateway.Arguments])) - args = argument.Arguments(space, [space.wrap(7)]) w_res = space.call_args(w_f, args) @@ -522,7 +512,21 @@ # white-box check for opt assert called[0] is args + + def test_pass_trough_arguments1(self): + space = self.space + called = [] + + def g(space, w_self, __args__): + called.append(__args__) + a_w, _ = __args__.unpack() + return space.newtuple([space.wrap('g'), w_self, ]+a_w) + + w_g = space.wrap(gateway.interp2app_temp(g, + unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.Arguments])) w_self = space.wrap('self') From pedronis at codespeak.net Sat Aug 9 16:56:31 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 9 Aug 2008 16:56:31 +0200 (CEST) Subject: [pypy-svn] r57138 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080809145631.086DD16A036@codespeak.net> Author: pedronis Date: Sat Aug 9 16:56:30 2008 New Revision: 57138 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: more checks Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 9 16:56:30 2008 @@ -528,20 +528,31 @@ gateway.W_Root, gateway.Arguments])) + old_funcrun = w_g.code.funcrun + def funcrun_witness(func, args): + called.append('funcrun') + return old_funcrun(func, args) + + w_g.code.funcrun = funcrun_witness + w_self = space.wrap('self') args3 = argument.Arguments(space, [space.wrap(3)]) w_res = space.call_obj_args(w_g, w_self, args3) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) # white-box check for opt + assert len(called) == 1 assert called[0] is args3 - # no opt in this case + called = [] args0 = argument.Arguments(space, [space.wrap(0)]) args = args0.prepend(w_self) w_res = space.call_args(w_g, args) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + # no opt in this case + assert len(called) == 2 + assert called[0] == 'funcrun' class AppTestKeywordsToBuiltinSanity(object): From pedronis at codespeak.net Sat Aug 9 17:06:22 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 9 Aug 2008 17:06:22 +0200 (CEST) Subject: [pypy-svn] r57139 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080809150622.2F66B16A02B@codespeak.net> Author: pedronis Date: Sat Aug 9 17:06:21 2008 New Revision: 57139 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: tests showing current situation for BuiltinCodePassThroughArguments1 optimization Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 9 17:06:21 2008 @@ -553,7 +553,35 @@ # no opt in this case assert len(called) == 2 assert called[0] == 'funcrun' + called = [] + # higher level interfaces + + w_res = space.call_function(w_g, w_self) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self')))) + assert len(called) == 2 + assert called[0] == 'funcrun' # bad + called = [] + + w_res = space.appexec([w_g], """(g): + return g('self', 11) + """) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 11)))) + assert len(called) == 2 + assert called[0] == 'funcrun' # bad + called = [] + + w_res = space.appexec([w_g], """(g): + class A(object): + m = g # not a builtin function, so works as method + a = A() + y = a.m(33) + return y == ('g', a, 33) + """) + assert space.is_true(w_res) + assert len(called) == 1 + assert isinstance(called[0], argument.AbstractArguments) + class AppTestKeywordsToBuiltinSanity(object): From pedronis at codespeak.net Sat Aug 9 17:13:31 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 9 Aug 2008 17:13:31 +0200 (CEST) Subject: [pypy-svn] r57141 - pypy/branch/garden-call-code/pypy/interpreter/test Message-ID: <20080809151331.CB21716A02B@codespeak.net> Author: pedronis Date: Sat Aug 9 17:13:31 2008 New Revision: 57141 Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: show pessimisation with CALL_METHOD Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 9 17:13:31 2008 @@ -1,4 +1,4 @@ - +from pypy.conftest import gettestobjspace from pypy.interpreter import gateway from pypy.interpreter import argument import py @@ -570,18 +570,30 @@ assert len(called) == 2 assert called[0] == 'funcrun' # bad called = [] - + w_res = space.appexec([w_g], """(g): class A(object): m = g # not a builtin function, so works as method - a = A() - y = a.m(33) - return y == ('g', a, 33) + d = {'A': A} + exec \"\"\" +# own compiler +a = A() +y = a.m(33) +\"\"\" in d + return d['y'] == ('g', d['a'], 33) """) assert space.is_true(w_res) assert len(called) == 1 assert isinstance(called[0], argument.AbstractArguments) - + +class TestPassThroughArguments_CALL_METHOD(TestPassThroughArguments): + + def setup_class(cls): + space = gettestobjspace(usemodules=('_stackless',), **{ + "objspace.opcodes.CALL_METHOD": True + }) + cls.space = space + py.test.skip("shows pessimization with CALL_METHOD") class AppTestKeywordsToBuiltinSanity(object): From pedronis at codespeak.net Sat Aug 9 22:56:48 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 9 Aug 2008 22:56:48 +0200 (CEST) Subject: [pypy-svn] r57153 - in pypy/branch/garden-call-code/pypy/interpreter: . test Message-ID: <20080809205648.F15432A00E3@codespeak.net> Author: pedronis Date: Sat Aug 9 22:56:46 2008 New Revision: 57153 Modified: pypy/branch/garden-call-code/pypy/interpreter/eval.py pypy/branch/garden-call-code/pypy/interpreter/function.py pypy/branch/garden-call-code/pypy/interpreter/gateway.py pypy/branch/garden-call-code/pypy/interpreter/pycode.py pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Log: trying to fix the CALL_METHOD speed regression, trying to benchmark somewhere else Modified: pypy/branch/garden-call-code/pypy/interpreter/eval.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/eval.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/eval.py Sat Aug 9 22:56:46 2008 @@ -11,7 +11,10 @@ Abstract base class.""" hidden_applevel = False - fast_natural_arity = -1 + # n >= 0 : arity + # -n: special cases + # -99: hopeless + fast_natural_arity = -99 def __init__(self, co_name): self.co_name = co_name Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sat Aug 9 22:56:46 2008 @@ -46,7 +46,8 @@ def funccall(self, *args_w): # speed hack code = self.getcode() # hook for the jit nargs = len(args_w) - if nargs == code.fast_natural_arity: + fast_natural_arity = code.fast_natural_arity + if nargs == fast_natural_arity: if nargs == 0: return code.fastcall_0(self.space, self) elif nargs == 1: @@ -59,10 +60,17 @@ elif nargs == 4: return code.fastcall_4(self.space, self, args_w[0], args_w[1], args_w[2], args_w[3]) + elif nargs >= 1 and fast_natural_arity == -1: + from pypy.interpreter import gateway + assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) + return code.funcrun_obj(self, args_w[0], + Arguments(self.space, + list(args_w[1:]))) return self.call_args(Arguments(self.space, list(args_w))) def funccall_valuestack(self, nargs, frame): # speed hack code = self.getcode() # hook for the jit + fast_natural_arity = code.fast_natural_arity if nargs == code.fast_natural_arity: if nargs == 0: return code.fastcall_0(self.space, self) @@ -73,11 +81,22 @@ frame.peekvalue(0)) elif nargs == 3: return code.fastcall_3(self.space, self, frame.peekvalue(2), - frame.peekvalue(1), frame.peekvalue(0)) + frame.peekvalue(1), frame.peekvalue(0)) elif nargs == 4: return code.fastcall_4(self.space, self, frame.peekvalue(3), frame.peekvalue(2), frame.peekvalue(1), - frame.peekvalue(0)) + frame.peekvalue(0)) + elif fast_natural_arity == -1 and nargs > 1: + from pypy.interpreter import gateway + assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) + w_obj = frame.peekvalue(nargs-1) + args = frame.make_arguments(nargs-1) + try: + return code.funcrun_obj(self, w_obj, args) + finally: + if isinstance(args, ArgumentsFromValuestack): + args.frame = None + args = frame.make_arguments(nargs) try: return self.call_args(args) Modified: pypy/branch/garden-call-code/pypy/interpreter/gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/gateway.py Sat Aug 9 22:56:46 2008 @@ -536,6 +536,7 @@ return w_result class BuiltinCodePassThroughArguments1(BuiltinCode): + fast_natural_arity = -1 def funcrun_obj(self, func, w_obj, args): space = func.space Modified: pypy/branch/garden-call-code/pypy/interpreter/pycode.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/pycode.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/pycode.py Sat Aug 9 22:56:46 2008 @@ -162,7 +162,7 @@ def _compute_fastcall(self): # Speed hack! - self.fast_natural_arity = -1 + self.fast_natural_arity = -99 if not (0 <= self.co_argcount <= 4): return if self.co_flags & (CO_VARARGS | CO_VARKEYWORDS): Modified: pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/test/test_gateway.py Sat Aug 9 22:56:46 2008 @@ -559,16 +559,16 @@ w_res = space.call_function(w_g, w_self) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self')))) - assert len(called) == 2 - assert called[0] == 'funcrun' # bad + assert len(called) == 1 + assert isinstance(called[0], argument.AbstractArguments) called = [] w_res = space.appexec([w_g], """(g): return g('self', 11) """) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 11)))) - assert len(called) == 2 - assert called[0] == 'funcrun' # bad + assert len(called) == 1 + assert isinstance(called[0], argument.AbstractArguments) called = [] w_res = space.appexec([w_g], """(g): @@ -593,7 +593,6 @@ "objspace.opcodes.CALL_METHOD": True }) cls.space = space - py.test.skip("shows pessimization with CALL_METHOD") class AppTestKeywordsToBuiltinSanity(object): From pedronis at codespeak.net Sun Aug 10 01:18:28 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 01:18:28 +0200 (CEST) Subject: [pypy-svn] r57155 - pypy/branch/garden-call-code/pypy/interpreter Message-ID: <20080809231828.E6A3416A0C5@codespeak.net> Author: pedronis Date: Sun Aug 10 01:18:27 2008 New Revision: 57155 Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py Log: oops Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sun Aug 10 01:18:27 2008 @@ -86,7 +86,7 @@ return code.fastcall_4(self.space, self, frame.peekvalue(3), frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) - elif fast_natural_arity == -1 and nargs > 1: + elif fast_natural_arity == -1 and nargs >= 1: from pypy.interpreter import gateway assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) w_obj = frame.peekvalue(nargs-1) From pedronis at codespeak.net Sun Aug 10 11:40:22 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 11:40:22 +0200 (CEST) Subject: [pypy-svn] r57157 - pypy/branch/garden-call-code/pypy/interpreter/callbench Message-ID: <20080810094022.4656E16A227@codespeak.net> Author: pedronis Date: Sun Aug 10 11:40:20 2008 New Revision: 57157 Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/sup.py Log: trying to make benchmarks more stable Modified: pypy/branch/garden-call-code/pypy/interpreter/callbench/sup.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/callbench/sup.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/callbench/sup.py Sun Aug 10 11:40:20 2008 @@ -16,10 +16,20 @@ st[0] = t() ref(n, start) - elapsed_ref = t() - st[0] + elapsed_ref1 = t() - st[0] + ref(n, start) + elapsed_ref2 = t() - st[0] + ref(n, start) + elapsed_ref3 = t() - st[0] + elapsed_ref = min(elapsed_ref1, elapsed_ref2, elapsed_ref3) func(n, start) - elapsed = t() - st[0] + elapsed1 = t() - st[0] + func(n, start) + elapsed2 = t() - st[0] + func(n, start) + elapsed3 = t() - st[0] + elapsed = min(elapsed1, elapsed2, elapsed3) #if elapsed < elapsed_ref*10: # print "not enough meat", elapsed, elapsed_ref From pedronis at codespeak.net Sun Aug 10 13:17:04 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 13:17:04 +0200 (CEST) Subject: [pypy-svn] r57158 - in pypy/branch/garden-call-code/pypy: interpreter module/operator Message-ID: <20080810111704.AAA8E498089@codespeak.net> Author: pedronis Date: Sun Aug 10 13:17:03 2008 New Revision: 57158 Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py pypy/branch/garden-call-code/pypy/module/operator/__init__.py pypy/branch/garden-call-code/pypy/module/operator/app_operator.py pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py Log: avoid repetion Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sun Aug 10 13:17:03 2008 @@ -71,7 +71,7 @@ def funccall_valuestack(self, nargs, frame): # speed hack code = self.getcode() # hook for the jit fast_natural_arity = code.fast_natural_arity - if nargs == code.fast_natural_arity: + if nargs == fast_natural_arity: if nargs == 0: return code.fastcall_0(self.space, self) elif nargs == 1: Modified: pypy/branch/garden-call-code/pypy/module/operator/__init__.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/operator/__init__.py (original) +++ pypy/branch/garden-call-code/pypy/module/operator/__init__.py Sun Aug 10 13:17:03 2008 @@ -21,15 +21,16 @@ 'countOf', 'delslice', 'getslice', 'indexOf', 'isMappingType', 'isNumberType', 'isSequenceType', 'repeat', 'setslice', + 'attrgetter', 'itemgetter' ] for name in app_names: appleveldefs[name] = 'app_operator.%s' % name - interp_names = ['index', 'abs', 'add', 'and_', 'attrgetter', + interp_names = ['index', 'abs', 'add', 'and_', 'concat', 'contains', 'delitem', 'div', 'eq', 'floordiv', 'ge', 'getitem', 'gt', 'inv', - 'invert', 'is_', 'is_not', 'isCallable', 'itemgetter', + 'invert', 'is_', 'is_not', 'isCallable', 'le', 'lshift', 'lt', 'mod', 'mul', 'ne', 'neg', 'not_', 'or_', 'pos', 'pow', 'rshift', 'setitem', 'sequenceIncludes', Modified: pypy/branch/garden-call-code/pypy/module/operator/app_operator.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/operator/app_operator.py (original) +++ pypy/branch/garden-call-code/pypy/module/operator/app_operator.py Sun Aug 10 13:17:03 2008 @@ -63,3 +63,20 @@ a[b:c] = d __setslice__ = setslice +class attrgetter(object): + + def __init__(self, name): + self.name = name + + def __call__(self, obj): + return getattr(obj, self.name) + +class itemgetter(object): + + def __init__(self, index): + self.index = index + + def __call__(self, obj): + return obj[self.index] + + Modified: pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py (original) +++ pypy/branch/garden-call-code/pypy/module/operator/interp_operator.py Sun Aug 10 13:17:03 2008 @@ -157,54 +157,3 @@ def xor(space, w_a, w_b): 'xor(a, b) -- Same as a ^ b.' return space.xor(w_a, w_b) - -# ____________________________________________________________ -# attrgetter and itergetter - -from pypy.interpreter import eval, function -from pypy.interpreter.error import OperationError - -class SimpleClosureBuiltinFunction(function.BuiltinFunction): - - def __init__(self, space, code, w_index): - assert isinstance(code, SimpleClosureCode) - function.Function.__init__(self, space, code) - self.w_index = w_index - - -class SimpleClosureCode(eval.Code): - fast_natural_arity = 1 - - sig = (['obj'], None, None) - - def __init__(self, co_name, is_attrgetter): - eval.Code.__init__(self, co_name) - self.is_attrgetter = is_attrgetter - - def signature(self): - return self.sig - - def funcrun(self, func, args): - space = func.space - [w_obj] = args.parse(func.name, self.sig) - return self.fastcall_1(space, func, w_obj) - - def fastcall_1(self, space, func, w_obj): - if not isinstance(func, SimpleClosureBuiltinFunction): - raise OperationError(space.w_TypeError, space.wrap("bad call")) - w_index = func.w_index - if self.is_attrgetter: - return space.getattr(w_obj, w_index) - else: - return space.getitem(w_obj, w_index) - -attrgetter_code = SimpleClosureCode("attrgetter", is_attrgetter=True) -itemgetter_code = SimpleClosureCode("itemgetter", is_attrgetter=False) - -def attrgetter(space, w_attr): - func = SimpleClosureBuiltinFunction(space, attrgetter_code, w_attr) - return space.wrap(func) - -def itemgetter(space, w_idx): - func = SimpleClosureBuiltinFunction(space, itemgetter_code, w_idx) - return space.wrap(func) From pedronis at codespeak.net Sun Aug 10 15:13:25 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 15:13:25 +0200 (CEST) Subject: [pypy-svn] r57161 - pypy/dist/pypy/translator/test Message-ID: <20080810131325.4FE9949801A@codespeak.net> Author: pedronis Date: Sun Aug 10 15:13:22 2008 New Revision: 57161 Added: pypy/dist/pypy/translator/test/test_stackcheck.py (contents, props changed) Log: add a stackcheck insertion test independent from stackless Added: pypy/dist/pypy/translator/test/test_stackcheck.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/translator/test/test_stackcheck.py Sun Aug 10 15:13:22 2008 @@ -0,0 +1,35 @@ +from pypy import conftest +from pypy.translator.translator import TranslationContext +from pypy.translator.backendopt.all import backend_optimizations +from pypy.translator.transform import insert_ll_stackcheck + +def test_simple(): + class A(object): + def __init__(self, n): + self.n = n + + def f(a): + x = A(a.n+1) + if x.n == 10: + return + f(x) + + def g(n): + f(A(n)) + + t = TranslationContext() + a = t.buildannotator() + a.build_types(g, [int]) + a.simplify() + t.buildrtyper().specialize() + backend_optimizations(t) + t.checkgraphs() + n = insert_ll_stackcheck(t) + t.checkgraphs() + assert n == 1 + if conftest.option.view: + t.view() + + + + From pedronis at codespeak.net Sun Aug 10 21:34:04 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 21:34:04 +0200 (CEST) Subject: [pypy-svn] r57182 - in pypy/branch/garden-call-code/pypy: interpreter module/__builtin__/test module/pypyjit objspace/std Message-ID: <20080810193404.9F49916A165@codespeak.net> Author: pedronis Date: Sun Aug 10 21:34:04 2008 New Revision: 57182 Modified: pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py pypy/branch/garden-call-code/pypy/interpreter/function.py pypy/branch/garden-call-code/pypy/module/__builtin__/test/test_builtin.py pypy/branch/garden-call-code/pypy/module/pypyjit/portal.py pypy/branch/garden-call-code/pypy/objspace/std/objspace.py Log: - kill Function.funccall_obj_valuestack - prune funccall_starX versions Modified: pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/baseobjspace.py Sun Aug 10 21:34:04 2008 @@ -679,15 +679,17 @@ return self.call_args(w_callable, args) def call_function(self, w_func, *args_w): - if not self.config.objspace.disable_call_speedhacks: + nargs = len(args_w) # used for pruning funccall versions + if not self.config.objspace.disable_call_speedhacks and nargs < 5: # XXX start of hack for performance from pypy.interpreter.function import Function, Method if isinstance(w_func, Method): w_inst = w_func.w_instance if w_inst is not None: - func = w_func.w_function - if isinstance(func, Function): - return func.funccall(w_inst, *args_w) + if nargs < 4: + func = w_func.w_function + if isinstance(func, Function): + return func.funccall(w_inst, *args_w) elif args_w and self.is_true( self.abstract_isinstance(args_w[0], w_func.w_class)): w_func = w_func.w_function @@ -707,9 +709,10 @@ if isinstance(w_func, Method): w_inst = w_func.w_instance if w_inst is not None: - func = w_func.w_function - if isinstance(func, Function): - return func.funccall_obj_valuestack(w_inst, nargs, frame) + w_func = w_func.w_function + # reuse callable stack place for w_inst + frame.settopvalue(w_inst, nargs) + nargs += 1 elif nargs > 0 and self.is_true( self.abstract_isinstance(frame.peekvalue(nargs-1), # :-( w_func.w_class)): Modified: pypy/branch/garden-call-code/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code/pypy/interpreter/function.py Sun Aug 10 21:34:04 2008 @@ -104,30 +104,6 @@ if isinstance(args, ArgumentsFromValuestack): args.frame = None - def funccall_obj_valuestack(self, w_obj, nargs, frame): # speed hack - code = self.getcode() # hook for the jit - if nargs+1 == code.fast_natural_arity: - if nargs == 0: - return code.fastcall_1(self.space, self, w_obj) - elif nargs == 1: - return code.fastcall_2(self.space, self, w_obj, - frame.peekvalue(0)) - elif nargs == 2: - return code.fastcall_3(self.space, self, w_obj, - frame.peekvalue(1), - frame.peekvalue(0)) - elif nargs == 3: - return code.fastcall_4(self.space, self, w_obj, - frame.peekvalue(2), - frame.peekvalue(1), - frame.peekvalue(0)) - stkargs = frame.make_arguments(nargs) - try: - return self.call_obj_args(w_obj, stkargs) - finally: - if isinstance(stkargs, ArgumentsFromValuestack): - stkargs.frame = None - def getdict(self): if self.w_func_dict is None: self.w_func_dict = self.space.newdict() Modified: pypy/branch/garden-call-code/pypy/module/__builtin__/test/test_builtin.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/__builtin__/test/test_builtin.py (original) +++ pypy/branch/garden-call-code/pypy/module/__builtin__/test/test_builtin.py Sun Aug 10 21:34:04 2008 @@ -503,6 +503,28 @@ s = """ """ # XXX write this test! + def test_shadow_case_bound_method(self): + s = """def test(l): + n = len(l) + old_len = len + class A(object): + x = 5 + def length(self, o): + return self.x*old_len(o) + import __builtin__ + __builtin__.len = A().length + try: + m = len(l) + finally: + __builtin__.len = old_len + return n+m + """ + ns = {} + exec s in ns + res = ns["test"]([2,3,4]) + assert res == 18 + + class TestInternal: def setup_method(self,method): Modified: pypy/branch/garden-call-code/pypy/module/pypyjit/portal.py ============================================================================== --- pypy/branch/garden-call-code/pypy/module/pypyjit/portal.py (original) +++ pypy/branch/garden-call-code/pypy/module/pypyjit/portal.py Sun Aug 10 21:34:04 2008 @@ -111,8 +111,6 @@ # self.seepath(pypy.interpreter.pyframe.PyFrame.CALL_FUNCTION, pypy.interpreter.function.Function.funccall_valuestack) - self.seepath(pypy.interpreter.pyframe.PyFrame.CALL_FUNCTION, - pypy.interpreter.function.Function.funccall_obj_valuestack) Modified: pypy/branch/garden-call-code/pypy/objspace/std/objspace.py ============================================================================== --- pypy/branch/garden-call-code/pypy/objspace/std/objspace.py (original) +++ pypy/branch/garden-call-code/pypy/objspace/std/objspace.py Sun Aug 10 21:34:04 2008 @@ -2,8 +2,9 @@ from pypy.interpreter.baseobjspace import ObjSpace, Wrappable from pypy.interpreter.error import OperationError, debug_print from pypy.interpreter.typedef import get_unique_interplevel_subclass -from pypy.interpreter.argument import Arguments +from pypy.interpreter import argument from pypy.interpreter import pyframe +from pypy.interpreter import function from pypy.interpreter.pyopcode import unrolling_compare_dispatch_table, \ BytecodeCorruption from pypy.rlib.objectmodel import instantiate @@ -147,13 +148,23 @@ nargs = oparg & 0xff w_function = w_value try: - w_result = f.space.call_valuestack(w_function, nargs, f) + w_result = f.call_likely_builtin(w_function, nargs) # XXX XXX fix the problem of resume points! #rstack.resume_point("CALL_FUNCTION", f, nargs, returns=w_result) finally: f.dropvalues(nargs) f.pushvalue(w_result) + def call_likely_builtin(f, w_function, nargs): + if isinstance(w_function, function.Function): + return w_function.funccall_valuestack(nargs, f) + args = f.make_arguments(nargs) + try: + return f.space.call_args(w_function, args) + finally: + if isinstance(args, argument.ArgumentsFromValuestack): + args.frame = None + if self.config.objspace.opcodes.CALL_METHOD: # def LOOKUP_METHOD(...): from pypy.objspace.std.callmethod import LOOKUP_METHOD From pedronis at codespeak.net Sun Aug 10 23:55:52 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 23:55:52 +0200 (CEST) Subject: [pypy-svn] r57183 - in pypy/dist/pypy: interpreter interpreter/callbench interpreter/test module/__builtin__/test module/_stackless/test module/operator module/pypyjit module/thread objspace objspace/std Message-ID: <20080810215552.6333D16A2F3@codespeak.net> Author: pedronis Date: Sun Aug 10 23:55:49 2008 New Revision: 57183 Added: pypy/dist/pypy/interpreter/callbench/bltn_instantiate.py - copied unchanged from r57182, pypy/branch/garden-call-code/pypy/interpreter/callbench/bltn_instantiate.py pypy/dist/pypy/interpreter/callbench/bltna1.py - copied unchanged from r57182, pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna1.py pypy/dist/pypy/interpreter/callbench/bltna2.py - copied unchanged from r57182, pypy/branch/garden-call-code/pypy/interpreter/callbench/bltna2.py pypy/dist/pypy/interpreter/callbench/inst_no_init.py - copied unchanged from r57182, pypy/branch/garden-call-code/pypy/interpreter/callbench/inst_no_init.py pypy/dist/pypy/module/_stackless/test/test_frame_chain_reconstruction.py - copied unchanged from r57182, pypy/branch/garden-call-code/pypy/module/_stackless/test/test_frame_chain_reconstruction.py Modified: pypy/dist/pypy/interpreter/argument.py pypy/dist/pypy/interpreter/baseobjspace.py pypy/dist/pypy/interpreter/callbench/inst.py pypy/dist/pypy/interpreter/callbench/sup.py pypy/dist/pypy/interpreter/eval.py pypy/dist/pypy/interpreter/function.py pypy/dist/pypy/interpreter/gateway.py pypy/dist/pypy/interpreter/pycode.py pypy/dist/pypy/interpreter/test/test_function.py pypy/dist/pypy/interpreter/test/test_gateway.py pypy/dist/pypy/interpreter/test/test_objspace.py pypy/dist/pypy/module/__builtin__/test/test_builtin.py pypy/dist/pypy/module/operator/__init__.py pypy/dist/pypy/module/operator/app_operator.py pypy/dist/pypy/module/operator/interp_operator.py pypy/dist/pypy/module/pypyjit/portal.py pypy/dist/pypy/module/thread/os_local.py pypy/dist/pypy/objspace/descroperation.py pypy/dist/pypy/objspace/std/objspace.py pypy/dist/pypy/objspace/std/proxyobject.py pypy/dist/pypy/objspace/std/typeobject.py Log: merging first garden-call-code branch - avoid the strange interface checking whether the result is None for fastcall_# methods - killed Arguments.popfirst and ArgumentsPrepended introducing call_obj_args and related methods instead - kill Function.funccall_obj_valuestack - prune funccall_star# versions - move itemgetter and attrgetter back to app-level for simplicity for now - various new tests Modified: pypy/dist/pypy/interpreter/argument.py ============================================================================== --- pypy/dist/pypy/interpreter/argument.py (original) +++ pypy/dist/pypy/interpreter/argument.py Sun Aug 10 23:55:49 2008 @@ -6,32 +6,62 @@ class AbstractArguments: - def parse(self, fnname, signature, defaults_w=[]): + def parse_into_scope(self, w_firstarg, + scope_w, fnname, signature, defaults_w=[]): """Parse args and kwargs to initialize a frame according to the signature of code object. + Store the argumentvalues into scope_w. + scope_w must be big enough for signature. """ + argnames, varargname, kwargname = signature + has_vararg = varargname is not None + has_kwarg = kwargname is not None try: - return self.match_signature(signature, defaults_w) + return self._match_signature(w_firstarg, + scope_w, argnames, has_vararg, + has_kwarg, defaults_w, 0) except ArgErr, e: raise OperationError(self.space.w_TypeError, self.space.wrap(e.getmsg(fnname))) - def parse_into_scope(self, scope_w, fnname, signature, defaults_w=[]): - """Parse args and kwargs to initialize a frame - according to the signature of code object. - Store the argumentvalues into scope_w. - scope_w must be big enough for signature. + def _parse(self, w_firstarg, signature, defaults_w, blindargs=0): + """Parse args and kwargs according to the signature of a code object, + or raise an ArgErr in case of failure. """ argnames, varargname, kwargname = signature + scopelen = len(argnames) has_vararg = varargname is not None has_kwarg = kwargname is not None + if has_vararg: + scopelen += 1 + if has_kwarg: + scopelen += 1 + scope_w = [None] * scopelen + self._match_signature(w_firstarg, scope_w, argnames, has_vararg, has_kwarg, defaults_w, blindargs) + return scope_w + + def parse(self, fnname, signature, defaults_w=[], blindargs=0): + """Parse args and kwargs to initialize a frame + according to the signature of code object. + """ try: - return self._match_signature(scope_w, argnames, has_vararg, - has_kwarg, defaults_w, 0, None) + return self._parse(None, signature, defaults_w, blindargs) except ArgErr, e: raise OperationError(self.space.w_TypeError, self.space.wrap(e.getmsg(fnname))) + # xxx have only this one + def parse_obj(self, w_firstarg, + fnname, signature, defaults_w=[], blindargs=0): + """Parse args and kwargs to initialize a frame + according to the signature of code object. + """ + try: + return self._parse(w_firstarg, signature, defaults_w, blindargs) + except ArgErr, e: + raise OperationError(self.space.w_TypeError, + self.space.wrap(e.getmsg(fnname))) + def frompacked(space, w_args=None, w_kwds=None): """Convenience static method to build an Arguments from a wrapped sequence and a wrapped dictionary.""" @@ -68,31 +98,11 @@ return Arguments(space, args_w, kwds_w, w_star, w_starstar) fromshape = staticmethod(fromshape) - def prepend(self, w_firstarg): - "Return a new Arguments with a new argument inserted first." - return ArgumentsPrepended(self, w_firstarg) - - def popfirst(self): - """For optimization only: might return (w_firstarg, args_with_rest), - or might just raise IndexError. - """ - raise IndexError - def match_signature(self, signature, defaults_w): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. """ - argnames, varargname, kwargname = signature - scopelen = len(argnames) - has_vararg = varargname is not None - has_kwarg = kwargname is not None - if has_vararg: - scopelen += 1 - if has_kwarg: - scopelen += 1 - scope_w = [None] * scopelen - self._match_signature(scope_w, argnames, has_vararg, has_kwarg, defaults_w, 0, None) - return scope_w + return self._parse(None, signature, defaults_w) def unmatch_signature(self, signature, data_w): """kind of inverse of match_signature""" @@ -156,7 +166,12 @@ """ raise NotImplementedError() - def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None): + def prepend(self, w_firstarg): + """ Purely abstract + """ + raise NotImplementedError() + + def _match_signature(self, w_firstarg, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0): """ Purely abstract """ raise NotImplementedError() @@ -164,66 +179,9 @@ def fixedunpack(self, argcount): """ Purely abstract """ - raise NotImplementedError() - -class ArgumentsPrepended(AbstractArguments): - def __init__(self, args, w_firstarg): - self.space = args.space - self.args = args - self.w_firstarg = w_firstarg - - def firstarg(self): - "Return the first argument for inspection." - return self.w_firstarg - - def popfirst(self): - return self.w_firstarg, self.args - - def __repr__(self): - return 'ArgumentsPrepended(%r, %r)' % (self.args, self.w_firstarg) - - def has_keywords(self): - return self.args.has_keywords() - - def unpack(self): - arguments_w, kwds_w = self.args.unpack() - return ([self.w_firstarg] + arguments_w), kwds_w - - def fixedunpack(self, argcount): - if argcount <= 0: - raise ValueError, "too many arguments (%d expected)" % argcount # XXX: Incorrect - return [self.w_firstarg] + self.args.fixedunpack(argcount - 1) - - def _rawshape(self, nextra=0): - return self.args._rawshape(nextra + 1) - - def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None): - """Parse args and kwargs according to the signature of a code object, - or raise an ArgErr in case of failure. - Return the number of arguments filled in. - """ - if blindargs < len(argnames): - scope_w[blindargs] = self.w_firstarg - else: - if extravarargs is None: - extravarargs = [ self.w_firstarg ] - else: - extravarargs.append(self.w_firstarg) - return self.args._match_signature(scope_w, argnames, has_vararg, - has_kwarg, defaults_w, - blindargs + 1, extravarargs) - - def flatten(self): - (shape_cnt, shape_keys, shape_star, shape_stst), data_w = self.args.flatten() - data_w.insert(0, self.w_firstarg) - return (shape_cnt + 1, shape_keys, shape_star, shape_stst), data_w + raise NotImplementedError() - def num_args(self): - return self.args.num_args() + 1 - def num_kwds(self): - return self.args.num_kwds() - class ArgumentsFromValuestack(AbstractArguments): """ Collects the arguments of a function call as stored on a PyFrame @@ -242,14 +200,11 @@ return None return self.frame.peekvalue(self.nargs - 1) - def popfirst(self): - if self.nargs <= 0: - raise IndexError - frame = self.frame - newnargs = self.nargs-1 - return (frame.peekvalue(newnargs), - ArgumentsFromValuestack(self.space, frame, newnargs)) - + def prepend(self, w_firstarg): + "Return a new Arguments with a new argument inserted first." + args_w = self.frame.peekvalues(self.nargs) + return Arguments(self.space, [w_firstarg] + args_w) + def __repr__(self): return 'ArgumentsFromValuestack(%r, %r)' % (self.frame, self.nargs) @@ -276,52 +231,65 @@ def _rawshape(self, nextra=0): return nextra + self.nargs, (), False, False - def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None): + def _match_signature(self, w_firstarg, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. Return the number of arguments filled in. """ co_argcount = len(argnames) - if blindargs + self.nargs + len(defaults_w) < co_argcount: + extravarargs = None + input_argcount = 0 + + if w_firstarg is not None: + upfront = 1 + if co_argcount > 0: + scope_w[0] = w_firstarg + input_argcount = 1 + else: + extravarargs = [ w_firstarg ] + else: + upfront = 0 + + avail = upfront + self.nargs + + if avail + len(defaults_w) < co_argcount: raise ArgErrCount(blindargs + self.nargs , 0, (co_argcount, has_vararg, has_kwarg), - defaults_w, co_argcount - blindargs - - self.nargs - len(defaults_w)) - if blindargs + self.nargs > co_argcount and not has_vararg: + defaults_w, co_argcount - avail - len(defaults_w)) + if avail > co_argcount and not has_vararg: raise ArgErrCount(blindargs + self.nargs, 0, (co_argcount, has_vararg, has_kwarg), defaults_w, 0) - if blindargs + self.nargs >= co_argcount: - for i in range(co_argcount - blindargs): - scope_w[i + blindargs] = self.frame.peekvalue(self.nargs - 1 - i) + if avail >= co_argcount: + for i in range(co_argcount - input_argcount): + scope_w[i + input_argcount] = self.frame.peekvalue(self.nargs - 1 - i) if has_vararg: - if blindargs > co_argcount: + if upfront > co_argcount: + assert extravarargs is not None stararg_w = extravarargs for i in range(self.nargs): stararg_w.append(self.frame.peekvalue(self.nargs - 1 - i)) else: - stararg_w = [None] * (self.nargs + blindargs - co_argcount) - for i in range(co_argcount - blindargs, self.nargs): - stararg_w[i - co_argcount + blindargs] = self.frame.peekvalue(self.nargs - 1 - i) + args_left = co_argcount - upfront + stararg_w = [None] * (avail - co_argcount) + for i in range(args_left, self.nargs): + stararg_w[i - args_left] = self.frame.peekvalue(self.nargs - 1 - i) scope_w[co_argcount] = self.space.newtuple(stararg_w) - co_argcount += 1 else: for i in range(self.nargs): - scope_w[i + blindargs] = self.frame.peekvalue(self.nargs - 1 - i) + scope_w[i + input_argcount] = self.frame.peekvalue(self.nargs - 1 - i) ndefaults = len(defaults_w) - missing = co_argcount - self.nargs - blindargs + missing = co_argcount - avail first_default = ndefaults - missing for i in range(missing): - scope_w[self.nargs + blindargs + i] = defaults_w[first_default + i] + scope_w[avail + i] = defaults_w[first_default + i] if has_vararg: scope_w[co_argcount] = self.space.newtuple([]) - co_argcount += 1 if has_kwarg: - scope_w[co_argcount] = self.space.newdict() - co_argcount += 1 - return co_argcount + scope_w[co_argcount + has_vararg] = self.space.newdict() + return co_argcount + has_vararg + has_kwarg def flatten(self): data_w = [None] * self.nargs @@ -382,12 +350,12 @@ "Return a ([w1,w2...], {'kw':w3...}) pair." self._unpack() return self.arguments_w, self.kwds_w - - def popfirst(self): - self._unpack() - return self.arguments_w[0], Arguments(self.space, self.arguments_w[1:], - kwds_w = self.kwds_w) + def prepend(self, w_firstarg): + "Return a new Arguments with a new argument inserted first." + return Arguments(self.space, [w_firstarg] + self.arguments_w, + self.kwds_w, self.w_stararg, self.w_starstararg) + def _unpack(self): "unpack the *arg and **kwd into w_arguments and kwds_w" # --- unpack the * argument now --- @@ -460,9 +428,8 @@ ### Parsing for function calls ### - def _match_signature(self, scope_w, argnames, has_vararg=False, - has_kwarg=False, defaults_w=[], blindargs=0, - extravarargs=None): + def _match_signature(self, w_firstarg, scope_w, argnames, has_vararg=False, + has_kwarg=False, defaults_w=[], blindargs=0): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. Return the number of arguments filled in. @@ -474,10 +441,23 @@ # scope_w = resulting list of wrapped values # co_argcount = len(argnames) # expected formal arguments, without */** + extravarargs = None + input_argcount = 0 + + if w_firstarg is not None: + upfront = 1 + if co_argcount > 0: + scope_w[0] = w_firstarg + input_argcount = 1 + else: + extravarargs = [ w_firstarg ] + else: + upfront = 0 + if self.w_stararg is not None: # There is a case where we don't have to unpack() a w_stararg: # if it matches exactly a *arg in the signature. - if (len(self.arguments_w) + blindargs == co_argcount and + if (len(self.arguments_w) + upfront == co_argcount and has_vararg and self.space.is_w(self.space.type(self.w_stararg), self.space.w_tuple)): @@ -489,23 +469,25 @@ self._unpack() args_w = self.arguments_w + num_args = len(args_w) + kwds_w = self.kwds_w num_kwds = 0 if kwds_w is not None: num_kwds = len(kwds_w) - - # put as many positional input arguments into place as available - if blindargs >= co_argcount: - input_argcount = co_argcount - elif len(args_w) + blindargs > co_argcount: - for i in range(co_argcount - blindargs): - scope_w[i + blindargs] = args_w[i] - input_argcount = co_argcount - next_arg = co_argcount - blindargs - else: - for i in range(len(args_w)): - scope_w[i + blindargs] = args_w[i] - input_argcount = len(args_w) + blindargs + + avail = num_args + upfront + + if input_argcount < co_argcount: + # put as many positional input arguments into place as available + if avail > co_argcount: + take = co_argcount - input_argcount + else: + take = num_args + + for i in range(take): + scope_w[i + input_argcount] = args_w[i] + input_argcount += take # check that no keyword argument conflicts with these # note that for this purpose we ignore the first blindargs, @@ -542,21 +524,21 @@ # collect extra positional arguments into the *vararg if has_vararg: if self.w_stararg is None: # common case - args_left = co_argcount - blindargs + args_left = co_argcount - upfront if args_left < 0: # check required by rpython assert extravarargs is not None starargs_w = extravarargs - if len(args_w): + if num_args: starargs_w.extend(args_w) - elif len(args_w) > args_left: + elif num_args > args_left: starargs_w = args_w[args_left:] else: starargs_w = [] scope_w[co_argcount] = self.space.newtuple(starargs_w) else: # shortcut for the non-unpack() case above scope_w[co_argcount] = self.w_stararg - elif len(args_w) + blindargs > co_argcount: - raise ArgErrCount(len(args_w) + blindargs, num_kwds, + elif avail > co_argcount: + raise ArgErrCount(avail, num_kwds, (co_argcount, has_vararg, has_kwarg), defaults_w, 0) @@ -571,7 +553,7 @@ raise ArgErrUnknownKwds(remainingkwds_w) if missing: - raise ArgErrCount(len(args_w) + blindargs, num_kwds, + raise ArgErrCount(avail, num_kwds, (co_argcount, has_vararg, has_kwarg), defaults_w, missing) Modified: pypy/dist/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/dist/pypy/interpreter/baseobjspace.py (original) +++ pypy/dist/pypy/interpreter/baseobjspace.py Sun Aug 10 23:55:49 2008 @@ -665,20 +665,31 @@ return True return False + def call_obj_args(self, w_callable, w_obj, args): + if not self.config.objspace.disable_call_speedhacks: + # XXX start of hack for performance + from pypy.interpreter.function import Function + if isinstance(w_callable, Function): + return w_callable.call_obj_args(w_obj, args) + # XXX end of hack for performance + return self.call_args(w_callable, args.prepend(w_obj)) + def call(self, w_callable, w_args, w_kwds=None): args = Arguments.frompacked(self, w_args, w_kwds) return self.call_args(w_callable, args) def call_function(self, w_func, *args_w): - if not self.config.objspace.disable_call_speedhacks: + nargs = len(args_w) # used for pruning funccall versions + if not self.config.objspace.disable_call_speedhacks and nargs < 5: # XXX start of hack for performance from pypy.interpreter.function import Function, Method if isinstance(w_func, Method): w_inst = w_func.w_instance if w_inst is not None: - func = w_func.w_function - if isinstance(func, Function): - return func.funccall(w_inst, *args_w) + if nargs < 4: + func = w_func.w_function + if isinstance(func, Function): + return func.funccall(w_inst, *args_w) elif args_w and self.is_true( self.abstract_isinstance(args_w[0], w_func.w_class)): w_func = w_func.w_function @@ -698,9 +709,10 @@ if isinstance(w_func, Method): w_inst = w_func.w_instance if w_inst is not None: - func = w_func.w_function - if isinstance(func, Function): - return func.funccall_obj_valuestack(w_inst, nargs, frame) + w_func = w_func.w_function + # reuse callable stack place for w_inst + frame.settopvalue(w_inst, nargs) + nargs += 1 elif nargs > 0 and self.is_true( self.abstract_isinstance(frame.peekvalue(nargs-1), # :-( w_func.w_class)): Modified: pypy/dist/pypy/interpreter/callbench/inst.py ============================================================================== --- pypy/dist/pypy/interpreter/callbench/inst.py (original) +++ pypy/dist/pypy/interpreter/callbench/inst.py Sun Aug 10 23:55:49 2008 @@ -5,6 +5,10 @@ def __init__(self): pass + class B(object): + def __init__(self, x, y): + pass + start() i = 0 while i < N: @@ -13,10 +17,10 @@ A() A() A() - A() - A() - A() - A() + B(1, 2) + B(1, 2) + B(1, 2) + B(1, 2) i+=1 run(w, 1000) Modified: pypy/dist/pypy/interpreter/callbench/sup.py ============================================================================== --- pypy/dist/pypy/interpreter/callbench/sup.py (original) +++ pypy/dist/pypy/interpreter/callbench/sup.py Sun Aug 10 23:55:49 2008 @@ -16,10 +16,20 @@ st[0] = t() ref(n, start) - elapsed_ref = t() - st[0] + elapsed_ref1 = t() - st[0] + ref(n, start) + elapsed_ref2 = t() - st[0] + ref(n, start) + elapsed_ref3 = t() - st[0] + elapsed_ref = min(elapsed_ref1, elapsed_ref2, elapsed_ref3) func(n, start) - elapsed = t() - st[0] + elapsed1 = t() - st[0] + func(n, start) + elapsed2 = t() - st[0] + func(n, start) + elapsed3 = t() - st[0] + elapsed = min(elapsed1, elapsed2, elapsed3) #if elapsed < elapsed_ref*10: # print "not enough meat", elapsed, elapsed_ref Modified: pypy/dist/pypy/interpreter/eval.py ============================================================================== --- pypy/dist/pypy/interpreter/eval.py (original) +++ pypy/dist/pypy/interpreter/eval.py Sun Aug 10 23:55:49 2008 @@ -11,6 +11,11 @@ Abstract base class.""" hidden_applevel = False + # n >= 0 : arity + # -n: special cases + # -99: hopeless + fast_natural_arity = -99 + def __init__(self, co_name): self.co_name = co_name @@ -55,18 +60,20 @@ frame.setfastscope(scope_w) return frame.run() + def funcrun_obj(self, func, w_obj, args): + return self.funcrun(func, args.prepend(w_obj)) # a performance hack (see gateway.BuiltinCode1/2/3 and pycode.PyCode) def fastcall_0(self, space, func): - return None + raise NotImplementedError def fastcall_1(self, space, func, w1): - return None + raise NotImplementedError def fastcall_2(self, space, func, w1, w2): - return None + raise NotImplementedError def fastcall_3(self, space, func, w1, w2, w3): - return None + raise NotImplementedError def fastcall_4(self, space, func, w1, w2, w3, w4): - return None + raise NotImplementedError class Frame(Wrappable): """A frame is an environment supporting the execution of a code object. Modified: pypy/dist/pypy/interpreter/function.py ============================================================================== --- pypy/dist/pypy/interpreter/function.py (original) +++ pypy/dist/pypy/interpreter/function.py Sun Aug 10 23:55:49 2008 @@ -33,63 +33,70 @@ return "" % getattr(self, 'name', '?') def call_args(self, args): - return self.code.funcrun(self, args) # delegate activation to code + # delegate activation to code + return self.code.funcrun(self, args) + + def call_obj_args(self, w_obj, args): + # delegate activation to code + return self.code.funcrun_obj(self, w_obj, args) def getcode(self): return self.code def funccall(self, *args_w): # speed hack code = self.getcode() # hook for the jit - if len(args_w) == 0: - w_res = code.fastcall_0(self.space, self) - if w_res is not None: - return w_res - elif len(args_w) == 1: - w_res = code.fastcall_1(self.space, self, args_w[0]) - if w_res is not None: - return w_res - elif len(args_w) == 2: - w_res = code.fastcall_2(self.space, self, args_w[0], args_w[1]) - if w_res is not None: - return w_res - elif len(args_w) == 3: - w_res = code.fastcall_3(self.space, self, args_w[0], - args_w[1], args_w[2]) - if w_res is not None: - return w_res - elif len(args_w) == 4: - w_res = code.fastcall_4(self.space, self, args_w[0], - args_w[1], args_w[2], args_w[3]) - if w_res is not None: - return w_res + nargs = len(args_w) + fast_natural_arity = code.fast_natural_arity + if nargs == fast_natural_arity: + if nargs == 0: + return code.fastcall_0(self.space, self) + elif nargs == 1: + return code.fastcall_1(self.space, self, args_w[0]) + elif nargs == 2: + return code.fastcall_2(self.space, self, args_w[0], args_w[1]) + elif nargs == 3: + return code.fastcall_3(self.space, self, args_w[0], + args_w[1], args_w[2]) + elif nargs == 4: + return code.fastcall_4(self.space, self, args_w[0], + args_w[1], args_w[2], args_w[3]) + elif nargs >= 1 and fast_natural_arity == -1: + from pypy.interpreter import gateway + assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) + return code.funcrun_obj(self, args_w[0], + Arguments(self.space, + list(args_w[1:]))) return self.call_args(Arguments(self.space, list(args_w))) def funccall_valuestack(self, nargs, frame): # speed hack code = self.getcode() # hook for the jit - if nargs == 0: - w_res = code.fastcall_0(self.space, self) - if w_res is not None: - return w_res - elif nargs == 1: - w_res = code.fastcall_1(self.space, self, frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 2: - w_res = code.fastcall_2(self.space, self, frame.peekvalue(1), - frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 3: - w_res = code.fastcall_3(self.space, self, frame.peekvalue(2), - frame.peekvalue(1), frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 4: - w_res = code.fastcall_4(self.space, self, frame.peekvalue(3), - frame.peekvalue(2), frame.peekvalue(1), - frame.peekvalue(0)) - if w_res is not None: - return w_res + fast_natural_arity = code.fast_natural_arity + if nargs == fast_natural_arity: + if nargs == 0: + return code.fastcall_0(self.space, self) + elif nargs == 1: + return code.fastcall_1(self.space, self, frame.peekvalue(0)) + elif nargs == 2: + return code.fastcall_2(self.space, self, frame.peekvalue(1), + frame.peekvalue(0)) + elif nargs == 3: + return code.fastcall_3(self.space, self, frame.peekvalue(2), + frame.peekvalue(1), frame.peekvalue(0)) + elif nargs == 4: + return code.fastcall_4(self.space, self, frame.peekvalue(3), + frame.peekvalue(2), frame.peekvalue(1), + frame.peekvalue(0)) + elif fast_natural_arity == -1 and nargs >= 1: + from pypy.interpreter import gateway + assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) + w_obj = frame.peekvalue(nargs-1) + args = frame.make_arguments(nargs-1) + try: + return code.funcrun_obj(self, w_obj, args) + finally: + if isinstance(args, ArgumentsFromValuestack): + args.frame = None + args = frame.make_arguments(nargs) try: return self.call_args(args) @@ -97,34 +104,6 @@ if isinstance(args, ArgumentsFromValuestack): args.frame = None - def funccall_obj_valuestack(self, w_obj, nargs, frame): # speed hack - code = self.getcode() # hook for the jit - if nargs == 0: - w_res = code.fastcall_1(self.space, self, w_obj) - if w_res is not None: - return w_res - elif nargs == 1: - w_res = code.fastcall_2(self.space, self, w_obj, frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 2: - w_res = code.fastcall_3(self.space, self, w_obj, frame.peekvalue(1), - frame.peekvalue(0)) - if w_res is not None: - return w_res - elif nargs == 3: - w_res = code.fastcall_4(self.space, self, w_obj, frame.peekvalue(2), - frame.peekvalue(1), frame.peekvalue(0)) - if w_res is not None: - return w_res - stkargs = frame.make_arguments(nargs) - args = stkargs.prepend(w_obj) - try: - return self.call_args(args) - finally: - if isinstance(stkargs, ArgumentsFromValuestack): - stkargs.frame = None - def getdict(self): if self.w_func_dict is None: self.w_func_dict = self.space.newdict() @@ -339,29 +318,29 @@ space = self.space if self.w_instance is not None: # bound method - args = args.prepend(self.w_instance) - else: - # unbound method - w_firstarg = args.firstarg() - if w_firstarg is not None and space.is_true( - space.abstract_isinstance(w_firstarg, self.w_class)): - pass # ok + return space.call_obj_args(self.w_function, self.w_instance, args) + + # unbound method + w_firstarg = args.firstarg() + if w_firstarg is not None and space.is_true( + space.abstract_isinstance(w_firstarg, self.w_class)): + pass # ok + else: + myname = self.getname(space,"") + clsdescr = self.w_class.getname(space,"") + if clsdescr: + clsdescr+=" " + if w_firstarg is None: + instdescr = "nothing" else: - myname = self.getname(space,"") - clsdescr = self.w_class.getname(space,"") - if clsdescr: - clsdescr+=" " - if w_firstarg is None: - instdescr = "nothing" - else: - instname = space.abstract_getclass(w_firstarg).getname(space,"") - if instname: - instname += " " - instdescr = "%sinstance" %instname - msg = ("unbound method %s() must be called with %s" - "instance as first argument (got %s instead)") % (myname, clsdescr, instdescr) - raise OperationError(space.w_TypeError, - space.wrap(msg)) + instname = space.abstract_getclass(w_firstarg).getname(space,"") + if instname: + instname += " " + instdescr = "%sinstance" %instname + msg = ("unbound method %s() must be called with %s" + "instance as first argument (got %s instead)") % (myname, clsdescr, instdescr) + raise OperationError(space.w_TypeError, + space.wrap(msg)) return space.call_args(self.w_function, args) def descr_method_get(self, w_obj, w_cls=None): Modified: pypy/dist/pypy/interpreter/gateway.py ============================================================================== --- pypy/dist/pypy/interpreter/gateway.py (original) +++ pypy/dist/pypy/interpreter/gateway.py Sun Aug 10 23:55:49 2008 @@ -479,9 +479,13 @@ return space.wrap(self.docstring) def funcrun(self, func, args): + return BuiltinCode.funcrun_obj(self, func, None, args) + + def funcrun_obj(self, func, w_obj, args): space = func.space activation = self.activation - scope_w = args.parse(func.name, self.sig, func.defs_w) + scope_w = args.parse_obj(w_obj, func.name, self.sig, + func.defs_w, self.minargs) try: w_result = activation._run(space, scope_w) except KeyboardInterrupt: @@ -495,6 +499,8 @@ raise OperationError(space.w_RuntimeError, space.wrap("internal error: " + str(e))) except DescrMismatch, e: + if w_obj is not None: + args = args.prepend(w_obj) return scope_w[0].descr_call_mismatch(space, self.descrmismatch_op, self.descr_reqcls, @@ -530,35 +536,33 @@ return w_result class BuiltinCodePassThroughArguments1(BuiltinCode): + fast_natural_arity = -1 - def funcrun(self, func, args): + def funcrun_obj(self, func, w_obj, args): space = func.space try: - w_obj, newargs = args.popfirst() - except IndexError: - return BuiltinCode.funcrun(self, func, args) - else: - try: - w_result = self.func__args__(space, w_obj, newargs) - except KeyboardInterrupt: - raise OperationError(space.w_KeyboardInterrupt, space.w_None) - except MemoryError: - raise OperationError(space.w_MemoryError, space.w_None) - except NotImplementedError, e: - raise - except RuntimeError, e: - raise OperationError(space.w_RuntimeError, - space.wrap("internal error: " + str(e))) - except DescrMismatch, e: - return args.firstarg().descr_call_mismatch(space, - self.descrmismatch_op, - self.descr_reqcls, - args) - if w_result is None: - w_result = space.w_None - return w_result + w_result = self.func__args__(space, w_obj, args) + except KeyboardInterrupt: + raise OperationError(space.w_KeyboardInterrupt, space.w_None) + except MemoryError: + raise OperationError(space.w_MemoryError, space.w_None) + except NotImplementedError, e: + raise + except RuntimeError, e: + raise OperationError(space.w_RuntimeError, + space.wrap("internal error: " + str(e))) + except DescrMismatch, e: + return args.firstarg().descr_call_mismatch(space, + self.descrmismatch_op, + self.descr_reqcls, + args.prepend(w_obj)) + if w_result is None: + w_result = space.w_None + return w_result class BuiltinCode0(BuiltinCode): + fast_natural_arity = 0 + def fastcall_0(self, space, w_func): self = hint(self, deepfreeze=True) try: @@ -575,6 +579,8 @@ return w_result class BuiltinCode1(BuiltinCode): + fast_natural_arity = 1 + def fastcall_1(self, space, w_func, w1): self = hint(self, deepfreeze=True) try: @@ -598,6 +604,8 @@ return w_result class BuiltinCode2(BuiltinCode): + fast_natural_arity = 2 + def fastcall_2(self, space, w_func, w1, w2): self = hint(self, deepfreeze=True) try: @@ -621,6 +629,8 @@ return w_result class BuiltinCode3(BuiltinCode): + fast_natural_arity = 3 + def fastcall_3(self, space, func, w1, w2, w3): self = hint(self, deepfreeze=True) try: @@ -644,6 +654,8 @@ return w_result class BuiltinCode4(BuiltinCode): + fast_natural_arity = 4 + def fastcall_4(self, space, func, w1, w2, w3, w4): self = hint(self, deepfreeze=True) try: Modified: pypy/dist/pypy/interpreter/pycode.py ============================================================================== --- pypy/dist/pypy/interpreter/pycode.py (original) +++ pypy/dist/pypy/interpreter/pycode.py Sun Aug 10 23:55:49 2008 @@ -162,7 +162,7 @@ def _compute_fastcall(self): # Speed hack! - self.do_fastcall = -1 + self.fast_natural_arity = -99 if not (0 <= self.co_argcount <= 4): return if self.co_flags & (CO_VARARGS | CO_VARKEYWORDS): @@ -170,59 +170,61 @@ if len(self._args_as_cellvars) > 0: return - self.do_fastcall = self.co_argcount + self.fast_natural_arity = self.co_argcount def fastcall_0(self, space, w_func): - if self.do_fastcall == 0: - frame = space.createframe(self, w_func.w_func_globals, + frame = space.createframe(self, w_func.w_func_globals, w_func.closure) - return frame.run() - return None + return frame.run() def fastcall_1(self, space, w_func, w_arg): - if self.do_fastcall == 1: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg # frame.setfastscope([w_arg]) - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg # frame.setfastscope([w_arg]) + return frame.run() def fastcall_2(self, space, w_func, w_arg1, w_arg2): - if self.do_fastcall == 2: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) - frame.fastlocals_w[1] = w_arg2 - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) + frame.fastlocals_w[1] = w_arg2 + return frame.run() def fastcall_3(self, space, w_func, w_arg1, w_arg2, w_arg3): - if self.do_fastcall == 3: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) - frame.fastlocals_w[1] = w_arg2 - frame.fastlocals_w[2] = w_arg3 - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) + frame.fastlocals_w[1] = w_arg2 + frame.fastlocals_w[2] = w_arg3 + return frame.run() def fastcall_4(self, space, w_func, w_arg1, w_arg2, w_arg3, w_arg4): - if self.do_fastcall == 4: - frame = space.createframe(self, w_func.w_func_globals, - w_func.closure) - frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) - frame.fastlocals_w[1] = w_arg2 - frame.fastlocals_w[2] = w_arg3 - frame.fastlocals_w[3] = w_arg4 - return frame.run() - return None + frame = space.createframe(self, w_func.w_func_globals, + w_func.closure) + frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg]) + frame.fastlocals_w[1] = w_arg2 + frame.fastlocals_w[2] = w_arg3 + frame.fastlocals_w[3] = w_arg4 + return frame.run() def funcrun(self, func, args): frame = self.space.createframe(self, func.w_func_globals, func.closure) sig = self._signature # speed hack - args_matched = args.parse_into_scope(frame.fastlocals_w, func.name, + args_matched = args.parse_into_scope(None, frame.fastlocals_w, + func.name, + sig, func.defs_w) + frame.init_cells() + return frame.run() + + def funcrun_obj(self, func, w_obj, args): + frame = self.space.createframe(self, func.w_func_globals, + func.closure) + sig = self._signature + # speed hack + args_matched = args.parse_into_scope(w_obj, frame.fastlocals_w, + func.name, sig, func.defs_w) frame.init_cells() return frame.run() Modified: pypy/dist/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/dist/pypy/interpreter/test/test_function.py (original) +++ pypy/dist/pypy/interpreter/test/test_function.py Sun Aug 10 23:55:49 2008 @@ -84,6 +84,10 @@ assert res[0] == 23 assert res[1] == (42,) + res = func(23, *(42,)) + assert res[0] == 23 + assert res[1] == (42,) + def test_simple_kwargs(self): def func(arg1, **kwargs): return arg1, kwargs @@ -91,6 +95,10 @@ assert res[0] == 23 assert res[1] == {'value': 42} + res = func(23, **{'value': 42}) + assert res[0] == 23 + assert res[1] == {'value': 42} + def test_kwargs_sets_wrong_positional_raises(self): def func(arg1): pass @@ -146,6 +154,25 @@ return arg1, kw raises(TypeError, func, 42, **{'arg1': 23}) + def test_kwargs_bound_blind(self): + class A(object): + def func(self, **kw): + return self, kw + func = A().func + + # don't want the extra argument passing of raises + try: + func(self=23) + assert False + except TypeError: + pass + + try: + func(**{'self': 23}) + assert False + except TypeError: + pass + def test_kwargs_confusing_name(self): def func(self): # 'self' conflicts with the interp-level return self*7 # argument to call_function() @@ -177,6 +204,55 @@ assert type(f.__doc__) is unicode class AppTestMethod: + def test_simple_call(self): + class A(object): + def func(self, arg2): + return self, arg2 + a = A() + res = a.func(42) + assert res[0] is a + assert res[1] == 42 + + def test_simple_varargs(self): + class A(object): + def func(self, *args): + return self, args + a = A() + res = a.func(42) + assert res[0] is a + assert res[1] == (42,) + + res = a.func(*(42,)) + assert res[0] is a + assert res[1] == (42,) + + def test_obscure_varargs(self): + class A(object): + def func(*args): + return args + a = A() + res = a.func(42) + assert res[0] is a + assert res[1] == 42 + + res = a.func(*(42,)) + assert res[0] is a + assert res[1] == 42 + + def test_simple_kwargs(self): + class A(object): + def func(self, **kwargs): + return self, kwargs + a = A() + + res = a.func(value=42) + assert res[0] is a + assert res[1] == {'value': 42} + + res = a.func(**{'value': 42}) + assert res[0] is a + assert res[1] == {'value': 42} + def test_get(self): def func(self): return self class Object(object): pass @@ -340,3 +416,72 @@ # --- with an incompatible class w_meth5 = meth3.descr_method_get(space.wrap('hello'), space.w_str) assert space.is_w(w_meth5, w_meth3) + +class TestShortcuts(object): + + def test_fastcall(self): + space = self.space + + def f(a): + return a + code = PyCode._from_code(self.space, f.func_code) + fn = Function(self.space, code, self.space.newdict()) + + assert fn.code.fast_natural_arity == 1 + + called = [] + fastcall_1 = fn.code.fastcall_1 + def witness_fastcall_1(space, w_func, w_arg): + called.append(w_func) + return fastcall_1(space, w_func, w_arg) + + fn.code.fastcall_1 = witness_fastcall_1 + + w_3 = space.newint(3) + w_res = space.call_function(fn, w_3) + + assert w_res is w_3 + assert called == [fn] + + called = [] + + w_res = space.appexec([fn, w_3], """(f, x): + return f(x) + """) + + assert w_res is w_3 + assert called == [fn] + + def test_fastcall_method(self): + space = self.space + + def f(self, a): + return a + code = PyCode._from_code(self.space, f.func_code) + fn = Function(self.space, code, self.space.newdict()) + + assert fn.code.fast_natural_arity == 2 + + called = [] + fastcall_2 = fn.code.fastcall_2 + def witness_fastcall_2(space, w_func, w_arg1, w_arg2): + called.append(w_func) + return fastcall_2(space, w_func, w_arg1, w_arg2) + + fn.code.fastcall_2 = witness_fastcall_2 + + w_3 = space.newint(3) + w_res = space.appexec([fn, w_3], """(f, x): + class A(object): + m = f + y = A().m(x) + b = A().m + z = b(x) + return y is x and z is x + """) + + assert space.is_true(w_res) + assert called == [fn, fn] + + + Modified: pypy/dist/pypy/interpreter/test/test_gateway.py ============================================================================== --- pypy/dist/pypy/interpreter/test/test_gateway.py (original) +++ pypy/dist/pypy/interpreter/test/test_gateway.py Sun Aug 10 23:55:49 2008 @@ -1,4 +1,4 @@ - +from pypy.conftest import gettestobjspace from pypy.interpreter import gateway from pypy.interpreter import argument import py @@ -388,3 +388,250 @@ w_app_g_run = space.wrap(app_g_run) w_bound = space.get(w_app_g_run, w("hello"), space.w_str) assert space.eq_w(space.call_function(w_bound), w(42)) + + def test_interp2app_fastcall(self): + space = self.space + w = space.wrap + w_3 = w(3) + + def f(space): + return w_3 + app_f = gateway.interp2app_temp(f, unwrap_spec=[gateway.ObjSpace]) + w_app_f = w(app_f) + + # sanity + assert isinstance(w_app_f.code, gateway.BuiltinCode0) + + called = [] + fastcall_0 = w_app_f.code.fastcall_0 + def witness_fastcall_0(space, w_func): + called.append(w_func) + return fastcall_0(space, w_func) + + w_app_f.code.fastcall_0 = witness_fastcall_0 + + w_3 = space.newint(3) + w_res = space.call_function(w_app_f) + + assert w_res is w_3 + assert called == [w_app_f] + + called = [] + + w_res = space.appexec([w_app_f], """(f): + return f() + """) + + assert w_res is w_3 + assert called == [w_app_f] + + def test_interp2app_fastcall_method(self): + space = self.space + w = space.wrap + w_3 = w(3) + + def f(space, w_self, w_x): + return w_x + app_f = gateway.interp2app_temp(f, unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.W_Root]) + w_app_f = w(app_f) + + # sanity + assert isinstance(w_app_f.code, gateway.BuiltinCode2) + + called = [] + fastcall_2 = w_app_f.code.fastcall_2 + def witness_fastcall_2(space, w_func, w_a, w_b): + called.append(w_func) + return fastcall_2(space, w_func, w_a, w_b) + + w_app_f.code.fastcall_2 = witness_fastcall_2 + + w_res = space.appexec([w_app_f, w_3], """(f, x): + class A(object): + m = f # not a builtin function, so works as method + y = A().m(x) + b = A().m + z = b(x) + return y is x and z is x + """) + + assert space.is_true(w_res) + assert called == [w_app_f, w_app_f] + + def test_plain(self): + space = self.space + + def g(space, w_a, w_x): + return space.newtuple([space.wrap('g'), w_a, w_x]) + + w_g = space.wrap(gateway.interp2app_temp(g, + unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.W_Root])) + + args = argument.Arguments(space, [space.wrap(-1), space.wrap(0)]) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', -1, 0)))) + + w_self = space.wrap('self') + + args0 = argument.Arguments(space, [space.wrap(0)]) + args = args0.prepend(w_self) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + + args3 = argument.Arguments(space, [space.wrap(3)]) + w_res = space.call_obj_args(w_g, w_self, args3) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) + + +class TestPassThroughArguments: + + def test_pass_trough_arguments0(self): + space = self.space + + called = [] + + def f(space, __args__): + called.append(__args__) + a_w, _ = __args__.unpack() + return space.newtuple([space.wrap('f')]+a_w) + + w_f = space.wrap(gateway.interp2app_temp(f, + unwrap_spec=[gateway.ObjSpace, + gateway.Arguments])) + + args = argument.Arguments(space, [space.wrap(7)]) + + w_res = space.call_args(w_f, args) + assert space.is_true(space.eq(w_res, space.wrap(('f', 7)))) + + # white-box check for opt + assert called[0] is args + + def test_pass_trough_arguments1(self): + space = self.space + + called = [] + + def g(space, w_self, __args__): + called.append(__args__) + a_w, _ = __args__.unpack() + return space.newtuple([space.wrap('g'), w_self, ]+a_w) + + w_g = space.wrap(gateway.interp2app_temp(g, + unwrap_spec=[gateway.ObjSpace, + gateway.W_Root, + gateway.Arguments])) + + old_funcrun = w_g.code.funcrun + def funcrun_witness(func, args): + called.append('funcrun') + return old_funcrun(func, args) + + w_g.code.funcrun = funcrun_witness + + w_self = space.wrap('self') + + args3 = argument.Arguments(space, [space.wrap(3)]) + w_res = space.call_obj_args(w_g, w_self, args3) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 3)))) + # white-box check for opt + assert len(called) == 1 + assert called[0] is args3 + + called = [] + args0 = argument.Arguments(space, [space.wrap(0)]) + args = args0.prepend(w_self) + + w_res = space.call_args(w_g, args) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) + # no opt in this case + assert len(called) == 2 + assert called[0] == 'funcrun' + called = [] + + # higher level interfaces + + w_res = space.call_function(w_g, w_self) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self')))) + assert len(called) == 1 + assert isinstance(called[0], argument.AbstractArguments) + called = [] + + w_res = space.appexec([w_g], """(g): + return g('self', 11) + """) + assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 11)))) + assert len(called) == 1 + assert isinstance(called[0], argument.AbstractArguments) + called = [] + + w_res = space.appexec([w_g], """(g): + class A(object): + m = g # not a builtin function, so works as method + d = {'A': A} + exec \"\"\" +# own compiler +a = A() +y = a.m(33) +\"\"\" in d + return d['y'] == ('g', d['a'], 33) + """) + assert space.is_true(w_res) + assert len(called) == 1 + assert isinstance(called[0], argument.AbstractArguments) + +class TestPassThroughArguments_CALL_METHOD(TestPassThroughArguments): + + def setup_class(cls): + space = gettestobjspace(usemodules=('_stackless',), **{ + "objspace.opcodes.CALL_METHOD": True + }) + cls.space = space + +class AppTestKeywordsToBuiltinSanity(object): + + def test_type(self): + class X(object): + def __init__(self, **kw): + pass + clash = type.__call__.func_code.co_varnames[0] + + X(**{clash: 33}) + type.__call__(X, **{clash: 33}) + + def test_object_new(self): + class X(object): + def __init__(self, **kw): + pass + clash = object.__new__.func_code.co_varnames[0] + + X(**{clash: 33}) + object.__new__(X, **{clash: 33}) + + + def test_dict_new(self): + clash = dict.__new__.func_code.co_varnames[0] + + dict(**{clash: 33}) + dict.__new__(dict, **{clash: 33}) + + def test_dict_init(self): + d = {} + clash = dict.__init__.func_code.co_varnames[0] + + d.__init__(**{clash: 33}) + dict.__init__(d, **{clash: 33}) + + def test_dict_update(self): + d = {} + clash = dict.update.func_code.co_varnames[0] + + d.update(**{clash: 33}) + dict.update(d, **{clash: 33}) + Modified: pypy/dist/pypy/interpreter/test/test_objspace.py ============================================================================== --- pypy/dist/pypy/interpreter/test/test_objspace.py (original) +++ pypy/dist/pypy/interpreter/test/test_objspace.py Sun Aug 10 23:55:49 2008 @@ -180,6 +180,36 @@ w_obj = space.wrap(-12) space.raises_w(space.w_ValueError, space.r_ulonglong_w, w_obj) + def test_call_obj_args(self): + from pypy.interpreter.argument import Arguments + + space = self.space + + w_f = space.appexec([], """(): + def f(x, y): + return (x, y) + return f +""") + + w_a = space.appexec([], """(): + class A(object): + def __call__(self, x): + return x + return A() +""") + + w_9 = space.wrap(9) + w_1 = space.wrap(1) + + w_res = space.call_obj_args(w_f, w_9, Arguments(space, [w_1])) + + w_x, w_y = space.unpacktuple(w_res, 2) + assert w_x is w_9 + assert w_y is w_1 + + w_res = space.call_obj_args(w_a, w_9, Arguments(space, [])) + assert w_res is w_9 + class TestModuleMinimal: def test_sys_exists(self): Modified: pypy/dist/pypy/module/__builtin__/test/test_builtin.py ============================================================================== --- pypy/dist/pypy/module/__builtin__/test/test_builtin.py (original) +++ pypy/dist/pypy/module/__builtin__/test/test_builtin.py Sun Aug 10 23:55:49 2008 @@ -503,6 +503,28 @@ s = """ """ # XXX write this test! + def test_shadow_case_bound_method(self): + s = """def test(l): + n = len(l) + old_len = len + class A(object): + x = 5 + def length(self, o): + return self.x*old_len(o) + import __builtin__ + __builtin__.len = A().length + try: + m = len(l) + finally: + __builtin__.len = old_len + return n+m + """ + ns = {} + exec s in ns + res = ns["test"]([2,3,4]) + assert res == 18 + + class TestInternal: def setup_method(self,method): Modified: pypy/dist/pypy/module/operator/__init__.py ============================================================================== --- pypy/dist/pypy/module/operator/__init__.py (original) +++ pypy/dist/pypy/module/operator/__init__.py Sun Aug 10 23:55:49 2008 @@ -21,15 +21,16 @@ 'countOf', 'delslice', 'getslice', 'indexOf', 'isMappingType', 'isNumberType', 'isSequenceType', 'repeat', 'setslice', + 'attrgetter', 'itemgetter' ] for name in app_names: appleveldefs[name] = 'app_operator.%s' % name - interp_names = ['index', 'abs', 'add', 'and_', 'attrgetter', + interp_names = ['index', 'abs', 'add', 'and_', 'concat', 'contains', 'delitem', 'div', 'eq', 'floordiv', 'ge', 'getitem', 'gt', 'inv', - 'invert', 'is_', 'is_not', 'isCallable', 'itemgetter', + 'invert', 'is_', 'is_not', 'isCallable', 'le', 'lshift', 'lt', 'mod', 'mul', 'ne', 'neg', 'not_', 'or_', 'pos', 'pow', 'rshift', 'setitem', 'sequenceIncludes', Modified: pypy/dist/pypy/module/operator/app_operator.py ============================================================================== --- pypy/dist/pypy/module/operator/app_operator.py (original) +++ pypy/dist/pypy/module/operator/app_operator.py Sun Aug 10 23:55:49 2008 @@ -63,3 +63,20 @@ a[b:c] = d __setslice__ = setslice +class attrgetter(object): + + def __init__(self, name): + self.name = name + + def __call__(self, obj): + return getattr(obj, self.name) + +class itemgetter(object): + + def __init__(self, index): + self.index = index + + def __call__(self, obj): + return obj[self.index] + + Modified: pypy/dist/pypy/module/operator/interp_operator.py ============================================================================== --- pypy/dist/pypy/module/operator/interp_operator.py (original) +++ pypy/dist/pypy/module/operator/interp_operator.py Sun Aug 10 23:55:49 2008 @@ -157,52 +157,3 @@ def xor(space, w_a, w_b): 'xor(a, b) -- Same as a ^ b.' return space.xor(w_a, w_b) - -# ____________________________________________________________ -# attrgetter and itergetter - -from pypy.interpreter import eval, function -from pypy.interpreter.error import OperationError - -class SimpleClosureBuiltinFunction(function.BuiltinFunction): - - def __init__(self, space, code, w_index): - assert isinstance(code, SimpleClosureCode) - function.Function.__init__(self, space, code) - self.w_index = w_index - - -class SimpleClosureCode(eval.Code): - sig = (['obj'], None, None) - - def __init__(self, co_name, is_attrgetter): - eval.Code.__init__(self, co_name) - self.is_attrgetter = is_attrgetter - - def signature(self): - return self.sig - - def funcrun(self, func, args): - space = func.space - [w_obj] = args.parse(func.name, self.sig) - return self.fastcall_1(space, func, w_obj) - - def fastcall_1(self, space, func, w_obj): - if not isinstance(func, SimpleClosureBuiltinFunction): - raise OperationError(space.w_TypeError, space.wrap("bad call")) - w_index = func.w_index - if self.is_attrgetter: - return space.getattr(w_obj, w_index) - else: - return space.getitem(w_obj, w_index) - -attrgetter_code = SimpleClosureCode("attrgetter", is_attrgetter=True) -itemgetter_code = SimpleClosureCode("itemgetter", is_attrgetter=False) - -def attrgetter(space, w_attr): - func = SimpleClosureBuiltinFunction(space, attrgetter_code, w_attr) - return space.wrap(func) - -def itemgetter(space, w_idx): - func = SimpleClosureBuiltinFunction(space, itemgetter_code, w_idx) - return space.wrap(func) Modified: pypy/dist/pypy/module/pypyjit/portal.py ============================================================================== --- pypy/dist/pypy/module/pypyjit/portal.py (original) +++ pypy/dist/pypy/module/pypyjit/portal.py Sun Aug 10 23:55:49 2008 @@ -111,8 +111,6 @@ # self.seepath(pypy.interpreter.pyframe.PyFrame.CALL_FUNCTION, pypy.interpreter.function.Function.funccall_valuestack) - self.seepath(pypy.interpreter.pyframe.PyFrame.CALL_FUNCTION, - pypy.interpreter.function.Function.funccall_obj_valuestack) Modified: pypy/dist/pypy/module/thread/os_local.py ============================================================================== --- pypy/dist/pypy/module/thread/os_local.py (original) +++ pypy/dist/pypy/module/thread/os_local.py Sun Aug 10 23:55:49 2008 @@ -29,7 +29,7 @@ w_self = space.wrap(self) w_type = space.type(w_self) w_init = space.getattr(w_type, space.wrap("__init__")) - space.call_args(w_init, self.initargs.prepend(w_self)) + space.call_obj_args(w_init, w_self, self.initargs) except: # failed, forget w_dict and propagate the exception del self.dicts[ident] Modified: pypy/dist/pypy/objspace/descroperation.py ============================================================================== --- pypy/dist/pypy/objspace/descroperation.py (original) +++ pypy/dist/pypy/objspace/descroperation.py Sun Aug 10 23:55:49 2008 @@ -75,7 +75,7 @@ descr = space.interpclass_w(w_descr) # a special case for performance and to avoid infinite recursion if type(descr) is Function: - return descr.call_args(args.prepend(w_obj)) + return descr.call_obj_args(w_obj, args) else: w_impl = space.get(w_descr, w_obj) return space.call_args(w_impl, args) Modified: pypy/dist/pypy/objspace/std/objspace.py ============================================================================== --- pypy/dist/pypy/objspace/std/objspace.py (original) +++ pypy/dist/pypy/objspace/std/objspace.py Sun Aug 10 23:55:49 2008 @@ -2,8 +2,9 @@ from pypy.interpreter.baseobjspace import ObjSpace, Wrappable from pypy.interpreter.error import OperationError, debug_print from pypy.interpreter.typedef import get_unique_interplevel_subclass -from pypy.interpreter.argument import Arguments +from pypy.interpreter import argument from pypy.interpreter import pyframe +from pypy.interpreter import function from pypy.interpreter.pyopcode import unrolling_compare_dispatch_table, \ BytecodeCorruption from pypy.rlib.objectmodel import instantiate @@ -147,13 +148,23 @@ nargs = oparg & 0xff w_function = w_value try: - w_result = f.space.call_valuestack(w_function, nargs, f) + w_result = f.call_likely_builtin(w_function, nargs) # XXX XXX fix the problem of resume points! #rstack.resume_point("CALL_FUNCTION", f, nargs, returns=w_result) finally: f.dropvalues(nargs) f.pushvalue(w_result) + def call_likely_builtin(f, w_function, nargs): + if isinstance(w_function, function.Function): + return w_function.funccall_valuestack(nargs, f) + args = f.make_arguments(nargs) + try: + return f.space.call_args(w_function, args) + finally: + if isinstance(args, argument.ArgumentsFromValuestack): + args.frame = None + if self.config.objspace.opcodes.CALL_METHOD: # def LOOKUP_METHOD(...): from pypy.objspace.std.callmethod import LOOKUP_METHOD Modified: pypy/dist/pypy/objspace/std/proxyobject.py ============================================================================== --- pypy/dist/pypy/objspace/std/proxyobject.py (original) +++ pypy/dist/pypy/objspace/std/proxyobject.py Sun Aug 10 23:55:49 2008 @@ -5,7 +5,7 @@ from pypy.objspace.std.objspace import * from pypy.objspace.std.proxy_helpers import register_type from pypy.interpreter.error import OperationError -from pypy.interpreter import baseobjspace +from pypy.interpreter import baseobjspace, argument #class W_Transparent(W_Object): # def __init__(self, w_controller): @@ -22,8 +22,10 @@ self.space = space def descr_call_mismatch(self, space, name, reqcls, args): - _, args = args.popfirst() - args = args.prepend(space.wrap(name)) + args_w, kwds_w = args.unpack() + args_w = args_w[:] + args_w[0] = space.wrap(name) + args = argument.Arguments(space, args_w, kwds_w) return space.call_args(self.w_controller, args) def getclass(self, space): Modified: pypy/dist/pypy/objspace/std/typeobject.py ============================================================================== --- pypy/dist/pypy/objspace/std/typeobject.py (original) +++ pypy/dist/pypy/objspace/std/typeobject.py Sun Aug 10 23:55:49 2008 @@ -499,7 +499,7 @@ return space.type(w_obj) # invoke the __new__ of the type w_newfunc = space.getattr(w_type, space.wrap('__new__')) - w_newobject = space.call_args(w_newfunc, __args__.prepend(w_type)) + w_newobject = space.call_obj_args(w_newfunc, w_type, __args__) # maybe invoke the __init__ of the type if space.is_true(space.isinstance(w_newobject, w_type)): w_descr = space.lookup(w_newobject, '__init__') From pedronis at codespeak.net Sun Aug 10 23:58:14 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 10 Aug 2008 23:58:14 +0200 (CEST) Subject: [pypy-svn] r57184 - pypy/branch/garden-call-code Message-ID: <20080810215814.0CAE916A2DE@codespeak.net> Author: pedronis Date: Sun Aug 10 23:58:13 2008 New Revision: 57184 Removed: pypy/branch/garden-call-code/ Log: remove merged branch From cami at codespeak.net Mon Aug 11 11:42:09 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Mon, 11 Aug 2008 11:42:09 +0200 (CEST) Subject: [pypy-svn] r57190 - pypy/dist/pypy/lang/gameboy/test Message-ID: <20080811094209.9C09116A290@codespeak.net> Author: cami Date: Mon Aug 11 11:42:07 2008 New Revision: 57190 Modified: pypy/dist/pypy/lang/gameboy/test/test_video.py Log: adapted video_test to the type in Interrupt added missing imports to vide Modified: pypy/dist/pypy/lang/gameboy/test/test_video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_video.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_video.py Mon Aug 11 11:42:07 2008 @@ -1,6 +1,8 @@ -from pypy.lang.gameboy.video import * + +from pypy.lang.gameboy import constants from pypy.lang.gameboy.interrupt import Interrupt -import pypy.lang.gameboy.constants +from pypy.lang.gameboy.video import Video +from pypy.lang.gameboy.video import VideoDriver import py class Memory(object): @@ -306,7 +308,7 @@ def test_emulate_v_vblank_1(): video = get_video() - video.interrupt.set_fnterrupt_flag(0) + video.interrupt.set_interrupt_flag(0) video.stat = 0xFE video.vblank = True video.cycles = 0 @@ -317,7 +319,7 @@ assert video.interrupt.vblank.is_pending() assert video.interrupt.lcd.is_pending() - video.interrupt.set_fnterrupt_flag(0) + video.interrupt.set_interrupt_flag(0) video.stat = 0x00 video.vblank = True assert not video.interrupt.vblank.is_pending() @@ -331,7 +333,7 @@ def test_emulate_v_vblank_2(): video = get_video() - video.interrupt.set_fnterrupt_flag(0) + video.interrupt.set_interrupt_flag(0) video.stat = 0x2D video.vblank = False video.cycles = 0 @@ -343,7 +345,7 @@ assert not video.interrupt.vblank.is_pending() assert video.interrupt.lcd.is_pending() - video.interrupt.set_fnterrupt_flag(0) + video.interrupt.set_interrupt_flag(0) video.cycles = 0 video.stat = 0xFD video.emulate_vblank() From cami at codespeak.net Mon Aug 11 11:46:38 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Mon, 11 Aug 2008 11:46:38 +0200 (CEST) Subject: [pypy-svn] r57191 - pypy/dist/pypy/lang/gameboy Message-ID: <20080811094638.C6080168522@codespeak.net> Author: cami Date: Mon Aug 11 11:46:35 2008 New Revision: 57191 Modified: pypy/dist/pypy/lang/gameboy/cpu.py Log: remove the useless debug module import in cpu Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Mon Aug 11 11:46:35 2008 @@ -3,10 +3,6 @@ from pypy.lang.gameboy.ram import * from pypy.lang.gameboy.interrupt import * -from pypy.rlib.objectmodel import we_are_translated -if not we_are_translated(): - from pypy.lang.gameboy.debug import * - # --------------------------------------------------------------------------- def process_2_complement(value): From cami at codespeak.net Mon Aug 11 11:55:18 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Mon, 11 Aug 2008 11:55:18 +0200 (CEST) Subject: [pypy-svn] r57192 - pypy/dist/pypy/lang/gameboy/test Message-ID: <20080811095518.1F11816A288@codespeak.net> Author: cami Date: Mon Aug 11 11:55:17 2008 New Revision: 57192 Modified: pypy/dist/pypy/lang/gameboy/test/test_gameboy_implementaton.py Log: added import faile check for lib sdl for the gameboy_implementation test Modified: pypy/dist/pypy/lang/gameboy/test/test_gameboy_implementaton.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_gameboy_implementaton.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_gameboy_implementaton.py Mon Aug 11 11:55:17 2008 @@ -1,10 +1,14 @@ -import py -from pypy.lang.gameboy.gameboy_implementation import * -from pypy.lang.gameboy import constants - import py, sys from pypy import conftest +from pypy.lang.gameboy import constants + +# check if the lib-sdl import fails here +try: + from pypy.lang.gameboy.gameboy_implementation import * +except ImportError: + py.test.skip("lib sdl is not installed") + # # This test file is skipped unless run with "py.test --view". # If it is run as "py.test --view -s", then it interactively asks @@ -15,6 +19,7 @@ from AppKit import NSApplication NSApplication.sharedApplication() + class TestGameBoyImplementation(object): def setup_method(self, meth): From arigo at codespeak.net Mon Aug 11 12:47:00 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 11 Aug 2008 12:47:00 +0200 (CEST) Subject: [pypy-svn] r57193 - in pypy/dist/pypy: interpreter interpreter/test module/__builtin__ module/__builtin__/test objspace/flow objspace/flow/test Message-ID: <20080811104700.861A016A24C@codespeak.net> Author: arigo Date: Mon Aug 11 12:46:58 2008 New Revision: 57193 Added: pypy/dist/pypy/module/__builtin__/abstractinst.py - copied unchanged from r57133, pypy/branch/isinstance-refactor/pypy/module/__builtin__/abstractinst.py pypy/dist/pypy/module/__builtin__/test/test_abstractinst.py - copied unchanged from r57133, pypy/branch/isinstance-refactor/pypy/module/__builtin__/test/test_abstractinst.py Modified: pypy/dist/pypy/interpreter/baseobjspace.py pypy/dist/pypy/interpreter/error.py pypy/dist/pypy/interpreter/function.py pypy/dist/pypy/interpreter/test/test_function.py pypy/dist/pypy/module/__builtin__/__init__.py pypy/dist/pypy/module/__builtin__/interp_classobj.py pypy/dist/pypy/module/__builtin__/operation.py pypy/dist/pypy/module/__builtin__/test/test_classobj.py pypy/dist/pypy/objspace/flow/objspace.py pypy/dist/pypy/objspace/flow/test/test_objspace.py Log: Merge the isinstance-refactor branch: * merge the two variants of "abstract isinstance" checking. * add tests for, hopefully, all the various obscure cases of isinstance() and issubclass() * implement them in a new module, abstractinst.py, following the CPython logic closely * the BaseObjSpace class only provides minimal implementations by default now as space.abstract_isxxx_w(), but the __buitin__ module patches the space to install the full version. A bit hackish but it looks less messy overall - no flow space hacks needed, and no need for strange imports from BaseObjSpace to pypy.module.__builtin__.interp_classobj. * the overzealous test compliance hack is gone, so I guess we'll need to do a small edit in one of CPython's tests. * minor speed-up in old-style instances' __getattribute__(). * one more test for the flow space. Modified: pypy/dist/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/dist/pypy/interpreter/baseobjspace.py (original) +++ pypy/dist/pypy/interpreter/baseobjspace.py Mon Aug 11 12:46:58 2008 @@ -654,16 +654,13 @@ def exception_match(self, w_exc_type, w_check_class): """Checks if the given exception type matches 'w_check_class'.""" if self.is_w(w_exc_type, w_check_class): - return True - if self.is_true(self.abstract_issubclass(w_exc_type, w_check_class)): - return True - - if self.is_true(self.isinstance(w_check_class, self.w_tuple)): - exclst_w = self.unpacktuple(w_check_class) - for w_e in exclst_w: - if self.exception_match(w_exc_type, w_e): - return True - return False + return True # fast path (also here to handle string exceptions) + try: + return self.abstract_issubclass_w(w_exc_type, w_check_class) + except OperationError, e: + if e.match(self, self.w_TypeError): # string exceptions maybe + return False + raise def call_obj_args(self, w_callable, w_obj, args): if not self.config.objspace.disable_call_speedhacks: @@ -690,8 +687,8 @@ func = w_func.w_function if isinstance(func, Function): return func.funccall(w_inst, *args_w) - elif args_w and self.is_true( - self.abstract_isinstance(args_w[0], w_func.w_class)): + elif args_w and ( + self.abstract_isinstance_w(args_w[0], w_func.w_class)): w_func = w_func.w_function if isinstance(w_func, Function): @@ -713,9 +710,9 @@ # reuse callable stack place for w_inst frame.settopvalue(w_inst, nargs) nargs += 1 - elif nargs > 0 and self.is_true( - self.abstract_isinstance(frame.peekvalue(nargs-1), # :-( - w_func.w_class)): + elif nargs > 0 and ( + self.abstract_isinstance_w(frame.peekvalue(nargs-1), # :-( + w_func.w_class)): w_func = w_func.w_function if isinstance(w_func, Function): @@ -767,61 +764,34 @@ w_objtype = self.type(w_obj) return self.issubtype(w_objtype, w_type) - def abstract_issubclass(self, w_obj, w_cls, failhard=False): - try: - return self.issubtype(w_obj, w_cls) - except OperationError, e: - if not e.match(self, self.w_TypeError): - raise - try: - self.getattr(w_cls, self.wrap('__bases__')) # type sanity check - return self.recursive_issubclass(w_obj, w_cls) - except OperationError, e: - if failhard or not (e.match(self, self.w_TypeError) or - e.match(self, self.w_AttributeError)): - raise - else: - return self.w_False - - def recursive_issubclass(self, w_obj, w_cls): - if self.is_w(w_obj, w_cls): - return self.w_True - for w_base in self.unpackiterable(self.getattr(w_obj, - self.wrap('__bases__'))): - if self.is_true(self.recursive_issubclass(w_base, w_cls)): - return self.w_True - return self.w_False - - def abstract_isinstance(self, w_obj, w_cls): - try: - return self.isinstance(w_obj, w_cls) - except OperationError, e: - if not e.match(self, self.w_TypeError): - raise - try: - w_objcls = self.getattr(w_obj, self.wrap('__class__')) - return self.abstract_issubclass(w_objcls, w_cls) - except OperationError, e: - if not (e.match(self, self.w_TypeError) or - e.match(self, self.w_AttributeError)): - raise - return self.w_False - - def abstract_isclass(self, w_obj): - if self.is_true(self.isinstance(w_obj, self.w_type)): - return self.w_True - if self.findattr(w_obj, self.wrap('__bases__')) is not None: - return self.w_True - else: - return self.w_False + def abstract_issubclass_w(self, w_cls1, w_cls2): + # Equivalent to 'issubclass(cls1, cls2)'. The code below only works + # for the simple case (new-style class, new-style class). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.is_true(self.issubtype(w_cls1, w_cls2)) + + def abstract_isinstance_w(self, w_obj, w_cls): + # Equivalent to 'isinstance(obj, cls)'. The code below only works + # for the simple case (new-style instance, new-style class). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.is_true(self.isinstance(w_obj, w_cls)) + + def abstract_isclass_w(self, w_obj): + # Equivalent to 'isinstance(obj, type)'. The code below only works + # for the simple case (new-style instance without special stuff). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.is_true(self.isinstance(w_obj, self.w_type)) def abstract_getclass(self, w_obj): - try: - return self.getattr(w_obj, self.wrap('__class__')) - except OperationError, e: - if e.match(self, self.w_TypeError) or e.match(self, self.w_AttributeError): - return self.type(w_obj) - raise + # Equivalent to 'obj.__class__'. The code below only works + # for the simple case (new-style instance without special stuff). + # This method is patched with the full logic by the __builtin__ + # module when it is loaded. + return self.type(w_obj) + def eval(self, expression, w_globals, w_locals): "NOT_RPYTHON: For internal debugging." Modified: pypy/dist/pypy/interpreter/error.py ============================================================================== --- pypy/dist/pypy/interpreter/error.py (original) +++ pypy/dist/pypy/interpreter/error.py Mon Aug 11 12:46:58 2008 @@ -151,15 +151,14 @@ while space.is_true(space.isinstance(w_type, space.w_tuple)): w_type = space.getitem(w_type, space.wrap(0)) - if space.is_true(space.abstract_isclass(w_type)): + if space.abstract_isclass_w(w_type): if space.is_w(w_value, space.w_None): # raise Type: we assume we have to instantiate Type w_value = space.call_function(w_type) w_type = space.abstract_getclass(w_value) else: w_valuetype = space.abstract_getclass(w_value) - if space.is_true(space.abstract_issubclass(w_valuetype, - w_type)): + if space.abstract_issubclass_w(w_valuetype, w_type): # raise Type, Instance: let etype be the exact type of value w_type = w_valuetype else: Modified: pypy/dist/pypy/interpreter/function.py ============================================================================== --- pypy/dist/pypy/interpreter/function.py (original) +++ pypy/dist/pypy/interpreter/function.py Mon Aug 11 12:46:58 2008 @@ -322,8 +322,8 @@ # unbound method w_firstarg = args.firstarg() - if w_firstarg is not None and space.is_true( - space.abstract_isinstance(w_firstarg, self.w_class)): + if w_firstarg is not None and ( + space.abstract_isinstance_w(w_firstarg, self.w_class)): pass # ok else: myname = self.getname(space,"") @@ -351,7 +351,7 @@ # only allow binding to a more specific class than before if (w_cls is not None and not space.is_w(w_cls, space.w_None) and - not space.is_true(space.abstract_issubclass(w_cls, self.w_class))): + not space.abstract_issubclass_w(w_cls, self.w_class)): return space.wrap(self) # subclass test failed else: return descr_function_get(space, self.w_function, w_obj, w_cls) Modified: pypy/dist/pypy/interpreter/test/test_function.py ============================================================================== --- pypy/dist/pypy/interpreter/test/test_function.py (original) +++ pypy/dist/pypy/interpreter/test/test_function.py Mon Aug 11 12:46:58 2008 @@ -356,6 +356,46 @@ __metaclass__ = A().foo assert Fun[:2] == ('Fun', ()) + def test_unbound_abstract_typecheck(self): + import new + def f(*args): + return args + m = new.instancemethod(f, None, "foobar") + raises(TypeError, m) + raises(TypeError, m, None) + raises(TypeError, m, "egg") + + m = new.instancemethod(f, None, (str, int)) # really obscure... + assert m(4) == (4,) + assert m("uh") == ("uh",) + raises(TypeError, m, []) + + class MyBaseInst(object): + pass + class MyInst(MyBaseInst): + def __init__(self, myclass): + self.myclass = myclass + def __class__(self): + if self.myclass is None: + raise AttributeError + return self.myclass + __class__ = property(__class__) + class MyClass(object): + pass + BBase = MyClass() + BSub1 = MyClass() + BSub2 = MyClass() + BBase.__bases__ = () + BSub1.__bases__ = (BBase,) + BSub2.__bases__ = (BBase,) + x = MyInst(BSub1) + m = new.instancemethod(f, None, BSub1) + assert m(x) == (x,) + raises(TypeError, m, MyInst(BBase)) + raises(TypeError, m, MyInst(BSub2)) + raises(TypeError, m, MyInst(None)) + raises(TypeError, m, MyInst(42)) + class TestMethod: def setup_method(self, method): Modified: pypy/dist/pypy/module/__builtin__/__init__.py ============================================================================== --- pypy/dist/pypy/module/__builtin__/__init__.py (original) +++ pypy/dist/pypy/module/__builtin__/__init__.py Mon Aug 11 12:46:58 2008 @@ -87,8 +87,8 @@ 'coerce' : 'operation.coerce', 'divmod' : 'operation.divmod', '_issubtype' : 'operation._issubtype', - 'issubclass' : 'operation.issubclass', - 'isinstance' : 'operation.isinstance', + 'issubclass' : 'abstractinst.app_issubclass', + 'isinstance' : 'abstractinst.app_isinstance', 'getattr' : 'operation.getattr', 'setattr' : 'operation.setattr', 'delattr' : 'operation.delattr', @@ -151,6 +151,12 @@ # xxx hide the installer space.delitem(self.w_dict, space.wrap(name)) del self.loaders[name] + # install the more general version of isinstance() & co. in the space + from pypy.module.__builtin__ import abstractinst as ab + space.abstract_isinstance_w = ab.abstract_isinstance_w.__get__(space) + space.abstract_issubclass_w = ab.abstract_issubclass_w.__get__(space) + space.abstract_isclass_w = ab.abstract_isclass_w.__get__(space) + space.abstract_getclass = ab.abstract_getclass.__get__(space) def startup(self, space): # install zipimport hook if --withmod-zipimport is used Modified: pypy/dist/pypy/module/__builtin__/interp_classobj.py ============================================================================== --- pypy/dist/pypy/module/__builtin__/interp_classobj.py (original) +++ pypy/dist/pypy/module/__builtin__/interp_classobj.py Mon Aug 11 12:46:58 2008 @@ -85,6 +85,16 @@ space.wrap("__bases__ items must be classes")) self.bases_w = bases_w + def is_subclass_of(self, other): + assert isinstance(other, W_ClassObject) + if self is other: + return True + for base in self.bases_w: + assert isinstance(base, W_ClassObject) + if base.is_subclass_of(other): + return True + return False + def lookup(self, space, w_attr): # returns w_value or interplevel None w_result = space.finditem(self.w_dict, w_attr) @@ -225,6 +235,7 @@ except OperationError, e: if e.match(space, space.w_AttributeError): return space.w_NotImplemented + raise else: if w_meth is None: return space.w_NotImplemented @@ -314,11 +325,6 @@ def getattr(self, space, w_name, exc=True): - name = space.str_w(w_name) - if name == "__dict__": - return self.w_dict - elif name == "__class__": - return self.w_class w_result = space.finditem(self.w_dict, w_name) if w_result is not None: return w_result @@ -328,7 +334,7 @@ raise OperationError( space.w_AttributeError, space.wrap("%s instance has no attribute %s" % ( - self.w_class.name, name))) + self.w_class.name, space.str_w(w_name)))) else: return None w_descr_get = space.lookup(w_value, '__get__') @@ -337,7 +343,12 @@ return space.call_function(w_descr_get, w_value, self, self.w_class) def descr_getattribute(self, space, w_attr): - #import pdb; pdb.set_trace() + name = space.str_w(w_attr) + if len(name) >= 8 and name[0] == '_': + if name == "__dict__": + return self.w_dict + elif name == "__class__": + return self.w_class try: return self.getattr(space, w_attr) except OperationError, e: Modified: pypy/dist/pypy/module/__builtin__/operation.py ============================================================================== --- pypy/dist/pypy/module/__builtin__/operation.py (original) +++ pypy/dist/pypy/module/__builtin__/operation.py Mon Aug 11 12:46:58 2008 @@ -220,95 +220,3 @@ function). Note that classes are callable.""" return space.callable(w_object) - - -def _recursive_issubclass(space, w_cls, w_klass_or_tuple): # returns interp-level bool - if space.is_w(w_cls, w_klass_or_tuple): - return True - try: - w_bases = space.getattr(w_cls, space.wrap("__bases__")) - except OperationError, e: - if e.match(space, space.w_AttributeError): - return False - else: - raise - w_iterator = space.iter(w_bases) - while True: - try: - w_base = space.next(w_iterator) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise - break - if _recursive_issubclass(space, w_base, w_klass_or_tuple): - return True - return False - -def _issubclass(space, w_cls, w_klass_or_tuple, check_cls, depth): # returns interp-level bool - if depth == 0: - # XXX overzealous test compliance hack - raise OperationError(space.w_RuntimeError, space.wrap("maximum recursion depth exceeded")) - if space.is_true(space.issubtype(space.type(w_klass_or_tuple), space.w_tuple)): - w_iter = space.iter(w_klass_or_tuple) - while True: - try: - w_klass = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise - break - if _issubclass(space, w_cls, w_klass, True, depth - 1): - return True - return False - - try: - return space.is_true(space.issubtype(w_cls, w_klass_or_tuple)) - except OperationError, e: - if e.match(space, space.w_TypeError): - w_bases = space.wrap('__bases__') - if check_cls: - try: - space.getattr(w_cls, w_bases) - except OperationError, e: - if not e.match(space, space.w_AttributeError): - raise - raise OperationError(space.w_TypeError, space.wrap('arg 1 must be a class or type')) - try: - space.getattr(w_klass_or_tuple, w_bases) - except OperationError, e: - if not e.match(space, space.w_AttributeError): - raise - raise OperationError(space.w_TypeError, space.wrap('arg 2 must be a class or type or a tuple thereof')) - return _recursive_issubclass(space, w_cls, w_klass_or_tuple) - else: - raise - - -def issubclass(space, w_cls, w_klass_or_tuple): - """Check whether a class 'cls' is a subclass (i.e., a derived class) of -another class. When using a tuple as the second argument, check whether -'cls' is a subclass of any of the classes listed in the tuple.""" - return space.wrap(issubclass_w(space, w_cls, w_klass_or_tuple)) - -def issubclass_w(space, w_cls, w_klass_or_tuple): - return _issubclass(space, w_cls, w_klass_or_tuple, True, space.sys.recursionlimit) - - -def isinstance(space, w_obj, w_klass_or_tuple): - """Check whether an object is an instance of a class (or of a subclass -thereof). When using a tuple as the second argument, check whether 'obj' -is an instance of any of the classes listed in the tuple.""" - w_objtype = space.type(w_obj) - if issubclass_w(space, w_objtype, w_klass_or_tuple): - return space.w_True - try: - w_objcls = space.getattr(w_obj, space.wrap("__class__")) - except OperationError, e: - if e.match(space, space.w_AttributeError): - return space.w_False - else: - raise - if space.is_w(w_objcls, w_objtype): - return space.w_False - else: - return space.wrap(_issubclass(space, w_objcls, w_klass_or_tuple, False, space.sys.recursionlimit)) Modified: pypy/dist/pypy/module/__builtin__/test/test_classobj.py ============================================================================== --- pypy/dist/pypy/module/__builtin__/test/test_classobj.py (original) +++ pypy/dist/pypy/module/__builtin__/test/test_classobj.py Mon Aug 11 12:46:58 2008 @@ -15,6 +15,35 @@ assert a.__class__ is A assert a.__dict__ == {'b': 2} + def test_isinstance(self): + class A: + pass + class B(A): + pass + class C(A): + pass + assert isinstance(B(), A) + assert isinstance(B(), B) + assert not isinstance(B(), C) + assert not isinstance(A(), B) + assert isinstance(B(), (A, C)) + assert isinstance(B(), (C, (), (C, B))) + assert not isinstance(B(), ()) + + def test_issubclass(self): + class A: + pass + class B(A): + pass + class C(A): + pass + assert issubclass(A, A) + assert not issubclass(A, B) + assert not issubclass(A, C) + assert issubclass(B, A) + assert issubclass(B, B) + assert not issubclass(B, C) + def test_mutate_class_special(self): class A: a = 1 @@ -415,6 +444,21 @@ raises(TypeError, "a + 1.1") assert l == [1, 1.1] + def test_binaryop_raises(self): + class A: + def __add__(self, other): + raise this_exception + def __iadd__(self, other): + raise this_exception + + a = A() + this_exception = ValueError + raises(ValueError, "a + 1") + raises(ValueError, "a += 1") + this_exception = AttributeError + raises(AttributeError, "a + 1") + raises(AttributeError, "a += 1") + def test_iadd(self): class A: def __init__(self): @@ -618,13 +662,14 @@ def test_catch_attributeerror_of_descriptor(self): def booh(self): - raise AttributeError, "booh" + raise this_exception, "booh" class E: __eq__ = property(booh) __iadd__ = property(booh) e = E() + this_exception = AttributeError raises(TypeError, "e += 1") # does not crash E() == E() @@ -632,6 +677,9 @@ __init__ = property(booh) raises(AttributeError, I) + this_exception = ValueError + raises(ValueError, "e += 1") + def test_multiple_inheritance_more(self): l = [] class A: # classic class @@ -686,6 +734,10 @@ assert Y() != X() def test_assignment_to_del(self): + import sys + if not hasattr(sys, 'pypy_objspaceclass'): + skip("assignment to __del__ doesn't give a warning in CPython") + import warnings warnings.simplefilter('error', RuntimeWarning) Modified: pypy/dist/pypy/objspace/flow/objspace.py ============================================================================== --- pypy/dist/pypy/objspace/flow/objspace.py (original) +++ pypy/dist/pypy/objspace/flow/objspace.py Mon Aug 11 12:46:58 2008 @@ -218,18 +218,6 @@ return ecls return None - def abstract_issubclass(self, w_obj, w_cls, failhard=False): - return self.issubtype(w_obj, w_cls) - - def abstract_isinstance(self, w_obj, w_cls): - return self.isinstance(w_obj, w_cls) - - def abstract_isclass(self, w_obj): - return self.isinstance(w_obj, self.w_type) - - def abstract_getclass(self, w_obj): - return self.type(w_obj) - def build_flow(self, func, constargs={}): """ Modified: pypy/dist/pypy/objspace/flow/test/test_objspace.py ============================================================================== --- pypy/dist/pypy/objspace/flow/test/test_objspace.py (original) +++ pypy/dist/pypy/objspace/flow/test/test_objspace.py Mon Aug 11 12:46:58 2008 @@ -1,7 +1,7 @@ import new import py from pypy.objspace.flow.model import Constant, Block, Link, Variable, traverse -from pypy.objspace.flow.model import flatten +from pypy.objspace.flow.model import flatten, mkentrymap from pypy.interpreter.argument import Arguments from pypy.translator.simplify import simplify_graph from pypy.objspace.flow.objspace import FlowObjSpace @@ -416,6 +416,27 @@ x = self.codetest(self.catch_simple_call) #__________________________________________________________ + def multiple_catch_simple_call(): + try: + user_defined_function() + except (IndexError, OSError): + return -1 + return 0 + + def test_multiple_catch_simple_call(self): + graph = self.codetest(self.multiple_catch_simple_call) + simplify_graph(graph) + assert self.all_operations(graph) == {'simple_call': 1} + entrymap = mkentrymap(graph) + links = entrymap[graph.returnblock] + assert len(links) == 3 + assert (dict.fromkeys([link.exitcase for link in links]) == + dict.fromkeys([None, IndexError, OSError])) + links = entrymap[graph.exceptblock] + assert len(links) == 1 + assert links[0].exitcase is Exception + + #__________________________________________________________ def dellocal(): x = 1 del x From arigo at codespeak.net Mon Aug 11 12:47:05 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 11 Aug 2008 12:47:05 +0200 (CEST) Subject: [pypy-svn] r57194 - pypy/branch/isinstance-refactor Message-ID: <20080811104705.176A816A24E@codespeak.net> Author: arigo Date: Mon Aug 11 12:47:05 2008 New Revision: 57194 Removed: pypy/branch/isinstance-refactor/ Log: Remove merged branch. From bgola at codespeak.net Mon Aug 11 22:10:32 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 11 Aug 2008 22:10:32 +0200 (CEST) Subject: [pypy-svn] r57200 - pypy/branch/2.5-features/lib-python/modified-2.5.1 Message-ID: <20080811201032.387B316A220@codespeak.net> Author: bgola Date: Mon Aug 11 22:10:30 2008 New Revision: 57200 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/ pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/binhex.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/copy.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/locale.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/tarfile.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/traceback.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py (contents, props changed) Log: applying the changes from modified-2.4.1 to modified-2.5.1. Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,116 @@ +"""Record of phased-in incompatible language changes. + +Each line is of the form: + + FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease "," + CompilerFlag ")" + +where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples +of the same form as sys.version_info: + + (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int + PY_MINOR_VERSION, # the 1; an int + PY_MICRO_VERSION, # the 0; an int + PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string + PY_RELEASE_SERIAL # the 3; an int + ) + +OptionalRelease records the first release in which + + from __future__ import FeatureName + +was accepted. + +In the case of MandatoryReleases that have not yet occurred, +MandatoryRelease predicts the release in which the feature will become part +of the language. + +Else MandatoryRelease records when the feature became part of the language; +in releases at or after that, modules no longer need + + from __future__ import FeatureName + +to use the feature in question, but may continue to use such imports. + +MandatoryRelease may also be None, meaning that a planned feature got +dropped. + +Instances of class _Feature have two corresponding methods, +.getOptionalRelease() and .getMandatoryRelease(). + +CompilerFlag is the (bitfield) flag that should be passed in the fourth +argument to the builtin function compile() to enable the feature in +dynamically compiled code. This flag is stored in the .compiler_flag +attribute on _Future instances. These values must match the appropriate +#defines of CO_xxx flags in Include/compile.h. + +No feature line is ever to be deleted from this file. +""" + +all_feature_names = [ + "nested_scopes", + "generators", + "division", + "absolute_import", + "with_statement", +] + +__all__ = ["all_feature_names"] + all_feature_names + +# The CO_xxx symbols are defined here under the same names used by +# compile.h, so that an editor search will find them here. However, +# they're not exported in __all__, because they don't really belong to +# this module. +CO_NESTED = 0x0010 # nested_scopes +CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000) +CO_FUTURE_DIVISION = 0x2000 # division +CO_FUTURE_ABSIMPORT = 0x4000 # absolute_import +CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement + +class _Feature: + def __init__(self, optionalRelease, mandatoryRelease, compiler_flag): + self.optional = optionalRelease + self.mandatory = mandatoryRelease + self.compiler_flag = compiler_flag + + def getOptionalRelease(self): + """Return first release in which this feature was recognized. + + This is a 5-tuple, of the same form as sys.version_info. + """ + + return self.optional + + def getMandatoryRelease(self): + """Return release in which this feature will become mandatory. + + This is a 5-tuple, of the same form as sys.version_info, or, if + the feature was dropped, is None. + """ + + return self.mandatory + + def __repr__(self): + return "_Feature" + repr((self.optional, + self.mandatory, + self.compiler_flag)) + +nested_scopes = _Feature((2, 1, 0, "beta", 1), + (2, 2, 0, "alpha", 0), + CO_NESTED) + +generators = _Feature((2, 2, 0, "alpha", 1), + (2, 3, 0, "final", 0), + CO_GENERATOR_ALLOWED) + +division = _Feature((2, 2, 0, "alpha", 2), + (3, 0, 0, "alpha", 0), + CO_FUTURE_DIVISION) + +absolute_import = _Feature((2, 5, 0, "alpha", 1), + (2, 7, 0, "alpha", 0), + CO_FUTURE_ABSIMPORT) + +with_statement = _Feature((2, 5, 0, "alpha", 1), + (2, 6, 0, "alpha", 0), + CO_FUTURE_WITH_STATEMENT) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/binhex.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/binhex.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,527 @@ +"""Macintosh binhex compression/decompression. + +easy interface: +binhex(inputfilename, outputfilename) +hexbin(inputfilename, outputfilename) +""" + +# +# Jack Jansen, CWI, August 1995. +# +# The module is supposed to be as compatible as possible. Especially the +# easy interface should work "as expected" on any platform. +# XXXX Note: currently, textfiles appear in mac-form on all platforms. +# We seem to lack a simple character-translate in python. +# (we should probably use ISO-Latin-1 on all but the mac platform). +# XXXX The simple routines are too simple: they expect to hold the complete +# files in-core. Should be fixed. +# XXXX It would be nice to handle AppleDouble format on unix +# (for servers serving macs). +# XXXX I don't understand what happens when you get 0x90 times the same byte on +# input. The resulting code (xx 90 90) would appear to be interpreted as an +# escaped *value* of 0x90. All coders I've seen appear to ignore this nicety... +# +import sys +import os +import struct +import binascii + +__all__ = ["binhex","hexbin","Error"] + +class Error(Exception): + pass + +# States (what have we written) +[_DID_HEADER, _DID_DATA, _DID_RSRC] = range(3) + +# Various constants +REASONABLY_LARGE=32768 # Minimal amount we pass the rle-coder +LINELEN=64 +RUNCHAR=chr(0x90) # run-length introducer + +# +# This code is no longer byte-order dependent + +# +# Workarounds for non-mac machines. +try: + from Carbon.File import FSSpec, FInfo + from MacOS import openrf + + def getfileinfo(name): + finfo = FSSpec(name).FSpGetFInfo() + dir, file = os.path.split(name) + # XXX Get resource/data sizes + fp = open(name, 'rb') + fp.seek(0, 2) + dlen = fp.tell() + fp = openrf(name, '*rb') + fp.seek(0, 2) + rlen = fp.tell() + return file, finfo, dlen, rlen + + def openrsrc(name, *mode): + if not mode: + mode = '*rb' + else: + mode = '*' + mode[0] + return openrf(name, mode) + +except ImportError: + # + # Glue code for non-macintosh usage + # + + class FInfo: + def __init__(self): + self.Type = '????' + self.Creator = '????' + self.Flags = 0 + + def getfileinfo(name): + finfo = FInfo() + # Quick check for textfile + fp = open(name) + data = open(name).read(256) + for c in data: + if not c.isspace() and (c<' ' or ord(c) > 0x7f): + break + else: + finfo.Type = 'TEXT' + fp.seek(0, 2) + dsize = fp.tell() + fp.close() + dir, file = os.path.split(name) + file = file.replace(':', '-', 1) + return file, finfo, dsize, 0 + + class openrsrc: + def __init__(self, *args): + pass + + def read(self, *args): + return '' + + def write(self, *args): + pass + + def close(self): + pass + +class _Hqxcoderengine: + """Write data to the coder in 3-byte chunks""" + + def __init__(self, ofp): + self.ofp = ofp + self.data = '' + self.hqxdata = '' + self.linelen = LINELEN-1 + + def write(self, data): + self.data = self.data + data + datalen = len(self.data) + todo = (datalen//3)*3 + data = self.data[:todo] + self.data = self.data[todo:] + if not data: + return + self.hqxdata = self.hqxdata + binascii.b2a_hqx(data) + self._flush(0) + + def _flush(self, force): + first = 0 + while first <= len(self.hqxdata)-self.linelen: + last = first + self.linelen + self.ofp.write(self.hqxdata[first:last]+'\n') + self.linelen = LINELEN + first = last + self.hqxdata = self.hqxdata[first:] + if force: + self.ofp.write(self.hqxdata + ':\n') + + def close(self): + if self.data: + self.hqxdata = \ + self.hqxdata + binascii.b2a_hqx(self.data) + self._flush(1) + self.ofp.close() + del self.ofp + +class _Rlecoderengine: + """Write data to the RLE-coder in suitably large chunks""" + + def __init__(self, ofp): + self.ofp = ofp + self.data = '' + + def write(self, data): + self.data = self.data + data + if len(self.data) < REASONABLY_LARGE: + return + rledata = binascii.rlecode_hqx(self.data) + self.ofp.write(rledata) + self.data = '' + + def close(self): + if self.data: + rledata = binascii.rlecode_hqx(self.data) + self.ofp.write(rledata) + self.ofp.close() + del self.ofp + +class BinHex: + def __init__(self, (name, finfo, dlen, rlen), ofp): + if type(ofp) == type(''): + ofname = ofp + ofp = open(ofname, 'w') + if os.name == 'mac': + fss = FSSpec(ofname) + fss.SetCreatorType('BnHq', 'TEXT') + ofp.write('(This file must be converted with BinHex 4.0)\n\n:') + hqxer = _Hqxcoderengine(ofp) + self.ofp = _Rlecoderengine(hqxer) + self.crc = 0 + if finfo is None: + finfo = FInfo() + self.dlen = dlen + self.rlen = rlen + self._writeinfo(name, finfo) + self.state = _DID_HEADER + + def _writeinfo(self, name, finfo): + nl = len(name) + if nl > 63: + raise Error, 'Filename too long' + d = chr(nl) + name + '\0' + d2 = finfo.Type + finfo.Creator + + # Force all structs to be packed with big-endian + d3 = struct.pack('>h', finfo.Flags) + d4 = struct.pack('>ii', self.dlen, self.rlen) + info = d + d2 + d3 + d4 + self._write(info) + self._writecrc() + + def _write(self, data): + self.crc = binascii.crc_hqx(data, self.crc) + self.ofp.write(data) + + def _writecrc(self): + # XXXX Should this be here?? + # self.crc = binascii.crc_hqx('\0\0', self.crc) + if self.crc < 0: + fmt = '>h' + else: + fmt = '>H' + self.ofp.write(struct.pack(fmt, self.crc)) + self.crc = 0 + + def write(self, data): + if self.state != _DID_HEADER: + raise Error, 'Writing data at the wrong time' + self.dlen = self.dlen - len(data) + self._write(data) + + def close_data(self): + if self.dlen != 0: + raise Error, 'Incorrect data size, diff=%r' % (self.rlen,) + self._writecrc() + self.state = _DID_DATA + + def write_rsrc(self, data): + if self.state < _DID_DATA: + self.close_data() + if self.state != _DID_DATA: + raise Error, 'Writing resource data at the wrong time' + self.rlen = self.rlen - len(data) + self._write(data) + + def close(self): + if self.state < _DID_DATA: + self.close_data() + if self.state != _DID_DATA: + raise Error, 'Close at the wrong time' + if self.rlen != 0: + raise Error, \ + "Incorrect resource-datasize, diff=%r" % (self.rlen,) + self._writecrc() + self.ofp.close() + self.state = None + del self.ofp + +def binhex(inp, out): + """(infilename, outfilename) - Create binhex-encoded copy of a file""" + finfo = getfileinfo(inp) + ofp = BinHex(finfo, out) + + ifp = open(inp, 'rb') + # XXXX Do textfile translation on non-mac systems + while 1: + d = ifp.read(128000) + if not d: break + ofp.write(d) + ofp.close_data() + ifp.close() + + ifp = openrsrc(inp, 'rb') + while 1: + d = ifp.read(128000) + if not d: break + ofp.write_rsrc(d) + ofp.close() + ifp.close() + +class _Hqxdecoderengine: + """Read data via the decoder in 4-byte chunks""" + + def __init__(self, ifp): + self.ifp = ifp + self.eof = 0 + + def read(self, totalwtd): + """Read at least wtd bytes (or until EOF)""" + decdata = '' + wtd = totalwtd + # + # The loop here is convoluted, since we don't really now how + # much to decode: there may be newlines in the incoming data. + while wtd > 0: + if self.eof: return decdata + wtd = ((wtd+2)//3)*4 + data = self.ifp.read(wtd) + # + # Next problem: there may not be a complete number of + # bytes in what we pass to a2b. Solve by yet another + # loop. + # + while 1: + try: + decdatacur, self.eof = \ + binascii.a2b_hqx(data) + break + except binascii.Incomplete: + pass + newdata = self.ifp.read(1) + if not newdata: + raise Error, \ + 'Premature EOF on binhex file' + data = data + newdata + decdata = decdata + decdatacur + wtd = totalwtd - len(decdata) + if not decdata and not self.eof: + raise Error, 'Premature EOF on binhex file' + return decdata + + def close(self): + self.ifp.close() + +class _Rledecoderengine: + """Read data via the RLE-coder""" + + def __init__(self, ifp): + self.ifp = ifp + self.pre_buffer = '' + self.post_buffer = '' + self.eof = 0 + + def read(self, wtd): + if wtd > len(self.post_buffer): + self._fill(wtd-len(self.post_buffer)) + rv = self.post_buffer[:wtd] + self.post_buffer = self.post_buffer[wtd:] + return rv + + def _fill(self, wtd): + self.pre_buffer = self.pre_buffer + self.ifp.read(wtd+4) + if self.ifp.eof: + self.post_buffer = self.post_buffer + \ + binascii.rledecode_hqx(self.pre_buffer) + self.pre_buffer = '' + return + + # + # Obfuscated code ahead. We have to take care that we don't + # end up with an orphaned RUNCHAR later on. So, we keep a couple + # of bytes in the buffer, depending on what the end of + # the buffer looks like: + # '\220\0\220' - Keep 3 bytes: repeated \220 (escaped as \220\0) + # '?\220' - Keep 2 bytes: repeated something-else + # '\220\0' - Escaped \220: Keep 2 bytes. + # '?\220?' - Complete repeat sequence: decode all + # otherwise: keep 1 byte. + # + mark = len(self.pre_buffer) + if self.pre_buffer[-3:] == RUNCHAR + '\0' + RUNCHAR: + mark = mark - 3 + elif self.pre_buffer[-1] == RUNCHAR: + mark = mark - 2 + elif self.pre_buffer[-2:] == RUNCHAR + '\0': + mark = mark - 2 + elif self.pre_buffer[-2] == RUNCHAR: + pass # Decode all + else: + mark = mark - 1 + + self.post_buffer = self.post_buffer + \ + binascii.rledecode_hqx(self.pre_buffer[:mark]) + self.pre_buffer = self.pre_buffer[mark:] + + def close(self): + self.ifp.close() + +class HexBin: + def __init__(self, ifp): + if type(ifp) == type(''): + ifp = open(ifp) + # + # Find initial colon. + # + while 1: + ch = ifp.read(1) + if not ch: + raise Error, "No binhex data found" + # Cater for \r\n terminated lines (which show up as \n\r, hence + # all lines start with \r) + if ch == '\r': + continue + if ch == ':': + break + if ch != '\n': + dummy = ifp.readline() + + hqxifp = _Hqxdecoderengine(ifp) + self.ifp = _Rledecoderengine(hqxifp) + self.crc = 0 + self._readheader() + + def _read(self, len): + data = self.ifp.read(len) + self.crc = binascii.crc_hqx(data, self.crc) + return data + + def _checkcrc(self): + filecrc = struct.unpack('>h', self.ifp.read(2))[0] & 0xffff + #self.crc = binascii.crc_hqx('\0\0', self.crc) + # XXXX Is this needed?? + self.crc = self.crc & 0xffff + if filecrc != self.crc: + raise Error, 'CRC error, computed %x, read %x' \ + %(self.crc, filecrc) + self.crc = 0 + + def _readheader(self): + len = self._read(1) + fname = self._read(ord(len)) + rest = self._read(1+4+4+2+4+4) + self._checkcrc() + + type = rest[1:5] + creator = rest[5:9] + flags = struct.unpack('>h', rest[9:11])[0] + self.dlen = struct.unpack('>l', rest[11:15])[0] + self.rlen = struct.unpack('>l', rest[15:19])[0] + + self.FName = fname + self.FInfo = FInfo() + self.FInfo.Creator = creator + self.FInfo.Type = type + self.FInfo.Flags = flags + + self.state = _DID_HEADER + + def read(self, *n): + if self.state != _DID_HEADER: + raise Error, 'Read data at wrong time' + if n: + n = n[0] + n = min(n, self.dlen) + else: + n = self.dlen + rv = '' + while len(rv) < n: + rv = rv + self._read(n-len(rv)) + self.dlen = self.dlen - n + return rv + + def close_data(self): + if self.state != _DID_HEADER: + raise Error, 'close_data at wrong time' + if self.dlen: + dummy = self._read(self.dlen) + self._checkcrc() + self.state = _DID_DATA + + def read_rsrc(self, *n): + if self.state == _DID_HEADER: + self.close_data() + if self.state != _DID_DATA: + raise Error, 'Read resource data at wrong time' + if n: + n = n[0] + n = min(n, self.rlen) + else: + n = self.rlen + self.rlen = self.rlen - n + return self._read(n) + + def close(self): + if self.rlen: + dummy = self.read_rsrc(self.rlen) + self._checkcrc() + self.state = _DID_RSRC + self.ifp.close() + +def hexbin(inp, out): + """(infilename, outfilename) - Decode binhexed file""" + ifp = HexBin(inp) + finfo = ifp.FInfo + if not out: + out = ifp.FName + if os.name == 'mac': + ofss = FSSpec(out) + out = ofss.as_pathname() + + ofp = open(out, 'wb') + # XXXX Do translation on non-mac systems + while 1: + d = ifp.read(128000) + if not d: break + ofp.write(d) + ofp.close() + ifp.close_data() + + d = ifp.read_rsrc(128000) + if d: + ofp = openrsrc(out, 'wb') + ofp.write(d) + while 1: + d = ifp.read_rsrc(128000) + if not d: break + ofp.write(d) + ofp.close() + + if os.name == 'mac': + nfinfo = ofss.GetFInfo() + nfinfo.Creator = finfo.Creator + nfinfo.Type = finfo.Type + nfinfo.Flags = finfo.Flags + ofss.SetFInfo(nfinfo) + + ifp.close() + +def _test(): + if os.name == 'mac': + import macfs + fss, ok = macfs.PromptGetFile('File to convert:') + if not ok: + sys.exit(0) + fname = fss.as_pathname() + else: + fname = sys.argv[1] + binhex(fname, fname+'.hqx') + hexbin(fname+'.hqx', fname+'.viahqx') + #hexbin(fname, fname+'.unpacked') + sys.exit(1) + +if __name__ == '__main__': + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,405 @@ +"""A generic class to build line-oriented command interpreters. + +Interpreters constructed with this class obey the following conventions: + +1. End of file on input is processed as the command 'EOF'. +2. A command is parsed out of each line by collecting the prefix composed + of characters in the identchars member. +3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method + is passed a single argument consisting of the remainder of the line. +4. Typing an empty line repeats the last command. (Actually, it calls the + method `emptyline', which may be overridden in a subclass.) +5. There is a predefined `help' method. Given an argument `topic', it + calls the command `help_topic'. With no arguments, it lists all topics + with defined help_ functions, broken into up to three topics; documented + commands, miscellaneous help topics, and undocumented commands. +6. The command '?' is a synonym for `help'. The command '!' is a synonym + for `shell', if a do_shell method exists. +7. If completion is enabled, completing commands will be done automatically, + and completing of commands args is done by calling complete_foo() with + arguments text, line, begidx, endidx. text is string we are matching + against, all returned matches must begin with it. line is the current + input line (lstripped), begidx and endidx are the beginning and end + indexes of the text being matched, which could be used to provide + different completion depending upon which position the argument is in. + +The `default' method may be overridden to intercept commands for which there +is no do_ method. + +The `completedefault' method may be overridden to intercept completions for +commands that have no complete_ method. + +The data member `self.ruler' sets the character used to draw separator lines +in the help messages. If empty, no ruler line is drawn. It defaults to "=". + +If the value of `self.intro' is nonempty when the cmdloop method is called, +it is printed out on interpreter startup. This value may be overridden +via an optional argument to the cmdloop() method. + +The data members `self.doc_header', `self.misc_header', and +`self.undoc_header' set the headers used for the help function's +listings of documented functions, miscellaneous topics, and undocumented +functions respectively. + +These interpreters use raw_input; thus, if the readline module is loaded, +they automatically support Emacs-like command history and editing features. +""" + +import string + +__all__ = ["Cmd"] + +PROMPT = '(Cmd) ' +IDENTCHARS = string.ascii_letters + string.digits + '_' + +class Cmd: + """A simple framework for writing line-oriented command interpreters. + + These are often useful for test harnesses, administrative tools, and + prototypes that will later be wrapped in a more sophisticated interface. + + A Cmd instance or subclass instance is a line-oriented interpreter + framework. There is no good reason to instantiate Cmd itself; rather, + it's useful as a superclass of an interpreter class you define yourself + in order to inherit Cmd's methods and encapsulate action methods. + + """ + prompt = PROMPT + identchars = IDENTCHARS + ruler = '=' + lastcmd = '' + intro = None + doc_leader = "" + doc_header = "Documented commands (type help ):" + misc_header = "Miscellaneous help topics:" + undoc_header = "Undocumented commands:" + nohelp = "*** No help on %s" + use_rawinput = 1 + + def __init__(self, completekey='tab', stdin=None, stdout=None): + """Instantiate a line-oriented interpreter framework. + + The optional argument 'completekey' is the readline name of a + completion key; it defaults to the Tab key. If completekey is + not None and the readline module is available, command completion + is done automatically. The optional arguments stdin and stdout + specify alternate input and output file objects; if not specified, + sys.stdin and sys.stdout are used. + + """ + import sys + if stdin is not None: + self.stdin = stdin + else: + self.stdin = sys.stdin + if stdout is not None: + self.stdout = stdout + else: + self.stdout = sys.stdout + self.cmdqueue = [] + self.completekey = completekey + + def cmdloop(self, intro=None): + """Repeatedly issue a prompt, accept input, parse an initial prefix + off the received input, and dispatch to action methods, passing them + the remainder of the line as argument. + + """ + + self.preloop() + if self.use_rawinput and self.completekey: + try: + import readline + self.old_completer = readline.get_completer() + readline.set_completer(self.complete) + readline.parse_and_bind(self.completekey+": complete") + except (ImportError, AttributeError): + pass + try: + if intro is not None: + self.intro = intro + if self.intro: + self.stdout.write(str(self.intro)+"\n") + stop = None + while not stop: + if self.cmdqueue: + line = self.cmdqueue.pop(0) + else: + if self.use_rawinput: + try: + line = raw_input(self.prompt) + except EOFError: + line = 'EOF' + else: + self.stdout.write(self.prompt) + self.stdout.flush() + line = self.stdin.readline() + if not len(line): + line = 'EOF' + else: + line = line[:-1] # chop \n + line = self.precmd(line) + stop = self.onecmd(line) + stop = self.postcmd(stop, line) + self.postloop() + finally: + if self.use_rawinput and self.completekey: + try: + import readline + readline.set_completer(self.old_completer) + except (ImportError, AttributeError): + pass + + + def precmd(self, line): + """Hook method executed just before the command line is + interpreted, but after the input prompt is generated and issued. + + """ + return line + + def postcmd(self, stop, line): + """Hook method executed just after a command dispatch is finished.""" + return stop + + def preloop(self): + """Hook method executed once when the cmdloop() method is called.""" + pass + + def postloop(self): + """Hook method executed once when the cmdloop() method is about to + return. + + """ + pass + + def parseline(self, line): + """Parse the line into a command name and a string containing + the arguments. Returns a tuple containing (command, args, line). + 'command' and 'args' may be None if the line couldn't be parsed. + """ + line = line.strip() + if not line: + return None, None, line + elif line[0] == '?': + line = 'help ' + line[1:] + elif line[0] == '!': + if hasattr(self, 'do_shell'): + line = 'shell ' + line[1:] + else: + return None, None, line + i, n = 0, len(line) + while i < n and line[i] in self.identchars: i = i+1 + cmd, arg = line[:i], line[i:].strip() + return cmd, arg, line + + def onecmd(self, line): + """Interpret the argument as though it had been typed in response + to the prompt. + + This may be overridden, but should not normally need to be; + see the precmd() and postcmd() methods for useful execution hooks. + The return value is a flag indicating whether interpretation of + commands by the interpreter should stop. + + """ + cmd, arg, line = self.parseline(line) + if not line: + return self.emptyline() + if cmd is None: + return self.default(line) + self.lastcmd = line + if cmd == '': + return self.default(line) + else: + try: + func = getattr(self, 'do_' + cmd) + except AttributeError: + return self.default(line) + return func(arg) + + def emptyline(self): + """Called when an empty line is entered in response to the prompt. + + If this method is not overridden, it repeats the last nonempty + command entered. + + """ + if self.lastcmd: + return self.onecmd(self.lastcmd) + + def default(self, line): + """Called on an input line when the command prefix is not recognized. + + If this method is not overridden, it prints an error message and + returns. + + """ + self.stdout.write('*** Unknown syntax: %s\n'%line) + + def completedefault(self, *ignored): + """Method called to complete an input line when no command-specific + complete_*() method is available. + + By default, it returns an empty list. + + """ + return [] + + def completenames(self, text, *ignored): + dotext = 'do_'+text + return [a[3:] for a in self.get_names() if a.startswith(dotext)] + + def complete(self, text, state): + """Return the next possible completion for 'text'. + + If a command has not been entered, then complete against command list. + Otherwise try to call complete_ to get list of completions. + """ + if state == 0: + import readline + origline = readline.get_line_buffer() + line = origline.lstrip() + stripped = len(origline) - len(line) + begidx = readline.get_begidx() - stripped + endidx = readline.get_endidx() - stripped + if begidx>0: + cmd, args, foo = self.parseline(line) + if cmd == '': + compfunc = self.completedefault + else: + try: + compfunc = getattr(self, 'complete_' + cmd) + except AttributeError: + compfunc = self.completedefault + else: + compfunc = self.completenames + self.completion_matches = compfunc(text, line, begidx, endidx) + try: + return self.completion_matches[state] + except IndexError: + return None + + def get_names(self): + # Inheritance says we have to look in class and + # base classes; order is not important. + names = [] + classes = [self.__class__] + while classes: + aclass = classes.pop(0) + if aclass.__bases__: + classes = classes + list(aclass.__bases__) + names = names + dir(aclass) + return names + + def complete_help(self, *args): + return self.completenames(*args) + + def do_help(self, arg): + if arg: + # XXX check arg syntax + try: + func = getattr(self, 'help_' + arg) + except AttributeError: + try: + doc=getattr(self, 'do_' + arg).__doc__ + if doc: + self.stdout.write("%s\n"%str(doc)) + return + except AttributeError: + pass + self.stdout.write("%s\n"%str(self.nohelp % (arg,))) + return + func() + else: + names = self.get_names() + cmds_doc = [] + cmds_undoc = [] + help = {} + for name in names: + if name[:5] == 'help_': + help[name[5:]]=1 + names.sort() + # There can be duplicates if routines overridden + prevname = '' + for name in names: + if name[:3] == 'do_': + if name == prevname: + continue + prevname = name + cmd=name[3:] + if cmd in help: + cmds_doc.append(cmd) + del help[cmd] + elif getattr(self, name).__doc__: + cmds_doc.append(cmd) + else: + cmds_undoc.append(cmd) + self.stdout.write("%s\n"%str(self.doc_leader)) + self.print_topics(self.doc_header, cmds_doc, 15,80) + self.print_topics(self.misc_header, help.keys(),15,80) + self.print_topics(self.undoc_header, cmds_undoc, 15,80) + + def print_topics(self, header, cmds, cmdlen, maxcol): + if cmds: + self.stdout.write("%s\n"%str(header)) + if self.ruler: + self.stdout.write("%s\n"%str(self.ruler * len(header))) + self.columnize(cmds, maxcol-1) + self.stdout.write("\n") + + def columnize(self, list, displaywidth=80): + """Display a list of strings as a compact set of columns. + + Each column is only as wide as necessary. + Columns are separated by two spaces (one was not legible enough). + """ + if not list: + self.stdout.write("\n") + return + nonstrings = [i for i in range(len(list)) + if not isinstance(list[i], str)] + if nonstrings: + raise TypeError, ("list[i] not a string for i in %s" % + ", ".join(map(str, nonstrings))) + size = len(list) + if size == 1: + self.stdout.write('%s\n'%str(list[0])) + return + # Try every row count from 1 upwards + for nrows in range(1, len(list)): + ncols = (size+nrows-1) // nrows + colwidths = [] + totwidth = -2 + for col in range(ncols): + colwidth = 0 + for row in range(nrows): + i = row + nrows*col + if i >= size: + break + x = list[i] + colwidth = max(colwidth, len(x)) + colwidths.append(colwidth) + totwidth += colwidth + 2 + if totwidth > displaywidth: + break + if totwidth <= displaywidth: + break + else: + nrows = len(list) + ncols = 1 + colwidths = [0] + for row in range(nrows): + texts = [] + for col in range(ncols): + i = row + nrows*col + if i >= size: + x = "" + else: + x = list[i] + texts.append(x) + while texts and not texts[-1]: + del texts[-1] + for col in range(len(texts)): + texts[col] = texts[col].ljust(colwidths[col]) + self.stdout.write("%s\n"%str(" ".join(texts))) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/copy.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/copy.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,414 @@ +"""Generic (shallow and deep) copying operations. + +Interface summary: + + import copy + + x = copy.copy(y) # make a shallow copy of y + x = copy.deepcopy(y) # make a deep copy of y + +For module specific errors, copy.Error is raised. + +The difference between shallow and deep copying is only relevant for +compound objects (objects that contain other objects, like lists or +class instances). + +- A shallow copy constructs a new compound object and then (to the + extent possible) inserts *the same objects* into it that the + original contains. + +- A deep copy constructs a new compound object and then, recursively, + inserts *copies* into it of the objects found in the original. + +Two problems often exist with deep copy operations that don't exist +with shallow copy operations: + + a) recursive objects (compound objects that, directly or indirectly, + contain a reference to themselves) may cause a recursive loop + + b) because deep copy copies *everything* it may copy too much, e.g. + administrative data structures that should be shared even between + copies + +Python's deep copy operation avoids these problems by: + + a) keeping a table of objects already copied during the current + copying pass + + b) letting user-defined classes override the copying operation or the + set of components copied + +This version does not copy types like module, class, function, method, +nor stack trace, stack frame, nor file, socket, window, nor array, nor +any similar types. + +Classes can use the same interfaces to control copying that they use +to control pickling: they can define methods called __getinitargs__(), +__getstate__() and __setstate__(). See the documentation for module +"pickle" for information on these methods. +""" + +import types +from copy_reg import dispatch_table + +class Error(Exception): + pass +error = Error # backward compatibility + +try: + from org.python.core import PyStringMap +except ImportError: + PyStringMap = None + +__all__ = ["Error", "copy", "deepcopy"] + +def copy(x): + """Shallow copy operation on arbitrary Python objects. + + See the module's __doc__ string for more info. + """ + + cls = type(x) + + copier = _copy_dispatch.get(cls) + if copier: + return copier(x) + + copier = getattr(cls, "__copy__", None) + if copier: + return copier(x) + + reductor = dispatch_table.get(cls) + if reductor: + rv = reductor(x) + else: + reductor = getattr(x, "__reduce_ex__", None) + if reductor: + rv = reductor(2) + else: + reductor = getattr(x, "__reduce__", None) + if reductor: + rv = reductor() + else: + raise Error("un(shallow)copyable object of type %s" % cls) + + return _reconstruct(x, rv, 0) + + +_copy_dispatch = d = {} + +def _copy_immutable(x): + return x +for t in (type(None), int, long, float, bool, str, tuple, + frozenset, type, xrange, types.ClassType, + types.BuiltinFunctionType, + types.FunctionType): + d[t] = _copy_immutable +for name in ("ComplexType", "UnicodeType", "CodeType"): + t = getattr(types, name, None) + if t is not None: + d[t] = _copy_immutable + +def _copy_with_constructor(x): + return type(x)(x) +for t in (list, dict, set): + d[t] = _copy_with_constructor + +def _copy_with_copy_method(x): + return x.copy() +if PyStringMap is not None: + d[PyStringMap] = _copy_with_copy_method + +def _copy_inst(x): + if hasattr(x, '__copy__'): + return x.__copy__() + if hasattr(x, '__getinitargs__'): + args = x.__getinitargs__() + y = x.__class__(*args) + else: + y = _EmptyClass() + y.__class__ = x.__class__ + if hasattr(x, '__getstate__'): + state = x.__getstate__() + else: + state = x.__dict__ + if hasattr(y, '__setstate__'): + y.__setstate__(state) + else: + y.__dict__.update(state) + return y +d[types.InstanceType] = _copy_inst + +del d + +def deepcopy(x, memo=None, _nil=[]): + """Deep copy operation on arbitrary Python objects. + + See the module's __doc__ string for more info. + """ + + if memo is None: + memo = {} + + d = id(x) + y = memo.get(d, _nil) + if y is not _nil: + return y + + cls = type(x) + + copier = _deepcopy_dispatch.get(cls) + if copier: + y = copier(x, memo) + else: + try: + issc = issubclass(cls, type) + except TypeError: # cls is not a class (old Boost; see SF #502085) + issc = 0 + if issc: + y = _deepcopy_atomic(x, memo) + else: + copier = getattr(x, "__deepcopy__", None) + if copier: + y = copier(memo) + else: + reductor = dispatch_table.get(cls) + if reductor: + rv = reductor(x) + else: + reductor = getattr(x, "__reduce_ex__", None) + if reductor: + rv = reductor(2) + else: + reductor = getattr(x, "__reduce__", None) + if reductor: + rv = reductor() + else: + raise Error( + "un(deep)copyable object of type %s" % cls) + y = _reconstruct(x, rv, 1, memo) + + memo[d] = y + _keep_alive(x, memo) # Make sure x lives at least as long as d + return y + +_deepcopy_dispatch = d = {} + +def _deepcopy_atomic(x, memo): + return x +d[type(None)] = _deepcopy_atomic +d[int] = _deepcopy_atomic +d[long] = _deepcopy_atomic +d[float] = _deepcopy_atomic +d[bool] = _deepcopy_atomic +try: + d[complex] = _deepcopy_atomic +except NameError: + pass +d[str] = _deepcopy_atomic +try: + d[unicode] = _deepcopy_atomic +except NameError: + pass +try: + d[types.CodeType] = _deepcopy_atomic +except AttributeError: + pass +d[type] = _deepcopy_atomic +d[xrange] = _deepcopy_atomic +d[types.ClassType] = _deepcopy_atomic +d[types.BuiltinFunctionType] = _deepcopy_atomic +d[types.FunctionType] = _deepcopy_atomic + +def _deepcopy_list(x, memo): + y = [] + memo[id(x)] = y + for a in x: + y.append(deepcopy(a, memo)) + return y +d[list] = _deepcopy_list + +def _deepcopy_tuple(x, memo): + y = [] + for a in x: + y.append(deepcopy(a, memo)) + d = id(x) + try: + return memo[d] + except KeyError: + pass + for i in range(len(x)): + if x[i] is not y[i]: + y = tuple(y) + break + else: + y = x + memo[d] = y + return y +d[tuple] = _deepcopy_tuple + +def _deepcopy_dict(x, memo): + y = {} + memo[id(x)] = y + for key, value in x.iteritems(): + y[deepcopy(key, memo)] = deepcopy(value, memo) + return y +d[dict] = _deepcopy_dict +if PyStringMap is not None: + d[PyStringMap] = _deepcopy_dict + +def _keep_alive(x, memo): + """Keeps a reference to the object x in the memo. + + Because we remember objects by their id, we have + to assure that possibly temporary objects are kept + alive by referencing them. + We store a reference at the id of the memo, which should + normally not be used unless someone tries to deepcopy + the memo itself... + """ + try: + memo[id(memo)].append(x) + except KeyError: + # aha, this is the first one :-) + memo[id(memo)]=[x] + +def _deepcopy_inst(x, memo): + if hasattr(x, '__deepcopy__'): + return x.__deepcopy__(memo) + if hasattr(x, '__getinitargs__'): + args = x.__getinitargs__() + args = deepcopy(args, memo) + y = x.__class__(*args) + else: + y = _EmptyClass() + y.__class__ = x.__class__ + memo[id(x)] = y + if hasattr(x, '__getstate__'): + state = x.__getstate__() + else: + state = x.__dict__ + state = deepcopy(state, memo) + if hasattr(y, '__setstate__'): + y.__setstate__(state) + else: + y.__dict__.update(state) + return y +d[types.InstanceType] = _deepcopy_inst + +def _reconstruct(x, info, deep, memo=None): + if isinstance(info, str): + return x + assert isinstance(info, tuple) + if memo is None: + memo = {} + n = len(info) + assert n in (2, 3, 4, 5) + callable, args = info[:2] + if n > 2: + state = info[2] + else: + state = {} + if n > 3: + listiter = info[3] + else: + listiter = None + if n > 4: + dictiter = info[4] + else: + dictiter = None + if deep: + args = deepcopy(args, memo) + y = callable(*args) + memo[id(x)] = y + if listiter is not None: + for item in listiter: + if deep: + item = deepcopy(item, memo) + y.append(item) + if dictiter is not None: + for key, value in dictiter: + if deep: + key = deepcopy(key, memo) + value = deepcopy(value, memo) + y[key] = value + if state: + if deep: + state = deepcopy(state, memo) + if hasattr(y, '__setstate__'): + y.__setstate__(state) + else: + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + else: + slotstate = None + if state is not None: + y.__dict__.update(state) + if slotstate is not None: + for key, value in slotstate.iteritems(): + setattr(y, key, value) + return y + +del d + +del types + +# Helper for instance creation without calling __init__ +class _EmptyClass: + pass + +def _test(): + l = [None, 1, 2L, 3.14, 'xyzzy', (1, 2L), [3.14, 'abc'], + {'abc': 'ABC'}, (), [], {}] + l1 = copy(l) + print l1==l + l1 = map(copy, l) + print l1==l + l1 = deepcopy(l) + print l1==l + class C: + def __init__(self, arg=None): + self.a = 1 + self.arg = arg + if __name__ == '__main__': + import sys + file = sys.argv[0] + else: + file = __file__ + self.fp = open(file) + self.fp.close() + def __getstate__(self): + return {'a': self.a, 'arg': self.arg} + def __setstate__(self, state): + for key, value in state.iteritems(): + setattr(self, key, value) + def __deepcopy__(self, memo=None): + new = self.__class__(deepcopy(self.arg, memo)) + new.a = self.a + return new + c = C('argument sketch') + l.append(c) + l2 = copy(l) + print l == l2 + print l + print l2 + l2 = deepcopy(l) + print l == l2 + print l + print l2 + l.append({l[1]: l, 'xyz': l[2]}) + l3 = copy(l) + import repr + print map(repr.repr, l) + print map(repr.repr, l1) + print map(repr.repr, l2) + print map(repr.repr, l3) + l3 = deepcopy(l) + import repr + print map(repr.repr, l) + print map(repr.repr, l1) + print map(repr.repr, l2) + print map(repr.repr, l3) + +if __name__ == '__main__': + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,3138 @@ +# Copyright (c) 2004 Python Software Foundation. +# All rights reserved. + +# Written by Eric Price +# and Facundo Batista +# and Raymond Hettinger +# and Aahz +# and Tim Peters + +# This module is currently Py2.3 compatible and should be kept that way +# unless a major compelling advantage arises. IOW, 2.3 compatibility is +# strongly preferred, but not guaranteed. + +# Also, this module should be kept in sync with the latest updates of +# the IBM specification as it evolves. Those updates will be treated +# as bug fixes (deviation from the spec is a compatibility, usability +# bug) and will be backported. At this point the spec is stabilizing +# and the updates are becoming fewer, smaller, and less significant. + +""" +This is a Py2.3 implementation of decimal floating point arithmetic based on +the General Decimal Arithmetic Specification: + + www2.hursley.ibm.com/decimal/decarith.html + +and IEEE standard 854-1987: + + www.cs.berkeley.edu/~ejr/projects/754/private/drafts/854-1987/dir.html + +Decimal floating point has finite precision with arbitrarily large bounds. + +The purpose of the module is to support arithmetic using familiar +"schoolhouse" rules and to avoid the some of tricky representation +issues associated with binary floating point. The package is especially +useful for financial applications or for contexts where users have +expectations that are at odds with binary floating point (for instance, +in binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead +of the expected Decimal("0.00") returned by decimal floating point). + +Here are some examples of using the decimal module: + +>>> from decimal import * +>>> setcontext(ExtendedContext) +>>> Decimal(0) +Decimal("0") +>>> Decimal("1") +Decimal("1") +>>> Decimal("-.0123") +Decimal("-0.0123") +>>> Decimal(123456) +Decimal("123456") +>>> Decimal("123.45e12345678901234567890") +Decimal("1.2345E+12345678901234567892") +>>> Decimal("1.33") + Decimal("1.27") +Decimal("2.60") +>>> Decimal("12.34") + Decimal("3.87") - Decimal("18.41") +Decimal("-2.20") +>>> dig = Decimal(1) +>>> print dig / Decimal(3) +0.333333333 +>>> getcontext().prec = 18 +>>> print dig / Decimal(3) +0.333333333333333333 +>>> print dig.sqrt() +1 +>>> print Decimal(3).sqrt() +1.73205080756887729 +>>> print Decimal(3) ** 123 +4.85192780976896427E+58 +>>> inf = Decimal(1) / Decimal(0) +>>> print inf +Infinity +>>> neginf = Decimal(-1) / Decimal(0) +>>> print neginf +-Infinity +>>> print neginf + inf +NaN +>>> print neginf * inf +-Infinity +>>> print dig / 0 +Infinity +>>> getcontext().traps[DivisionByZero] = 1 +>>> print dig / 0 +Traceback (most recent call last): + ... + ... + ... +DivisionByZero: x / 0 +>>> c = Context() +>>> c.traps[InvalidOperation] = 0 +>>> print c.flags[InvalidOperation] +0 +>>> c.divide(Decimal(0), Decimal(0)) +Decimal("NaN") +>>> c.traps[InvalidOperation] = 1 +>>> print c.flags[InvalidOperation] +1 +>>> c.flags[InvalidOperation] = 0 +>>> print c.flags[InvalidOperation] +0 +>>> print c.divide(Decimal(0), Decimal(0)) +Traceback (most recent call last): + ... + ... + ... +InvalidOperation: 0 / 0 +>>> print c.flags[InvalidOperation] +1 +>>> c.flags[InvalidOperation] = 0 +>>> c.traps[InvalidOperation] = 0 +>>> print c.divide(Decimal(0), Decimal(0)) +NaN +>>> print c.flags[InvalidOperation] +1 +>>> +""" + +__all__ = [ + # Two major classes + 'Decimal', 'Context', + + # Contexts + 'DefaultContext', 'BasicContext', 'ExtendedContext', + + # Exceptions + 'DecimalException', 'Clamped', 'InvalidOperation', 'DivisionByZero', + 'Inexact', 'Rounded', 'Subnormal', 'Overflow', 'Underflow', + + # Constants for use in setting up contexts + 'ROUND_DOWN', 'ROUND_HALF_UP', 'ROUND_HALF_EVEN', 'ROUND_CEILING', + 'ROUND_FLOOR', 'ROUND_UP', 'ROUND_HALF_DOWN', + + # Functions for manipulating contexts + 'setcontext', 'getcontext', 'localcontext' +] + +import copy as _copy + +#Rounding +ROUND_DOWN = 'ROUND_DOWN' +ROUND_HALF_UP = 'ROUND_HALF_UP' +ROUND_HALF_EVEN = 'ROUND_HALF_EVEN' +ROUND_CEILING = 'ROUND_CEILING' +ROUND_FLOOR = 'ROUND_FLOOR' +ROUND_UP = 'ROUND_UP' +ROUND_HALF_DOWN = 'ROUND_HALF_DOWN' + +#Rounding decision (not part of the public API) +NEVER_ROUND = 'NEVER_ROUND' # Round in division (non-divmod), sqrt ONLY +ALWAYS_ROUND = 'ALWAYS_ROUND' # Every operation rounds at end. + +#Errors + +class DecimalException(ArithmeticError): + """Base exception class. + + Used exceptions derive from this. + If an exception derives from another exception besides this (such as + Underflow (Inexact, Rounded, Subnormal) that indicates that it is only + called if the others are present. This isn't actually used for + anything, though. + + handle -- Called when context._raise_error is called and the + trap_enabler is set. First argument is self, second is the + context. More arguments can be given, those being after + the explanation in _raise_error (For example, + context._raise_error(NewError, '(-x)!', self._sign) would + call NewError().handle(context, self._sign).) + + To define a new exception, it should be sufficient to have it derive + from DecimalException. + """ + def handle(self, context, *args): + pass + + +class Clamped(DecimalException): + """Exponent of a 0 changed to fit bounds. + + This occurs and signals clamped if the exponent of a result has been + altered in order to fit the constraints of a specific concrete + representation. This may occur when the exponent of a zero result would + be outside the bounds of a representation, or when a large normal + number would have an encoded exponent that cannot be represented. In + this latter case, the exponent is reduced to fit and the corresponding + number of zero digits are appended to the coefficient ("fold-down"). + """ + + +class InvalidOperation(DecimalException): + """An invalid operation was performed. + + Various bad things cause this: + + Something creates a signaling NaN + -INF + INF + 0 * (+-)INF + (+-)INF / (+-)INF + x % 0 + (+-)INF % x + x._rescale( non-integer ) + sqrt(-x) , x > 0 + 0 ** 0 + x ** (non-integer) + x ** (+-)INF + An operand is invalid + """ + def handle(self, context, *args): + if args: + if args[0] == 1: #sNaN, must drop 's' but keep diagnostics + return Decimal( (args[1]._sign, args[1]._int, 'n') ) + return NaN + +class ConversionSyntax(InvalidOperation): + """Trying to convert badly formed string. + + This occurs and signals invalid-operation if an string is being + converted to a number and it does not conform to the numeric string + syntax. The result is [0,qNaN]. + """ + + def handle(self, context, *args): + return (0, (0,), 'n') #Passed to something which uses a tuple. + +class DivisionByZero(DecimalException, ZeroDivisionError): + """Division by 0. + + This occurs and signals division-by-zero if division of a finite number + by zero was attempted (during a divide-integer or divide operation, or a + power operation with negative right-hand operand), and the dividend was + not zero. + + The result of the operation is [sign,inf], where sign is the exclusive + or of the signs of the operands for divide, or is 1 for an odd power of + -0, for power. + """ + + def handle(self, context, sign, double = None, *args): + if double is not None: + return (Infsign[sign],)*2 + return Infsign[sign] + +class DivisionImpossible(InvalidOperation): + """Cannot perform the division adequately. + + This occurs and signals invalid-operation if the integer result of a + divide-integer or remainder operation had too many digits (would be + longer than precision). The result is [0,qNaN]. + """ + + def handle(self, context, *args): + return (NaN, NaN) + +class DivisionUndefined(InvalidOperation, ZeroDivisionError): + """Undefined result of division. + + This occurs and signals invalid-operation if division by zero was + attempted (during a divide-integer, divide, or remainder operation), and + the dividend is also zero. The result is [0,qNaN]. + """ + + def handle(self, context, tup=None, *args): + if tup is not None: + return (NaN, NaN) #for 0 %0, 0 // 0 + return NaN + +class Inexact(DecimalException): + """Had to round, losing information. + + This occurs and signals inexact whenever the result of an operation is + not exact (that is, it needed to be rounded and any discarded digits + were non-zero), or if an overflow or underflow condition occurs. The + result in all cases is unchanged. + + The inexact signal may be tested (or trapped) to determine if a given + operation (or sequence of operations) was inexact. + """ + pass + +class InvalidContext(InvalidOperation): + """Invalid context. Unknown rounding, for example. + + This occurs and signals invalid-operation if an invalid context was + detected during an operation. This can occur if contexts are not checked + on creation and either the precision exceeds the capability of the + underlying concrete representation or an unknown or unsupported rounding + was specified. These aspects of the context need only be checked when + the values are required to be used. The result is [0,qNaN]. + """ + + def handle(self, context, *args): + return NaN + +class Rounded(DecimalException): + """Number got rounded (not necessarily changed during rounding). + + This occurs and signals rounded whenever the result of an operation is + rounded (that is, some zero or non-zero digits were discarded from the + coefficient), or if an overflow or underflow condition occurs. The + result in all cases is unchanged. + + The rounded signal may be tested (or trapped) to determine if a given + operation (or sequence of operations) caused a loss of precision. + """ + pass + +class Subnormal(DecimalException): + """Exponent < Emin before rounding. + + This occurs and signals subnormal whenever the result of a conversion or + operation is subnormal (that is, its adjusted exponent is less than + Emin, before any rounding). The result in all cases is unchanged. + + The subnormal signal may be tested (or trapped) to determine if a given + or operation (or sequence of operations) yielded a subnormal result. + """ + pass + +class Overflow(Inexact, Rounded): + """Numerical overflow. + + This occurs and signals overflow if the adjusted exponent of a result + (from a conversion or from an operation that is not an attempt to divide + by zero), after rounding, would be greater than the largest value that + can be handled by the implementation (the value Emax). + + The result depends on the rounding mode: + + For round-half-up and round-half-even (and for round-half-down and + round-up, if implemented), the result of the operation is [sign,inf], + where sign is the sign of the intermediate result. For round-down, the + result is the largest finite number that can be represented in the + current precision, with the sign of the intermediate result. For + round-ceiling, the result is the same as for round-down if the sign of + the intermediate result is 1, or is [0,inf] otherwise. For round-floor, + the result is the same as for round-down if the sign of the intermediate + result is 0, or is [1,inf] otherwise. In all cases, Inexact and Rounded + will also be raised. + """ + + def handle(self, context, sign, *args): + if context.rounding in (ROUND_HALF_UP, ROUND_HALF_EVEN, + ROUND_HALF_DOWN, ROUND_UP): + return Infsign[sign] + if sign == 0: + if context.rounding == ROUND_CEILING: + return Infsign[sign] + return Decimal((sign, (9,)*context.prec, + context.Emax-context.prec+1)) + if sign == 1: + if context.rounding == ROUND_FLOOR: + return Infsign[sign] + return Decimal( (sign, (9,)*context.prec, + context.Emax-context.prec+1)) + + +class Underflow(Inexact, Rounded, Subnormal): + """Numerical underflow with result rounded to 0. + + This occurs and signals underflow if a result is inexact and the + adjusted exponent of the result would be smaller (more negative) than + the smallest value that can be handled by the implementation (the value + Emin). That is, the result is both inexact and subnormal. + + The result after an underflow will be a subnormal number rounded, if + necessary, so that its exponent is not less than Etiny. This may result + in 0 with the sign of the intermediate result and an exponent of Etiny. + + In all cases, Inexact, Rounded, and Subnormal will also be raised. + """ + +# List of public traps and flags +_signals = [Clamped, DivisionByZero, Inexact, Overflow, Rounded, + Underflow, InvalidOperation, Subnormal] + +# Map conditions (per the spec) to signals +_condition_map = {ConversionSyntax:InvalidOperation, + DivisionImpossible:InvalidOperation, + DivisionUndefined:InvalidOperation, + InvalidContext:InvalidOperation} + +##### Context Functions ####################################### + +# The getcontext() and setcontext() function manage access to a thread-local +# current context. Py2.4 offers direct support for thread locals. If that +# is not available, use threading.currentThread() which is slower but will +# work for older Pythons. If threads are not part of the build, create a +# mock threading object with threading.local() returning the module namespace. + +try: + import threading +except ImportError: + # Python was compiled without threads; create a mock object instead + import sys + class MockThreading: + def local(self, sys=sys): + return sys.modules[__name__] + threading = MockThreading() + del sys, MockThreading + +try: + threading.local + +except AttributeError: + + #To fix reloading, force it to create a new context + #Old contexts have different exceptions in their dicts, making problems. + if hasattr(threading.currentThread(), '__decimal_context__'): + del threading.currentThread().__decimal_context__ + + def setcontext(context): + """Set this thread's context to context.""" + if context in (DefaultContext, BasicContext, ExtendedContext): + context = context.copy() + context.clear_flags() + threading.currentThread().__decimal_context__ = context + + def getcontext(): + """Returns this thread's context. + + If this thread does not yet have a context, returns + a new context and sets this thread's context. + New contexts are copies of DefaultContext. + """ + try: + return threading.currentThread().__decimal_context__ + except AttributeError: + context = Context() + threading.currentThread().__decimal_context__ = context + return context + +else: + + local = threading.local() + if hasattr(local, '__decimal_context__'): + del local.__decimal_context__ + + def getcontext(_local=local): + """Returns this thread's context. + + If this thread does not yet have a context, returns + a new context and sets this thread's context. + New contexts are copies of DefaultContext. + """ + try: + return _local.__decimal_context__ + except AttributeError: + context = Context() + _local.__decimal_context__ = context + return context + + def setcontext(context, _local=local): + """Set this thread's context to context.""" + if context in (DefaultContext, BasicContext, ExtendedContext): + context = context.copy() + context.clear_flags() + _local.__decimal_context__ = context + + del threading, local # Don't contaminate the namespace + +def localcontext(ctx=None): + """Return a context manager for a copy of the supplied context + + Uses a copy of the current context if no context is specified + The returned context manager creates a local decimal context + in a with statement: + def sin(x): + with localcontext() as ctx: + ctx.prec += 2 + # Rest of sin calculation algorithm + # uses a precision 2 greater than normal + return +s # Convert result to normal precision + + def sin(x): + with localcontext(ExtendedContext): + # Rest of sin calculation algorithm + # uses the Extended Context from the + # General Decimal Arithmetic Specification + return +s # Convert result to normal context + + """ + # The string below can't be included in the docstring until Python 2.6 + # as the doctest module doesn't understand __future__ statements + """ + >>> from __future__ import with_statement + >>> print getcontext().prec + 28 + >>> with localcontext(): + ... ctx = getcontext() + ... ctx.prec += 2 + ... print ctx.prec + ... + 30 + >>> with localcontext(ExtendedContext): + ... print getcontext().prec + ... + 9 + >>> print getcontext().prec + 28 + """ + if ctx is None: ctx = getcontext() + return _ContextManager(ctx) + + +##### Decimal class ########################################### + +class Decimal(object): + """Floating point class for decimal arithmetic.""" + + __slots__ = ('_exp','_int','_sign', '_is_special') + # Generally, the value of the Decimal instance is given by + # (-1)**_sign * _int * 10**_exp + # Special values are signified by _is_special == True + + # We're immutable, so use __new__ not __init__ + def __new__(cls, value="0", context=None): + """Create a decimal point instance. + + >>> Decimal('3.14') # string input + Decimal("3.14") + >>> Decimal((0, (3, 1, 4), -2)) # tuple input (sign, digit_tuple, exponent) + Decimal("3.14") + >>> Decimal(314) # int or long + Decimal("314") + >>> Decimal(Decimal(314)) # another decimal instance + Decimal("314") + """ + + self = object.__new__(cls) + self._is_special = False + + # From an internal working value + if isinstance(value, _WorkRep): + self._sign = value.sign + self._int = tuple(map(int, str(value.int))) + self._exp = int(value.exp) + return self + + # From another decimal + if isinstance(value, Decimal): + self._exp = value._exp + self._sign = value._sign + self._int = value._int + self._is_special = value._is_special + return self + + # From an integer + if isinstance(value, (int,long)): + if value >= 0: + self._sign = 0 + else: + self._sign = 1 + self._exp = 0 + self._int = tuple(map(int, str(abs(value)))) + return self + + # tuple/list conversion (possibly from as_tuple()) + if isinstance(value, (list,tuple)): + if len(value) != 3: + raise ValueError, 'Invalid arguments' + if value[0] not in (0,1): + raise ValueError, 'Invalid sign' + for digit in value[1]: + if not isinstance(digit, (int,long)) or digit < 0: + raise ValueError, "The second value in the tuple must be composed of non negative integer elements." + + self._sign = value[0] + self._int = tuple(value[1]) + if value[2] in ('F','n','N'): + self._exp = value[2] + self._is_special = True + else: + self._exp = int(value[2]) + return self + + if isinstance(value, float): + raise TypeError("Cannot convert float to Decimal. " + + "First convert the float to a string") + + # Other argument types may require the context during interpretation + if context is None: + context = getcontext() + + # From a string + # REs insist on real strings, so we can too. + if isinstance(value, basestring): + if _isinfinity(value): + self._exp = 'F' + self._int = (0,) + self._is_special = True + if _isinfinity(value) == 1: + self._sign = 0 + else: + self._sign = 1 + return self + if _isnan(value): + sig, sign, diag = _isnan(value) + self._is_special = True + if len(diag) > context.prec: #Diagnostic info too long + self._sign, self._int, self._exp = \ + context._raise_error(ConversionSyntax) + return self + if sig == 1: + self._exp = 'n' #qNaN + else: #sig == 2 + self._exp = 'N' #sNaN + self._sign = sign + self._int = tuple(map(int, diag)) #Diagnostic info + return self + try: + self._sign, self._int, self._exp = _string2exact(value) + except ValueError: + self._is_special = True + self._sign, self._int, self._exp = context._raise_error(ConversionSyntax) + return self + + raise TypeError("Cannot convert %r to Decimal" % value) + + def _isnan(self): + """Returns whether the number is not actually one. + + 0 if a number + 1 if NaN + 2 if sNaN + """ + if self._is_special: + exp = self._exp + if exp == 'n': + return 1 + elif exp == 'N': + return 2 + return 0 + + def _isinfinity(self): + """Returns whether the number is infinite + + 0 if finite or not a number + 1 if +INF + -1 if -INF + """ + if self._exp == 'F': + if self._sign: + return -1 + return 1 + return 0 + + def _check_nans(self, other = None, context=None): + """Returns whether the number is not actually one. + + if self, other are sNaN, signal + if self, other are NaN return nan + return 0 + + Done before operations. + """ + + self_is_nan = self._isnan() + if other is None: + other_is_nan = False + else: + other_is_nan = other._isnan() + + if self_is_nan or other_is_nan: + if context is None: + context = getcontext() + + if self_is_nan == 2: + return context._raise_error(InvalidOperation, 'sNaN', + 1, self) + if other_is_nan == 2: + return context._raise_error(InvalidOperation, 'sNaN', + 1, other) + if self_is_nan: + return self + + return other + return 0 + + def __nonzero__(self): + """Is the number non-zero? + + 0 if self == 0 + 1 if self != 0 + """ + if self._is_special: + return 1 + return sum(self._int) != 0 + + def __cmp__(self, other, context=None): + other = _convert_other(other) + if other is NotImplemented: + return other + + if self._is_special or other._is_special: + ans = self._check_nans(other, context) + if ans: + return 1 # Comparison involving NaN's always reports self > other + + # INF = INF + return cmp(self._isinfinity(), other._isinfinity()) + + if not self and not other: + return 0 #If both 0, sign comparison isn't certain. + + #If different signs, neg one is less + if other._sign < self._sign: + return -1 + if self._sign < other._sign: + return 1 + + self_adjusted = self.adjusted() + other_adjusted = other.adjusted() + if self_adjusted == other_adjusted and \ + self._int + (0,)*(self._exp - other._exp) == \ + other._int + (0,)*(other._exp - self._exp): + return 0 #equal, except in precision. ([0]*(-x) = []) + elif self_adjusted > other_adjusted and self._int[0] != 0: + return (-1)**self._sign + elif self_adjusted < other_adjusted and other._int[0] != 0: + return -((-1)**self._sign) + + # Need to round, so make sure we have a valid context + if context is None: + context = getcontext() + + context = context._shallow_copy() + rounding = context._set_rounding(ROUND_UP) #round away from 0 + + flags = context._ignore_all_flags() + res = self.__sub__(other, context=context) + + context._regard_flags(*flags) + + context.rounding = rounding + + if not res: + return 0 + elif res._sign: + return -1 + return 1 + + def __eq__(self, other): + if not isinstance(other, (Decimal, int, long)): + return NotImplemented + return self.__cmp__(other) == 0 + + def __ne__(self, other): + if not isinstance(other, (Decimal, int, long)): + return NotImplemented + return self.__cmp__(other) != 0 + + def compare(self, other, context=None): + """Compares one to another. + + -1 => a < b + 0 => a = b + 1 => a > b + NaN => one is NaN + Like __cmp__, but returns Decimal instances. + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + #compare(NaN, NaN) = NaN + if (self._is_special or other and other._is_special): + ans = self._check_nans(other, context) + if ans: + return ans + + return Decimal(self.__cmp__(other, context)) + + def __hash__(self): + """x.__hash__() <==> hash(x)""" + # Decimal integers must hash the same as the ints + # Non-integer decimals are normalized and hashed as strings + # Normalization assures that hash(100E-1) == hash(10) + if self._is_special: + if self._isnan(): + raise TypeError('Cannot hash a NaN value.') + return hash(str(self)) + i = int(self) + if self == Decimal(i): + return hash(i) + assert self.__nonzero__() # '-0' handled by integer case + return hash(str(self.normalize())) + + def as_tuple(self): + """Represents the number as a triple tuple. + + To show the internals exactly as they are. + """ + return (self._sign, self._int, self._exp) + + def __repr__(self): + """Represents the number as an instance of Decimal.""" + # Invariant: eval(repr(d)) == d + return 'Decimal("%s")' % str(self) + + def __str__(self, eng = 0, context=None): + """Return string representation of the number in scientific notation. + + Captures all of the information in the underlying representation. + """ + + if self._is_special: + if self._isnan(): + minus = '-'*self._sign + if self._int == (0,): + info = '' + else: + info = ''.join(map(str, self._int)) + if self._isnan() == 2: + return minus + 'sNaN' + info + return minus + 'NaN' + info + if self._isinfinity(): + minus = '-'*self._sign + return minus + 'Infinity' + + if context is None: + context = getcontext() + + tmp = map(str, self._int) + numdigits = len(self._int) + leftdigits = self._exp + numdigits + if eng and not self: #self = 0eX wants 0[.0[0]]eY, not [[0]0]0eY + if self._exp < 0 and self._exp >= -6: #short, no need for e/E + s = '-'*self._sign + '0.' + '0'*(abs(self._exp)) + return s + #exp is closest mult. of 3 >= self._exp + exp = ((self._exp - 1)// 3 + 1) * 3 + if exp != self._exp: + s = '0.'+'0'*(exp - self._exp) + else: + s = '0' + if exp != 0: + if context.capitals: + s += 'E' + else: + s += 'e' + if exp > 0: + s += '+' #0.0e+3, not 0.0e3 + s += str(exp) + s = '-'*self._sign + s + return s + if eng: + dotplace = (leftdigits-1)%3+1 + adjexp = leftdigits -1 - (leftdigits-1)%3 + else: + adjexp = leftdigits-1 + dotplace = 1 + if self._exp == 0: + pass + elif self._exp < 0 and adjexp >= 0: + tmp.insert(leftdigits, '.') + elif self._exp < 0 and adjexp >= -6: + tmp[0:0] = ['0'] * int(-leftdigits) + tmp.insert(0, '0.') + else: + if numdigits > dotplace: + tmp.insert(dotplace, '.') + elif numdigits < dotplace: + tmp.extend(['0']*(dotplace-numdigits)) + if adjexp: + if not context.capitals: + tmp.append('e') + else: + tmp.append('E') + if adjexp > 0: + tmp.append('+') + tmp.append(str(adjexp)) + if eng: + while tmp[0:1] == ['0']: + tmp[0:1] = [] + if len(tmp) == 0 or tmp[0] == '.' or tmp[0].lower() == 'e': + tmp[0:0] = ['0'] + if self._sign: + tmp.insert(0, '-') + + return ''.join(tmp) + + def to_eng_string(self, context=None): + """Convert to engineering-type string. + + Engineering notation has an exponent which is a multiple of 3, so there + are up to 3 digits left of the decimal place. + + Same rules for when in exponential and when as a value as in __str__. + """ + return self.__str__(eng=1, context=context) + + def __neg__(self, context=None): + """Returns a copy with the sign switched. + + Rounds, if it has reason. + """ + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + if not self: + # -Decimal('0') is Decimal('0'), not Decimal('-0') + sign = 0 + elif self._sign: + sign = 0 + else: + sign = 1 + + if context is None: + context = getcontext() + if context._rounding_decision == ALWAYS_ROUND: + return Decimal((sign, self._int, self._exp))._fix(context) + return Decimal( (sign, self._int, self._exp)) + + def __pos__(self, context=None): + """Returns a copy, unless it is a sNaN. + + Rounds the number (if more then precision digits) + """ + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + sign = self._sign + if not self: + # + (-0) = 0 + sign = 0 + + if context is None: + context = getcontext() + + if context._rounding_decision == ALWAYS_ROUND: + ans = self._fix(context) + else: + ans = Decimal(self) + ans._sign = sign + return ans + + def __abs__(self, round=1, context=None): + """Returns the absolute value of self. + + If the second argument is 0, do not round. + """ + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + if not round: + if context is None: + context = getcontext() + context = context._shallow_copy() + context._set_rounding_decision(NEVER_ROUND) + + if self._sign: + ans = self.__neg__(context=context) + else: + ans = self.__pos__(context=context) + + return ans + + def __add__(self, other, context=None): + """Returns self + other. + + -INF + INF (or the reverse) cause InvalidOperation errors. + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + if context is None: + context = getcontext() + + if self._is_special or other._is_special: + ans = self._check_nans(other, context) + if ans: + return ans + + if self._isinfinity(): + #If both INF, same sign => same as both, opposite => error. + if self._sign != other._sign and other._isinfinity(): + return context._raise_error(InvalidOperation, '-INF + INF') + return Decimal(self) + if other._isinfinity(): + return Decimal(other) #Can't both be infinity here + + shouldround = context._rounding_decision == ALWAYS_ROUND + + exp = min(self._exp, other._exp) + negativezero = 0 + if context.rounding == ROUND_FLOOR and self._sign != other._sign: + #If the answer is 0, the sign should be negative, in this case. + negativezero = 1 + + if not self and not other: + sign = min(self._sign, other._sign) + if negativezero: + sign = 1 + return Decimal( (sign, (0,), exp)) + if not self: + exp = max(exp, other._exp - context.prec-1) + ans = other._rescale(exp, watchexp=0, context=context) + if shouldround: + ans = ans._fix(context) + return ans + if not other: + exp = max(exp, self._exp - context.prec-1) + ans = self._rescale(exp, watchexp=0, context=context) + if shouldround: + ans = ans._fix(context) + return ans + + op1 = _WorkRep(self) + op2 = _WorkRep(other) + op1, op2 = _normalize(op1, op2, shouldround, context.prec) + + result = _WorkRep() + if op1.sign != op2.sign: + # Equal and opposite + if op1.int == op2.int: + if exp < context.Etiny(): + exp = context.Etiny() + context._raise_error(Clamped) + return Decimal((negativezero, (0,), exp)) + if op1.int < op2.int: + op1, op2 = op2, op1 + #OK, now abs(op1) > abs(op2) + if op1.sign == 1: + result.sign = 1 + op1.sign, op2.sign = op2.sign, op1.sign + else: + result.sign = 0 + #So we know the sign, and op1 > 0. + elif op1.sign == 1: + result.sign = 1 + op1.sign, op2.sign = (0, 0) + else: + result.sign = 0 + #Now, op1 > abs(op2) > 0 + + if op2.sign == 0: + result.int = op1.int + op2.int + else: + result.int = op1.int - op2.int + + result.exp = op1.exp + ans = Decimal(result) + if shouldround: + ans = ans._fix(context) + return ans + + __radd__ = __add__ + + def __sub__(self, other, context=None): + """Return self + (-other)""" + other = _convert_other(other) + if other is NotImplemented: + return other + + if self._is_special or other._is_special: + ans = self._check_nans(other, context=context) + if ans: + return ans + + # -Decimal(0) = Decimal(0), which we don't want since + # (-0 - 0 = -0 + (-0) = -0, but -0 + 0 = 0.) + # so we change the sign directly to a copy + tmp = Decimal(other) + tmp._sign = 1-tmp._sign + + return self.__add__(tmp, context=context) + + def __rsub__(self, other, context=None): + """Return other + (-self)""" + other = _convert_other(other) + if other is NotImplemented: + return other + + tmp = Decimal(self) + tmp._sign = 1 - tmp._sign + return other.__add__(tmp, context=context) + + def _increment(self, round=1, context=None): + """Special case of add, adding 1eExponent + + Since it is common, (rounding, for example) this adds + (sign)*one E self._exp to the number more efficiently than add. + + For example: + Decimal('5.624e10')._increment() == Decimal('5.625e10') + """ + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + return Decimal(self) # Must be infinite, and incrementing makes no difference + + L = list(self._int) + L[-1] += 1 + spot = len(L)-1 + while L[spot] == 10: + L[spot] = 0 + if spot == 0: + L[0:0] = [1] + break + L[spot-1] += 1 + spot -= 1 + ans = Decimal((self._sign, L, self._exp)) + + if context is None: + context = getcontext() + if round and context._rounding_decision == ALWAYS_ROUND: + ans = ans._fix(context) + return ans + + def __mul__(self, other, context=None): + """Return self * other. + + (+-) INF * 0 (or its reverse) raise InvalidOperation. + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + if context is None: + context = getcontext() + + resultsign = self._sign ^ other._sign + + if self._is_special or other._is_special: + ans = self._check_nans(other, context) + if ans: + return ans + + if self._isinfinity(): + if not other: + return context._raise_error(InvalidOperation, '(+-)INF * 0') + return Infsign[resultsign] + + if other._isinfinity(): + if not self: + return context._raise_error(InvalidOperation, '0 * (+-)INF') + return Infsign[resultsign] + + resultexp = self._exp + other._exp + shouldround = context._rounding_decision == ALWAYS_ROUND + + # Special case for multiplying by zero + if not self or not other: + ans = Decimal((resultsign, (0,), resultexp)) + if shouldround: + #Fixing in case the exponent is out of bounds + ans = ans._fix(context) + return ans + + # Special case for multiplying by power of 10 + if self._int == (1,): + ans = Decimal((resultsign, other._int, resultexp)) + if shouldround: + ans = ans._fix(context) + return ans + if other._int == (1,): + ans = Decimal((resultsign, self._int, resultexp)) + if shouldround: + ans = ans._fix(context) + return ans + + op1 = _WorkRep(self) + op2 = _WorkRep(other) + + ans = Decimal( (resultsign, map(int, str(op1.int * op2.int)), resultexp)) + if shouldround: + ans = ans._fix(context) + + return ans + __rmul__ = __mul__ + + def __div__(self, other, context=None): + """Return self / other.""" + return self._divide(other, context=context) + __truediv__ = __div__ + + def _divide(self, other, divmod = 0, context=None): + """Return a / b, to context.prec precision. + + divmod: + 0 => true division + 1 => (a //b, a%b) + 2 => a //b + 3 => a%b + + Actually, if divmod is 2 or 3 a tuple is returned, but errors for + computing the other value are not raised. + """ + other = _convert_other(other) + if other is NotImplemented: + if divmod in (0, 1): + return NotImplemented + return (NotImplemented, NotImplemented) + + if context is None: + context = getcontext() + + sign = self._sign ^ other._sign + + if self._is_special or other._is_special: + ans = self._check_nans(other, context) + if ans: + if divmod: + return (ans, ans) + return ans + + if self._isinfinity() and other._isinfinity(): + if divmod: + return (context._raise_error(InvalidOperation, + '(+-)INF // (+-)INF'), + context._raise_error(InvalidOperation, + '(+-)INF % (+-)INF')) + return context._raise_error(InvalidOperation, '(+-)INF/(+-)INF') + + if self._isinfinity(): + if divmod == 1: + return (Infsign[sign], + context._raise_error(InvalidOperation, 'INF % x')) + elif divmod == 2: + return (Infsign[sign], NaN) + elif divmod == 3: + return (Infsign[sign], + context._raise_error(InvalidOperation, 'INF % x')) + return Infsign[sign] + + if other._isinfinity(): + if divmod: + return (Decimal((sign, (0,), 0)), Decimal(self)) + context._raise_error(Clamped, 'Division by infinity') + return Decimal((sign, (0,), context.Etiny())) + + # Special cases for zeroes + if not self and not other: + if divmod: + return context._raise_error(DivisionUndefined, '0 / 0', 1) + return context._raise_error(DivisionUndefined, '0 / 0') + + if not self: + if divmod: + otherside = Decimal(self) + otherside._exp = min(self._exp, other._exp) + return (Decimal((sign, (0,), 0)), otherside) + exp = self._exp - other._exp + if exp < context.Etiny(): + exp = context.Etiny() + context._raise_error(Clamped, '0e-x / y') + if exp > context.Emax: + exp = context.Emax + context._raise_error(Clamped, '0e+x / y') + return Decimal( (sign, (0,), exp) ) + + if not other: + if divmod: + return context._raise_error(DivisionByZero, 'divmod(x,0)', + sign, 1) + return context._raise_error(DivisionByZero, 'x / 0', sign) + + #OK, so neither = 0, INF or NaN + + shouldround = context._rounding_decision == ALWAYS_ROUND + + #If we're dividing into ints, and self < other, stop. + #self.__abs__(0) does not round. + if divmod and (self.__abs__(0, context) < other.__abs__(0, context)): + + if divmod == 1 or divmod == 3: + exp = min(self._exp, other._exp) + ans2 = self._rescale(exp, context=context, watchexp=0) + if shouldround: + ans2 = ans2._fix(context) + return (Decimal( (sign, (0,), 0) ), + ans2) + + elif divmod == 2: + #Don't round the mod part, if we don't need it. + return (Decimal( (sign, (0,), 0) ), Decimal(self)) + + op1 = _WorkRep(self) + op2 = _WorkRep(other) + op1, op2, adjust = _adjust_coefficients(op1, op2) + res = _WorkRep( (sign, 0, (op1.exp - op2.exp)) ) + if divmod and res.exp > context.prec + 1: + return context._raise_error(DivisionImpossible) + + prec_limit = 10 ** context.prec + while 1: + while op2.int <= op1.int: + res.int += 1 + op1.int -= op2.int + if res.exp == 0 and divmod: + if res.int >= prec_limit and shouldround: + return context._raise_error(DivisionImpossible) + otherside = Decimal(op1) + frozen = context._ignore_all_flags() + + exp = min(self._exp, other._exp) + otherside = otherside._rescale(exp, context=context, watchexp=0) + context._regard_flags(*frozen) + if shouldround: + otherside = otherside._fix(context) + return (Decimal(res), otherside) + + if op1.int == 0 and adjust >= 0 and not divmod: + break + if res.int >= prec_limit and shouldround: + if divmod: + return context._raise_error(DivisionImpossible) + shouldround=1 + # Really, the answer is a bit higher, so adding a one to + # the end will make sure the rounding is right. + if op1.int != 0: + res.int *= 10 + res.int += 1 + res.exp -= 1 + + break + res.int *= 10 + res.exp -= 1 + adjust += 1 + op1.int *= 10 + op1.exp -= 1 + + if res.exp == 0 and divmod and op2.int > op1.int: + #Solves an error in precision. Same as a previous block. + + if res.int >= prec_limit and shouldround: + return context._raise_error(DivisionImpossible) + otherside = Decimal(op1) + frozen = context._ignore_all_flags() + + exp = min(self._exp, other._exp) + otherside = otherside._rescale(exp, context=context) + + context._regard_flags(*frozen) + + return (Decimal(res), otherside) + + ans = Decimal(res) + if shouldround: + ans = ans._fix(context) + return ans + + def __rdiv__(self, other, context=None): + """Swaps self/other and returns __div__.""" + other = _convert_other(other) + if other is NotImplemented: + return other + return other.__div__(self, context=context) + __rtruediv__ = __rdiv__ + + def __divmod__(self, other, context=None): + """ + (self // other, self % other) + """ + return self._divide(other, 1, context) + + def __rdivmod__(self, other, context=None): + """Swaps self/other and returns __divmod__.""" + other = _convert_other(other) + if other is NotImplemented: + return other + return other.__divmod__(self, context=context) + + def __mod__(self, other, context=None): + """ + self % other + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + if self._is_special or other._is_special: + ans = self._check_nans(other, context) + if ans: + return ans + + if self and not other: + return context._raise_error(InvalidOperation, 'x % 0') + + return self._divide(other, 3, context)[1] + + def __rmod__(self, other, context=None): + """Swaps self/other and returns __mod__.""" + other = _convert_other(other) + if other is NotImplemented: + return other + return other.__mod__(self, context=context) + + def remainder_near(self, other, context=None): + """ + Remainder nearest to 0- abs(remainder-near) <= other/2 + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + if self._is_special or other._is_special: + ans = self._check_nans(other, context) + if ans: + return ans + if self and not other: + return context._raise_error(InvalidOperation, 'x % 0') + + if context is None: + context = getcontext() + # If DivisionImpossible causes an error, do not leave Rounded/Inexact + # ignored in the calling function. + context = context._shallow_copy() + flags = context._ignore_flags(Rounded, Inexact) + #keep DivisionImpossible flags + (side, r) = self.__divmod__(other, context=context) + + if r._isnan(): + context._regard_flags(*flags) + return r + + context = context._shallow_copy() + rounding = context._set_rounding_decision(NEVER_ROUND) + + if other._sign: + comparison = other.__div__(Decimal(-2), context=context) + else: + comparison = other.__div__(Decimal(2), context=context) + + context._set_rounding_decision(rounding) + context._regard_flags(*flags) + + s1, s2 = r._sign, comparison._sign + r._sign, comparison._sign = 0, 0 + + if r < comparison: + r._sign, comparison._sign = s1, s2 + #Get flags now + self.__divmod__(other, context=context) + return r._fix(context) + r._sign, comparison._sign = s1, s2 + + rounding = context._set_rounding_decision(NEVER_ROUND) + + (side, r) = self.__divmod__(other, context=context) + context._set_rounding_decision(rounding) + if r._isnan(): + return r + + decrease = not side._iseven() + rounding = context._set_rounding_decision(NEVER_ROUND) + side = side.__abs__(context=context) + context._set_rounding_decision(rounding) + + s1, s2 = r._sign, comparison._sign + r._sign, comparison._sign = 0, 0 + if r > comparison or decrease and r == comparison: + r._sign, comparison._sign = s1, s2 + context.prec += 1 + if len(side.__add__(Decimal(1), context=context)._int) >= context.prec: + context.prec -= 1 + return context._raise_error(DivisionImpossible)[1] + context.prec -= 1 + if self._sign == other._sign: + r = r.__sub__(other, context=context) + else: + r = r.__add__(other, context=context) + else: + r._sign, comparison._sign = s1, s2 + + return r._fix(context) + + def __floordiv__(self, other, context=None): + """self // other""" + return self._divide(other, 2, context)[0] + + def __rfloordiv__(self, other, context=None): + """Swaps self/other and returns __floordiv__.""" + other = _convert_other(other) + if other is NotImplemented: + return other + return other.__floordiv__(self, context=context) + + def __float__(self): + """Float representation.""" + return float(str(self)) + + def __int__(self): + """Converts self to an int, truncating if necessary.""" + if self._is_special: + if self._isnan(): + context = getcontext() + return context._raise_error(InvalidContext) + elif self._isinfinity(): + raise OverflowError, "Cannot convert infinity to long" + if self._exp >= 0: + s = ''.join(map(str, self._int)) + '0'*self._exp + else: + s = ''.join(map(str, self._int))[:self._exp] + if s == '': + s = '0' + sign = '-'*self._sign + return int(sign + s) + + def __long__(self): + """Converts to a long. + + Equivalent to long(int(self)) + """ + return long(self.__int__()) + + def _fix(self, context): + """Round if it is necessary to keep self within prec precision. + + Rounds and fixes the exponent. Does not raise on a sNaN. + + Arguments: + self - Decimal instance + context - context used. + """ + if self._is_special: + return self + if context is None: + context = getcontext() + prec = context.prec + ans = self._fixexponents(context) + if len(ans._int) > prec: + ans = ans._round(prec, context=context) + ans = ans._fixexponents(context) + return ans + + def _fixexponents(self, context): + """Fix the exponents and return a copy with the exponent in bounds. + Only call if known to not be a special value. + """ + folddown = context._clamp + Emin = context.Emin + ans = self + ans_adjusted = ans.adjusted() + if ans_adjusted < Emin: + Etiny = context.Etiny() + if ans._exp < Etiny: + if not ans: + ans = Decimal(self) + ans._exp = Etiny + context._raise_error(Clamped) + return ans + ans = ans._rescale(Etiny, context=context) + #It isn't zero, and exp < Emin => subnormal + context._raise_error(Subnormal) + if context.flags[Inexact]: + context._raise_error(Underflow) + else: + if ans: + #Only raise subnormal if non-zero. + context._raise_error(Subnormal) + else: + Etop = context.Etop() + if folddown and ans._exp > Etop: + context._raise_error(Clamped) + ans = ans._rescale(Etop, context=context) + else: + Emax = context.Emax + if ans_adjusted > Emax: + if not ans: + ans = Decimal(self) + ans._exp = Emax + context._raise_error(Clamped) + return ans + context._raise_error(Inexact) + context._raise_error(Rounded) + return context._raise_error(Overflow, 'above Emax', ans._sign) + return ans + + def _round(self, prec=None, rounding=None, context=None): + """Returns a rounded version of self. + + You can specify the precision or rounding method. Otherwise, the + context determines it. + """ + + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + if self._isinfinity(): + return Decimal(self) + + if context is None: + context = getcontext() + + if rounding is None: + rounding = context.rounding + if prec is None: + prec = context.prec + + if not self: + if prec <= 0: + dig = (0,) + exp = len(self._int) - prec + self._exp + else: + dig = (0,) * prec + exp = len(self._int) + self._exp - prec + ans = Decimal((self._sign, dig, exp)) + context._raise_error(Rounded) + return ans + + if prec == 0: + temp = Decimal(self) + temp._int = (0,)+temp._int + prec = 1 + elif prec < 0: + exp = self._exp + len(self._int) - prec - 1 + temp = Decimal( (self._sign, (0, 1), exp)) + prec = 1 + else: + temp = Decimal(self) + + numdigits = len(temp._int) + if prec == numdigits: + return temp + + # See if we need to extend precision + expdiff = prec - numdigits + if expdiff > 0: + tmp = list(temp._int) + tmp.extend([0] * expdiff) + ans = Decimal( (temp._sign, tmp, temp._exp - expdiff)) + return ans + + #OK, but maybe all the lost digits are 0. + lostdigits = self._int[expdiff:] + if lostdigits == (0,) * len(lostdigits): + ans = Decimal( (temp._sign, temp._int[:prec], temp._exp - expdiff)) + #Rounded, but not Inexact + context._raise_error(Rounded) + return ans + + # Okay, let's round and lose data + + this_function = getattr(temp, self._pick_rounding_function[rounding]) + #Now we've got the rounding function + + if prec != context.prec: + context = context._shallow_copy() + context.prec = prec + ans = this_function(prec, expdiff, context) + context._raise_error(Rounded) + context._raise_error(Inexact, 'Changed in rounding') + + return ans + + _pick_rounding_function = {} + + def _round_down(self, prec, expdiff, context): + """Also known as round-towards-0, truncate.""" + return Decimal( (self._sign, self._int[:prec], self._exp - expdiff) ) + + def _round_half_up(self, prec, expdiff, context, tmp = None): + """Rounds 5 up (away from 0)""" + + if tmp is None: + tmp = Decimal( (self._sign,self._int[:prec], self._exp - expdiff)) + if self._int[prec] >= 5: + tmp = tmp._increment(round=0, context=context) + if len(tmp._int) > prec: + return Decimal( (tmp._sign, tmp._int[:-1], tmp._exp + 1)) + return tmp + + def _round_half_even(self, prec, expdiff, context): + """Round 5 to even, rest to nearest.""" + + tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff)) + half = (self._int[prec] == 5) + if half: + for digit in self._int[prec+1:]: + if digit != 0: + half = 0 + break + if half: + if self._int[prec-1] & 1 == 0: + return tmp + return self._round_half_up(prec, expdiff, context, tmp) + + def _round_half_down(self, prec, expdiff, context): + """Round 5 down""" + + tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff)) + half = (self._int[prec] == 5) + if half: + for digit in self._int[prec+1:]: + if digit != 0: + half = 0 + break + if half: + return tmp + return self._round_half_up(prec, expdiff, context, tmp) + + def _round_up(self, prec, expdiff, context): + """Rounds away from 0.""" + tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff) ) + for digit in self._int[prec:]: + if digit != 0: + tmp = tmp._increment(round=1, context=context) + if len(tmp._int) > prec: + return Decimal( (tmp._sign, tmp._int[:-1], tmp._exp + 1)) + else: + return tmp + return tmp + + def _round_ceiling(self, prec, expdiff, context): + """Rounds up (not away from 0 if negative.)""" + if self._sign: + return self._round_down(prec, expdiff, context) + else: + return self._round_up(prec, expdiff, context) + + def _round_floor(self, prec, expdiff, context): + """Rounds down (not towards 0 if negative)""" + if not self._sign: + return self._round_down(prec, expdiff, context) + else: + return self._round_up(prec, expdiff, context) + + def __pow__(self, n, modulo = None, context=None): + """Return self ** n (mod modulo) + + If modulo is None (default), don't take it mod modulo. + """ + n = _convert_other(n) + if n is NotImplemented: + return n + + if context is None: + context = getcontext() + + if self._is_special or n._is_special or n.adjusted() > 8: + #Because the spot << doesn't work with really big exponents + if n._isinfinity() or n.adjusted() > 8: + return context._raise_error(InvalidOperation, 'x ** INF') + + ans = self._check_nans(n, context) + if ans: + return ans + + if not n._isinteger(): + return context._raise_error(InvalidOperation, 'x ** (non-integer)') + + if not self and not n: + return context._raise_error(InvalidOperation, '0 ** 0') + + if not n: + return Decimal(1) + + if self == Decimal(1): + return Decimal(1) + + sign = self._sign and not n._iseven() + n = int(n) + + if self._isinfinity(): + if modulo: + return context._raise_error(InvalidOperation, 'INF % x') + if n > 0: + return Infsign[sign] + return Decimal( (sign, (0,), 0) ) + + #with ludicrously large exponent, just raise an overflow and return inf. + if not modulo and n > 0 and (self._exp + len(self._int) - 1) * n > context.Emax \ + and self: + + tmp = Decimal('inf') + tmp._sign = sign + context._raise_error(Rounded) + context._raise_error(Inexact) + context._raise_error(Overflow, 'Big power', sign) + return tmp + + elength = len(str(abs(n))) + firstprec = context.prec + + if not modulo and firstprec + elength + 1 > DefaultContext.Emax: + return context._raise_error(Overflow, 'Too much precision.', sign) + + mul = Decimal(self) + val = Decimal(1) + context = context._shallow_copy() + context.prec = firstprec + elength + 1 + if n < 0: + #n is a long now, not Decimal instance + n = -n + mul = Decimal(1).__div__(mul, context=context) + + spot = 1 + while spot <= n: + spot <<= 1 + + spot >>= 1 + #Spot is the highest power of 2 less than n + while spot: + val = val.__mul__(val, context=context) + if val._isinfinity(): + val = Infsign[sign] + break + if spot & n: + val = val.__mul__(mul, context=context) + if modulo is not None: + val = val.__mod__(modulo, context=context) + spot >>= 1 + context.prec = firstprec + + if context._rounding_decision == ALWAYS_ROUND: + return val._fix(context) + return val + + def __rpow__(self, other, context=None): + """Swaps self/other and returns __pow__.""" + other = _convert_other(other) + if other is NotImplemented: + return other + return other.__pow__(self, context=context) + + def normalize(self, context=None): + """Normalize- strip trailing 0s, change anything equal to 0 to 0e0""" + + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + dup = self._fix(context) + if dup._isinfinity(): + return dup + + if not dup: + return Decimal( (dup._sign, (0,), 0) ) + end = len(dup._int) + exp = dup._exp + while dup._int[end-1] == 0: + exp += 1 + end -= 1 + return Decimal( (dup._sign, dup._int[:end], exp) ) + + + def quantize(self, exp, rounding=None, context=None, watchexp=1): + """Quantize self so its exponent is the same as that of exp. + + Similar to self._rescale(exp._exp) but with error checking. + """ + if self._is_special or exp._is_special: + ans = self._check_nans(exp, context) + if ans: + return ans + + if exp._isinfinity() or self._isinfinity(): + if exp._isinfinity() and self._isinfinity(): + return self #if both are inf, it is OK + if context is None: + context = getcontext() + return context._raise_error(InvalidOperation, + 'quantize with one INF') + return self._rescale(exp._exp, rounding, context, watchexp) + + def same_quantum(self, other): + """Test whether self and other have the same exponent. + + same as self._exp == other._exp, except NaN == sNaN + """ + if self._is_special or other._is_special: + if self._isnan() or other._isnan(): + return self._isnan() and other._isnan() and True + if self._isinfinity() or other._isinfinity(): + return self._isinfinity() and other._isinfinity() and True + return self._exp == other._exp + + def _rescale(self, exp, rounding=None, context=None, watchexp=1): + """Rescales so that the exponent is exp. + + exp = exp to scale to (an integer) + rounding = rounding version + watchexp: if set (default) an error is returned if exp is greater + than Emax or less than Etiny. + """ + if context is None: + context = getcontext() + + if self._is_special: + if self._isinfinity(): + return context._raise_error(InvalidOperation, 'rescale with an INF') + + ans = self._check_nans(context=context) + if ans: + return ans + + if watchexp and (context.Emax < exp or context.Etiny() > exp): + return context._raise_error(InvalidOperation, 'rescale(a, INF)') + + if not self: + ans = Decimal(self) + ans._int = (0,) + ans._exp = exp + return ans + + diff = self._exp - exp + digits = len(self._int) + diff + + if watchexp and digits > context.prec: + return context._raise_error(InvalidOperation, 'Rescale > prec') + + tmp = Decimal(self) + tmp._int = (0,) + tmp._int + digits += 1 + + if digits < 0: + tmp._exp = -digits + tmp._exp + tmp._int = (0,1) + digits = 1 + tmp = tmp._round(digits, rounding, context=context) + + if tmp._int[0] == 0 and len(tmp._int) > 1: + tmp._int = tmp._int[1:] + tmp._exp = exp + + tmp_adjusted = tmp.adjusted() + if tmp and tmp_adjusted < context.Emin: + context._raise_error(Subnormal) + elif tmp and tmp_adjusted > context.Emax: + return context._raise_error(InvalidOperation, 'rescale(a, INF)') + return tmp + + def to_integral(self, rounding=None, context=None): + """Rounds to the nearest integer, without raising inexact, rounded.""" + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + return self + if self._exp >= 0: + return self + if context is None: + context = getcontext() + flags = context._ignore_flags(Rounded, Inexact) + ans = self._rescale(0, rounding, context=context) + context._regard_flags(flags) + return ans + + def sqrt(self, context=None): + """Return the square root of self. + + Uses a converging algorithm (Xn+1 = 0.5*(Xn + self / Xn)) + Should quadratically approach the right answer. + """ + if self._is_special: + ans = self._check_nans(context=context) + if ans: + return ans + + if self._isinfinity() and self._sign == 0: + return Decimal(self) + + if not self: + #exponent = self._exp / 2, using round_down. + #if self._exp < 0: + # exp = (self._exp+1) // 2 + #else: + exp = (self._exp) // 2 + if self._sign == 1: + #sqrt(-0) = -0 + return Decimal( (1, (0,), exp)) + else: + return Decimal( (0, (0,), exp)) + + if context is None: + context = getcontext() + + if self._sign == 1: + return context._raise_error(InvalidOperation, 'sqrt(-x), x > 0') + + tmp = Decimal(self) + + expadd = tmp._exp // 2 + if tmp._exp & 1: + tmp._int += (0,) + tmp._exp = 0 + else: + tmp._exp = 0 + + context = context._shallow_copy() + flags = context._ignore_all_flags() + firstprec = context.prec + context.prec = 3 + if tmp.adjusted() & 1 == 0: + ans = Decimal( (0, (8,1,9), tmp.adjusted() - 2) ) + ans = ans.__add__(tmp.__mul__(Decimal((0, (2,5,9), -2)), + context=context), context=context) + ans._exp -= 1 + tmp.adjusted() // 2 + else: + ans = Decimal( (0, (2,5,9), tmp._exp + len(tmp._int)- 3) ) + ans = ans.__add__(tmp.__mul__(Decimal((0, (8,1,9), -3)), + context=context), context=context) + ans._exp -= 1 + tmp.adjusted() // 2 + + #ans is now a linear approximation. + + Emax, Emin = context.Emax, context.Emin + context.Emax, context.Emin = DefaultContext.Emax, DefaultContext.Emin + + half = Decimal('0.5') + + maxp = firstprec + 2 + rounding = context._set_rounding(ROUND_HALF_EVEN) + while 1: + context.prec = min(2*context.prec - 2, maxp) + ans = half.__mul__(ans.__add__(tmp.__div__(ans, context=context), + context=context), context=context) + if context.prec == maxp: + break + + #round to the answer's precision-- the only error can be 1 ulp. + context.prec = firstprec + prevexp = ans.adjusted() + ans = ans._round(context=context) + + #Now, check if the other last digits are better. + context.prec = firstprec + 1 + # In case we rounded up another digit and we should actually go lower. + if prevexp != ans.adjusted(): + ans._int += (0,) + ans._exp -= 1 + + + lower = ans.__sub__(Decimal((0, (5,), ans._exp-1)), context=context) + context._set_rounding(ROUND_UP) + if lower.__mul__(lower, context=context) > (tmp): + ans = ans.__sub__(Decimal((0, (1,), ans._exp)), context=context) + + else: + upper = ans.__add__(Decimal((0, (5,), ans._exp-1)),context=context) + context._set_rounding(ROUND_DOWN) + if upper.__mul__(upper, context=context) < tmp: + ans = ans.__add__(Decimal((0, (1,), ans._exp)),context=context) + + ans._exp += expadd + + context.prec = firstprec + context.rounding = rounding + ans = ans._fix(context) + + rounding = context._set_rounding_decision(NEVER_ROUND) + if not ans.__mul__(ans, context=context) == self: + # Only rounded/inexact if here. + context._regard_flags(flags) + context._raise_error(Rounded) + context._raise_error(Inexact) + else: + #Exact answer, so let's set the exponent right. + #if self._exp < 0: + # exp = (self._exp +1)// 2 + #else: + exp = self._exp // 2 + context.prec += ans._exp - exp + ans = ans._rescale(exp, context=context) + context.prec = firstprec + context._regard_flags(flags) + context.Emax, context.Emin = Emax, Emin + + return ans._fix(context) + + def max(self, other, context=None): + """Returns the larger value. + + like max(self, other) except if one is not a number, returns + NaN (and signals if one is sNaN). Also rounds. + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + if self._is_special or other._is_special: + # if one operand is a quiet NaN and the other is number, then the + # number is always returned + sn = self._isnan() + on = other._isnan() + if sn or on: + if on == 1 and sn != 2: + return self + if sn == 1 and on != 2: + return other + return self._check_nans(other, context) + + ans = self + c = self.__cmp__(other) + if c == 0: + # if both operands are finite and equal in numerical value + # then an ordering is applied: + # + # if the signs differ then max returns the operand with the + # positive sign and min returns the operand with the negative sign + # + # if the signs are the same then the exponent is used to select + # the result. + if self._sign != other._sign: + if self._sign: + ans = other + elif self._exp < other._exp and not self._sign: + ans = other + elif self._exp > other._exp and self._sign: + ans = other + elif c == -1: + ans = other + + if context is None: + context = getcontext() + if context._rounding_decision == ALWAYS_ROUND: + return ans._fix(context) + return ans + + def min(self, other, context=None): + """Returns the smaller value. + + like min(self, other) except if one is not a number, returns + NaN (and signals if one is sNaN). Also rounds. + """ + other = _convert_other(other) + if other is NotImplemented: + return other + + if self._is_special or other._is_special: + # if one operand is a quiet NaN and the other is number, then the + # number is always returned + sn = self._isnan() + on = other._isnan() + if sn or on: + if on == 1 and sn != 2: + return self + if sn == 1 and on != 2: + return other + return self._check_nans(other, context) + + ans = self + c = self.__cmp__(other) + if c == 0: + # if both operands are finite and equal in numerical value + # then an ordering is applied: + # + # if the signs differ then max returns the operand with the + # positive sign and min returns the operand with the negative sign + # + # if the signs are the same then the exponent is used to select + # the result. + if self._sign != other._sign: + if other._sign: + ans = other + elif self._exp > other._exp and not self._sign: + ans = other + elif self._exp < other._exp and self._sign: + ans = other + elif c == 1: + ans = other + + if context is None: + context = getcontext() + if context._rounding_decision == ALWAYS_ROUND: + return ans._fix(context) + return ans + + def _isinteger(self): + """Returns whether self is an integer""" + if self._exp >= 0: + return True + rest = self._int[self._exp:] + return rest == (0,)*len(rest) + + def _iseven(self): + """Returns 1 if self is even. Assumes self is an integer.""" + if self._exp > 0: + return 1 + return self._int[-1+self._exp] & 1 == 0 + + def adjusted(self): + """Return the adjusted exponent of self""" + try: + return self._exp + len(self._int) - 1 + #If NaN or Infinity, self._exp is string + except TypeError: + return 0 + + # support for pickling, copy, and deepcopy + def __reduce__(self): + return (self.__class__, (str(self),)) + + def __copy__(self): + if type(self) == Decimal: + return self # I'm immutable; therefore I am my own clone + return self.__class__(str(self)) + + def __deepcopy__(self, memo): + if type(self) == Decimal: + return self # My components are also immutable + return self.__class__(str(self)) + +##### Context class ########################################### + + +# get rounding method function: +rounding_functions = [name for name in Decimal.__dict__.keys() if name.startswith('_round_')] +for name in rounding_functions: + #name is like _round_half_even, goes to the global ROUND_HALF_EVEN value. + globalname = name[1:].upper() + val = globals()[globalname] + Decimal._pick_rounding_function[val] = name + +del name, val, globalname, rounding_functions + +class _ContextManager(object): + """Context manager class to support localcontext(). + + Sets a copy of the supplied context in __enter__() and restores + the previous decimal context in __exit__() + """ + def __init__(self, new_context): + self.new_context = new_context.copy() + def __enter__(self): + self.saved_context = getcontext() + setcontext(self.new_context) + return self.new_context + def __exit__(self, t, v, tb): + setcontext(self.saved_context) + +class Context(object): + """Contains the context for a Decimal instance. + + Contains: + prec - precision (for use in rounding, division, square roots..) + rounding - rounding type. (how you round) + _rounding_decision - ALWAYS_ROUND, NEVER_ROUND -- do you round? + traps - If traps[exception] = 1, then the exception is + raised when it is caused. Otherwise, a value is + substituted in. + flags - When an exception is caused, flags[exception] is incremented. + (Whether or not the trap_enabler is set) + Should be reset by user of Decimal instance. + Emin - Minimum exponent + Emax - Maximum exponent + capitals - If 1, 1*10^1 is printed as 1E+1. + If 0, printed as 1e1 + _clamp - If 1, change exponents if too high (Default 0) + """ + + def __init__(self, prec=None, rounding=None, + traps=None, flags=None, + _rounding_decision=None, + Emin=None, Emax=None, + capitals=None, _clamp=0, + _ignored_flags=None): + if flags is None: + flags = [] + if _ignored_flags is None: + _ignored_flags = [] + if not isinstance(flags, dict): + flags = dict([(s,s in flags) for s in _signals]) + del s + if traps is not None and not isinstance(traps, dict): + traps = dict([(s,s in traps) for s in _signals]) + del s + for name, val in locals().items(): + if val is None: + setattr(self, name, _copy.copy(getattr(DefaultContext, name))) + else: + setattr(self, name, val) + del self.self + + def __repr__(self): + """Show the current context.""" + s = [] + s.append('Context(prec=%(prec)d, rounding=%(rounding)s, Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d' % vars(self)) + s.append('flags=[' + ', '.join([f.__name__ for f, v in self.flags.items() if v]) + ']') + s.append('traps=[' + ', '.join([t.__name__ for t, v in self.traps.items() if v]) + ']') + return ', '.join(s) + ')' + + def clear_flags(self): + """Reset all flags to zero""" + for flag in self.flags: + self.flags[flag] = 0 + + def _shallow_copy(self): + """Returns a shallow copy from self.""" + nc = Context(self.prec, self.rounding, self.traps, self.flags, + self._rounding_decision, self.Emin, self.Emax, + self.capitals, self._clamp, self._ignored_flags) + return nc + + def copy(self): + """Returns a deep copy from self.""" + nc = Context(self.prec, self.rounding, self.traps.copy(), self.flags.copy(), + self._rounding_decision, self.Emin, self.Emax, + self.capitals, self._clamp, self._ignored_flags) + return nc + __copy__ = copy + + def _raise_error(self, condition, explanation = None, *args): + """Handles an error + + If the flag is in _ignored_flags, returns the default response. + Otherwise, it increments the flag, then, if the corresponding + trap_enabler is set, it reaises the exception. Otherwise, it returns + the default value after incrementing the flag. + """ + error = _condition_map.get(condition, condition) + if error in self._ignored_flags: + #Don't touch the flag + return error().handle(self, *args) + + self.flags[error] += 1 + if not self.traps[error]: + #The errors define how to handle themselves. + return condition().handle(self, *args) + + # Errors should only be risked on copies of the context + #self._ignored_flags = [] + raise error, explanation + + def _ignore_all_flags(self): + """Ignore all flags, if they are raised""" + return self._ignore_flags(*_signals) + + def _ignore_flags(self, *flags): + """Ignore the flags, if they are raised""" + # Do not mutate-- This way, copies of a context leave the original + # alone. + self._ignored_flags = (self._ignored_flags + list(flags)) + return list(flags) + + def _regard_flags(self, *flags): + """Stop ignoring the flags, if they are raised""" + if flags and isinstance(flags[0], (tuple,list)): + flags = flags[0] + for flag in flags: + self._ignored_flags.remove(flag) + + def __hash__(self): + """A Context cannot be hashed.""" + # We inherit object.__hash__, so we must deny this explicitly + raise TypeError, "Cannot hash a Context." + + def Etiny(self): + """Returns Etiny (= Emin - prec + 1)""" + return int(self.Emin - self.prec + 1) + + def Etop(self): + """Returns maximum exponent (= Emax - prec + 1)""" + return int(self.Emax - self.prec + 1) + + def _set_rounding_decision(self, type): + """Sets the rounding decision. + + Sets the rounding decision, and returns the current (previous) + rounding decision. Often used like: + + context = context._shallow_copy() + # That so you don't change the calling context + # if an error occurs in the middle (say DivisionImpossible is raised). + + rounding = context._set_rounding_decision(NEVER_ROUND) + instance = instance / Decimal(2) + context._set_rounding_decision(rounding) + + This will make it not round for that operation. + """ + + rounding = self._rounding_decision + self._rounding_decision = type + return rounding + + def _set_rounding(self, type): + """Sets the rounding type. + + Sets the rounding type, and returns the current (previous) + rounding type. Often used like: + + context = context.copy() + # so you don't change the calling context + # if an error occurs in the middle. + rounding = context._set_rounding(ROUND_UP) + val = self.__sub__(other, context=context) + context._set_rounding(rounding) + + This will make it round up for that operation. + """ + rounding = self.rounding + self.rounding= type + return rounding + + def create_decimal(self, num='0'): + """Creates a new Decimal instance but using self as context.""" + d = Decimal(num, context=self) + return d._fix(self) + + #Methods + def abs(self, a): + """Returns the absolute value of the operand. + + If the operand is negative, the result is the same as using the minus + operation on the operand. Otherwise, the result is the same as using + the plus operation on the operand. + + >>> ExtendedContext.abs(Decimal('2.1')) + Decimal("2.1") + >>> ExtendedContext.abs(Decimal('-100')) + Decimal("100") + >>> ExtendedContext.abs(Decimal('101.5')) + Decimal("101.5") + >>> ExtendedContext.abs(Decimal('-101.5')) + Decimal("101.5") + """ + return a.__abs__(context=self) + + def add(self, a, b): + """Return the sum of the two operands. + + >>> ExtendedContext.add(Decimal('12'), Decimal('7.00')) + Decimal("19.00") + >>> ExtendedContext.add(Decimal('1E+2'), Decimal('1.01E+4')) + Decimal("1.02E+4") + """ + return a.__add__(b, context=self) + + def _apply(self, a): + return str(a._fix(self)) + + def compare(self, a, b): + """Compares values numerically. + + If the signs of the operands differ, a value representing each operand + ('-1' if the operand is less than zero, '0' if the operand is zero or + negative zero, or '1' if the operand is greater than zero) is used in + place of that operand for the comparison instead of the actual + operand. + + The comparison is then effected by subtracting the second operand from + the first and then returning a value according to the result of the + subtraction: '-1' if the result is less than zero, '0' if the result is + zero or negative zero, or '1' if the result is greater than zero. + + >>> ExtendedContext.compare(Decimal('2.1'), Decimal('3')) + Decimal("-1") + >>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.1')) + Decimal("0") + >>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.10')) + Decimal("0") + >>> ExtendedContext.compare(Decimal('3'), Decimal('2.1')) + Decimal("1") + >>> ExtendedContext.compare(Decimal('2.1'), Decimal('-3')) + Decimal("1") + >>> ExtendedContext.compare(Decimal('-3'), Decimal('2.1')) + Decimal("-1") + """ + return a.compare(b, context=self) + + def divide(self, a, b): + """Decimal division in a specified context. + + >>> ExtendedContext.divide(Decimal('1'), Decimal('3')) + Decimal("0.333333333") + >>> ExtendedContext.divide(Decimal('2'), Decimal('3')) + Decimal("0.666666667") + >>> ExtendedContext.divide(Decimal('5'), Decimal('2')) + Decimal("2.5") + >>> ExtendedContext.divide(Decimal('1'), Decimal('10')) + Decimal("0.1") + >>> ExtendedContext.divide(Decimal('12'), Decimal('12')) + Decimal("1") + >>> ExtendedContext.divide(Decimal('8.00'), Decimal('2')) + Decimal("4.00") + >>> ExtendedContext.divide(Decimal('2.400'), Decimal('2.0')) + Decimal("1.20") + >>> ExtendedContext.divide(Decimal('1000'), Decimal('100')) + Decimal("10") + >>> ExtendedContext.divide(Decimal('1000'), Decimal('1')) + Decimal("1000") + >>> ExtendedContext.divide(Decimal('2.40E+6'), Decimal('2')) + Decimal("1.20E+6") + """ + return a.__div__(b, context=self) + + def divide_int(self, a, b): + """Divides two numbers and returns the integer part of the result. + + >>> ExtendedContext.divide_int(Decimal('2'), Decimal('3')) + Decimal("0") + >>> ExtendedContext.divide_int(Decimal('10'), Decimal('3')) + Decimal("3") + >>> ExtendedContext.divide_int(Decimal('1'), Decimal('0.3')) + Decimal("3") + """ + return a.__floordiv__(b, context=self) + + def divmod(self, a, b): + return a.__divmod__(b, context=self) + + def max(self, a,b): + """max compares two values numerically and returns the maximum. + + If either operand is a NaN then the general rules apply. + Otherwise, the operands are compared as as though by the compare + operation. If they are numerically equal then the left-hand operand + is chosen as the result. Otherwise the maximum (closer to positive + infinity) of the two operands is chosen as the result. + + >>> ExtendedContext.max(Decimal('3'), Decimal('2')) + Decimal("3") + >>> ExtendedContext.max(Decimal('-10'), Decimal('3')) + Decimal("3") + >>> ExtendedContext.max(Decimal('1.0'), Decimal('1')) + Decimal("1") + >>> ExtendedContext.max(Decimal('7'), Decimal('NaN')) + Decimal("7") + """ + return a.max(b, context=self) + + def min(self, a,b): + """min compares two values numerically and returns the minimum. + + If either operand is a NaN then the general rules apply. + Otherwise, the operands are compared as as though by the compare + operation. If they are numerically equal then the left-hand operand + is chosen as the result. Otherwise the minimum (closer to negative + infinity) of the two operands is chosen as the result. + + >>> ExtendedContext.min(Decimal('3'), Decimal('2')) + Decimal("2") + >>> ExtendedContext.min(Decimal('-10'), Decimal('3')) + Decimal("-10") + >>> ExtendedContext.min(Decimal('1.0'), Decimal('1')) + Decimal("1.0") + >>> ExtendedContext.min(Decimal('7'), Decimal('NaN')) + Decimal("7") + """ + return a.min(b, context=self) + + def minus(self, a): + """Minus corresponds to unary prefix minus in Python. + + The operation is evaluated using the same rules as subtract; the + operation minus(a) is calculated as subtract('0', a) where the '0' + has the same exponent as the operand. + + >>> ExtendedContext.minus(Decimal('1.3')) + Decimal("-1.3") + >>> ExtendedContext.minus(Decimal('-1.3')) + Decimal("1.3") + """ + return a.__neg__(context=self) + + def multiply(self, a, b): + """multiply multiplies two operands. + + If either operand is a special value then the general rules apply. + Otherwise, the operands are multiplied together ('long multiplication'), + resulting in a number which may be as long as the sum of the lengths + of the two operands. + + >>> ExtendedContext.multiply(Decimal('1.20'), Decimal('3')) + Decimal("3.60") + >>> ExtendedContext.multiply(Decimal('7'), Decimal('3')) + Decimal("21") + >>> ExtendedContext.multiply(Decimal('0.9'), Decimal('0.8')) + Decimal("0.72") + >>> ExtendedContext.multiply(Decimal('0.9'), Decimal('-0')) + Decimal("-0.0") + >>> ExtendedContext.multiply(Decimal('654321'), Decimal('654321')) + Decimal("4.28135971E+11") + """ + return a.__mul__(b, context=self) + + def normalize(self, a): + """normalize reduces an operand to its simplest form. + + Essentially a plus operation with all trailing zeros removed from the + result. + + >>> ExtendedContext.normalize(Decimal('2.1')) + Decimal("2.1") + >>> ExtendedContext.normalize(Decimal('-2.0')) + Decimal("-2") + >>> ExtendedContext.normalize(Decimal('1.200')) + Decimal("1.2") + >>> ExtendedContext.normalize(Decimal('-120')) + Decimal("-1.2E+2") + >>> ExtendedContext.normalize(Decimal('120.00')) + Decimal("1.2E+2") + >>> ExtendedContext.normalize(Decimal('0.00')) + Decimal("0") + """ + return a.normalize(context=self) + + def plus(self, a): + """Plus corresponds to unary prefix plus in Python. + + The operation is evaluated using the same rules as add; the + operation plus(a) is calculated as add('0', a) where the '0' + has the same exponent as the operand. + + >>> ExtendedContext.plus(Decimal('1.3')) + Decimal("1.3") + >>> ExtendedContext.plus(Decimal('-1.3')) + Decimal("-1.3") + """ + return a.__pos__(context=self) + + def power(self, a, b, modulo=None): + """Raises a to the power of b, to modulo if given. + + The right-hand operand must be a whole number whose integer part (after + any exponent has been applied) has no more than 9 digits and whose + fractional part (if any) is all zeros before any rounding. The operand + may be positive, negative, or zero; if negative, the absolute value of + the power is used, and the left-hand operand is inverted (divided into + 1) before use. + + If the increased precision needed for the intermediate calculations + exceeds the capabilities of the implementation then an Invalid operation + condition is raised. + + If, when raising to a negative power, an underflow occurs during the + division into 1, the operation is not halted at that point but + continues. + + >>> ExtendedContext.power(Decimal('2'), Decimal('3')) + Decimal("8") + >>> ExtendedContext.power(Decimal('2'), Decimal('-3')) + Decimal("0.125") + >>> ExtendedContext.power(Decimal('1.7'), Decimal('8')) + Decimal("69.7575744") + >>> ExtendedContext.power(Decimal('Infinity'), Decimal('-2')) + Decimal("0") + >>> ExtendedContext.power(Decimal('Infinity'), Decimal('-1')) + Decimal("0") + >>> ExtendedContext.power(Decimal('Infinity'), Decimal('0')) + Decimal("1") + >>> ExtendedContext.power(Decimal('Infinity'), Decimal('1')) + Decimal("Infinity") + >>> ExtendedContext.power(Decimal('Infinity'), Decimal('2')) + Decimal("Infinity") + >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('-2')) + Decimal("0") + >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('-1')) + Decimal("-0") + >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('0')) + Decimal("1") + >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('1')) + Decimal("-Infinity") + >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('2')) + Decimal("Infinity") + >>> ExtendedContext.power(Decimal('0'), Decimal('0')) + Decimal("NaN") + """ + return a.__pow__(b, modulo, context=self) + + def quantize(self, a, b): + """Returns a value equal to 'a' (rounded) and having the exponent of 'b'. + + The coefficient of the result is derived from that of the left-hand + operand. It may be rounded using the current rounding setting (if the + exponent is being increased), multiplied by a positive power of ten (if + the exponent is being decreased), or is unchanged (if the exponent is + already equal to that of the right-hand operand). + + Unlike other operations, if the length of the coefficient after the + quantize operation would be greater than precision then an Invalid + operation condition is raised. This guarantees that, unless there is an + error condition, the exponent of the result of a quantize is always + equal to that of the right-hand operand. + + Also unlike other operations, quantize will never raise Underflow, even + if the result is subnormal and inexact. + + >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.001')) + Decimal("2.170") + >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.01')) + Decimal("2.17") + >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.1')) + Decimal("2.2") + >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+0')) + Decimal("2") + >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+1')) + Decimal("0E+1") + >>> ExtendedContext.quantize(Decimal('-Inf'), Decimal('Infinity')) + Decimal("-Infinity") + >>> ExtendedContext.quantize(Decimal('2'), Decimal('Infinity')) + Decimal("NaN") + >>> ExtendedContext.quantize(Decimal('-0.1'), Decimal('1')) + Decimal("-0") + >>> ExtendedContext.quantize(Decimal('-0'), Decimal('1e+5')) + Decimal("-0E+5") + >>> ExtendedContext.quantize(Decimal('+35236450.6'), Decimal('1e-2')) + Decimal("NaN") + >>> ExtendedContext.quantize(Decimal('-35236450.6'), Decimal('1e-2')) + Decimal("NaN") + >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-1')) + Decimal("217.0") + >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-0')) + Decimal("217") + >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+1')) + Decimal("2.2E+2") + >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+2')) + Decimal("2E+2") + """ + return a.quantize(b, context=self) + + def remainder(self, a, b): + """Returns the remainder from integer division. + + The result is the residue of the dividend after the operation of + calculating integer division as described for divide-integer, rounded to + precision digits if necessary. The sign of the result, if non-zero, is + the same as that of the original dividend. + + This operation will fail under the same conditions as integer division + (that is, if integer division on the same two operands would fail, the + remainder cannot be calculated). + + >>> ExtendedContext.remainder(Decimal('2.1'), Decimal('3')) + Decimal("2.1") + >>> ExtendedContext.remainder(Decimal('10'), Decimal('3')) + Decimal("1") + >>> ExtendedContext.remainder(Decimal('-10'), Decimal('3')) + Decimal("-1") + >>> ExtendedContext.remainder(Decimal('10.2'), Decimal('1')) + Decimal("0.2") + >>> ExtendedContext.remainder(Decimal('10'), Decimal('0.3')) + Decimal("0.1") + >>> ExtendedContext.remainder(Decimal('3.6'), Decimal('1.3')) + Decimal("1.0") + """ + return a.__mod__(b, context=self) + + def remainder_near(self, a, b): + """Returns to be "a - b * n", where n is the integer nearest the exact + value of "x / b" (if two integers are equally near then the even one + is chosen). If the result is equal to 0 then its sign will be the + sign of a. + + This operation will fail under the same conditions as integer division + (that is, if integer division on the same two operands would fail, the + remainder cannot be calculated). + + >>> ExtendedContext.remainder_near(Decimal('2.1'), Decimal('3')) + Decimal("-0.9") + >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('6')) + Decimal("-2") + >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('3')) + Decimal("1") + >>> ExtendedContext.remainder_near(Decimal('-10'), Decimal('3')) + Decimal("-1") + >>> ExtendedContext.remainder_near(Decimal('10.2'), Decimal('1')) + Decimal("0.2") + >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('0.3')) + Decimal("0.1") + >>> ExtendedContext.remainder_near(Decimal('3.6'), Decimal('1.3')) + Decimal("-0.3") + """ + return a.remainder_near(b, context=self) + + def same_quantum(self, a, b): + """Returns True if the two operands have the same exponent. + + The result is never affected by either the sign or the coefficient of + either operand. + + >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.001')) + False + >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.01')) + True + >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('1')) + False + >>> ExtendedContext.same_quantum(Decimal('Inf'), Decimal('-Inf')) + True + """ + return a.same_quantum(b) + + def sqrt(self, a): + """Returns the square root of a non-negative number to context precision. + + If the result must be inexact, it is rounded using the round-half-even + algorithm. + + >>> ExtendedContext.sqrt(Decimal('0')) + Decimal("0") + >>> ExtendedContext.sqrt(Decimal('-0')) + Decimal("-0") + >>> ExtendedContext.sqrt(Decimal('0.39')) + Decimal("0.624499800") + >>> ExtendedContext.sqrt(Decimal('100')) + Decimal("10") + >>> ExtendedContext.sqrt(Decimal('1')) + Decimal("1") + >>> ExtendedContext.sqrt(Decimal('1.0')) + Decimal("1.0") + >>> ExtendedContext.sqrt(Decimal('1.00')) + Decimal("1.0") + >>> ExtendedContext.sqrt(Decimal('7')) + Decimal("2.64575131") + >>> ExtendedContext.sqrt(Decimal('10')) + Decimal("3.16227766") + >>> ExtendedContext.prec + 9 + """ + return a.sqrt(context=self) + + def subtract(self, a, b): + """Return the difference between the two operands. + + >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.07')) + Decimal("0.23") + >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.30')) + Decimal("0.00") + >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('2.07')) + Decimal("-0.77") + """ + return a.__sub__(b, context=self) + + def to_eng_string(self, a): + """Converts a number to a string, using scientific notation. + + The operation is not affected by the context. + """ + return a.to_eng_string(context=self) + + def to_sci_string(self, a): + """Converts a number to a string, using scientific notation. + + The operation is not affected by the context. + """ + return a.__str__(context=self) + + def to_integral(self, a): + """Rounds to an integer. + + When the operand has a negative exponent, the result is the same + as using the quantize() operation using the given operand as the + left-hand-operand, 1E+0 as the right-hand-operand, and the precision + of the operand as the precision setting, except that no flags will + be set. The rounding mode is taken from the context. + + >>> ExtendedContext.to_integral(Decimal('2.1')) + Decimal("2") + >>> ExtendedContext.to_integral(Decimal('100')) + Decimal("100") + >>> ExtendedContext.to_integral(Decimal('100.0')) + Decimal("100") + >>> ExtendedContext.to_integral(Decimal('101.5')) + Decimal("102") + >>> ExtendedContext.to_integral(Decimal('-101.5')) + Decimal("-102") + >>> ExtendedContext.to_integral(Decimal('10E+5')) + Decimal("1.0E+6") + >>> ExtendedContext.to_integral(Decimal('7.89E+77')) + Decimal("7.89E+77") + >>> ExtendedContext.to_integral(Decimal('-Inf')) + Decimal("-Infinity") + """ + return a.to_integral(context=self) + +class _WorkRep(object): + __slots__ = ('sign','int','exp') + # sign: 0 or 1 + # int: int or long + # exp: None, int, or string + + def __init__(self, value=None): + if value is None: + self.sign = None + self.int = 0 + self.exp = None + elif isinstance(value, Decimal): + self.sign = value._sign + cum = 0 + for digit in value._int: + cum = cum * 10 + digit + self.int = cum + self.exp = value._exp + else: + # assert isinstance(value, tuple) + self.sign = value[0] + self.int = value[1] + self.exp = value[2] + + def __repr__(self): + return "(%r, %r, %r)" % (self.sign, self.int, self.exp) + + __str__ = __repr__ + + + +def _normalize(op1, op2, shouldround = 0, prec = 0): + """Normalizes op1, op2 to have the same exp and length of coefficient. + + Done during addition. + """ + # Yes, the exponent is a long, but the difference between exponents + # must be an int-- otherwise you'd get a big memory problem. + numdigits = int(op1.exp - op2.exp) + if numdigits < 0: + numdigits = -numdigits + tmp = op2 + other = op1 + else: + tmp = op1 + other = op2 + + + if shouldround and numdigits > prec + 1: + # Big difference in exponents - check the adjusted exponents + tmp_len = len(str(tmp.int)) + other_len = len(str(other.int)) + if numdigits > (other_len + prec + 1 - tmp_len): + # If the difference in adjusted exps is > prec+1, we know + # other is insignificant, so might as well put a 1 after the precision. + # (since this is only for addition.) Also stops use of massive longs. + + extend = prec + 2 - tmp_len + if extend <= 0: + extend = 1 + tmp.int *= 10 ** extend + tmp.exp -= extend + other.int = 1 + other.exp = tmp.exp + return op1, op2 + + tmp.int *= 10 ** numdigits + tmp.exp -= numdigits + return op1, op2 + +def _adjust_coefficients(op1, op2): + """Adjust op1, op2 so that op2.int * 10 > op1.int >= op2.int. + + Returns the adjusted op1, op2 as well as the change in op1.exp-op2.exp. + + Used on _WorkRep instances during division. + """ + adjust = 0 + #If op1 is smaller, make it larger + while op2.int > op1.int: + op1.int *= 10 + op1.exp -= 1 + adjust += 1 + + #If op2 is too small, make it larger + while op1.int >= (10 * op2.int): + op2.int *= 10 + op2.exp -= 1 + adjust -= 1 + + return op1, op2, adjust + +##### Helper Functions ######################################## + +def _convert_other(other): + """Convert other to Decimal. + + Verifies that it's ok to use in an implicit construction. + """ + if isinstance(other, Decimal): + return other + if isinstance(other, (int, long)): + return Decimal(other) + return NotImplemented + +_infinity_map = { + 'inf' : 1, + 'infinity' : 1, + '+inf' : 1, + '+infinity' : 1, + '-inf' : -1, + '-infinity' : -1 +} + +def _isinfinity(num): + """Determines whether a string or float is infinity. + + +1 for negative infinity; 0 for finite ; +1 for positive infinity + """ + num = str(num).lower() + return _infinity_map.get(num, 0) + +def _isnan(num): + """Determines whether a string or float is NaN + + (1, sign, diagnostic info as string) => NaN + (2, sign, diagnostic info as string) => sNaN + 0 => not a NaN + """ + num = str(num).lower() + if not num: + return 0 + + #get the sign, get rid of trailing [+-] + sign = 0 + if num[0] == '+': + num = num[1:] + elif num[0] == '-': #elif avoids '+-nan' + num = num[1:] + sign = 1 + + if num.startswith('nan'): + if len(num) > 3 and not num[3:].isdigit(): #diagnostic info + return 0 + return (1, sign, num[3:].lstrip('0')) + if num.startswith('snan'): + if len(num) > 4 and not num[4:].isdigit(): + return 0 + return (2, sign, num[4:].lstrip('0')) + return 0 + + +##### Setup Specific Contexts ################################ + +# The default context prototype used by Context() +# Is mutable, so that new contexts can have different default values + +DefaultContext = Context( + prec=28, rounding=ROUND_HALF_EVEN, + traps=[DivisionByZero, Overflow, InvalidOperation], + flags=[], + _rounding_decision=ALWAYS_ROUND, + Emax=999999999, + Emin=-999999999, + capitals=1 +) + +# Pre-made alternate contexts offered by the specification +# Don't change these; the user should be able to select these +# contexts and be able to reproduce results from other implementations +# of the spec. + +BasicContext = Context( + prec=9, rounding=ROUND_HALF_UP, + traps=[DivisionByZero, Overflow, InvalidOperation, Clamped, Underflow], + flags=[], +) + +ExtendedContext = Context( + prec=9, rounding=ROUND_HALF_EVEN, + traps=[], + flags=[], +) + + +##### Useful Constants (internal use only) #################### + +#Reusable defaults +Inf = Decimal('Inf') +negInf = Decimal('-Inf') + +#Infsign[sign] is infinity w/ that sign +Infsign = (Inf, negInf) + +NaN = Decimal('NaN') + + +##### crud for parsing strings ################################# +import re + +# There's an optional sign at the start, and an optional exponent +# at the end. The exponent has an optional sign and at least one +# digit. In between, must have either at least one digit followed +# by an optional fraction, or a decimal point followed by at least +# one digit. Yuck. + +_parser = re.compile(r""" +# \s* + (?P[-+])? + ( + (?P\d+) (\. (?P\d*))? + | + \. (?P\d+) + ) + ([eE](?P[-+]? \d+))? +# \s* + $ +""", re.VERBOSE).match #Uncomment the \s* to allow leading or trailing spaces. + +del re + +# return sign, n, p s.t. float string value == -1**sign * n * 10**p exactly + +def _string2exact(s): + m = _parser(s) + if m is None: + raise ValueError("invalid literal for Decimal: %r" % s) + + if m.group('sign') == "-": + sign = 1 + else: + sign = 0 + + exp = m.group('exp') + if exp is None: + exp = 0 + else: + exp = int(exp) + + intpart = m.group('int') + if intpart is None: + intpart = "" + fracpart = m.group('onlyfrac') + else: + fracpart = m.group('frac') + if fracpart is None: + fracpart = "" + + exp -= len(fracpart) + + mantissa = intpart + fracpart + tmp = map(int, mantissa) + backup = tmp + while tmp and tmp[0] == 0: + del tmp[0] + + # It's a zero + if not tmp: + if backup: + return (sign, tuple(backup), exp) + return (sign, (0,), exp) + mantissa = tuple(tmp) + + return (sign, mantissa, exp) + + +if __name__ == '__main__': + import doctest, sys + doctest.testmod(sys.modules[__name__]) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,2637 @@ +# Module doctest. +# Released to the public domain 16-Jan-2001, by Tim Peters (tim at python.org). +# Major enhancements and refactoring by: +# Jim Fulton +# Edward Loper + +# Provided as-is; use at your own risk; no warranty; no promises; enjoy! + +r"""Module doctest -- a framework for running examples in docstrings. + +In simplest use, end each module M to be tested with: + +def _test(): + import doctest + doctest.testmod() + +if __name__ == "__main__": + _test() + +Then running the module as a script will cause the examples in the +docstrings to get executed and verified: + +python M.py + +This won't display anything unless an example fails, in which case the +failing example(s) and the cause(s) of the failure(s) are printed to stdout +(why not stderr? because stderr is a lame hack <0.2 wink>), and the final +line of output is "Test failed.". + +Run it with the -v switch instead: + +python M.py -v + +and a detailed report of all examples tried is printed to stdout, along +with assorted summaries at the end. + +You can force verbose mode by passing "verbose=True" to testmod, or prohibit +it by passing "verbose=False". In either of those cases, sys.argv is not +examined by testmod. + +There are a variety of other ways to run doctests, including integration +with the unittest framework, and support for running non-Python text +files containing doctests. There are also many ways to override parts +of doctest's default behaviors. See the Library Reference Manual for +details. +""" + +__docformat__ = 'reStructuredText en' + +__all__ = [ + # 0, Option Flags + 'register_optionflag', + 'DONT_ACCEPT_TRUE_FOR_1', + 'DONT_ACCEPT_BLANKLINE', + 'NORMALIZE_WHITESPACE', + 'ELLIPSIS', + 'SKIP', + 'IGNORE_EXCEPTION_DETAIL', + 'COMPARISON_FLAGS', + 'REPORT_UDIFF', + 'REPORT_CDIFF', + 'REPORT_NDIFF', + 'REPORT_ONLY_FIRST_FAILURE', + 'REPORTING_FLAGS', + # 1. Utility Functions + # 2. Example & DocTest + 'Example', + 'DocTest', + # 3. Doctest Parser + 'DocTestParser', + # 4. Doctest Finder + 'DocTestFinder', + # 5. Doctest Runner + 'DocTestRunner', + 'OutputChecker', + 'DocTestFailure', + 'UnexpectedException', + 'DebugRunner', + # 6. Test Functions + 'testmod', + 'testfile', + 'run_docstring_examples', + # 7. Tester + 'Tester', + # 8. Unittest Support + 'DocTestSuite', + 'DocFileSuite', + 'set_unittest_reportflags', + # 9. Debugging Support + 'script_from_examples', + 'testsource', + 'debug_src', + 'debug', +] + +import __future__ + +import sys, traceback, inspect, linecache, os, re +import unittest, difflib, pdb, tempfile +import warnings +from StringIO import StringIO + +# There are 4 basic classes: +# - Example: a pair, plus an intra-docstring line number. +# - DocTest: a collection of examples, parsed from a docstring, plus +# info about where the docstring came from (name, filename, lineno). +# - DocTestFinder: extracts DocTests from a given object's docstring and +# its contained objects' docstrings. +# - DocTestRunner: runs DocTest cases, and accumulates statistics. +# +# So the basic picture is: +# +# list of: +# +------+ +---------+ +-------+ +# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results| +# +------+ +---------+ +-------+ +# | Example | +# | ... | +# | Example | +# +---------+ + +# Option constants. + +OPTIONFLAGS_BY_NAME = {} +def register_optionflag(name): + # Create a new flag unless `name` is already known. + return OPTIONFLAGS_BY_NAME.setdefault(name, 1 << len(OPTIONFLAGS_BY_NAME)) + +DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1') +DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE') +NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE') +ELLIPSIS = register_optionflag('ELLIPSIS') +SKIP = register_optionflag('SKIP') +IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL') + +COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 | + DONT_ACCEPT_BLANKLINE | + NORMALIZE_WHITESPACE | + ELLIPSIS | + SKIP | + IGNORE_EXCEPTION_DETAIL) + +REPORT_UDIFF = register_optionflag('REPORT_UDIFF') +REPORT_CDIFF = register_optionflag('REPORT_CDIFF') +REPORT_NDIFF = register_optionflag('REPORT_NDIFF') +REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE') + +REPORTING_FLAGS = (REPORT_UDIFF | + REPORT_CDIFF | + REPORT_NDIFF | + REPORT_ONLY_FIRST_FAILURE) + +# Special string markers for use in `want` strings: +BLANKLINE_MARKER = '' +ELLIPSIS_MARKER = '...' + +###################################################################### +## Table of Contents +###################################################################### +# 1. Utility Functions +# 2. Example & DocTest -- store test cases +# 3. DocTest Parser -- extracts examples from strings +# 4. DocTest Finder -- extracts test cases from objects +# 5. DocTest Runner -- runs test cases +# 6. Test Functions -- convenient wrappers for testing +# 7. Tester Class -- for backwards compatibility +# 8. Unittest Support +# 9. Debugging Support +# 10. Example Usage + +###################################################################### +## 1. Utility Functions +###################################################################### + +def _extract_future_flags(globs): + """ + Return the compiler-flags associated with the future features that + have been imported into the given namespace (globs). + """ + flags = 0 + for fname in __future__.all_feature_names: + feature = globs.get(fname, None) + if feature is getattr(__future__, fname): + flags |= feature.compiler_flag + return flags + +def _normalize_module(module, depth=2): + """ + Return the module specified by `module`. In particular: + - If `module` is a module, then return module. + - If `module` is a string, then import and return the + module with that name. + - If `module` is None, then return the calling module. + The calling module is assumed to be the module of + the stack frame at the given depth in the call stack. + """ + if inspect.ismodule(module): + return module + elif isinstance(module, (str, unicode)): + return __import__(module, globals(), locals(), ["*"]) + elif module is None: + return sys.modules[sys._getframe(depth).f_globals['__name__']] + else: + raise TypeError("Expected a module, string, or None") + +def _load_testfile(filename, package, module_relative): + if module_relative: + package = _normalize_module(package, 3) + filename = _module_relative_path(package, filename) + if hasattr(package, '__loader__'): + if hasattr(package.__loader__, 'get_data'): + return package.__loader__.get_data(filename), filename + return open(filename).read(), filename + +def _indent(s, indent=4): + """ + Add the given number of space characters to the beginning every + non-blank line in `s`, and return the result. + """ + # This regexp matches the start of non-blank lines: + return re.sub('(?m)^(?!$)', indent*' ', s) + +def _exception_traceback(exc_info): + """ + Return a string containing a traceback message for the given + exc_info tuple (as returned by sys.exc_info()). + """ + # Get a traceback message. + excout = StringIO() + exc_type, exc_val, exc_tb = exc_info + traceback.print_exception(exc_type, exc_val, exc_tb, file=excout) + return excout.getvalue() + +# Override some StringIO methods. +class _SpoofOut(StringIO): + def getvalue(self): + result = StringIO.getvalue(self) + # If anything at all was written, make sure there's a trailing + # newline. There's no way for the expected output to indicate + # that a trailing newline is missing. + if result and not result.endswith("\n"): + result += "\n" + # Prevent softspace from screwing up the next test case, in + # case they used print with a trailing comma in an example. + if hasattr(self, "softspace"): + del self.softspace + return result + + def truncate(self, size=None): + StringIO.truncate(self, size) + if hasattr(self, "softspace"): + del self.softspace + +# Worst-case linear-time ellipsis matching. +def _ellipsis_match(want, got): + """ + Essentially the only subtle case: + >>> _ellipsis_match('aa...aa', 'aaa') + False + """ + if ELLIPSIS_MARKER not in want: + return want == got + + # Find "the real" strings. + ws = want.split(ELLIPSIS_MARKER) + assert len(ws) >= 2 + + # Deal with exact matches possibly needed at one or both ends. + startpos, endpos = 0, len(got) + w = ws[0] + if w: # starts with exact match + if got.startswith(w): + startpos = len(w) + del ws[0] + else: + return False + w = ws[-1] + if w: # ends with exact match + if got.endswith(w): + endpos -= len(w) + del ws[-1] + else: + return False + + if startpos > endpos: + # Exact end matches required more characters than we have, as in + # _ellipsis_match('aa...aa', 'aaa') + return False + + # For the rest, we only need to find the leftmost non-overlapping + # match for each piece. If there's no overall match that way alone, + # there's no overall match period. + for w in ws: + # w may be '' at times, if there are consecutive ellipses, or + # due to an ellipsis at the start or end of `want`. That's OK. + # Search for an empty string succeeds, and doesn't change startpos. + startpos = got.find(w, startpos, endpos) + if startpos < 0: + return False + startpos += len(w) + + return True + +def _comment_line(line): + "Return a commented form of the given line" + line = line.rstrip() + if line: + return '# '+line + else: + return '#' + +class _OutputRedirectingPdb(pdb.Pdb): + """ + A specialized version of the python debugger that redirects stdout + to a given stream when interacting with the user. Stdout is *not* + redirected when traced code is executed. + """ + def __init__(self, out): + self.__out = out + pdb.Pdb.__init__(self, stdout=out) + + def trace_dispatch(self, *args): + # Redirect stdout to the given stream. + save_stdout = sys.stdout + sys.stdout = self.__out + # Call Pdb's trace dispatch method. + try: + return pdb.Pdb.trace_dispatch(self, *args) + finally: + sys.stdout = save_stdout + +# [XX] Normalize with respect to os.path.pardir? +def _module_relative_path(module, path): + if not inspect.ismodule(module): + raise TypeError, 'Expected a module: %r' % module + if path.startswith('/'): + raise ValueError, 'Module-relative files may not have absolute paths' + + # Find the base directory for the path. + if hasattr(module, '__file__'): + # A normal module/package + basedir = os.path.split(module.__file__)[0] + elif module.__name__ == '__main__': + # An interactive session. + if len(sys.argv)>0 and sys.argv[0] != '': + basedir = os.path.split(sys.argv[0])[0] + else: + basedir = os.curdir + else: + # A module w/o __file__ (this includes builtins) + raise ValueError("Can't resolve paths relative to the module " + + module + " (it has no __file__)") + + # Combine the base directory and the path. + return os.path.join(basedir, *(path.split('/'))) + +###################################################################### +## 2. Example & DocTest +###################################################################### +## - An "example" is a pair, where "source" is a +## fragment of source code, and "want" is the expected output for +## "source." The Example class also includes information about +## where the example was extracted from. +## +## - A "doctest" is a collection of examples, typically extracted from +## a string (such as an object's docstring). The DocTest class also +## includes information about where the string was extracted from. + +class Example: + """ + A single doctest example, consisting of source code and expected + output. `Example` defines the following attributes: + + - source: A single Python statement, always ending with a newline. + The constructor adds a newline if needed. + + - want: The expected output from running the source code (either + from stdout, or a traceback in case of exception). `want` ends + with a newline unless it's empty, in which case it's an empty + string. The constructor adds a newline if needed. + + - exc_msg: The exception message generated by the example, if + the example is expected to generate an exception; or `None` if + it is not expected to generate an exception. This exception + message is compared against the return value of + `traceback.format_exception_only()`. `exc_msg` ends with a + newline unless it's `None`. The constructor adds a newline + if needed. + + - lineno: The line number within the DocTest string containing + this Example where the Example begins. This line number is + zero-based, with respect to the beginning of the DocTest. + + - indent: The example's indentation in the DocTest string. + I.e., the number of space characters that preceed the + example's first prompt. + + - options: A dictionary mapping from option flags to True or + False, which is used to override default options for this + example. Any option flags not contained in this dictionary + are left at their default value (as specified by the + DocTestRunner's optionflags). By default, no options are set. + """ + def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, + options=None): + # Normalize inputs. + if not source.endswith('\n'): + source += '\n' + if want and not want.endswith('\n'): + want += '\n' + if exc_msg is not None and not exc_msg.endswith('\n'): + exc_msg += '\n' + # Store properties. + self.source = source + self.want = want + self.lineno = lineno + self.indent = indent + if options is None: options = {} + self.options = options + self.exc_msg = exc_msg + +class DocTest: + """ + A collection of doctest examples that should be run in a single + namespace. Each `DocTest` defines the following attributes: + + - examples: the list of examples. + + - globs: The namespace (aka globals) that the examples should + be run in. + + - name: A name identifying the DocTest (typically, the name of + the object whose docstring this DocTest was extracted from). + + - filename: The name of the file that this DocTest was extracted + from, or `None` if the filename is unknown. + + - lineno: The line number within filename where this DocTest + begins, or `None` if the line number is unavailable. This + line number is zero-based, with respect to the beginning of + the file. + + - docstring: The string that the examples were extracted from, + or `None` if the string is unavailable. + """ + def __init__(self, examples, globs, name, filename, lineno, docstring): + """ + Create a new DocTest containing the given examples. The + DocTest's globals are initialized with a copy of `globs`. + """ + assert not isinstance(examples, basestring), \ + "DocTest no longer accepts str; use DocTestParser instead" + self.examples = examples + self.docstring = docstring + self.globs = globs.copy() + self.name = name + self.filename = filename + self.lineno = lineno + + def __repr__(self): + if len(self.examples) == 0: + examples = 'no examples' + elif len(self.examples) == 1: + examples = '1 example' + else: + examples = '%d examples' % len(self.examples) + return ('' % + (self.name, self.filename, self.lineno, examples)) + + + # This lets us sort tests by name: + def __cmp__(self, other): + if not isinstance(other, DocTest): + return -1 + return cmp((self.name, self.filename, self.lineno, id(self)), + (other.name, other.filename, other.lineno, id(other))) + +###################################################################### +## 3. DocTestParser +###################################################################### + +class DocTestParser: + """ + A class used to parse strings containing doctest examples. + """ + # This regular expression is used to find doctest examples in a + # string. It defines three groups: `source` is the source code + # (including leading indentation and prompts); `indent` is the + # indentation of the first (PS1) line of the source code; and + # `want` is the expected output (including leading indentation). + _EXAMPLE_RE = re.compile(r''' + # Source consists of a PS1 line followed by zero or more PS2 lines. + (?P + (?:^(?P [ ]*) >>> .*) # PS1 line + (?:\n [ ]* \.\.\. .*)*) # PS2 lines + \n? + # Want consists of any non-blank lines that do not start with PS1. + (?P (?:(?![ ]*$) # Not a blank line + (?![ ]*>>>) # Not a line starting with PS1 + .*$\n? # But any other line + )*) + ''', re.MULTILINE | re.VERBOSE) + + # A regular expression for handling `want` strings that contain + # expected exceptions. It divides `want` into three pieces: + # - the traceback header line (`hdr`) + # - the traceback stack (`stack`) + # - the exception message (`msg`), as generated by + # traceback.format_exception_only() + # `msg` may have multiple lines. We assume/require that the + # exception message is the first non-indented line starting with a word + # character following the traceback header line. + _EXCEPTION_RE = re.compile(r""" + # Grab the traceback header. Different versions of Python have + # said different things on the first traceback line. + ^(?P Traceback\ \( + (?: most\ recent\ call\ last + | innermost\ last + ) \) : + ) + \s* $ # toss trailing whitespace on the header. + (?P .*?) # don't blink: absorb stuff until... + ^ (?P \w+ .*) # a line *starts* with alphanum. + """, re.VERBOSE | re.MULTILINE | re.DOTALL) + + # A callable returning a true value iff its argument is a blank line + # or contains a single comment. + _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match + + def parse(self, string, name=''): + """ + Divide the given string into examples and intervening text, + and return them as a list of alternating Examples and strings. + Line numbers for the Examples are 0-based. The optional + argument `name` is a name identifying this string, and is only + used for error messages. + """ + string = string.expandtabs() + # If all lines begin with the same indentation, then strip it. + min_indent = self._min_indent(string) + if min_indent > 0: + string = '\n'.join([l[min_indent:] for l in string.split('\n')]) + + output = [] + charno, lineno = 0, 0 + # Find all doctest examples in the string: + for m in self._EXAMPLE_RE.finditer(string): + # Add the pre-example text to `output`. + output.append(string[charno:m.start()]) + # Update lineno (lines before this example) + lineno += string.count('\n', charno, m.start()) + # Extract info from the regexp match. + (source, options, want, exc_msg) = \ + self._parse_example(m, name, lineno) + # Create an Example, and add it to the list. + if not self._IS_BLANK_OR_COMMENT(source): + output.append( Example(source, want, exc_msg, + lineno=lineno, + indent=min_indent+len(m.group('indent')), + options=options) ) + # Update lineno (lines inside this example) + lineno += string.count('\n', m.start(), m.end()) + # Update charno. + charno = m.end() + # Add any remaining post-example text to `output`. + output.append(string[charno:]) + return output + + def get_doctest(self, string, globs, name, filename, lineno): + """ + Extract all doctest examples from the given string, and + collect them into a `DocTest` object. + + `globs`, `name`, `filename`, and `lineno` are attributes for + the new `DocTest` object. See the documentation for `DocTest` + for more information. + """ + return DocTest(self.get_examples(string, name), globs, + name, filename, lineno, string) + + def get_examples(self, string, name=''): + """ + Extract all doctest examples from the given string, and return + them as a list of `Example` objects. Line numbers are + 0-based, because it's most common in doctests that nothing + interesting appears on the same line as opening triple-quote, + and so the first interesting line is called \"line 1\" then. + + The optional argument `name` is a name identifying this + string, and is only used for error messages. + """ + return [x for x in self.parse(string, name) + if isinstance(x, Example)] + + def _parse_example(self, m, name, lineno): + """ + Given a regular expression match from `_EXAMPLE_RE` (`m`), + return a pair `(source, want)`, where `source` is the matched + example's source code (with prompts and indentation stripped); + and `want` is the example's expected output (with indentation + stripped). + + `name` is the string's name, and `lineno` is the line number + where the example starts; both are used for error messages. + """ + # Get the example's indentation level. + indent = len(m.group('indent')) + + # Divide source into lines; check that they're properly + # indented; and then strip their indentation & prompts. + source_lines = m.group('source').split('\n') + self._check_prompt_blank(source_lines, indent, name, lineno) + self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno) + source = '\n'.join([sl[indent+4:] for sl in source_lines]) + + # Divide want into lines; check that it's properly indented; and + # then strip the indentation. Spaces before the last newline should + # be preserved, so plain rstrip() isn't good enough. + want = m.group('want') + want_lines = want.split('\n') + if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): + del want_lines[-1] # forget final newline & spaces after it + self._check_prefix(want_lines, ' '*indent, name, + lineno + len(source_lines)) + want = '\n'.join([wl[indent:] for wl in want_lines]) + + # If `want` contains a traceback message, then extract it. + m = self._EXCEPTION_RE.match(want) + if m: + exc_msg = m.group('msg') + else: + exc_msg = None + + # Extract options from the source. + options = self._find_options(source, name, lineno) + + return source, options, want, exc_msg + + # This regular expression looks for option directives in the + # source code of an example. Option directives are comments + # starting with "doctest:". Warning: this may give false + # positives for string-literals that contain the string + # "#doctest:". Eliminating these false positives would require + # actually parsing the string; but we limit them by ignoring any + # line containing "#doctest:" that is *followed* by a quote mark. + _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$', + re.MULTILINE) + + def _find_options(self, source, name, lineno): + """ + Return a dictionary containing option overrides extracted from + option directives in the given source string. + + `name` is the string's name, and `lineno` is the line number + where the example starts; both are used for error messages. + """ + options = {} + # (note: with the current regexp, this will match at most once:) + for m in self._OPTION_DIRECTIVE_RE.finditer(source): + option_strings = m.group(1).replace(',', ' ').split() + for option in option_strings: + if (option[0] not in '+-' or + option[1:] not in OPTIONFLAGS_BY_NAME): + raise ValueError('line %r of the doctest for %s ' + 'has an invalid option: %r' % + (lineno+1, name, option)) + flag = OPTIONFLAGS_BY_NAME[option[1:]] + options[flag] = (option[0] == '+') + if options and self._IS_BLANK_OR_COMMENT(source): + raise ValueError('line %r of the doctest for %s has an option ' + 'directive on a line with no example: %r' % + (lineno, name, source)) + return options + + # This regular expression finds the indentation of every non-blank + # line in a string. + _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE) + + def _min_indent(self, s): + "Return the minimum indentation of any non-blank line in `s`" + indents = [len(indent) for indent in self._INDENT_RE.findall(s)] + if len(indents) > 0: + return min(indents) + else: + return 0 + + def _check_prompt_blank(self, lines, indent, name, lineno): + """ + Given the lines of a source string (including prompts and + leading indentation), check to make sure that every prompt is + followed by a space character. If any line is not followed by + a space character, then raise ValueError. + """ + for i, line in enumerate(lines): + if len(line) >= indent+4 and line[indent+3] != ' ': + raise ValueError('line %r of the docstring for %s ' + 'lacks blank after %s: %r' % + (lineno+i+1, name, + line[indent:indent+3], line)) + + def _check_prefix(self, lines, prefix, name, lineno): + """ + Check that every line in the given list starts with the given + prefix; if any line does not, then raise a ValueError. + """ + for i, line in enumerate(lines): + if line and not line.startswith(prefix): + raise ValueError('line %r of the docstring for %s has ' + 'inconsistent leading whitespace: %r' % + (lineno+i+1, name, line)) + + +###################################################################### +## 4. DocTest Finder +###################################################################### + +class DocTestFinder: + """ + A class used to extract the DocTests that are relevant to a given + object, from its docstring and the docstrings of its contained + objects. Doctests can currently be extracted from the following + object types: modules, functions, classes, methods, staticmethods, + classmethods, and properties. + """ + + def __init__(self, verbose=False, parser=DocTestParser(), + recurse=True, exclude_empty=True): + """ + Create a new doctest finder. + + The optional argument `parser` specifies a class or + function that should be used to create new DocTest objects (or + objects that implement the same interface as DocTest). The + signature for this factory function should match the signature + of the DocTest constructor. + + If the optional argument `recurse` is false, then `find` will + only examine the given object, and not any contained objects. + + If the optional argument `exclude_empty` is false, then `find` + will include tests for objects with empty docstrings. + """ + self._parser = parser + self._verbose = verbose + self._recurse = recurse + self._exclude_empty = exclude_empty + + def find(self, obj, name=None, module=None, globs=None, extraglobs=None): + """ + Return a list of the DocTests that are defined by the given + object's docstring, or by any of its contained objects' + docstrings. + + The optional parameter `module` is the module that contains + the given object. If the module is not specified or is None, then + the test finder will attempt to automatically determine the + correct module. The object's module is used: + + - As a default namespace, if `globs` is not specified. + - To prevent the DocTestFinder from extracting DocTests + from objects that are imported from other modules. + - To find the name of the file containing the object. + - To help find the line number of the object within its + file. + + Contained objects whose module does not match `module` are ignored. + + If `module` is False, no attempt to find the module will be made. + This is obscure, of use mostly in tests: if `module` is False, or + is None but cannot be found automatically, then all objects are + considered to belong to the (non-existent) module, so all contained + objects will (recursively) be searched for doctests. + + The globals for each DocTest is formed by combining `globs` + and `extraglobs` (bindings in `extraglobs` override bindings + in `globs`). A new copy of the globals dictionary is created + for each DocTest. If `globs` is not specified, then it + defaults to the module's `__dict__`, if specified, or {} + otherwise. If `extraglobs` is not specified, then it defaults + to {}. + + """ + # If name was not specified, then extract it from the object. + if name is None: + name = getattr(obj, '__name__', None) + if name is None: + raise ValueError("DocTestFinder.find: name must be given " + "when obj.__name__ doesn't exist: %r" % + (type(obj),)) + + # Find the module that contains the given object (if obj is + # a module, then module=obj.). Note: this may fail, in which + # case module will be None. + if module is False: + module = None + elif module is None: + module = inspect.getmodule(obj) + + # Read the module's source code. This is used by + # DocTestFinder._find_lineno to find the line number for a + # given object's docstring. + try: + file = inspect.getsourcefile(obj) or inspect.getfile(obj) + source_lines = linecache.getlines(file) + if not source_lines: + source_lines = None + except TypeError: + source_lines = None + + # Initialize globals, and merge in extraglobs. + if globs is None: + if module is None: + globs = {} + else: + globs = module.__dict__.copy() + else: + globs = globs.copy() + if extraglobs is not None: + globs.update(extraglobs) + + # Recursively expore `obj`, extracting DocTests. + tests = [] + self._find(tests, obj, name, module, source_lines, globs, {}) + # Sort the tests by alpha order of names, for consistency in + # verbose-mode output. This was a feature of doctest in Pythons + # <= 2.3 that got lost by accident in 2.4. It was repaired in + # 2.4.4 and 2.5. + tests.sort() + return tests + + def _from_module(self, module, object): + """ + Return true if the given object is defined in the given + module. + """ + if module is None: + return True + elif inspect.isfunction(object): + return module.__dict__ is object.func_globals + elif inspect.isclass(object): + return module.__name__ == object.__module__ + elif isinstance(object, property): + return True # [XX] no way not be sure. + elif inspect.getmodule(object) is not None: + return module is inspect.getmodule(object) + elif hasattr(object, '__module__'): + return module.__name__ == object.__module__ + else: + raise ValueError("object must be a class or function") + + def _find(self, tests, obj, name, module, source_lines, globs, seen): + """ + Find tests for the given object and any contained objects, and + add them to `tests`. + """ + if self._verbose: + print 'Finding tests in %s' % name + + # If we've already processed this object, then ignore it. + if id(obj) in seen: + return + seen[id(obj)] = 1 + + # Find a test for this object, and add it to the list of tests. + test = self._get_test(obj, name, module, globs, source_lines) + if test is not None: + tests.append(test) + + # Look for tests in a module's contained objects. + if inspect.ismodule(obj) and self._recurse: + for valname, val in obj.__dict__.items(): + valname = '%s.%s' % (name, valname) + # Recurse to functions & classes. + if ((inspect.isfunction(val) or inspect.isclass(val)) and + self._from_module(module, val)): + self._find(tests, val, valname, module, source_lines, + globs, seen) + + # Look for tests in a module's __test__ dictionary. + if inspect.ismodule(obj) and self._recurse: + for valname, val in getattr(obj, '__test__', {}).items(): + if not isinstance(valname, basestring): + raise ValueError("DocTestFinder.find: __test__ keys " + "must be strings: %r" % + (type(valname),)) + if not (inspect.isfunction(val) or inspect.isclass(val) or + inspect.ismethod(val) or inspect.ismodule(val) or + isinstance(val, basestring)): + raise ValueError("DocTestFinder.find: __test__ values " + "must be strings, functions, methods, " + "classes, or modules: %r" % + (type(val),)) + valname = '%s.__test__.%s' % (name, valname) + self._find(tests, val, valname, module, source_lines, + globs, seen) + + # Look for tests in a class's contained objects. + if inspect.isclass(obj) and self._recurse: + for valname, val in obj.__dict__.items(): + # Special handling for staticmethod/classmethod. + if isinstance(val, staticmethod): + val = getattr(obj, valname) + if isinstance(val, classmethod): + val = getattr(obj, valname).im_func + + # Recurse to methods, properties, and nested classes. + if ((inspect.isfunction(val) or inspect.isclass(val) or + isinstance(val, property)) and + self._from_module(module, val)): + valname = '%s.%s' % (name, valname) + self._find(tests, val, valname, module, source_lines, + globs, seen) + + def _get_test(self, obj, name, module, globs, source_lines): + """ + Return a DocTest for the given object, if it defines a docstring; + otherwise, return None. + """ + # Extract the object's docstring. If it doesn't have one, + # then return None (no test for this object). + if isinstance(obj, basestring): + docstring = obj + else: + try: + if obj.__doc__ is None: + docstring = '' + else: + docstring = obj.__doc__ + if not isinstance(docstring, basestring): + docstring = str(docstring) + except (TypeError, AttributeError): + docstring = '' + + # Find the docstring's location in the file. + lineno = self._find_lineno(obj, source_lines) + + # Don't bother if the docstring is empty. + if self._exclude_empty and not docstring: + return None + + # Return a DocTest for this object. + if module is None: + filename = None + else: + filename = getattr(module, '__file__', module.__name__) + if filename[-4:] in (".pyc", ".pyo"): + filename = filename[:-1] + return self._parser.get_doctest(docstring, globs, name, + filename, lineno) + + def _find_lineno(self, obj, source_lines): + """ + Return a line number of the given object's docstring. Note: + this method assumes that the object has a docstring. + """ + lineno = None + + # Find the line number for modules. + if inspect.ismodule(obj): + lineno = 0 + + # Find the line number for classes. + # Note: this could be fooled if a class is defined multiple + # times in a single file. + if inspect.isclass(obj): + if source_lines is None: + return None + pat = re.compile(r'^\s*class\s*%s\b' % + getattr(obj, '__name__', '-')) + for i, line in enumerate(source_lines): + if pat.match(line): + lineno = i + break + + # Find the line number for functions & methods. + if inspect.ismethod(obj): obj = obj.im_func + if inspect.isfunction(obj): obj = obj.func_code + if inspect.istraceback(obj): obj = obj.tb_frame + if inspect.isframe(obj): obj = obj.f_code + if inspect.iscode(obj): + lineno = getattr(obj, 'co_firstlineno', None)-1 + + # Find the line number where the docstring starts. Assume + # that it's the first line that begins with a quote mark. + # Note: this could be fooled by a multiline function + # signature, where a continuation line begins with a quote + # mark. + if lineno is not None: + if source_lines is None: + return lineno+1 + pat = re.compile('(^|.*:)\s*\w*("|\')') + for lineno in range(lineno, len(source_lines)): + if pat.match(source_lines[lineno]): + return lineno + + # We couldn't find the line number. + return None + +###################################################################### +## 5. DocTest Runner +###################################################################### + +class DocTestRunner: + """ + A class used to run DocTest test cases, and accumulate statistics. + The `run` method is used to process a single DocTest case. It + returns a tuple `(f, t)`, where `t` is the number of test cases + tried, and `f` is the number of test cases that failed. + + >>> tests = DocTestFinder().find(_TestClass) + >>> runner = DocTestRunner(verbose=False) + >>> tests.sort(key = lambda test: test.name) + >>> for test in tests: + ... print test.name, '->', runner.run(test) + _TestClass -> (0, 2) + _TestClass.__init__ -> (0, 2) + _TestClass.get -> (0, 2) + _TestClass.square -> (0, 1) + + The `summarize` method prints a summary of all the test cases that + have been run by the runner, and returns an aggregated `(f, t)` + tuple: + + >>> runner.summarize(verbose=1) + 4 items passed all tests: + 2 tests in _TestClass + 2 tests in _TestClass.__init__ + 2 tests in _TestClass.get + 1 tests in _TestClass.square + 7 tests in 4 items. + 7 passed and 0 failed. + Test passed. + (0, 7) + + The aggregated number of tried examples and failed examples is + also available via the `tries` and `failures` attributes: + + >>> runner.tries + 7 + >>> runner.failures + 0 + + The comparison between expected outputs and actual outputs is done + by an `OutputChecker`. This comparison may be customized with a + number of option flags; see the documentation for `testmod` for + more information. If the option flags are insufficient, then the + comparison may also be customized by passing a subclass of + `OutputChecker` to the constructor. + + The test runner's display output can be controlled in two ways. + First, an output function (`out) can be passed to + `TestRunner.run`; this function will be called with strings that + should be displayed. It defaults to `sys.stdout.write`. If + capturing the output is not sufficient, then the display output + can be also customized by subclassing DocTestRunner, and + overriding the methods `report_start`, `report_success`, + `report_unexpected_exception`, and `report_failure`. + """ + # This divider string is used to separate failure messages, and to + # separate sections of the summary. + DIVIDER = "*" * 70 + + def __init__(self, checker=None, verbose=None, optionflags=0): + """ + Create a new test runner. + + Optional keyword arg `checker` is the `OutputChecker` that + should be used to compare the expected outputs and actual + outputs of doctest examples. + + Optional keyword arg 'verbose' prints lots of stuff if true, + only failures if false; by default, it's true iff '-v' is in + sys.argv. + + Optional argument `optionflags` can be used to control how the + test runner compares expected output to actual output, and how + it displays failures. See the documentation for `testmod` for + more information. + """ + self._checker = checker or OutputChecker() + if verbose is None: + verbose = '-v' in sys.argv + self._verbose = verbose + self.optionflags = optionflags + self.original_optionflags = optionflags + + # Keep track of the examples we've run. + self.tries = 0 + self.failures = 0 + self._name2ft = {} + + # Create a fake output target for capturing doctest output. + self._fakeout = _SpoofOut() + + #///////////////////////////////////////////////////////////////// + # Reporting methods + #///////////////////////////////////////////////////////////////// + + def report_start(self, out, test, example): + """ + Report that the test runner is about to process the given + example. (Only displays a message if verbose=True) + """ + if self._verbose: + if example.want: + out('Trying:\n' + _indent(example.source) + + 'Expecting:\n' + _indent(example.want)) + else: + out('Trying:\n' + _indent(example.source) + + 'Expecting nothing\n') + + def report_success(self, out, test, example, got): + """ + Report that the given example ran successfully. (Only + displays a message if verbose=True) + """ + if self._verbose: + out("ok\n") + + def report_failure(self, out, test, example, got): + """ + Report that the given example failed. + """ + out(self._failure_header(test, example) + + self._checker.output_difference(example, got, self.optionflags)) + + def report_unexpected_exception(self, out, test, example, exc_info): + """ + Report that the given example raised an unexpected exception. + """ + out(self._failure_header(test, example) + + 'Exception raised:\n' + _indent(_exception_traceback(exc_info))) + + def _failure_header(self, test, example): + out = [self.DIVIDER] + if test.filename: + if test.lineno is not None and example.lineno is not None: + lineno = test.lineno + example.lineno + 1 + else: + lineno = '?' + out.append('File "%s", line %s, in %s' % + (test.filename, lineno, test.name)) + else: + out.append('Line %s, in %s' % (example.lineno+1, test.name)) + out.append('Failed example:') + source = example.source + out.append(_indent(source)) + return '\n'.join(out) + + #///////////////////////////////////////////////////////////////// + # DocTest Running + #///////////////////////////////////////////////////////////////// + + def __run(self, test, compileflags, out): + """ + Run the examples in `test`. Write the outcome of each example + with one of the `DocTestRunner.report_*` methods, using the + writer function `out`. `compileflags` is the set of compiler + flags that should be used to execute examples. Return a tuple + `(f, t)`, where `t` is the number of examples tried, and `f` + is the number of examples that failed. The examples are run + in the namespace `test.globs`. + """ + # Keep track of the number of failures and tries. + failures = tries = 0 + + # Save the option flags (since option directives can be used + # to modify them). + original_optionflags = self.optionflags + + SUCCESS, FAILURE, BOOM = range(3) # `outcome` state + + check = self._checker.check_output + + # Process each example. + for examplenum, example in enumerate(test.examples): + + # If REPORT_ONLY_FIRST_FAILURE is set, then supress + # reporting after the first failure. + quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and + failures > 0) + + # Merge in the example's options. + self.optionflags = original_optionflags + if example.options: + for (optionflag, val) in example.options.items(): + if val: + self.optionflags |= optionflag + else: + self.optionflags &= ~optionflag + + # If 'SKIP' is set, then skip this example. + if self.optionflags & SKIP: + continue + + # Record that we started this example. + tries += 1 + if not quiet: + self.report_start(out, test, example) + + # Use a special filename for compile(), so we can retrieve + # the source code during interactive debugging (see + # __patched_linecache_getlines). + filename = '' % (test.name, examplenum) + + # Run the example in the given context (globs), and record + # any exception that gets raised. (But don't intercept + # keyboard interrupts.) + try: + # Don't blink! This is where the user's code gets run. + exec compile(example.source, filename, "single", + compileflags, 1) in test.globs + self.debugger.set_continue() # ==== Example Finished ==== + exception = None + except KeyboardInterrupt: + raise + except: + exception = sys.exc_info() + self.debugger.set_continue() # ==== Example Finished ==== + + got = self._fakeout.getvalue() # the actual output + self._fakeout.truncate(0) + outcome = FAILURE # guilty until proved innocent or insane + + # If the example executed without raising any exceptions, + # verify its output. + if exception is None: + if check(example.want, got, self.optionflags): + outcome = SUCCESS + + # The example raised an exception: check if it was expected. + else: + exc_info = sys.exc_info() + exc_msg = traceback.format_exception_only(*exc_info[:2])[-1] + if not quiet: + got += _exception_traceback(exc_info) + + # If `example.exc_msg` is None, then we weren't expecting + # an exception. + if example.exc_msg is None: + outcome = BOOM + + # We expected an exception: see whether it matches. + elif check(example.exc_msg, exc_msg, self.optionflags): + outcome = SUCCESS + + # Another chance if they didn't care about the detail. + elif self.optionflags & IGNORE_EXCEPTION_DETAIL: + m1 = re.match(r'[^:]*:', example.exc_msg) + m2 = re.match(r'[^:]*:', exc_msg) + if m1 and m2 and check(m1.group(0), m2.group(0), + self.optionflags): + outcome = SUCCESS + + # Report the outcome. + if outcome is SUCCESS: + if not quiet: + self.report_success(out, test, example, got) + elif outcome is FAILURE: + if not quiet: + self.report_failure(out, test, example, got) + failures += 1 + elif outcome is BOOM: + if not quiet: + self.report_unexpected_exception(out, test, example, + exc_info) + failures += 1 + else: + assert False, ("unknown outcome", outcome) + + # Restore the option flags (in case they were modified) + self.optionflags = original_optionflags + + # Record and return the number of failures and tries. + self.__record_outcome(test, failures, tries) + return failures, tries + + def __record_outcome(self, test, f, t): + """ + Record the fact that the given DocTest (`test`) generated `f` + failures out of `t` tried examples. + """ + f2, t2 = self._name2ft.get(test.name, (0,0)) + self._name2ft[test.name] = (f+f2, t+t2) + self.failures += f + self.tries += t + + __LINECACHE_FILENAME_RE = re.compile(r'[\w\.]+)' + r'\[(?P\d+)\]>$') + def __patched_linecache_getlines(self, filename, module_globals=None): + m = self.__LINECACHE_FILENAME_RE.match(filename) + if m and m.group('name') == self.test.name: + example = self.test.examples[int(m.group('examplenum'))] + return example.source.splitlines(True) + else: + return self.save_linecache_getlines(filename, module_globals) + + def run(self, test, compileflags=None, out=None, clear_globs=True): + """ + Run the examples in `test`, and display the results using the + writer function `out`. + + The examples are run in the namespace `test.globs`. If + `clear_globs` is true (the default), then this namespace will + be cleared after the test runs, to help with garbage + collection. If you would like to examine the namespace after + the test completes, then use `clear_globs=False`. + + `compileflags` gives the set of flags that should be used by + the Python compiler when running the examples. If not + specified, then it will default to the set of future-import + flags that apply to `globs`. + + The output of each example is checked using + `DocTestRunner.check_output`, and the results are formatted by + the `DocTestRunner.report_*` methods. + """ + self.test = test + + if compileflags is None: + compileflags = _extract_future_flags(test.globs) + + save_stdout = sys.stdout + if out is None: + out = save_stdout.write + sys.stdout = self._fakeout + + # Patch pdb.set_trace to restore sys.stdout during interactive + # debugging (so it's not still redirected to self._fakeout). + # Note that the interactive output will go to *our* + # save_stdout, even if that's not the real sys.stdout; this + # allows us to write test cases for the set_trace behavior. + save_set_trace = pdb.set_trace + self.debugger = _OutputRedirectingPdb(save_stdout) + self.debugger.reset() + pdb.set_trace = self.debugger.set_trace + + # Patch linecache.getlines, so we can see the example's source + # when we're inside the debugger. + self.save_linecache_getlines = linecache.getlines + linecache.getlines = self.__patched_linecache_getlines + + try: + return self.__run(test, compileflags, out) + finally: + sys.stdout = save_stdout + pdb.set_trace = save_set_trace + linecache.getlines = self.save_linecache_getlines + if clear_globs: + test.globs.clear() + + #///////////////////////////////////////////////////////////////// + # Summarization + #///////////////////////////////////////////////////////////////// + def summarize(self, verbose=None): + """ + Print a summary of all the test cases that have been run by + this DocTestRunner, and return a tuple `(f, t)`, where `f` is + the total number of failed examples, and `t` is the total + number of tried examples. + + The optional `verbose` argument controls how detailed the + summary is. If the verbosity is not specified, then the + DocTestRunner's verbosity is used. + """ + if verbose is None: + verbose = self._verbose + notests = [] + passed = [] + failed = [] + totalt = totalf = 0 + for x in self._name2ft.items(): + name, (f, t) = x + assert f <= t + totalt += t + totalf += f + if t == 0: + notests.append(name) + elif f == 0: + passed.append( (name, t) ) + else: + failed.append(x) + if verbose: + if notests: + print len(notests), "items had no tests:" + notests.sort() + for thing in notests: + print " ", thing + if passed: + print len(passed), "items passed all tests:" + passed.sort() + for thing, count in passed: + print " %3d tests in %s" % (count, thing) + if failed: + print self.DIVIDER + print len(failed), "items had failures:" + failed.sort() + for thing, (f, t) in failed: + print " %3d of %3d in %s" % (f, t, thing) + if verbose: + print totalt, "tests in", len(self._name2ft), "items." + print totalt - totalf, "passed and", totalf, "failed." + if totalf: + print "***Test Failed***", totalf, "failures." + elif verbose: + print "Test passed." + return totalf, totalt + + #///////////////////////////////////////////////////////////////// + # Backward compatibility cruft to maintain doctest.master. + #///////////////////////////////////////////////////////////////// + def merge(self, other): + d = self._name2ft + for name, (f, t) in other._name2ft.items(): + if name in d: + print "*** DocTestRunner.merge: '" + name + "' in both" \ + " testers; summing outcomes." + f2, t2 = d[name] + f = f + f2 + t = t + t2 + d[name] = f, t + +class OutputChecker: + """ + A class used to check the whether the actual output from a doctest + example matches the expected output. `OutputChecker` defines two + methods: `check_output`, which compares a given pair of outputs, + and returns true if they match; and `output_difference`, which + returns a string describing the differences between two outputs. + """ + def check_output(self, want, got, optionflags): + """ + Return True iff the actual output from an example (`got`) + matches the expected output (`want`). These strings are + always considered to match if they are identical; but + depending on what option flags the test runner is using, + several non-exact match types are also possible. See the + documentation for `TestRunner` for more information about + option flags. + """ + # Handle the common case first, for efficiency: + # if they're string-identical, always return true. + if got == want: + return True + + # The values True and False replaced 1 and 0 as the return + # value for boolean comparisons in Python 2.3. + if not (optionflags & DONT_ACCEPT_TRUE_FOR_1): + if (got,want) == ("True\n", "1\n"): + return True + if (got,want) == ("False\n", "0\n"): + return True + + # can be used as a special sequence to signify a + # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. + if not (optionflags & DONT_ACCEPT_BLANKLINE): + # Replace in want with a blank line. + want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER), + '', want) + # If a line in got contains only spaces, then remove the + # spaces. + got = re.sub('(?m)^\s*?$', '', got) + if got == want: + return True + + # This flag causes doctest to ignore any differences in the + # contents of whitespace strings. Note that this can be used + # in conjunction with the ELLIPSIS flag. + if optionflags & NORMALIZE_WHITESPACE: + got = ' '.join(got.split()) + want = ' '.join(want.split()) + if got == want: + return True + + # The ELLIPSIS flag says to let the sequence "..." in `want` + # match any substring in `got`. + if optionflags & ELLIPSIS: + if _ellipsis_match(want, got): + return True + + # We didn't find any match; return false. + return False + + # Should we do a fancy diff? + def _do_a_fancy_diff(self, want, got, optionflags): + # Not unless they asked for a fancy diff. + if not optionflags & (REPORT_UDIFF | + REPORT_CDIFF | + REPORT_NDIFF): + return False + + # If expected output uses ellipsis, a meaningful fancy diff is + # too hard ... or maybe not. In two real-life failures Tim saw, + # a diff was a major help anyway, so this is commented out. + # [todo] _ellipsis_match() knows which pieces do and don't match, + # and could be the basis for a kick-ass diff in this case. + ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want: + ## return False + + # ndiff does intraline difference marking, so can be useful even + # for 1-line differences. + if optionflags & REPORT_NDIFF: + return True + + # The other diff types need at least a few lines to be helpful. + return want.count('\n') > 2 and got.count('\n') > 2 + + def output_difference(self, example, got, optionflags): + """ + Return a string describing the differences between the + expected output for a given example (`example`) and the actual + output (`got`). `optionflags` is the set of option flags used + to compare `want` and `got`. + """ + want = example.want + # If s are being used, then replace blank lines + # with in the actual output string. + if not (optionflags & DONT_ACCEPT_BLANKLINE): + got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got) + + # Check if we should use diff. + if self._do_a_fancy_diff(want, got, optionflags): + # Split want & got into lines. + want_lines = want.splitlines(True) # True == keep line ends + got_lines = got.splitlines(True) + # Use difflib to find their differences. + if optionflags & REPORT_UDIFF: + diff = difflib.unified_diff(want_lines, got_lines, n=2) + diff = list(diff)[2:] # strip the diff header + kind = 'unified diff with -expected +actual' + elif optionflags & REPORT_CDIFF: + diff = difflib.context_diff(want_lines, got_lines, n=2) + diff = list(diff)[2:] # strip the diff header + kind = 'context diff with expected followed by actual' + elif optionflags & REPORT_NDIFF: + engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK) + diff = list(engine.compare(want_lines, got_lines)) + kind = 'ndiff with -expected +actual' + else: + assert 0, 'Bad diff option' + # Remove trailing whitespace on diff output. + diff = [line.rstrip() + '\n' for line in diff] + return 'Differences (%s):\n' % kind + _indent(''.join(diff)) + + # If we're not using diff, then simply list the expected + # output followed by the actual output. + if want and got: + return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got)) + elif want: + return 'Expected:\n%sGot nothing\n' % _indent(want) + elif got: + return 'Expected nothing\nGot:\n%s' % _indent(got) + else: + return 'Expected nothing\nGot nothing\n' + +class DocTestFailure(Exception): + """A DocTest example has failed in debugging mode. + + The exception instance has variables: + + - test: the DocTest object being run + + - example: the Example object that failed + + - got: the actual output + """ + def __init__(self, test, example, got): + self.test = test + self.example = example + self.got = got + + def __str__(self): + return str(self.test) + +class UnexpectedException(Exception): + """A DocTest example has encountered an unexpected exception + + The exception instance has variables: + + - test: the DocTest object being run + + - example: the Example object that failed + + - exc_info: the exception info + """ + def __init__(self, test, example, exc_info): + self.test = test + self.example = example + self.exc_info = exc_info + + def __str__(self): + return str(self.test) + +class DebugRunner(DocTestRunner): + r"""Run doc tests but raise an exception as soon as there is a failure. + + If an unexpected exception occurs, an UnexpectedException is raised. + It contains the test, the example, and the original exception: + + >>> runner = DebugRunner(verbose=False) + >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', + ... {}, 'foo', 'foo.py', 0) + >>> try: + ... runner.run(test) + ... except UnexpectedException, failure: + ... pass + + >>> failure.test is test + True + + >>> failure.example.want + '42\n' + + >>> exc_info = failure.exc_info + >>> raise exc_info[0], exc_info[1], exc_info[2] + Traceback (most recent call last): + ... + KeyError + + We wrap the original exception to give the calling application + access to the test and example information. + + If the output doesn't match, then a DocTestFailure is raised: + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 1 + ... >>> x + ... 2 + ... ''', {}, 'foo', 'foo.py', 0) + + >>> try: + ... runner.run(test) + ... except DocTestFailure, failure: + ... pass + + DocTestFailure objects provide access to the test: + + >>> failure.test is test + True + + As well as to the example: + + >>> failure.example.want + '2\n' + + and the actual output: + + >>> failure.got + '1\n' + + If a failure or error occurs, the globals are left intact: + + >>> del test.globs['__builtins__'] + >>> test.globs + {'x': 1} + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 2 + ... >>> raise KeyError + ... ''', {}, 'foo', 'foo.py', 0) + + >>> runner.run(test) + Traceback (most recent call last): + ... + UnexpectedException: + + >>> del test.globs['__builtins__'] + >>> test.globs + {'x': 2} + + But the globals are cleared if there is no error: + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 2 + ... ''', {}, 'foo', 'foo.py', 0) + + >>> runner.run(test) + (0, 1) + + >>> test.globs + {} + + """ + + def run(self, test, compileflags=None, out=None, clear_globs=True): + r = DocTestRunner.run(self, test, compileflags, out, False) + if clear_globs: + test.globs.clear() + return r + + def report_unexpected_exception(self, out, test, example, exc_info): + raise UnexpectedException(test, example, exc_info) + + def report_failure(self, out, test, example, got): + raise DocTestFailure(test, example, got) + +###################################################################### +## 6. Test Functions +###################################################################### +# These should be backwards compatible. + +# For backward compatibility, a global instance of a DocTestRunner +# class, updated by testmod. +master = None + +def testmod(m=None, name=None, globs=None, verbose=None, + report=True, optionflags=0, extraglobs=None, + raise_on_error=False, exclude_empty=False): + """m=None, name=None, globs=None, verbose=None, report=True, + optionflags=0, extraglobs=None, raise_on_error=False, + exclude_empty=False + + Test examples in docstrings in functions and classes reachable + from module m (or the current module if m is not supplied), starting + with m.__doc__. + + Also test examples reachable from dict m.__test__ if it exists and is + not None. m.__test__ maps names to functions, classes and strings; + function and class docstrings are tested even if the name is private; + strings are tested directly, as if they were docstrings. + + Return (#failures, #tests). + + See doctest.__doc__ for an overview. + + Optional keyword arg "name" gives the name of the module; by default + use m.__name__. + + Optional keyword arg "globs" gives a dict to be used as the globals + when executing examples; by default, use m.__dict__. A copy of this + dict is actually used for each docstring, so that each docstring's + examples start with a clean slate. + + Optional keyword arg "extraglobs" gives a dictionary that should be + merged into the globals that are used to execute examples. By + default, no extra globals are used. This is new in 2.4. + + Optional keyword arg "verbose" prints lots of stuff if true, prints + only failures if false; by default, it's true iff "-v" is in sys.argv. + + Optional keyword arg "report" prints a summary at the end when true, + else prints nothing at the end. In verbose mode, the summary is + detailed, else very brief (in fact, empty if all tests passed). + + Optional keyword arg "optionflags" or's together module constants, + and defaults to 0. This is new in 2.3. Possible values (see the + docs for details): + + DONT_ACCEPT_TRUE_FOR_1 + DONT_ACCEPT_BLANKLINE + NORMALIZE_WHITESPACE + ELLIPSIS + SKIP + IGNORE_EXCEPTION_DETAIL + REPORT_UDIFF + REPORT_CDIFF + REPORT_NDIFF + REPORT_ONLY_FIRST_FAILURE + + Optional keyword arg "raise_on_error" raises an exception on the + first unexpected exception or failure. This allows failures to be + post-mortem debugged. + + Advanced tomfoolery: testmod runs methods of a local instance of + class doctest.Tester, then merges the results into (or creates) + global Tester instance doctest.master. Methods of doctest.master + can be called directly too, if you want to do something unusual. + Passing report=0 to testmod is especially useful then, to delay + displaying a summary. Invoke doctest.master.summarize(verbose) + when you're done fiddling. + """ + global master + + # If no module was given, then use __main__. + if m is None: + # DWA - m will still be None if this wasn't invoked from the command + # line, in which case the following TypeError is about as good an error + # as we should expect + m = sys.modules.get('__main__') + + # Check that we were actually given a module. + if not inspect.ismodule(m): + raise TypeError("testmod: module required; %r" % (m,)) + + # If no name was given, then use the module's name. + if name is None: + name = m.__name__ + + # Find, parse, and run all tests in the given module. + finder = DocTestFinder(exclude_empty=exclude_empty) + + if raise_on_error: + runner = DebugRunner(verbose=verbose, optionflags=optionflags) + else: + runner = DocTestRunner(verbose=verbose, optionflags=optionflags) + + for test in finder.find(m, name, globs=globs, extraglobs=extraglobs): + runner.run(test) + + if report: + runner.summarize() + + if master is None: + master = runner + else: + master.merge(runner) + + return runner.failures, runner.tries + +def testfile(filename, module_relative=True, name=None, package=None, + globs=None, verbose=None, report=True, optionflags=0, + extraglobs=None, raise_on_error=False, parser=DocTestParser(), + encoding=None): + """ + Test examples in the given file. Return (#failures, #tests). + + Optional keyword arg "module_relative" specifies how filenames + should be interpreted: + + - If "module_relative" is True (the default), then "filename" + specifies a module-relative path. By default, this path is + relative to the calling module's directory; but if the + "package" argument is specified, then it is relative to that + package. To ensure os-independence, "filename" should use + "/" characters to separate path segments, and should not + be an absolute path (i.e., it may not begin with "/"). + + - If "module_relative" is False, then "filename" specifies an + os-specific path. The path may be absolute or relative (to + the current working directory). + + Optional keyword arg "name" gives the name of the test; by default + use the file's basename. + + Optional keyword argument "package" is a Python package or the + name of a Python package whose directory should be used as the + base directory for a module relative filename. If no package is + specified, then the calling module's directory is used as the base + directory for module relative filenames. It is an error to + specify "package" if "module_relative" is False. + + Optional keyword arg "globs" gives a dict to be used as the globals + when executing examples; by default, use {}. A copy of this dict + is actually used for each docstring, so that each docstring's + examples start with a clean slate. + + Optional keyword arg "extraglobs" gives a dictionary that should be + merged into the globals that are used to execute examples. By + default, no extra globals are used. + + Optional keyword arg "verbose" prints lots of stuff if true, prints + only failures if false; by default, it's true iff "-v" is in sys.argv. + + Optional keyword arg "report" prints a summary at the end when true, + else prints nothing at the end. In verbose mode, the summary is + detailed, else very brief (in fact, empty if all tests passed). + + Optional keyword arg "optionflags" or's together module constants, + and defaults to 0. Possible values (see the docs for details): + + DONT_ACCEPT_TRUE_FOR_1 + DONT_ACCEPT_BLANKLINE + NORMALIZE_WHITESPACE + ELLIPSIS + SKIP + IGNORE_EXCEPTION_DETAIL + REPORT_UDIFF + REPORT_CDIFF + REPORT_NDIFF + REPORT_ONLY_FIRST_FAILURE + + Optional keyword arg "raise_on_error" raises an exception on the + first unexpected exception or failure. This allows failures to be + post-mortem debugged. + + Optional keyword arg "parser" specifies a DocTestParser (or + subclass) that should be used to extract tests from the files. + + Optional keyword arg "encoding" specifies an encoding that should + be used to convert the file to unicode. + + Advanced tomfoolery: testmod runs methods of a local instance of + class doctest.Tester, then merges the results into (or creates) + global Tester instance doctest.master. Methods of doctest.master + can be called directly too, if you want to do something unusual. + Passing report=0 to testmod is especially useful then, to delay + displaying a summary. Invoke doctest.master.summarize(verbose) + when you're done fiddling. + """ + global master + + if package and not module_relative: + raise ValueError("Package may only be specified for module-" + "relative paths.") + + # Relativize the path + text, filename = _load_testfile(filename, package, module_relative) + + # If no name was given, then use the file's name. + if name is None: + name = os.path.basename(filename) + + # Assemble the globals. + if globs is None: + globs = {} + else: + globs = globs.copy() + if extraglobs is not None: + globs.update(extraglobs) + + if raise_on_error: + runner = DebugRunner(verbose=verbose, optionflags=optionflags) + else: + runner = DocTestRunner(verbose=verbose, optionflags=optionflags) + + if encoding is not None: + text = text.decode(encoding) + + # Read the file, convert it to a test, and run it. + test = parser.get_doctest(text, globs, name, filename, 0) + runner.run(test) + + if report: + runner.summarize() + + if master is None: + master = runner + else: + master.merge(runner) + + return runner.failures, runner.tries + +def run_docstring_examples(f, globs, verbose=False, name="NoName", + compileflags=None, optionflags=0): + """ + Test examples in the given object's docstring (`f`), using `globs` + as globals. Optional argument `name` is used in failure messages. + If the optional argument `verbose` is true, then generate output + even if there are no failures. + + `compileflags` gives the set of flags that should be used by the + Python compiler when running the examples. If not specified, then + it will default to the set of future-import flags that apply to + `globs`. + + Optional keyword arg `optionflags` specifies options for the + testing and output. See the documentation for `testmod` for more + information. + """ + # Find, parse, and run all tests in the given module. + finder = DocTestFinder(verbose=verbose, recurse=False) + runner = DocTestRunner(verbose=verbose, optionflags=optionflags) + for test in finder.find(f, name, globs=globs): + runner.run(test, compileflags=compileflags) + +###################################################################### +## 7. Tester +###################################################################### +# This is provided only for backwards compatibility. It's not +# actually used in any way. + +class Tester: + def __init__(self, mod=None, globs=None, verbose=None, optionflags=0): + + warnings.warn("class Tester is deprecated; " + "use class doctest.DocTestRunner instead", + DeprecationWarning, stacklevel=2) + if mod is None and globs is None: + raise TypeError("Tester.__init__: must specify mod or globs") + if mod is not None and not inspect.ismodule(mod): + raise TypeError("Tester.__init__: mod must be a module; %r" % + (mod,)) + if globs is None: + globs = mod.__dict__ + self.globs = globs + + self.verbose = verbose + self.optionflags = optionflags + self.testfinder = DocTestFinder() + self.testrunner = DocTestRunner(verbose=verbose, + optionflags=optionflags) + + def runstring(self, s, name): + test = DocTestParser().get_doctest(s, self.globs, name, None, None) + if self.verbose: + print "Running string", name + (f,t) = self.testrunner.run(test) + if self.verbose: + print f, "of", t, "examples failed in string", name + return (f,t) + + def rundoc(self, object, name=None, module=None): + f = t = 0 + tests = self.testfinder.find(object, name, module=module, + globs=self.globs) + for test in tests: + (f2, t2) = self.testrunner.run(test) + (f,t) = (f+f2, t+t2) + return (f,t) + + def rundict(self, d, name, module=None): + import new + m = new.module(name) + m.__dict__.update(d) + if module is None: + module = False + return self.rundoc(m, name, module) + + def run__test__(self, d, name): + import new + m = new.module(name) + m.__test__ = d + return self.rundoc(m, name) + + def summarize(self, verbose=None): + return self.testrunner.summarize(verbose) + + def merge(self, other): + self.testrunner.merge(other.testrunner) + +###################################################################### +## 8. Unittest Support +###################################################################### + +_unittest_reportflags = 0 + +def set_unittest_reportflags(flags): + """Sets the unittest option flags. + + The old flag is returned so that a runner could restore the old + value if it wished to: + + >>> import doctest + >>> old = doctest._unittest_reportflags + >>> doctest.set_unittest_reportflags(REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) == old + True + + >>> doctest._unittest_reportflags == (REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) + True + + Only reporting flags can be set: + + >>> doctest.set_unittest_reportflags(ELLIPSIS) + Traceback (most recent call last): + ... + ValueError: ('Only reporting flags allowed', 8) + + >>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) + True + """ + global _unittest_reportflags + + if (flags & REPORTING_FLAGS) != flags: + raise ValueError("Only reporting flags allowed", flags) + old = _unittest_reportflags + _unittest_reportflags = flags + return old + + +class DocTestCase(unittest.TestCase): + + def __init__(self, test, optionflags=0, setUp=None, tearDown=None, + checker=None): + + unittest.TestCase.__init__(self) + self._dt_optionflags = optionflags + self._dt_checker = checker + self._dt_test = test + self._dt_setUp = setUp + self._dt_tearDown = tearDown + + def setUp(self): + test = self._dt_test + + if self._dt_setUp is not None: + self._dt_setUp(test) + + def tearDown(self): + test = self._dt_test + + if self._dt_tearDown is not None: + self._dt_tearDown(test) + + test.globs.clear() + + def runTest(self): + test = self._dt_test + old = sys.stdout + new = StringIO() + optionflags = self._dt_optionflags + + if not (optionflags & REPORTING_FLAGS): + # The option flags don't include any reporting flags, + # so add the default reporting flags + optionflags |= _unittest_reportflags + + runner = DocTestRunner(optionflags=optionflags, + checker=self._dt_checker, verbose=False) + + try: + runner.DIVIDER = "-"*70 + failures, tries = runner.run( + test, out=new.write, clear_globs=False) + finally: + sys.stdout = old + + if failures: + raise self.failureException(self.format_failure(new.getvalue())) + + def format_failure(self, err): + test = self._dt_test + if test.lineno is None: + lineno = 'unknown line number' + else: + lineno = '%s' % test.lineno + lname = '.'.join(test.name.split('.')[-1:]) + return ('Failed doctest test for %s\n' + ' File "%s", line %s, in %s\n\n%s' + % (test.name, test.filename, lineno, lname, err) + ) + + def debug(self): + r"""Run the test case without results and without catching exceptions + + The unit test framework includes a debug method on test cases + and test suites to support post-mortem debugging. The test code + is run in such a way that errors are not caught. This way a + caller can catch the errors and initiate post-mortem debugging. + + The DocTestCase provides a debug method that raises + UnexpectedException errors if there is an unexepcted + exception: + + >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', + ... {}, 'foo', 'foo.py', 0) + >>> case = DocTestCase(test) + >>> try: + ... case.debug() + ... except UnexpectedException, failure: + ... pass + + The UnexpectedException contains the test, the example, and + the original exception: + + >>> failure.test is test + True + + >>> failure.example.want + '42\n' + + >>> exc_info = failure.exc_info + >>> raise exc_info[0], exc_info[1], exc_info[2] + Traceback (most recent call last): + ... + KeyError + + If the output doesn't match, then a DocTestFailure is raised: + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 1 + ... >>> x + ... 2 + ... ''', {}, 'foo', 'foo.py', 0) + >>> case = DocTestCase(test) + + >>> try: + ... case.debug() + ... except DocTestFailure, failure: + ... pass + + DocTestFailure objects provide access to the test: + + >>> failure.test is test + True + + As well as to the example: + + >>> failure.example.want + '2\n' + + and the actual output: + + >>> failure.got + '1\n' + + """ + + self.setUp() + runner = DebugRunner(optionflags=self._dt_optionflags, + checker=self._dt_checker, verbose=False) + runner.run(self._dt_test) + self.tearDown() + + def id(self): + return self._dt_test.name + + def __repr__(self): + name = self._dt_test.name.split('.') + return "%s (%s)" % (name[-1], '.'.join(name[:-1])) + + __str__ = __repr__ + + def shortDescription(self): + return "Doctest: " + self._dt_test.name + +def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, + **options): + """ + Convert doctest tests for a module to a unittest test suite. + + This converts each documentation string in a module that + contains doctest tests to a unittest test case. If any of the + tests in a doc string fail, then the test case fails. An exception + is raised showing the name of the file containing the test and a + (sometimes approximate) line number. + + The `module` argument provides the module to be tested. The argument + can be either a module or a module name. + + If no argument is given, the calling module is used. + + A number of options may be provided as keyword arguments: + + setUp + A set-up function. This is called before running the + tests in each file. The setUp function will be passed a DocTest + object. The setUp function can access the test globals as the + globs attribute of the test passed. + + tearDown + A tear-down function. This is called after running the + tests in each file. The tearDown function will be passed a DocTest + object. The tearDown function can access the test globals as the + globs attribute of the test passed. + + globs + A dictionary containing initial global variables for the tests. + + optionflags + A set of doctest option flags expressed as an integer. + """ + + if test_finder is None: + test_finder = DocTestFinder() + + module = _normalize_module(module) + tests = test_finder.find(module, globs=globs, extraglobs=extraglobs) + if globs is None: + globs = module.__dict__ + if not tests: + # Why do we want to do this? Because it reveals a bug that might + # otherwise be hidden. + raise ValueError(module, "has no tests") + + tests.sort() + suite = unittest.TestSuite() + for test in tests: + if len(test.examples) == 0: + continue + if not test.filename: + filename = module.__file__ + if filename[-4:] in (".pyc", ".pyo"): + filename = filename[:-1] + test.filename = filename + suite.addTest(DocTestCase(test, **options)) + + return suite + +class DocFileCase(DocTestCase): + + def id(self): + return '_'.join(self._dt_test.name.split('.')) + + def __repr__(self): + return self._dt_test.filename + __str__ = __repr__ + + def format_failure(self, err): + return ('Failed doctest test for %s\n File "%s", line 0\n\n%s' + % (self._dt_test.name, self._dt_test.filename, err) + ) + +def DocFileTest(path, module_relative=True, package=None, + globs=None, parser=DocTestParser(), + encoding=None, **options): + if globs is None: + globs = {} + else: + globs = globs.copy() + + if package and not module_relative: + raise ValueError("Package may only be specified for module-" + "relative paths.") + + # Relativize the path. + doc, path = _load_testfile(path, package, module_relative) + + if "__file__" not in globs: + globs["__file__"] = path + + # Find the file and read it. + name = os.path.basename(path) + + # If an encoding is specified, use it to convert the file to unicode + if encoding is not None: + doc = doc.decode(encoding) + + # Convert it to a test, and wrap it in a DocFileCase. + test = parser.get_doctest(doc, globs, name, path, 0) + return DocFileCase(test, **options) + +def DocFileSuite(*paths, **kw): + """A unittest suite for one or more doctest files. + + The path to each doctest file is given as a string; the + interpretation of that string depends on the keyword argument + "module_relative". + + A number of options may be provided as keyword arguments: + + module_relative + If "module_relative" is True, then the given file paths are + interpreted as os-independent module-relative paths. By + default, these paths are relative to the calling module's + directory; but if the "package" argument is specified, then + they are relative to that package. To ensure os-independence, + "filename" should use "/" characters to separate path + segments, and may not be an absolute path (i.e., it may not + begin with "/"). + + If "module_relative" is False, then the given file paths are + interpreted as os-specific paths. These paths may be absolute + or relative (to the current working directory). + + package + A Python package or the name of a Python package whose directory + should be used as the base directory for module relative paths. + If "package" is not specified, then the calling module's + directory is used as the base directory for module relative + filenames. It is an error to specify "package" if + "module_relative" is False. + + setUp + A set-up function. This is called before running the + tests in each file. The setUp function will be passed a DocTest + object. The setUp function can access the test globals as the + globs attribute of the test passed. + + tearDown + A tear-down function. This is called after running the + tests in each file. The tearDown function will be passed a DocTest + object. The tearDown function can access the test globals as the + globs attribute of the test passed. + + globs + A dictionary containing initial global variables for the tests. + + optionflags + A set of doctest option flags expressed as an integer. + + parser + A DocTestParser (or subclass) that should be used to extract + tests from the files. + + encoding + An encoding that will be used to convert the files to unicode. + """ + suite = unittest.TestSuite() + + # We do this here so that _normalize_module is called at the right + # level. If it were called in DocFileTest, then this function + # would be the caller and we might guess the package incorrectly. + if kw.get('module_relative', True): + kw['package'] = _normalize_module(kw.get('package')) + + for path in paths: + suite.addTest(DocFileTest(path, **kw)) + + return suite + +###################################################################### +## 9. Debugging Support +###################################################################### + +def script_from_examples(s): + r"""Extract script from text with examples. + + Converts text with examples to a Python script. Example input is + converted to regular code. Example output and all other words + are converted to comments: + + >>> text = ''' + ... Here are examples of simple math. + ... + ... Python has super accurate integer addition + ... + ... >>> 2 + 2 + ... 5 + ... + ... And very friendly error messages: + ... + ... >>> 1/0 + ... To Infinity + ... And + ... Beyond + ... + ... You can use logic if you want: + ... + ... >>> if 0: + ... ... blah + ... ... blah + ... ... + ... + ... Ho hum + ... ''' + + >>> print script_from_examples(text) + # Here are examples of simple math. + # + # Python has super accurate integer addition + # + 2 + 2 + # Expected: + ## 5 + # + # And very friendly error messages: + # + 1/0 + # Expected: + ## To Infinity + ## And + ## Beyond + # + # You can use logic if you want: + # + if 0: + blah + blah + # + # Ho hum + + """ + output = [] + for piece in DocTestParser().parse(s): + if isinstance(piece, Example): + # Add the example's source code (strip trailing NL) + output.append(piece.source[:-1]) + # Add the expected output: + want = piece.want + if want: + output.append('# Expected:') + output += ['## '+l for l in want.split('\n')[:-1]] + else: + # Add non-example text. + output += [_comment_line(l) + for l in piece.split('\n')[:-1]] + + # Trim junk on both ends. + while output and output[-1] == '#': + output.pop() + while output and output[0] == '#': + output.pop(0) + # Combine the output, and return it. + # Add a courtesy newline to prevent exec from choking (see bug #1172785) + return '\n'.join(output) + '\n' + +def testsource(module, name): + """Extract the test sources from a doctest docstring as a script. + + Provide the module (or dotted name of the module) containing the + test to be debugged and the name (within the module) of the object + with the doc string with tests to be debugged. + """ + module = _normalize_module(module) + tests = DocTestFinder().find(module) + test = [t for t in tests if t.name == name] + if not test: + raise ValueError(name, "not found in tests") + test = test[0] + testsrc = script_from_examples(test.docstring) + return testsrc + +def debug_src(src, pm=False, globs=None): + """Debug a single doctest docstring, in argument `src`'""" + testsrc = script_from_examples(src) + debug_script(testsrc, pm, globs) + +def debug_script(src, pm=False, globs=None): + "Debug a test script. `src` is the script, as a string." + import pdb + + # Note that tempfile.NameTemporaryFile() cannot be used. As the + # docs say, a file so created cannot be opened by name a second time + # on modern Windows boxes, and execfile() needs to open it. + srcfilename = tempfile.mktemp(".py", "doctestdebug") + f = open(srcfilename, 'w') + f.write(src) + f.close() + + try: + if globs: + globs = globs.copy() + else: + globs = {} + + if pm: + try: + execfile(srcfilename, globs, globs) + except: + print sys.exc_info()[1] + pdb.post_mortem(sys.exc_info()[2]) + else: + # Note that %r is vital here. '%s' instead can, e.g., cause + # backslashes to get treated as metacharacters on Windows. + pdb.run("execfile(%r)" % srcfilename, globs, globs) + + finally: + os.remove(srcfilename) + +def debug(module, name, pm=False): + """Debug a single doctest docstring. + + Provide the module (or dotted name of the module) containing the + test to be debugged and the name (within the module) of the object + with the docstring with tests to be debugged. + """ + module = _normalize_module(module) + testsrc = testsource(module, name) + debug_script(testsrc, pm, module.__dict__) + +###################################################################### +## 10. Example Usage +###################################################################### +class _TestClass: + """ + A pointless class, for sanity-checking of docstring testing. + + Methods: + square() + get() + + >>> _TestClass(13).get() + _TestClass(-12).get() + 1 + >>> hex(_TestClass(13).square().get()) + '0xa9' + """ + + def __init__(self, val): + """val -> _TestClass object with associated value val. + + >>> t = _TestClass(123) + >>> print t.get() + 123 + """ + + self.val = val + + def square(self): + """square() -> square TestClass's associated value + + >>> _TestClass(13).square().get() + 169 + """ + + self.val = self.val ** 2 + return self + + def get(self): + """get() -> return TestClass's associated value. + + >>> x = _TestClass(-42) + >>> print x.get() + -42 + """ + + return self.val + +__test__ = {"_TestClass": _TestClass, + "string": r""" + Example of a string object, searched as-is. + >>> x = 1; y = 2 + >>> x + y, x * y + (3, 2) + """, + + "bool-int equivalence": r""" + In 2.2, boolean expressions displayed + 0 or 1. By default, we still accept + them. This can be disabled by passing + DONT_ACCEPT_TRUE_FOR_1 to the new + optionflags argument. + >>> 4 == 4 + 1 + >>> 4 == 4 + True + >>> 4 > 4 + 0 + >>> 4 > 4 + False + """, + + "blank lines": r""" + Blank lines can be marked with : + >>> print 'foo\n\nbar\n' + foo + + bar + + """, + + "ellipsis": r""" + If the ellipsis flag is used, then '...' can be used to + elide substrings in the desired output: + >>> print range(1000) #doctest: +ELLIPSIS + [0, 1, 2, ..., 999] + """, + + "whitespace normalization": r""" + If the whitespace normalization flag is used, then + differences in whitespace are ignored. + >>> print range(30) #doctest: +NORMALIZE_WHITESPACE + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, + 27, 28, 29] + """, + } + +def _test(): + r = unittest.TextTestRunner() + r.run(DocTestSuite()) + +if __name__ == "__main__": + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,897 @@ +# -*- coding: iso-8859-1 -*- +"""Get useful information from live Python objects. + +This module encapsulates the interface provided by the internal special +attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion. +It also provides some help for examining source code and class layout. + +Here are some of the useful functions provided by this module: + + ismodule(), isclass(), ismethod(), isfunction(), istraceback(), + isframe(), iscode(), isbuiltin(), isroutine() - check object types + getmembers() - get members of an object that satisfy a given condition + + getfile(), getsourcefile(), getsource() - find an object's source code + getdoc(), getcomments() - get documentation on an object + getmodule() - determine the module that an object came from + getclasstree() - arrange classes so as to represent their hierarchy + + getargspec(), getargvalues() - get info about function arguments + formatargspec(), formatargvalues() - format an argument spec + getouterframes(), getinnerframes() - get info about frames + currentframe() - get the current stack frame + stack(), trace() - get info about frames on the stack or in a traceback +""" + +# This module is in the public domain. No warranties. + +__author__ = 'Ka-Ping Yee ' +__date__ = '1 Jan 2001' + +import sys, os, types, string, re, dis, imp, tokenize, linecache +from operator import attrgetter + +# ----------------------------------------------------------- type-checking +def ismodule(object): + """Return true if the object is a module. + + Module objects provide these attributes: + __doc__ documentation string + __file__ filename (missing for built-in modules)""" + return isinstance(object, types.ModuleType) + +def isclass(object): + """Return true if the object is a class. + + Class objects provide these attributes: + __doc__ documentation string + __module__ name of module in which this class was defined""" + return isinstance(object, types.ClassType) or hasattr(object, '__bases__') + +def ismethod(object): + """Return true if the object is an instance method. + + Instance method objects provide these attributes: + __doc__ documentation string + __name__ name with which this method was defined + im_class class object in which this method belongs + im_func function object containing implementation of method + im_self instance to which this method is bound, or None""" + return isinstance(object, types.MethodType) + +def ismethoddescriptor(object): + """Return true if the object is a method descriptor. + + But not if ismethod() or isclass() or isfunction() are true. + + This is new in Python 2.2, and, for example, is true of int.__add__. + An object passing this test has a __get__ attribute but not a __set__ + attribute, but beyond that the set of attributes varies. __name__ is + usually sensible, and __doc__ often is. + + Methods implemented via descriptors that also pass one of the other + tests return false from the ismethoddescriptor() test, simply because + the other tests promise more -- you can, e.g., count on having the + im_func attribute (etc) when an object passes ismethod().""" + return (hasattr(object, "__get__") + and not hasattr(object, "__set__") # else it's a data descriptor + and not ismethod(object) # mutual exclusion + and not isfunction(object) + and not isclass(object)) + +def isdatadescriptor(object): + """Return true if the object is a data descriptor. + + Data descriptors have both a __get__ and a __set__ attribute. Examples are + properties (defined in Python) and getsets and members (defined in C). + Typically, data descriptors will also have __name__ and __doc__ attributes + (properties, getsets, and members have both of these attributes), but this + is not guaranteed.""" + return (hasattr(object, "__set__") and hasattr(object, "__get__")) + +if hasattr(types, 'MemberDescriptorType'): + # CPython and equivalent + def ismemberdescriptor(object): + """Return true if the object is a member descriptor. + + Member descriptors are specialized descriptors defined in extension + modules.""" + return isinstance(object, types.MemberDescriptorType) +else: + # Other implementations + def ismemberdescriptor(object): + """Return true if the object is a member descriptor. + + Member descriptors are specialized descriptors defined in extension + modules.""" + return False + +if hasattr(types, 'GetSetDescriptorType'): + # CPython and equivalent + def isgetsetdescriptor(object): + """Return true if the object is a getset descriptor. + + getset descriptors are specialized descriptors defined in extension + modules.""" + return isinstance(object, types.GetSetDescriptorType) +else: + # Other implementations + def isgetsetdescriptor(object): + """Return true if the object is a getset descriptor. + + getset descriptors are specialized descriptors defined in extension + modules.""" + return False + +def isfunction(object): + """Return true if the object is a user-defined function. + + Function objects provide these attributes: + __doc__ documentation string + __name__ name with which this function was defined + func_code code object containing compiled function bytecode + func_defaults tuple of any default values for arguments + func_doc (same as __doc__) + func_globals global namespace in which this function was defined + func_name (same as __name__)""" + return isinstance(object, types.FunctionType) + +def istraceback(object): + """Return true if the object is a traceback. + + Traceback objects provide these attributes: + tb_frame frame object at this level + tb_lasti index of last attempted instruction in bytecode + tb_lineno current line number in Python source code + tb_next next inner traceback object (called by this level)""" + return isinstance(object, types.TracebackType) + +def isframe(object): + """Return true if the object is a frame object. + + Frame objects provide these attributes: + f_back next outer frame object (this frame's caller) + f_builtins built-in namespace seen by this frame + f_code code object being executed in this frame + f_exc_traceback traceback if raised in this frame, or None + f_exc_type exception type if raised in this frame, or None + f_exc_value exception value if raised in this frame, or None + f_globals global namespace seen by this frame + f_lasti index of last attempted instruction in bytecode + f_lineno current line number in Python source code + f_locals local namespace seen by this frame + f_restricted 0 or 1 if frame is in restricted execution mode + f_trace tracing function for this frame, or None""" + return isinstance(object, types.FrameType) + +def iscode(object): + """Return true if the object is a code object. + + Code objects provide these attributes: + co_argcount number of arguments (not including * or ** args) + co_code string of raw compiled bytecode + co_consts tuple of constants used in the bytecode + co_filename name of file in which this code object was created + co_firstlineno number of first line in Python source code + co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg + co_lnotab encoded mapping of line numbers to bytecode indices + co_name name with which this code object was defined + co_names tuple of names of local variables + co_nlocals number of local variables + co_stacksize virtual machine stack space required + co_varnames tuple of names of arguments and local variables""" + return isinstance(object, types.CodeType) + +def isbuiltin(object): + """Return true if the object is a built-in function or method. + + Built-in functions and methods provide these attributes: + __doc__ documentation string + __name__ original name of this function or method + __self__ instance to which a method is bound, or None""" + return isinstance(object, types.BuiltinFunctionType) + +def isroutine(object): + """Return true if the object is any kind of function or method.""" + return (isbuiltin(object) + or isfunction(object) + or ismethod(object) + or ismethoddescriptor(object)) + +def getmembers(object, predicate=None): + """Return all members of an object as (name, value) pairs sorted by name. + Optionally, only return members that satisfy a given predicate.""" + results = [] + for key in dir(object): + value = getattr(object, key) + if not predicate or predicate(value): + results.append((key, value)) + results.sort() + return results + +def classify_class_attrs(cls): + """Return list of attribute-descriptor tuples. + + For each name in dir(cls), the return list contains a 4-tuple + with these elements: + + 0. The name (a string). + + 1. The kind of attribute this is, one of these strings: + 'class method' created via classmethod() + 'static method' created via staticmethod() + 'property' created via property() + 'method' any other flavor of method + 'data' not a method + + 2. The class which defined this attribute (a class). + + 3. The object as obtained directly from the defining class's + __dict__, not via getattr. This is especially important for + data attributes: C.data is just a data object, but + C.__dict__['data'] may be a data descriptor with additional + info, like a __doc__ string. + """ + + mro = getmro(cls) + names = dir(cls) + result = [] + for name in names: + # Get the object associated with the name. + # Getting an obj from the __dict__ sometimes reveals more than + # using getattr. Static and class methods are dramatic examples. + if name in cls.__dict__: + obj = cls.__dict__[name] + else: + obj = getattr(cls, name) + + # Figure out where it was defined. + homecls = getattr(obj, "__objclass__", None) + if homecls is None: + # search the dicts. + for base in mro: + if name in base.__dict__: + homecls = base + break + + # Get the object again, in order to get it from the defining + # __dict__ instead of via getattr (if possible). + if homecls is not None and name in homecls.__dict__: + obj = homecls.__dict__[name] + + # Also get the object via getattr. + obj_via_getattr = getattr(cls, name) + + # Classify the object. + if isinstance(obj, staticmethod): + kind = "static method" + elif isinstance(obj, classmethod): + kind = "class method" + elif isinstance(obj, property): + kind = "property" + elif (ismethod(obj_via_getattr) or + ismethoddescriptor(obj_via_getattr)): + kind = "method" + else: + kind = "data" + + result.append((name, kind, homecls, obj)) + + return result + +# ----------------------------------------------------------- class helpers +def _searchbases(cls, accum): + # Simulate the "classic class" search order. + if cls in accum: + return + accum.append(cls) + for base in cls.__bases__: + _searchbases(base, accum) + +def getmro(cls): + "Return tuple of base classes (including cls) in method resolution order." + if hasattr(cls, "__mro__"): + return cls.__mro__ + else: + result = [] + _searchbases(cls, result) + return tuple(result) + +# -------------------------------------------------- source code extraction +def indentsize(line): + """Return the indent size, in spaces, at the start of a line of text.""" + expline = string.expandtabs(line) + return len(expline) - len(string.lstrip(expline)) + +def getdoc(object): + """Get the documentation string for an object. + + All tabs are expanded to spaces. To clean up docstrings that are + indented to line up with blocks of code, any whitespace than can be + uniformly removed from the second line onwards is removed.""" + try: + doc = object.__doc__ + except AttributeError: + return None + if not isinstance(doc, types.StringTypes): + return None + try: + lines = string.split(string.expandtabs(doc), '\n') + except UnicodeError: + return None + else: + # Find minimum indentation of any non-blank lines after first line. + margin = sys.maxint + for line in lines[1:]: + content = len(string.lstrip(line)) + if content: + indent = len(line) - content + margin = min(margin, indent) + # Remove indentation. + if lines: + lines[0] = lines[0].lstrip() + if margin < sys.maxint: + for i in range(1, len(lines)): lines[i] = lines[i][margin:] + # Remove any trailing or leading blank lines. + while lines and not lines[-1]: + lines.pop() + while lines and not lines[0]: + lines.pop(0) + return string.join(lines, '\n') + +def getfile(object): + """Work out which source or compiled file an object was defined in.""" + if ismodule(object): + if hasattr(object, '__file__'): + return object.__file__ + raise TypeError('arg is a built-in module') + if isclass(object): + object = sys.modules.get(object.__module__) + if hasattr(object, '__file__'): + return object.__file__ + raise TypeError('arg is a built-in class') + if ismethod(object): + object = object.im_func + if isfunction(object): + object = object.func_code + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + return object.co_filename + raise TypeError('arg is not a module, class, method, ' + 'function, traceback, frame, or code object') + +def getmoduleinfo(path): + """Get the module name, suffix, mode, and module type for a given file.""" + filename = os.path.basename(path) + suffixes = map(lambda (suffix, mode, mtype): + (-len(suffix), suffix, mode, mtype), imp.get_suffixes()) + suffixes.sort() # try longest suffixes first, in case they overlap + for neglen, suffix, mode, mtype in suffixes: + if filename[neglen:] == suffix: + return filename[:neglen], suffix, mode, mtype + +def getmodulename(path): + """Return the module name for a given file, or None.""" + info = getmoduleinfo(path) + if info: return info[0] + +def getsourcefile(object): + """Return the Python source file an object was defined in, if it exists.""" + filename = getfile(object) + if string.lower(filename[-4:]) in ('.pyc', '.pyo'): + filename = filename[:-4] + '.py' + for suffix, mode, kind in imp.get_suffixes(): + if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix: + # Looks like a binary file. We want to only return a text file. + return None + if os.path.exists(filename): + return filename + # only return a non-existent filename if the module has a PEP 302 loader + if hasattr(getmodule(object, filename), '__loader__'): + return filename + +def getabsfile(object, _filename=None): + """Return an absolute path to the source or compiled file for an object. + + The idea is for each object to have a unique origin, so this routine + normalizes the result as much as possible.""" + if _filename is None: + _filename = getsourcefile(object) or getfile(object) + return os.path.normcase(os.path.abspath(_filename)) + +modulesbyfile = {} +_filesbymodname = {} + +def getmodule(object, _filename=None): + """Return the module an object was defined in, or None if not found.""" + if ismodule(object): + return object + if hasattr(object, '__module__'): + return sys.modules.get(object.__module__) + # Try the filename to modulename cache + if _filename is not None and _filename in modulesbyfile: + return sys.modules.get(modulesbyfile[_filename]) + # Try the cache again with the absolute file name + try: + file = getabsfile(object, _filename) + except TypeError: + return None + if file in modulesbyfile: + return sys.modules.get(modulesbyfile[file]) + # Update the filename to module name cache and check yet again + # Copy sys.modules in order to cope with changes while iterating + for modname, module in sys.modules.items(): + if ismodule(module) and hasattr(module, '__file__'): + f = module.__file__ + if f == _filesbymodname.get(modname, None): + # Have already mapped this module, so skip it + continue + _filesbymodname[modname] = f + f = getabsfile(module) + # Always map to the name the module knows itself by + modulesbyfile[f] = modulesbyfile[ + os.path.realpath(f)] = module.__name__ + if file in modulesbyfile: + return sys.modules.get(modulesbyfile[file]) + # Check the main module + main = sys.modules['__main__'] + if not hasattr(object, '__name__'): + return None + if hasattr(main, object.__name__): + mainobject = getattr(main, object.__name__) + if mainobject is object: + return main + # Check builtins + builtin = sys.modules['__builtin__'] + if hasattr(builtin, object.__name__): + builtinobject = getattr(builtin, object.__name__) + if builtinobject is object: + return builtin + +def findsource(object): + """Return the entire source file and starting line number for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of all the lines + in the file and the line number indexes a line in that list. An IOError + is raised if the source code cannot be retrieved.""" + file = getsourcefile(object) or getfile(object) + module = getmodule(object, file) + if module: + lines = linecache.getlines(file, module.__dict__) + else: + lines = linecache.getlines(file) + if not lines: + raise IOError('could not get source code') + + if ismodule(object): + return lines, 0 + + if isclass(object): + name = object.__name__ + pat = re.compile(r'^(\s*)class\s*' + name + r'\b') + # make some effort to find the best matching class definition: + # use the one with the least indentation, which is the one + # that's most probably not inside a function definition. + candidates = [] + for i in range(len(lines)): + match = pat.match(lines[i]) + if match: + # if it's at toplevel, it's already the best one + if lines[i][0] == 'c': + return lines, i + # else add whitespace to candidate list + candidates.append((match.group(1), i)) + if candidates: + # this will sort by whitespace, and by line number, + # less whitespace first + candidates.sort() + return lines, candidates[0][1] + else: + raise IOError('could not find class definition') + + if ismethod(object): + object = object.im_func + if isfunction(object): + object = object.func_code + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + if not hasattr(object, 'co_firstlineno'): + raise IOError('could not find function definition') + lnum = object.co_firstlineno - 1 + pat = re.compile(r'^(\s*def\s)|(.*(? 0: + if pat.match(lines[lnum]): break + lnum = lnum - 1 + return lines, lnum + raise IOError('could not find code object') + +def getcomments(object): + """Get lines of comments immediately preceding an object's source code. + + Returns None when source can't be found. + """ + try: + lines, lnum = findsource(object) + except (IOError, TypeError): + return None + + if ismodule(object): + # Look for a comment block at the top of the file. + start = 0 + if lines and lines[0][:2] == '#!': start = 1 + while start < len(lines) and string.strip(lines[start]) in ('', '#'): + start = start + 1 + if start < len(lines) and lines[start][:1] == '#': + comments = [] + end = start + while end < len(lines) and lines[end][:1] == '#': + comments.append(string.expandtabs(lines[end])) + end = end + 1 + return string.join(comments, '') + + # Look for a preceding block of comments at the same indentation. + elif lnum > 0: + indent = indentsize(lines[lnum]) + end = lnum - 1 + if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \ + indentsize(lines[end]) == indent: + comments = [string.lstrip(string.expandtabs(lines[end]))] + if end > 0: + end = end - 1 + comment = string.lstrip(string.expandtabs(lines[end])) + while comment[:1] == '#' and indentsize(lines[end]) == indent: + comments[:0] = [comment] + end = end - 1 + if end < 0: break + comment = string.lstrip(string.expandtabs(lines[end])) + while comments and string.strip(comments[0]) == '#': + comments[:1] = [] + while comments and string.strip(comments[-1]) == '#': + comments[-1:] = [] + return string.join(comments, '') + +class EndOfBlock(Exception): pass + +class BlockFinder: + """Provide a tokeneater() method to detect the end of a code block.""" + def __init__(self): + self.indent = 0 + self.islambda = False + self.started = False + self.passline = False + self.last = 1 + + def tokeneater(self, type, token, (srow, scol), (erow, ecol), line): + if not self.started: + # look for the first "def", "class" or "lambda" + if token in ("def", "class", "lambda"): + if token == "lambda": + self.islambda = True + self.started = True + self.passline = True # skip to the end of the line + elif type == tokenize.NEWLINE: + self.passline = False # stop skipping when a NEWLINE is seen + self.last = srow + if self.islambda: # lambdas always end at the first NEWLINE + raise EndOfBlock + elif self.passline: + pass + elif type == tokenize.INDENT: + self.indent = self.indent + 1 + self.passline = True + elif type == tokenize.DEDENT: + self.indent = self.indent - 1 + # the end of matching indent/dedent pairs end a block + # (note that this only works for "def"/"class" blocks, + # not e.g. for "if: else:" or "try: finally:" blocks) + if self.indent <= 0: + raise EndOfBlock + elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL): + # any other token on the same indentation level end the previous + # block as well, except the pseudo-tokens COMMENT and NL. + raise EndOfBlock + +def getblock(lines): + """Extract the block of code at the top of the given list of lines.""" + blockfinder = BlockFinder() + try: + tokenize.tokenize(iter(lines).next, blockfinder.tokeneater) + except (EndOfBlock, IndentationError): + pass + return lines[:blockfinder.last] + +def getsourcelines(object): + """Return a list of source lines and starting line number for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of the lines + corresponding to the object and the line number indicates where in the + original source file the first line of code was found. An IOError is + raised if the source code cannot be retrieved.""" + lines, lnum = findsource(object) + + if ismodule(object): return lines, 0 + else: return getblock(lines[lnum:]), lnum + 1 + +def getsource(object): + """Return the text of the source code for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a single string. An + IOError is raised if the source code cannot be retrieved.""" + lines, lnum = getsourcelines(object) + return string.join(lines, '') + +# --------------------------------------------------- class tree extraction +def walktree(classes, children, parent): + """Recursive helper function for getclasstree().""" + results = [] + classes.sort(key=attrgetter('__module__', '__name__')) + for c in classes: + results.append((c, c.__bases__)) + if c in children: + results.append(walktree(children[c], children, c)) + return results + +def getclasstree(classes, unique=0): + """Arrange the given list of classes into a hierarchy of nested lists. + + Where a nested list appears, it contains classes derived from the class + whose entry immediately precedes the list. Each entry is a 2-tuple + containing a class and a tuple of its base classes. If the 'unique' + argument is true, exactly one entry appears in the returned structure + for each class in the given list. Otherwise, classes using multiple + inheritance and their descendants will appear multiple times.""" + children = {} + roots = [] + for c in classes: + if c.__bases__: + for parent in c.__bases__: + if not parent in children: + children[parent] = [] + children[parent].append(c) + if unique and parent in classes: break + elif c not in roots: + roots.append(c) + for parent in children: + if parent not in classes: + roots.append(parent) + return walktree(roots, children, None) + +# ------------------------------------------------ argument list extraction +# These constants are from Python's compile.h. +CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8 + +def getargs(co): + """Get information about the arguments accepted by a code object. + + Three things are returned: (args, varargs, varkw), where 'args' is + a list of argument names (possibly containing nested lists), and + 'varargs' and 'varkw' are the names of the * and ** arguments or None.""" + + if not iscode(co): + if hasattr(len, 'func_code') and type(co) is type(len.func_code): + # PyPy extension: built-in function objects have a func_code too. + # There is no co_code on it, but co_argcount and co_varnames and + # co_flags are present. + pass + else: + raise TypeError('arg is not a code object') + + code = getattr(co, 'co_code', '') + nargs = co.co_argcount + names = co.co_varnames + args = list(names[:nargs]) + step = 0 + + # The following acrobatics are for anonymous (tuple) arguments. + for i in range(nargs): + if args[i][:1] in ('', '.'): + stack, remain, count = [], [], [] + while step < len(code): + op = ord(code[step]) + step = step + 1 + if op >= dis.HAVE_ARGUMENT: + opname = dis.opname[op] + value = ord(code[step]) + ord(code[step+1])*256 + step = step + 2 + if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'): + remain.append(value) + count.append(value) + elif opname == 'STORE_FAST': + stack.append(names[value]) + + # Special case for sublists of length 1: def foo((bar)) + # doesn't generate the UNPACK_TUPLE bytecode, so if + # `remain` is empty here, we have such a sublist. + if not remain: + stack[0] = [stack[0]] + break + else: + remain[-1] = remain[-1] - 1 + while remain[-1] == 0: + remain.pop() + size = count.pop() + stack[-size:] = [stack[-size:]] + if not remain: break + remain[-1] = remain[-1] - 1 + if not remain: break + args[i] = stack[0] + + varargs = None + if co.co_flags & CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + return args, varargs, varkw + +def getargspec(func): + """Get the names and default values of a function's arguments. + + A tuple of four things is returned: (args, varargs, varkw, defaults). + 'args' is a list of the argument names (it may contain nested lists). + 'varargs' and 'varkw' are the names of the * and ** arguments or None. + 'defaults' is an n-tuple of the default values of the last n arguments. + """ + + if ismethod(func): + func = func.im_func + if not (isfunction(func) or + isbuiltin(func) and hasattr(func, 'func_code')): + # PyPy extension: this works for built-in functions too + raise TypeError('arg is not a Python function') + args, varargs, varkw = getargs(func.func_code) + return args, varargs, varkw, func.func_defaults + +def getargvalues(frame): + """Get information about arguments passed into a particular frame. + + A tuple of four things is returned: (args, varargs, varkw, locals). + 'args' is a list of the argument names (it may contain nested lists). + 'varargs' and 'varkw' are the names of the * and ** arguments or None. + 'locals' is the locals dictionary of the given frame.""" + args, varargs, varkw = getargs(frame.f_code) + return args, varargs, varkw, frame.f_locals + +def joinseq(seq): + if len(seq) == 1: + return '(' + seq[0] + ',)' + else: + return '(' + string.join(seq, ', ') + ')' + +def strseq(object, convert, join=joinseq): + """Recursively walk a sequence, stringifying each element.""" + if type(object) in (list, tuple): + return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object)) + else: + return convert(object) + +def formatargspec(args, varargs=None, varkw=None, defaults=None, + formatarg=str, + formatvarargs=lambda name: '*' + name, + formatvarkw=lambda name: '**' + name, + formatvalue=lambda value: '=' + repr(value), + join=joinseq): + """Format an argument spec from the 4 values returned by getargspec. + + The first four arguments are (args, varargs, varkw, defaults). The + other four arguments are the corresponding optional formatting functions + that are called to turn names and values into strings. The ninth + argument is an optional function to format the sequence of arguments.""" + specs = [] + if defaults: + firstdefault = len(args) - len(defaults) + for i in range(len(args)): + spec = strseq(args[i], formatarg, join) + if defaults and i >= firstdefault: + spec = spec + formatvalue(defaults[i - firstdefault]) + specs.append(spec) + if varargs is not None: + specs.append(formatvarargs(varargs)) + if varkw is not None: + specs.append(formatvarkw(varkw)) + return '(' + string.join(specs, ', ') + ')' + +def formatargvalues(args, varargs, varkw, locals, + formatarg=str, + formatvarargs=lambda name: '*' + name, + formatvarkw=lambda name: '**' + name, + formatvalue=lambda value: '=' + repr(value), + join=joinseq): + """Format an argument spec from the 4 values returned by getargvalues. + + The first four arguments are (args, varargs, varkw, locals). The + next four arguments are the corresponding optional formatting functions + that are called to turn names and values into strings. The ninth + argument is an optional function to format the sequence of arguments.""" + def convert(name, locals=locals, + formatarg=formatarg, formatvalue=formatvalue): + return formatarg(name) + formatvalue(locals[name]) + specs = [] + for i in range(len(args)): + specs.append(strseq(args[i], convert, join)) + if varargs: + specs.append(formatvarargs(varargs) + formatvalue(locals[varargs])) + if varkw: + specs.append(formatvarkw(varkw) + formatvalue(locals[varkw])) + return '(' + string.join(specs, ', ') + ')' + +# -------------------------------------------------- stack frame extraction +def getframeinfo(frame, context=1): + """Get information about a frame or traceback object. + + A tuple of five things is returned: the filename, the line number of + the current line, the function name, a list of lines of context from + the source code, and the index of the current line within that list. + The optional second argument specifies the number of lines of context + to return, which are centered around the current line.""" + if istraceback(frame): + lineno = frame.tb_lineno + frame = frame.tb_frame + else: + lineno = frame.f_lineno + if not isframe(frame): + raise TypeError('arg is not a frame or traceback object') + + filename = getsourcefile(frame) or getfile(frame) + if context > 0: + start = lineno - 1 - context//2 + try: + lines, lnum = findsource(frame) + except IOError: + lines = index = None + else: + start = max(start, 1) + start = max(0, min(start, len(lines) - context)) + lines = lines[start:start+context] + index = lineno - 1 - start + else: + lines = index = None + + return (filename, lineno, frame.f_code.co_name, lines, index) + +def getlineno(frame): + """Get the line number from a frame object, allowing for optimization.""" + # FrameType.f_lineno is now a descriptor that grovels co_lnotab + return frame.f_lineno + +def getouterframes(frame, context=1): + """Get a list of records for a frame and all higher (calling) frames. + + Each record contains a frame object, filename, line number, function + name, a list of lines of context, and index within the context.""" + framelist = [] + while frame: + framelist.append((frame,) + getframeinfo(frame, context)) + frame = frame.f_back + return framelist + +def getinnerframes(tb, context=1): + """Get a list of records for a traceback's frame and all lower frames. + + Each record contains a frame object, filename, line number, function + name, a list of lines of context, and index within the context.""" + framelist = [] + while tb: + framelist.append((tb.tb_frame,) + getframeinfo(tb, context)) + tb = tb.tb_next + return framelist + +currentframe = sys._getframe + +def stack(context=1): + """Return a list of records for the stack above the caller's frame.""" + return getouterframes(sys._getframe(1), context) + +def trace(context=1): + """Return a list of records for the stack below the current exception.""" + return getinnerframes(sys.exc_info()[2], context) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/locale.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/locale.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,1562 @@ +""" Locale support. + + The module provides low-level access to the C lib's locale APIs + and adds high level number formatting APIs as well as a locale + aliasing engine to complement these. + + The aliasing engine includes support for many commonly used locale + names and maps them to values suitable for passing to the C lib's + setlocale() function. It also includes default encodings for all + supported locale names. + +""" + +import sys, encodings, encodings.aliases + +# Try importing the _locale module. +# +# If this fails, fall back on a basic 'C' locale emulation. + +# Yuck: LC_MESSAGES is non-standard: can't tell whether it exists before +# trying the import. So __all__ is also fiddled at the end of the file. +__all__ = ["setlocale","Error","localeconv","strcoll","strxfrm", + "format","str","atof","atoi","LC_CTYPE","LC_COLLATE", + "LC_TIME","LC_MONETARY","LC_NUMERIC", "LC_ALL","CHAR_MAX"] + +try: + + from _locale import * + +except ImportError: + + # Locale emulation + + CHAR_MAX = 127 + LC_ALL = 6 + LC_COLLATE = 3 + LC_CTYPE = 0 + LC_MESSAGES = 5 + LC_MONETARY = 4 + LC_NUMERIC = 1 + LC_TIME = 2 + Error = ValueError + + def localeconv(): + """ localeconv() -> dict. + Returns numeric and monetary locale-specific parameters. + """ + # 'C' locale default values + return {'grouping': [127], + 'currency_symbol': '', + 'n_sign_posn': 127, + 'p_cs_precedes': 127, + 'n_cs_precedes': 127, + 'mon_grouping': [], + 'n_sep_by_space': 127, + 'decimal_point': '.', + 'negative_sign': '', + 'positive_sign': '', + 'p_sep_by_space': 127, + 'int_curr_symbol': '', + 'p_sign_posn': 127, + 'thousands_sep': '', + 'mon_thousands_sep': '', + 'frac_digits': 127, + 'mon_decimal_point': '', + 'int_frac_digits': 127} + + def setlocale(category, value=None): + """ setlocale(integer,string=None) -> string. + Activates/queries locale processing. + """ + if value not in (None, '', 'C'): + raise Error, '_locale emulation only supports "C" locale' + return 'C' + + def strcoll(a,b): + """ strcoll(string,string) -> int. + Compares two strings according to the locale. + """ + return cmp(a,b) + + def strxfrm(s): + """ strxfrm(string) -> string. + Returns a string that behaves for cmp locale-aware. + """ + return s + +### Number formatting APIs + +# Author: Martin von Loewis +# improved by Georg Brandl + +#perform the grouping from right to left +def _group(s, monetary=False): + conv = localeconv() + thousands_sep = conv[monetary and 'mon_thousands_sep' or 'thousands_sep'] + grouping = conv[monetary and 'mon_grouping' or 'grouping'] + if not grouping: + return (s, 0) + result = "" + seps = 0 + spaces = "" + if s[-1] == ' ': + sp = s.find(' ') + spaces = s[sp:] + s = s[:sp] + while s and grouping: + # if grouping is -1, we are done + if grouping[0] == CHAR_MAX: + break + # 0: re-use last group ad infinitum + elif grouping[0] != 0: + #process last group + group = grouping[0] + grouping = grouping[1:] + if result: + result = s[-group:] + thousands_sep + result + seps += 1 + else: + result = s[-group:] + s = s[:-group] + if s and s[-1] not in "0123456789": + # the leading string is only spaces and signs + return s + result + spaces, seps + if not result: + return s + spaces, seps + if s: + result = s + thousands_sep + result + seps += 1 + return result + spaces, seps + +def format(percent, value, grouping=False, monetary=False, *additional): + """Returns the locale-aware substitution of a %? specifier + (percent). + + additional is for format strings which contain one or more + '*' modifiers.""" + # this is only for one-percent-specifier strings and this should be checked + if percent[0] != '%': + raise ValueError("format() must be given exactly one %char " + "format specifier") + if additional: + formatted = percent % ((value,) + additional) + else: + formatted = percent % value + # floats and decimal ints need special action! + if percent[-1] in 'eEfFgG': + seps = 0 + parts = formatted.split('.') + if grouping: + parts[0], seps = _group(parts[0], monetary=monetary) + decimal_point = localeconv()[monetary and 'mon_decimal_point' + or 'decimal_point'] + formatted = decimal_point.join(parts) + while seps: + sp = formatted.find(' ') + if sp == -1: break + formatted = formatted[:sp] + formatted[sp+1:] + seps -= 1 + elif percent[-1] in 'diu': + if grouping: + formatted = _group(formatted, monetary=monetary)[0] + return formatted + +import re, operator +_percent_re = re.compile(r'%(?:\((?P.*?)\))?' + r'(?P[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]') + +def format_string(f, val, grouping=False): + """Formats a string in the same way that the % formatting would use, + but takes the current locale into account. + Grouping is applied if the third parameter is true.""" + percents = list(_percent_re.finditer(f)) + new_f = _percent_re.sub('%s', f) + + if isinstance(val, tuple): + new_val = list(val) + i = 0 + for perc in percents: + starcount = perc.group('modifiers').count('*') + new_val[i] = format(perc.group(), new_val[i], grouping, False, *new_val[i+1:i+1+starcount]) + del new_val[i+1:i+1+starcount] + i += (1 + starcount) + val = tuple(new_val) + elif operator.isMappingType(val): + for perc in percents: + key = perc.group("key") + val[key] = format(perc.group(), val[key], grouping) + else: + # val is a single value + val = format(percents[0].group(), val, grouping) + + return new_f % val + +def currency(val, symbol=True, grouping=False, international=False): + """Formats val according to the currency settings + in the current locale.""" + conv = localeconv() + + # check for illegal values + digits = conv[international and 'int_frac_digits' or 'frac_digits'] + if digits == 127: + raise ValueError("Currency formatting is not possible using " + "the 'C' locale.") + + s = format('%%.%if' % digits, abs(val), grouping, monetary=True) + # '<' and '>' are markers if the sign must be inserted between symbol and value + s = '<' + s + '>' + + if symbol: + smb = conv[international and 'int_curr_symbol' or 'currency_symbol'] + precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes'] + separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space'] + + if precedes: + s = smb + (separated and ' ' or '') + s + else: + s = s + (separated and ' ' or '') + smb + + sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn'] + sign = conv[val<0 and 'negative_sign' or 'positive_sign'] + + if sign_pos == 0: + s = '(' + s + ')' + elif sign_pos == 1: + s = sign + s + elif sign_pos == 2: + s = s + sign + elif sign_pos == 3: + s = s.replace('<', sign) + elif sign_pos == 4: + s = s.replace('>', sign) + else: + # the default if nothing specified; + # this should be the most fitting sign position + s = sign + s + + return s.replace('<', '').replace('>', '') + +def str(val): + """Convert float to integer, taking the locale into account.""" + return format("%.12g", val) + +def atof(string, func=float): + "Parses a string as a float according to the locale settings." + #First, get rid of the grouping + ts = localeconv()['thousands_sep'] + if ts: + string = string.replace(ts, '') + #next, replace the decimal point with a dot + dd = localeconv()['decimal_point'] + if dd: + string = string.replace(dd, '.') + #finally, parse the string + return func(string) + +def atoi(str): + "Converts a string to an integer according to the locale settings." + return atof(str, int) + +def _test(): + setlocale(LC_ALL, "") + #do grouping + s1 = format("%d", 123456789,1) + print s1, "is", atoi(s1) + #standard formatting + s1 = str(3.14) + print s1, "is", atof(s1) + +### Locale name aliasing engine + +# Author: Marc-Andre Lemburg, mal at lemburg.com +# Various tweaks by Fredrik Lundh + +# store away the low-level version of setlocale (it's +# overridden below) +_setlocale = setlocale + +def normalize(localename): + + """ Returns a normalized locale code for the given locale + name. + + The returned locale code is formatted for use with + setlocale(). + + If normalization fails, the original name is returned + unchanged. + + If the given encoding is not known, the function defaults to + the default encoding for the locale code just like setlocale() + does. + + """ + # Normalize the locale name and extract the encoding + fullname = localename.lower() + if ':' in fullname: + # ':' is sometimes used as encoding delimiter. + fullname = fullname.replace(':', '.') + if '.' in fullname: + langname, encoding = fullname.split('.')[:2] + fullname = langname + '.' + encoding + else: + langname = fullname + encoding = '' + + # First lookup: fullname (possibly with encoding) + norm_encoding = encoding.replace('-', '') + norm_encoding = norm_encoding.replace('_', '') + lookup_name = langname + '.' + encoding + code = locale_alias.get(lookup_name, None) + if code is not None: + return code + #print 'first lookup failed' + + # Second try: langname (without encoding) + code = locale_alias.get(langname, None) + if code is not None: + #print 'langname lookup succeeded' + if '.' in code: + langname, defenc = code.split('.') + else: + langname = code + defenc = '' + if encoding: + # Convert the encoding to a C lib compatible encoding string + norm_encoding = encodings.normalize_encoding(encoding) + #print 'norm encoding: %r' % norm_encoding + norm_encoding = encodings.aliases.aliases.get(norm_encoding, + norm_encoding) + #print 'aliased encoding: %r' % norm_encoding + encoding = locale_encoding_alias.get(norm_encoding, + norm_encoding) + else: + encoding = defenc + #print 'found encoding %r' % encoding + if encoding: + return langname + '.' + encoding + else: + return langname + + else: + return localename + +def _parse_localename(localename): + + """ Parses the locale code for localename and returns the + result as tuple (language code, encoding). + + The localename is normalized and passed through the locale + alias engine. A ValueError is raised in case the locale name + cannot be parsed. + + The language code corresponds to RFC 1766. code and encoding + can be None in case the values cannot be determined or are + unknown to this implementation. + + """ + code = normalize(localename) + if '@' in code: + # Deal with locale modifiers + code, modifier = code.split('@') + if modifier == 'euro' and '.' not in code: + # Assume Latin-9 for @euro locales. This is bogus, + # since some systems may use other encodings for these + # locales. Also, we ignore other modifiers. + return code, 'iso-8859-15' + + if '.' in code: + return tuple(code.split('.')[:2]) + elif code == 'C': + return None, None + raise ValueError, 'unknown locale: %s' % localename + +def _build_localename(localetuple): + + """ Builds a locale code from the given tuple (language code, + encoding). + + No aliasing or normalizing takes place. + + """ + language, encoding = localetuple + if language is None: + language = 'C' + if encoding is None: + return language + else: + return language + '.' + encoding + +def getdefaultlocale(envvars=('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE')): + + """ Tries to determine the default locale settings and returns + them as tuple (language code, encoding). + + According to POSIX, a program which has not called + setlocale(LC_ALL, "") runs using the portable 'C' locale. + Calling setlocale(LC_ALL, "") lets it use the default locale as + defined by the LANG variable. Since we don't want to interfere + with the current locale setting we thus emulate the behavior + in the way described above. + + To maintain compatibility with other platforms, not only the + LANG variable is tested, but a list of variables given as + envvars parameter. The first found to be defined will be + used. envvars defaults to the search path used in GNU gettext; + it must always contain the variable name 'LANG'. + + Except for the code 'C', the language code corresponds to RFC + 1766. code and encoding can be None in case the values cannot + be determined. + + """ + + try: + # check if it's supported by the _locale module + import _locale + code, encoding = _locale._getdefaultlocale() + except (ImportError, AttributeError): + pass + else: + # make sure the code/encoding values are valid + if sys.platform == "win32" and code and code[:2] == "0x": + # map windows language identifier to language name + code = windows_locale.get(int(code, 0)) + # ...add other platform-specific processing here, if + # necessary... + return code, encoding + + # fall back on POSIX behaviour + import os + lookup = os.environ.get + for variable in envvars: + localename = lookup(variable,None) + if localename: + if variable == 'LANGUAGE': + localename = localename.split(':')[0] + break + else: + localename = 'C' + return _parse_localename(localename) + + +def getlocale(category=LC_CTYPE): + + """ Returns the current setting for the given locale category as + tuple (language code, encoding). + + category may be one of the LC_* value except LC_ALL. It + defaults to LC_CTYPE. + + Except for the code 'C', the language code corresponds to RFC + 1766. code and encoding can be None in case the values cannot + be determined. + + """ + localename = _setlocale(category) + if category == LC_ALL and ';' in localename: + raise TypeError, 'category LC_ALL is not supported' + return _parse_localename(localename) + +def setlocale(category, locale=None): + + """ Set the locale for the given category. The locale can be + a string, a locale tuple (language code, encoding), or None. + + Locale tuples are converted to strings the locale aliasing + engine. Locale strings are passed directly to the C lib. + + category may be given as one of the LC_* values. + + """ + if locale and type(locale) is not type(""): + # convert to string + locale = normalize(_build_localename(locale)) + return _setlocale(category, locale) + +def resetlocale(category=LC_ALL): + + """ Sets the locale for category to the default setting. + + The default setting is determined by calling + getdefaultlocale(). category defaults to LC_ALL. + + """ + _setlocale(category, _build_localename(getdefaultlocale())) + +if sys.platform in ('win32', 'darwin', 'mac'): + # On Win32, this will return the ANSI code page + # On the Mac, it should return the system encoding; + # it might return "ascii" instead + def getpreferredencoding(do_setlocale = True): + """Return the charset that the user is likely using.""" + import _locale + return _locale._getdefaultlocale()[1] +else: + # On Unix, if CODESET is available, use that. + try: + CODESET + except NameError: + # Fall back to parsing environment variables :-( + def getpreferredencoding(do_setlocale = True): + """Return the charset that the user is likely using, + by looking at environment variables.""" + return getdefaultlocale()[1] + else: + def getpreferredencoding(do_setlocale = True): + """Return the charset that the user is likely using, + according to the system configuration.""" + if do_setlocale: + oldloc = setlocale(LC_CTYPE) + setlocale(LC_CTYPE, "") + result = nl_langinfo(CODESET) + setlocale(LC_CTYPE, oldloc) + return result + else: + return nl_langinfo(CODESET) + + +### Database +# +# The following data was extracted from the locale.alias file which +# comes with X11 and then hand edited removing the explicit encoding +# definitions and adding some more aliases. The file is usually +# available as /usr/lib/X11/locale/locale.alias. +# + +# +# The local_encoding_alias table maps lowercase encoding alias names +# to C locale encoding names (case-sensitive). Note that normalize() +# first looks up the encoding in the encodings.aliases dictionary and +# then applies this mapping to find the correct C lib name for the +# encoding. +# +locale_encoding_alias = { + + # Mappings for non-standard encoding names used in locale names + '437': 'C', + 'c': 'C', + 'en': 'ISO8859-1', + 'jis': 'JIS7', + 'jis7': 'JIS7', + 'ajec': 'eucJP', + + # Mappings from Python codec names to C lib encoding names + 'ascii': 'ISO8859-1', + 'latin_1': 'ISO8859-1', + 'iso8859_1': 'ISO8859-1', + 'iso8859_10': 'ISO8859-10', + 'iso8859_11': 'ISO8859-11', + 'iso8859_13': 'ISO8859-13', + 'iso8859_14': 'ISO8859-14', + 'iso8859_15': 'ISO8859-15', + 'iso8859_2': 'ISO8859-2', + 'iso8859_3': 'ISO8859-3', + 'iso8859_4': 'ISO8859-4', + 'iso8859_5': 'ISO8859-5', + 'iso8859_6': 'ISO8859-6', + 'iso8859_7': 'ISO8859-7', + 'iso8859_8': 'ISO8859-8', + 'iso8859_9': 'ISO8859-9', + 'iso2022_jp': 'JIS7', + 'shift_jis': 'SJIS', + 'tactis': 'TACTIS', + 'euc_jp': 'eucJP', + 'euc_kr': 'eucKR', + 'utf_8': 'UTF8', + 'koi8_r': 'KOI8-R', + 'koi8_u': 'KOI8-U', + # XXX This list is still incomplete. If you know more + # mappings, please file a bug report. Thanks. +} + +# +# The locale_alias table maps lowercase alias names to C locale names +# (case-sensitive). Encodings are always separated from the locale +# name using a dot ('.'); they should only be given in case the +# language name is needed to interpret the given encoding alias +# correctly (CJK codes often have this need). +# +# Note that the normalize() function which uses this tables +# removes '_' and '-' characters from the encoding part of the +# locale name before doing the lookup. This saves a lot of +# space in the table. +# +# MAL 2004-12-10: +# Updated alias mapping to most recent locale.alias file +# from X.org distribution using makelocalealias.py. +# +# These are the differences compared to the old mapping (Python 2.4 +# and older): +# +# updated 'bg' -> 'bg_BG.ISO8859-5' to 'bg_BG.CP1251' +# updated 'bg_bg' -> 'bg_BG.ISO8859-5' to 'bg_BG.CP1251' +# updated 'bulgarian' -> 'bg_BG.ISO8859-5' to 'bg_BG.CP1251' +# updated 'cz' -> 'cz_CZ.ISO8859-2' to 'cs_CZ.ISO8859-2' +# updated 'cz_cz' -> 'cz_CZ.ISO8859-2' to 'cs_CZ.ISO8859-2' +# updated 'czech' -> 'cs_CS.ISO8859-2' to 'cs_CZ.ISO8859-2' +# updated 'dutch' -> 'nl_BE.ISO8859-1' to 'nl_NL.ISO8859-1' +# updated 'et' -> 'et_EE.ISO8859-4' to 'et_EE.ISO8859-15' +# updated 'et_ee' -> 'et_EE.ISO8859-4' to 'et_EE.ISO8859-15' +# updated 'fi' -> 'fi_FI.ISO8859-1' to 'fi_FI.ISO8859-15' +# updated 'fi_fi' -> 'fi_FI.ISO8859-1' to 'fi_FI.ISO8859-15' +# updated 'iw' -> 'iw_IL.ISO8859-8' to 'he_IL.ISO8859-8' +# updated 'iw_il' -> 'iw_IL.ISO8859-8' to 'he_IL.ISO8859-8' +# updated 'japanese' -> 'ja_JP.SJIS' to 'ja_JP.eucJP' +# updated 'lt' -> 'lt_LT.ISO8859-4' to 'lt_LT.ISO8859-13' +# updated 'lv' -> 'lv_LV.ISO8859-4' to 'lv_LV.ISO8859-13' +# updated 'sl' -> 'sl_CS.ISO8859-2' to 'sl_SI.ISO8859-2' +# updated 'slovene' -> 'sl_CS.ISO8859-2' to 'sl_SI.ISO8859-2' +# updated 'th_th' -> 'th_TH.TACTIS' to 'th_TH.ISO8859-11' +# updated 'zh_cn' -> 'zh_CN.eucCN' to 'zh_CN.gb2312' +# updated 'zh_cn.big5' -> 'zh_TW.eucTW' to 'zh_TW.big5' +# updated 'zh_tw' -> 'zh_TW.eucTW' to 'zh_TW.big5' +# +locale_alias = { + 'a3': 'a3_AZ.KOI8-C', + 'a3_az': 'a3_AZ.KOI8-C', + 'a3_az.koi8c': 'a3_AZ.KOI8-C', + 'af': 'af_ZA.ISO8859-1', + 'af_za': 'af_ZA.ISO8859-1', + 'af_za.iso88591': 'af_ZA.ISO8859-1', + 'am': 'am_ET.UTF-8', + 'american': 'en_US.ISO8859-1', + 'american.iso88591': 'en_US.ISO8859-1', + 'ar': 'ar_AA.ISO8859-6', + 'ar_aa': 'ar_AA.ISO8859-6', + 'ar_aa.iso88596': 'ar_AA.ISO8859-6', + 'ar_ae': 'ar_AE.ISO8859-6', + 'ar_bh': 'ar_BH.ISO8859-6', + 'ar_dz': 'ar_DZ.ISO8859-6', + 'ar_eg': 'ar_EG.ISO8859-6', + 'ar_eg.iso88596': 'ar_EG.ISO8859-6', + 'ar_iq': 'ar_IQ.ISO8859-6', + 'ar_jo': 'ar_JO.ISO8859-6', + 'ar_kw': 'ar_KW.ISO8859-6', + 'ar_lb': 'ar_LB.ISO8859-6', + 'ar_ly': 'ar_LY.ISO8859-6', + 'ar_ma': 'ar_MA.ISO8859-6', + 'ar_om': 'ar_OM.ISO8859-6', + 'ar_qa': 'ar_QA.ISO8859-6', + 'ar_sa': 'ar_SA.ISO8859-6', + 'ar_sa.iso88596': 'ar_SA.ISO8859-6', + 'ar_sd': 'ar_SD.ISO8859-6', + 'ar_sy': 'ar_SY.ISO8859-6', + 'ar_tn': 'ar_TN.ISO8859-6', + 'ar_ye': 'ar_YE.ISO8859-6', + 'arabic': 'ar_AA.ISO8859-6', + 'arabic.iso88596': 'ar_AA.ISO8859-6', + 'az': 'az_AZ.ISO8859-9E', + 'az_az': 'az_AZ.ISO8859-9E', + 'az_az.iso88599e': 'az_AZ.ISO8859-9E', + 'be': 'be_BY.CP1251', + 'be_by': 'be_BY.CP1251', + 'be_by.cp1251': 'be_BY.CP1251', + 'be_by.microsoftcp1251': 'be_BY.CP1251', + 'bg': 'bg_BG.CP1251', + 'bg_bg': 'bg_BG.CP1251', + 'bg_bg.cp1251': 'bg_BG.CP1251', + 'bg_bg.iso88595': 'bg_BG.ISO8859-5', + 'bg_bg.koi8r': 'bg_BG.KOI8-R', + 'bg_bg.microsoftcp1251': 'bg_BG.CP1251', + 'bokmal': 'nb_NO.ISO8859-1', + 'bokm\xe5l': 'nb_NO.ISO8859-1', + 'br': 'br_FR.ISO8859-1', + 'br_fr': 'br_FR.ISO8859-1', + 'br_fr.iso88591': 'br_FR.ISO8859-1', + 'br_fr.iso885914': 'br_FR.ISO8859-14', + 'br_fr.iso885915': 'br_FR.ISO8859-15', + 'br_fr at euro': 'br_FR.ISO8859-15', + 'bulgarian': 'bg_BG.CP1251', + 'c': 'C', + 'c-french': 'fr_CA.ISO8859-1', + 'c-french.iso88591': 'fr_CA.ISO8859-1', + 'c.en': 'C', + 'c.iso88591': 'en_US.ISO8859-1', + 'c_c': 'C', + 'c_c.c': 'C', + 'ca': 'ca_ES.ISO8859-1', + 'ca_es': 'ca_ES.ISO8859-1', + 'ca_es.iso88591': 'ca_ES.ISO8859-1', + 'ca_es.iso885915': 'ca_ES.ISO8859-15', + 'ca_es at euro': 'ca_ES.ISO8859-15', + 'catalan': 'ca_ES.ISO8859-1', + 'cextend': 'en_US.ISO8859-1', + 'cextend.en': 'en_US.ISO8859-1', + 'chinese-s': 'zh_CN.eucCN', + 'chinese-t': 'zh_TW.eucTW', + 'croatian': 'hr_HR.ISO8859-2', + 'cs': 'cs_CZ.ISO8859-2', + 'cs_cs': 'cs_CZ.ISO8859-2', + 'cs_cs.iso88592': 'cs_CZ.ISO8859-2', + 'cs_cz': 'cs_CZ.ISO8859-2', + 'cs_cz.iso88592': 'cs_CZ.ISO8859-2', + 'cy': 'cy_GB.ISO8859-1', + 'cy_gb': 'cy_GB.ISO8859-1', + 'cy_gb.iso88591': 'cy_GB.ISO8859-1', + 'cy_gb.iso885914': 'cy_GB.ISO8859-14', + 'cy_gb.iso885915': 'cy_GB.ISO8859-15', + 'cy_gb at euro': 'cy_GB.ISO8859-15', + 'cz': 'cs_CZ.ISO8859-2', + 'cz_cz': 'cs_CZ.ISO8859-2', + 'czech': 'cs_CZ.ISO8859-2', + 'da': 'da_DK.ISO8859-1', + 'da_dk': 'da_DK.ISO8859-1', + 'da_dk.88591': 'da_DK.ISO8859-1', + 'da_dk.885915': 'da_DK.ISO8859-15', + 'da_dk.iso88591': 'da_DK.ISO8859-1', + 'da_dk.iso885915': 'da_DK.ISO8859-15', + 'da_dk at euro': 'da_DK.ISO8859-15', + 'danish': 'da_DK.ISO8859-1', + 'danish.iso88591': 'da_DK.ISO8859-1', + 'dansk': 'da_DK.ISO8859-1', + 'de': 'de_DE.ISO8859-1', + 'de_at': 'de_AT.ISO8859-1', + 'de_at.iso88591': 'de_AT.ISO8859-1', + 'de_at.iso885915': 'de_AT.ISO8859-15', + 'de_at at euro': 'de_AT.ISO8859-15', + 'de_be': 'de_BE.ISO8859-1', + 'de_be.iso88591': 'de_BE.ISO8859-1', + 'de_be.iso885915': 'de_BE.ISO8859-15', + 'de_be at euro': 'de_BE.ISO8859-15', + 'de_ch': 'de_CH.ISO8859-1', + 'de_ch.iso88591': 'de_CH.ISO8859-1', + 'de_ch.iso885915': 'de_CH.ISO8859-15', + 'de_ch at euro': 'de_CH.ISO8859-15', + 'de_de': 'de_DE.ISO8859-1', + 'de_de.88591': 'de_DE.ISO8859-1', + 'de_de.885915': 'de_DE.ISO8859-15', + 'de_de.885915 at euro': 'de_DE.ISO8859-15', + 'de_de.iso88591': 'de_DE.ISO8859-1', + 'de_de.iso885915': 'de_DE.ISO8859-15', + 'de_de at euro': 'de_DE.ISO8859-15', + 'de_lu': 'de_LU.ISO8859-1', + 'de_lu.iso88591': 'de_LU.ISO8859-1', + 'de_lu.iso885915': 'de_LU.ISO8859-15', + 'de_lu at euro': 'de_LU.ISO8859-15', + 'deutsch': 'de_DE.ISO8859-1', + 'dutch': 'nl_NL.ISO8859-1', + 'dutch.iso88591': 'nl_BE.ISO8859-1', + 'ee': 'ee_EE.ISO8859-4', + 'ee_ee': 'ee_EE.ISO8859-4', + 'ee_ee.iso88594': 'ee_EE.ISO8859-4', + 'eesti': 'et_EE.ISO8859-1', + 'el': 'el_GR.ISO8859-7', + 'el_gr': 'el_GR.ISO8859-7', + 'el_gr.iso88597': 'el_GR.ISO8859-7', + 'el_gr at euro': 'el_GR.ISO8859-15', + 'en': 'en_US.ISO8859-1', + 'en.iso88591': 'en_US.ISO8859-1', + 'en_au': 'en_AU.ISO8859-1', + 'en_au.iso88591': 'en_AU.ISO8859-1', + 'en_be': 'en_BE.ISO8859-1', + 'en_be at euro': 'en_BE.ISO8859-15', + 'en_bw': 'en_BW.ISO8859-1', + 'en_ca': 'en_CA.ISO8859-1', + 'en_ca.iso88591': 'en_CA.ISO8859-1', + 'en_gb': 'en_GB.ISO8859-1', + 'en_gb.88591': 'en_GB.ISO8859-1', + 'en_gb.iso88591': 'en_GB.ISO8859-1', + 'en_gb.iso885915': 'en_GB.ISO8859-15', + 'en_gb at euro': 'en_GB.ISO8859-15', + 'en_hk': 'en_HK.ISO8859-1', + 'en_ie': 'en_IE.ISO8859-1', + 'en_ie.iso88591': 'en_IE.ISO8859-1', + 'en_ie.iso885915': 'en_IE.ISO8859-15', + 'en_ie at euro': 'en_IE.ISO8859-15', + 'en_in': 'en_IN.ISO8859-1', + 'en_nz': 'en_NZ.ISO8859-1', + 'en_nz.iso88591': 'en_NZ.ISO8859-1', + 'en_ph': 'en_PH.ISO8859-1', + 'en_sg': 'en_SG.ISO8859-1', + 'en_uk': 'en_GB.ISO8859-1', + 'en_us': 'en_US.ISO8859-1', + 'en_us.88591': 'en_US.ISO8859-1', + 'en_us.885915': 'en_US.ISO8859-15', + 'en_us.iso88591': 'en_US.ISO8859-1', + 'en_us.iso885915': 'en_US.ISO8859-15', + 'en_us.iso885915 at euro': 'en_US.ISO8859-15', + 'en_us at euro': 'en_US.ISO8859-15', + 'en_us at euro@euro': 'en_US.ISO8859-15', + 'en_za': 'en_ZA.ISO8859-1', + 'en_za.88591': 'en_ZA.ISO8859-1', + 'en_za.iso88591': 'en_ZA.ISO8859-1', + 'en_za.iso885915': 'en_ZA.ISO8859-15', + 'en_za at euro': 'en_ZA.ISO8859-15', + 'en_zw': 'en_ZW.ISO8859-1', + 'eng_gb': 'en_GB.ISO8859-1', + 'eng_gb.8859': 'en_GB.ISO8859-1', + 'english': 'en_EN.ISO8859-1', + 'english.iso88591': 'en_EN.ISO8859-1', + 'english_uk': 'en_GB.ISO8859-1', + 'english_uk.8859': 'en_GB.ISO8859-1', + 'english_united-states': 'en_US.ISO8859-1', + 'english_united-states.437': 'C', + 'english_us': 'en_US.ISO8859-1', + 'english_us.8859': 'en_US.ISO8859-1', + 'english_us.ascii': 'en_US.ISO8859-1', + 'eo': 'eo_XX.ISO8859-3', + 'eo_eo': 'eo_EO.ISO8859-3', + 'eo_eo.iso88593': 'eo_EO.ISO8859-3', + 'eo_xx': 'eo_XX.ISO8859-3', + 'eo_xx.iso88593': 'eo_XX.ISO8859-3', + 'es': 'es_ES.ISO8859-1', + 'es_ar': 'es_AR.ISO8859-1', + 'es_ar.iso88591': 'es_AR.ISO8859-1', + 'es_bo': 'es_BO.ISO8859-1', + 'es_bo.iso88591': 'es_BO.ISO8859-1', + 'es_cl': 'es_CL.ISO8859-1', + 'es_cl.iso88591': 'es_CL.ISO8859-1', + 'es_co': 'es_CO.ISO8859-1', + 'es_co.iso88591': 'es_CO.ISO8859-1', + 'es_cr': 'es_CR.ISO8859-1', + 'es_cr.iso88591': 'es_CR.ISO8859-1', + 'es_do': 'es_DO.ISO8859-1', + 'es_do.iso88591': 'es_DO.ISO8859-1', + 'es_ec': 'es_EC.ISO8859-1', + 'es_ec.iso88591': 'es_EC.ISO8859-1', + 'es_es': 'es_ES.ISO8859-1', + 'es_es.88591': 'es_ES.ISO8859-1', + 'es_es.iso88591': 'es_ES.ISO8859-1', + 'es_es.iso885915': 'es_ES.ISO8859-15', + 'es_es at euro': 'es_ES.ISO8859-15', + 'es_gt': 'es_GT.ISO8859-1', + 'es_gt.iso88591': 'es_GT.ISO8859-1', + 'es_hn': 'es_HN.ISO8859-1', + 'es_hn.iso88591': 'es_HN.ISO8859-1', + 'es_mx': 'es_MX.ISO8859-1', + 'es_mx.iso88591': 'es_MX.ISO8859-1', + 'es_ni': 'es_NI.ISO8859-1', + 'es_ni.iso88591': 'es_NI.ISO8859-1', + 'es_pa': 'es_PA.ISO8859-1', + 'es_pa.iso88591': 'es_PA.ISO8859-1', + 'es_pa.iso885915': 'es_PA.ISO8859-15', + 'es_pa at euro': 'es_PA.ISO8859-15', + 'es_pe': 'es_PE.ISO8859-1', + 'es_pe.iso88591': 'es_PE.ISO8859-1', + 'es_pe.iso885915': 'es_PE.ISO8859-15', + 'es_pe at euro': 'es_PE.ISO8859-15', + 'es_pr': 'es_PR.ISO8859-1', + 'es_pr.iso88591': 'es_PR.ISO8859-1', + 'es_py': 'es_PY.ISO8859-1', + 'es_py.iso88591': 'es_PY.ISO8859-1', + 'es_py.iso885915': 'es_PY.ISO8859-15', + 'es_py at euro': 'es_PY.ISO8859-15', + 'es_sv': 'es_SV.ISO8859-1', + 'es_sv.iso88591': 'es_SV.ISO8859-1', + 'es_sv.iso885915': 'es_SV.ISO8859-15', + 'es_sv at euro': 'es_SV.ISO8859-15', + 'es_us': 'es_US.ISO8859-1', + 'es_uy': 'es_UY.ISO8859-1', + 'es_uy.iso88591': 'es_UY.ISO8859-1', + 'es_uy.iso885915': 'es_UY.ISO8859-15', + 'es_uy at euro': 'es_UY.ISO8859-15', + 'es_ve': 'es_VE.ISO8859-1', + 'es_ve.iso88591': 'es_VE.ISO8859-1', + 'es_ve.iso885915': 'es_VE.ISO8859-15', + 'es_ve at euro': 'es_VE.ISO8859-15', + 'estonian': 'et_EE.ISO8859-1', + 'et': 'et_EE.ISO8859-15', + 'et_ee': 'et_EE.ISO8859-15', + 'et_ee.iso88591': 'et_EE.ISO8859-1', + 'et_ee.iso885913': 'et_EE.ISO8859-13', + 'et_ee.iso885915': 'et_EE.ISO8859-15', + 'et_ee.iso88594': 'et_EE.ISO8859-4', + 'et_ee at euro': 'et_EE.ISO8859-15', + 'eu': 'eu_ES.ISO8859-1', + 'eu_es': 'eu_ES.ISO8859-1', + 'eu_es.iso88591': 'eu_ES.ISO8859-1', + 'eu_es.iso885915': 'eu_ES.ISO8859-15', + 'eu_es at euro': 'eu_ES.ISO8859-15', + 'fa': 'fa_IR.UTF-8', + 'fa_ir': 'fa_IR.UTF-8', + 'fa_ir.isiri3342': 'fa_IR.ISIRI-3342', + 'fi': 'fi_FI.ISO8859-15', + 'fi_fi': 'fi_FI.ISO8859-15', + 'fi_fi.88591': 'fi_FI.ISO8859-1', + 'fi_fi.iso88591': 'fi_FI.ISO8859-1', + 'fi_fi.iso885915': 'fi_FI.ISO8859-15', + 'fi_fi.utf8 at euro': 'fi_FI.UTF-8', + 'fi_fi at euro': 'fi_FI.ISO8859-15', + 'finnish': 'fi_FI.ISO8859-1', + 'finnish.iso88591': 'fi_FI.ISO8859-1', + 'fo': 'fo_FO.ISO8859-1', + 'fo_fo': 'fo_FO.ISO8859-1', + 'fo_fo.iso88591': 'fo_FO.ISO8859-1', + 'fo_fo.iso885915': 'fo_FO.ISO8859-15', + 'fo_fo at euro': 'fo_FO.ISO8859-15', + 'fr': 'fr_FR.ISO8859-1', + 'fr_be': 'fr_BE.ISO8859-1', + 'fr_be.88591': 'fr_BE.ISO8859-1', + 'fr_be.iso88591': 'fr_BE.ISO8859-1', + 'fr_be.iso885915': 'fr_BE.ISO8859-15', + 'fr_be at euro': 'fr_BE.ISO8859-15', + 'fr_ca': 'fr_CA.ISO8859-1', + 'fr_ca.88591': 'fr_CA.ISO8859-1', + 'fr_ca.iso88591': 'fr_CA.ISO8859-1', + 'fr_ca.iso885915': 'fr_CA.ISO8859-15', + 'fr_ca at euro': 'fr_CA.ISO8859-15', + 'fr_ch': 'fr_CH.ISO8859-1', + 'fr_ch.88591': 'fr_CH.ISO8859-1', + 'fr_ch.iso88591': 'fr_CH.ISO8859-1', + 'fr_ch.iso885915': 'fr_CH.ISO8859-15', + 'fr_ch at euro': 'fr_CH.ISO8859-15', + 'fr_fr': 'fr_FR.ISO8859-1', + 'fr_fr.88591': 'fr_FR.ISO8859-1', + 'fr_fr.iso88591': 'fr_FR.ISO8859-1', + 'fr_fr.iso885915': 'fr_FR.ISO8859-15', + 'fr_fr at euro': 'fr_FR.ISO8859-15', + 'fr_lu': 'fr_LU.ISO8859-1', + 'fr_lu.88591': 'fr_LU.ISO8859-1', + 'fr_lu.iso88591': 'fr_LU.ISO8859-1', + 'fr_lu.iso885915': 'fr_LU.ISO8859-15', + 'fr_lu at euro': 'fr_LU.ISO8859-15', + 'fran\xe7ais': 'fr_FR.ISO8859-1', + 'fre_fr': 'fr_FR.ISO8859-1', + 'fre_fr.8859': 'fr_FR.ISO8859-1', + 'french': 'fr_FR.ISO8859-1', + 'french.iso88591': 'fr_CH.ISO8859-1', + 'french_france': 'fr_FR.ISO8859-1', + 'french_france.8859': 'fr_FR.ISO8859-1', + 'ga': 'ga_IE.ISO8859-1', + 'ga_ie': 'ga_IE.ISO8859-1', + 'ga_ie.iso88591': 'ga_IE.ISO8859-1', + 'ga_ie.iso885914': 'ga_IE.ISO8859-14', + 'ga_ie.iso885915': 'ga_IE.ISO8859-15', + 'ga_ie at euro': 'ga_IE.ISO8859-15', + 'galego': 'gl_ES.ISO8859-1', + 'galician': 'gl_ES.ISO8859-1', + 'gd': 'gd_GB.ISO8859-1', + 'gd_gb': 'gd_GB.ISO8859-1', + 'gd_gb.iso88591': 'gd_GB.ISO8859-1', + 'gd_gb.iso885914': 'gd_GB.ISO8859-14', + 'gd_gb.iso885915': 'gd_GB.ISO8859-15', + 'gd_gb at euro': 'gd_GB.ISO8859-15', + 'ger_de': 'de_DE.ISO8859-1', + 'ger_de.8859': 'de_DE.ISO8859-1', + 'german': 'de_DE.ISO8859-1', + 'german.iso88591': 'de_CH.ISO8859-1', + 'german_germany': 'de_DE.ISO8859-1', + 'german_germany.8859': 'de_DE.ISO8859-1', + 'gl': 'gl_ES.ISO8859-1', + 'gl_es': 'gl_ES.ISO8859-1', + 'gl_es.iso88591': 'gl_ES.ISO8859-1', + 'gl_es.iso885915': 'gl_ES.ISO8859-15', + 'gl_es at euro': 'gl_ES.ISO8859-15', + 'greek': 'el_GR.ISO8859-7', + 'greek.iso88597': 'el_GR.ISO8859-7', + 'gv': 'gv_GB.ISO8859-1', + 'gv_gb': 'gv_GB.ISO8859-1', + 'gv_gb.iso88591': 'gv_GB.ISO8859-1', + 'gv_gb.iso885914': 'gv_GB.ISO8859-14', + 'gv_gb.iso885915': 'gv_GB.ISO8859-15', + 'gv_gb at euro': 'gv_GB.ISO8859-15', + 'he': 'he_IL.ISO8859-8', + 'he_il': 'he_IL.ISO8859-8', + 'he_il.cp1255': 'he_IL.CP1255', + 'he_il.iso88598': 'he_IL.ISO8859-8', + 'he_il.microsoftcp1255': 'he_IL.CP1255', + 'hebrew': 'iw_IL.ISO8859-8', + 'hebrew.iso88598': 'iw_IL.ISO8859-8', + 'hi': 'hi_IN.ISCII-DEV', + 'hi_in': 'hi_IN.ISCII-DEV', + 'hi_in.isciidev': 'hi_IN.ISCII-DEV', + 'hr': 'hr_HR.ISO8859-2', + 'hr_hr': 'hr_HR.ISO8859-2', + 'hr_hr.iso88592': 'hr_HR.ISO8859-2', + 'hrvatski': 'hr_HR.ISO8859-2', + 'hu': 'hu_HU.ISO8859-2', + 'hu_hu': 'hu_HU.ISO8859-2', + 'hu_hu.iso88592': 'hu_HU.ISO8859-2', + 'hungarian': 'hu_HU.ISO8859-2', + 'icelandic': 'is_IS.ISO8859-1', + 'icelandic.iso88591': 'is_IS.ISO8859-1', + 'id': 'id_ID.ISO8859-1', + 'id_id': 'id_ID.ISO8859-1', + 'in': 'id_ID.ISO8859-1', + 'in_id': 'id_ID.ISO8859-1', + 'is': 'is_IS.ISO8859-1', + 'is_is': 'is_IS.ISO8859-1', + 'is_is.iso88591': 'is_IS.ISO8859-1', + 'is_is.iso885915': 'is_IS.ISO8859-15', + 'is_is at euro': 'is_IS.ISO8859-15', + 'iso-8859-1': 'en_US.ISO8859-1', + 'iso-8859-15': 'en_US.ISO8859-15', + 'iso8859-1': 'en_US.ISO8859-1', + 'iso8859-15': 'en_US.ISO8859-15', + 'iso_8859_1': 'en_US.ISO8859-1', + 'iso_8859_15': 'en_US.ISO8859-15', + 'it': 'it_IT.ISO8859-1', + 'it_ch': 'it_CH.ISO8859-1', + 'it_ch.iso88591': 'it_CH.ISO8859-1', + 'it_ch.iso885915': 'it_CH.ISO8859-15', + 'it_ch at euro': 'it_CH.ISO8859-15', + 'it_it': 'it_IT.ISO8859-1', + 'it_it.88591': 'it_IT.ISO8859-1', + 'it_it.iso88591': 'it_IT.ISO8859-1', + 'it_it.iso885915': 'it_IT.ISO8859-15', + 'it_it at euro': 'it_IT.ISO8859-15', + 'italian': 'it_IT.ISO8859-1', + 'italian.iso88591': 'it_IT.ISO8859-1', + 'iu': 'iu_CA.NUNACOM-8', + 'iu_ca': 'iu_CA.NUNACOM-8', + 'iu_ca.nunacom8': 'iu_CA.NUNACOM-8', + 'iw': 'he_IL.ISO8859-8', + 'iw_il': 'he_IL.ISO8859-8', + 'iw_il.iso88598': 'he_IL.ISO8859-8', + 'ja': 'ja_JP.eucJP', + 'ja.jis': 'ja_JP.JIS7', + 'ja.sjis': 'ja_JP.SJIS', + 'ja_jp': 'ja_JP.eucJP', + 'ja_jp.ajec': 'ja_JP.eucJP', + 'ja_jp.euc': 'ja_JP.eucJP', + 'ja_jp.eucjp': 'ja_JP.eucJP', + 'ja_jp.iso-2022-jp': 'ja_JP.JIS7', + 'ja_jp.iso2022jp': 'ja_JP.JIS7', + 'ja_jp.jis': 'ja_JP.JIS7', + 'ja_jp.jis7': 'ja_JP.JIS7', + 'ja_jp.mscode': 'ja_JP.SJIS', + 'ja_jp.sjis': 'ja_JP.SJIS', + 'ja_jp.ujis': 'ja_JP.eucJP', + 'japan': 'ja_JP.eucJP', + 'japanese': 'ja_JP.eucJP', + 'japanese-euc': 'ja_JP.eucJP', + 'japanese.euc': 'ja_JP.eucJP', + 'japanese.sjis': 'ja_JP.SJIS', + 'jp_jp': 'ja_JP.eucJP', + 'ka': 'ka_GE.GEORGIAN-ACADEMY', + 'ka_ge': 'ka_GE.GEORGIAN-ACADEMY', + 'ka_ge.georgianacademy': 'ka_GE.GEORGIAN-ACADEMY', + 'ka_ge.georgianps': 'ka_GE.GEORGIAN-PS', + 'ka_ge.georgianrs': 'ka_GE.GEORGIAN-ACADEMY', + 'kl': 'kl_GL.ISO8859-1', + 'kl_gl': 'kl_GL.ISO8859-1', + 'kl_gl.iso88591': 'kl_GL.ISO8859-1', + 'kl_gl.iso885915': 'kl_GL.ISO8859-15', + 'kl_gl at euro': 'kl_GL.ISO8859-15', + 'ko': 'ko_KR.eucKR', + 'ko_kr': 'ko_KR.eucKR', + 'ko_kr.euc': 'ko_KR.eucKR', + 'ko_kr.euckr': 'ko_KR.eucKR', + 'korean': 'ko_KR.eucKR', + 'korean.euc': 'ko_KR.eucKR', + 'kw': 'kw_GB.ISO8859-1', + 'kw_gb': 'kw_GB.ISO8859-1', + 'kw_gb.iso88591': 'kw_GB.ISO8859-1', + 'kw_gb.iso885914': 'kw_GB.ISO8859-14', + 'kw_gb.iso885915': 'kw_GB.ISO8859-15', + 'kw_gb at euro': 'kw_GB.ISO8859-15', + 'lithuanian': 'lt_LT.ISO8859-13', + 'lo': 'lo_LA.MULELAO-1', + 'lo_la': 'lo_LA.MULELAO-1', + 'lo_la.cp1133': 'lo_LA.IBM-CP1133', + 'lo_la.ibmcp1133': 'lo_LA.IBM-CP1133', + 'lo_la.mulelao1': 'lo_LA.MULELAO-1', + 'lt': 'lt_LT.ISO8859-13', + 'lt_lt': 'lt_LT.ISO8859-13', + 'lt_lt.iso885913': 'lt_LT.ISO8859-13', + 'lt_lt.iso88594': 'lt_LT.ISO8859-4', + 'lv': 'lv_LV.ISO8859-13', + 'lv_lv': 'lv_LV.ISO8859-13', + 'lv_lv.iso885913': 'lv_LV.ISO8859-13', + 'lv_lv.iso88594': 'lv_LV.ISO8859-4', + 'mi': 'mi_NZ.ISO8859-1', + 'mi_nz': 'mi_NZ.ISO8859-1', + 'mi_nz.iso88591': 'mi_NZ.ISO8859-1', + 'mk': 'mk_MK.ISO8859-5', + 'mk_mk': 'mk_MK.ISO8859-5', + 'mk_mk.cp1251': 'mk_MK.CP1251', + 'mk_mk.iso88595': 'mk_MK.ISO8859-5', + 'mk_mk.microsoftcp1251': 'mk_MK.CP1251', + 'ms': 'ms_MY.ISO8859-1', + 'ms_my': 'ms_MY.ISO8859-1', + 'ms_my.iso88591': 'ms_MY.ISO8859-1', + 'mt': 'mt_MT.ISO8859-3', + 'mt_mt': 'mt_MT.ISO8859-3', + 'mt_mt.iso88593': 'mt_MT.ISO8859-3', + 'nb': 'nb_NO.ISO8859-1', + 'nb_no': 'nb_NO.ISO8859-1', + 'nb_no.88591': 'nb_NO.ISO8859-1', + 'nb_no.iso88591': 'nb_NO.ISO8859-1', + 'nb_no.iso885915': 'nb_NO.ISO8859-15', + 'nb_no at euro': 'nb_NO.ISO8859-15', + 'nl': 'nl_NL.ISO8859-1', + 'nl_be': 'nl_BE.ISO8859-1', + 'nl_be.88591': 'nl_BE.ISO8859-1', + 'nl_be.iso88591': 'nl_BE.ISO8859-1', + 'nl_be.iso885915': 'nl_BE.ISO8859-15', + 'nl_be at euro': 'nl_BE.ISO8859-15', + 'nl_nl': 'nl_NL.ISO8859-1', + 'nl_nl.88591': 'nl_NL.ISO8859-1', + 'nl_nl.iso88591': 'nl_NL.ISO8859-1', + 'nl_nl.iso885915': 'nl_NL.ISO8859-15', + 'nl_nl at euro': 'nl_NL.ISO8859-15', + 'nn': 'nn_NO.ISO8859-1', + 'nn_no': 'nn_NO.ISO8859-1', + 'nn_no.88591': 'nn_NO.ISO8859-1', + 'nn_no.iso88591': 'nn_NO.ISO8859-1', + 'nn_no.iso885915': 'nn_NO.ISO8859-15', + 'nn_no at euro': 'nn_NO.ISO8859-15', + 'no': 'no_NO.ISO8859-1', + 'no at nynorsk': 'ny_NO.ISO8859-1', + 'no_no': 'no_NO.ISO8859-1', + 'no_no.88591': 'no_NO.ISO8859-1', + 'no_no.iso88591': 'no_NO.ISO8859-1', + 'no_no.iso885915': 'no_NO.ISO8859-15', + 'no_no at euro': 'no_NO.ISO8859-15', + 'norwegian': 'no_NO.ISO8859-1', + 'norwegian.iso88591': 'no_NO.ISO8859-1', + 'ny': 'ny_NO.ISO8859-1', + 'ny_no': 'ny_NO.ISO8859-1', + 'ny_no.88591': 'ny_NO.ISO8859-1', + 'ny_no.iso88591': 'ny_NO.ISO8859-1', + 'ny_no.iso885915': 'ny_NO.ISO8859-15', + 'ny_no at euro': 'ny_NO.ISO8859-15', + 'nynorsk': 'nn_NO.ISO8859-1', + 'oc': 'oc_FR.ISO8859-1', + 'oc_fr': 'oc_FR.ISO8859-1', + 'oc_fr.iso88591': 'oc_FR.ISO8859-1', + 'oc_fr.iso885915': 'oc_FR.ISO8859-15', + 'oc_fr at euro': 'oc_FR.ISO8859-15', + 'pd': 'pd_US.ISO8859-1', + 'pd_de': 'pd_DE.ISO8859-1', + 'pd_de.iso88591': 'pd_DE.ISO8859-1', + 'pd_de.iso885915': 'pd_DE.ISO8859-15', + 'pd_de at euro': 'pd_DE.ISO8859-15', + 'pd_us': 'pd_US.ISO8859-1', + 'pd_us.iso88591': 'pd_US.ISO8859-1', + 'pd_us.iso885915': 'pd_US.ISO8859-15', + 'pd_us at euro': 'pd_US.ISO8859-15', + 'ph': 'ph_PH.ISO8859-1', + 'ph_ph': 'ph_PH.ISO8859-1', + 'ph_ph.iso88591': 'ph_PH.ISO8859-1', + 'pl': 'pl_PL.ISO8859-2', + 'pl_pl': 'pl_PL.ISO8859-2', + 'pl_pl.iso88592': 'pl_PL.ISO8859-2', + 'polish': 'pl_PL.ISO8859-2', + 'portuguese': 'pt_PT.ISO8859-1', + 'portuguese.iso88591': 'pt_PT.ISO8859-1', + 'portuguese_brazil': 'pt_BR.ISO8859-1', + 'portuguese_brazil.8859': 'pt_BR.ISO8859-1', + 'posix': 'C', + 'posix-utf2': 'C', + 'pp': 'pp_AN.ISO8859-1', + 'pp_an': 'pp_AN.ISO8859-1', + 'pp_an.iso88591': 'pp_AN.ISO8859-1', + 'pt': 'pt_PT.ISO8859-1', + 'pt_br': 'pt_BR.ISO8859-1', + 'pt_br.88591': 'pt_BR.ISO8859-1', + 'pt_br.iso88591': 'pt_BR.ISO8859-1', + 'pt_br.iso885915': 'pt_BR.ISO8859-15', + 'pt_br at euro': 'pt_BR.ISO8859-15', + 'pt_pt': 'pt_PT.ISO8859-1', + 'pt_pt.88591': 'pt_PT.ISO8859-1', + 'pt_pt.iso88591': 'pt_PT.ISO8859-1', + 'pt_pt.iso885915': 'pt_PT.ISO8859-15', + 'pt_pt.utf8 at euro': 'pt_PT.UTF-8', + 'pt_pt at euro': 'pt_PT.ISO8859-15', + 'ro': 'ro_RO.ISO8859-2', + 'ro_ro': 'ro_RO.ISO8859-2', + 'ro_ro.iso88592': 'ro_RO.ISO8859-2', + 'romanian': 'ro_RO.ISO8859-2', + 'ru': 'ru_RU.ISO8859-5', + 'ru_ru': 'ru_RU.ISO8859-5', + 'ru_ru.cp1251': 'ru_RU.CP1251', + 'ru_ru.iso88595': 'ru_RU.ISO8859-5', + 'ru_ru.koi8r': 'ru_RU.KOI8-R', + 'ru_ru.microsoftcp1251': 'ru_RU.CP1251', + 'ru_ua': 'ru_UA.KOI8-U', + 'ru_ua.cp1251': 'ru_UA.CP1251', + 'ru_ua.koi8u': 'ru_UA.KOI8-U', + 'ru_ua.microsoftcp1251': 'ru_UA.CP1251', + 'rumanian': 'ro_RO.ISO8859-2', + 'russian': 'ru_RU.ISO8859-5', + 'se_no': 'se_NO.UTF-8', + 'serbocroatian': 'sh_YU.ISO8859-2', + 'sh': 'sh_YU.ISO8859-2', + 'sh_hr': 'sh_HR.ISO8859-2', + 'sh_hr.iso88592': 'sh_HR.ISO8859-2', + 'sh_sp': 'sh_YU.ISO8859-2', + 'sh_yu': 'sh_YU.ISO8859-2', + 'sk': 'sk_SK.ISO8859-2', + 'sk_sk': 'sk_SK.ISO8859-2', + 'sk_sk.iso88592': 'sk_SK.ISO8859-2', + 'sl': 'sl_SI.ISO8859-2', + 'sl_cs': 'sl_CS.ISO8859-2', + 'sl_si': 'sl_SI.ISO8859-2', + 'sl_si.iso88592': 'sl_SI.ISO8859-2', + 'slovak': 'sk_SK.ISO8859-2', + 'slovene': 'sl_SI.ISO8859-2', + 'slovenian': 'sl_SI.ISO8859-2', + 'sp': 'sp_YU.ISO8859-5', + 'sp_yu': 'sp_YU.ISO8859-5', + 'spanish': 'es_ES.ISO8859-1', + 'spanish.iso88591': 'es_ES.ISO8859-1', + 'spanish_spain': 'es_ES.ISO8859-1', + 'spanish_spain.8859': 'es_ES.ISO8859-1', + 'sq': 'sq_AL.ISO8859-2', + 'sq_al': 'sq_AL.ISO8859-2', + 'sq_al.iso88592': 'sq_AL.ISO8859-2', + 'sr': 'sr_YU.ISO8859-5', + 'sr at cyrillic': 'sr_YU.ISO8859-5', + 'sr_sp': 'sr_SP.ISO8859-2', + 'sr_yu': 'sr_YU.ISO8859-5', + 'sr_yu.cp1251 at cyrillic': 'sr_YU.CP1251', + 'sr_yu.iso88592': 'sr_YU.ISO8859-2', + 'sr_yu.iso88595': 'sr_YU.ISO8859-5', + 'sr_yu.iso88595 at cyrillic': 'sr_YU.ISO8859-5', + 'sr_yu.microsoftcp1251 at cyrillic': 'sr_YU.CP1251', + 'sr_yu.utf8 at cyrillic': 'sr_YU.UTF-8', + 'sr_yu at cyrillic': 'sr_YU.ISO8859-5', + 'sv': 'sv_SE.ISO8859-1', + 'sv_fi': 'sv_FI.ISO8859-1', + 'sv_fi.iso88591': 'sv_FI.ISO8859-1', + 'sv_fi.iso885915': 'sv_FI.ISO8859-15', + 'sv_fi at euro': 'sv_FI.ISO8859-15', + 'sv_se': 'sv_SE.ISO8859-1', + 'sv_se.88591': 'sv_SE.ISO8859-1', + 'sv_se.iso88591': 'sv_SE.ISO8859-1', + 'sv_se.iso885915': 'sv_SE.ISO8859-15', + 'sv_se at euro': 'sv_SE.ISO8859-15', + 'swedish': 'sv_SE.ISO8859-1', + 'swedish.iso88591': 'sv_SE.ISO8859-1', + 'ta': 'ta_IN.TSCII-0', + 'ta_in': 'ta_IN.TSCII-0', + 'ta_in.tscii': 'ta_IN.TSCII-0', + 'ta_in.tscii0': 'ta_IN.TSCII-0', + 'tg': 'tg_TJ.KOI8-C', + 'tg_tj': 'tg_TJ.KOI8-C', + 'tg_tj.koi8c': 'tg_TJ.KOI8-C', + 'th': 'th_TH.ISO8859-11', + 'th_th': 'th_TH.ISO8859-11', + 'th_th.iso885911': 'th_TH.ISO8859-11', + 'th_th.tactis': 'th_TH.TIS620', + 'th_th.tis620': 'th_TH.TIS620', + 'thai': 'th_TH.ISO8859-11', + 'tl': 'tl_PH.ISO8859-1', + 'tl_ph': 'tl_PH.ISO8859-1', + 'tl_ph.iso88591': 'tl_PH.ISO8859-1', + 'tr': 'tr_TR.ISO8859-9', + 'tr_tr': 'tr_TR.ISO8859-9', + 'tr_tr.iso88599': 'tr_TR.ISO8859-9', + 'tt': 'tt_RU.TATAR-CYR', + 'tt_ru': 'tt_RU.TATAR-CYR', + 'tt_ru.koi8c': 'tt_RU.KOI8-C', + 'tt_ru.tatarcyr': 'tt_RU.TATAR-CYR', + 'turkish': 'tr_TR.ISO8859-9', + 'turkish.iso88599': 'tr_TR.ISO8859-9', + 'uk': 'uk_UA.KOI8-U', + 'uk_ua': 'uk_UA.KOI8-U', + 'uk_ua.cp1251': 'uk_UA.CP1251', + 'uk_ua.iso88595': 'uk_UA.ISO8859-5', + 'uk_ua.koi8u': 'uk_UA.KOI8-U', + 'uk_ua.microsoftcp1251': 'uk_UA.CP1251', + 'univ': 'en_US.utf', + 'universal': 'en_US.utf', + 'universal.utf8 at ucs4': 'en_US.UTF-8', + 'ur': 'ur_PK.CP1256', + 'ur_pk': 'ur_PK.CP1256', + 'ur_pk.cp1256': 'ur_PK.CP1256', + 'ur_pk.microsoftcp1256': 'ur_PK.CP1256', + 'uz': 'uz_UZ.UTF-8', + 'uz_uz': 'uz_UZ.UTF-8', + 'vi': 'vi_VN.TCVN', + 'vi_vn': 'vi_VN.TCVN', + 'vi_vn.tcvn': 'vi_VN.TCVN', + 'vi_vn.tcvn5712': 'vi_VN.TCVN', + 'vi_vn.viscii': 'vi_VN.VISCII', + 'vi_vn.viscii111': 'vi_VN.VISCII', + 'wa': 'wa_BE.ISO8859-1', + 'wa_be': 'wa_BE.ISO8859-1', + 'wa_be.iso88591': 'wa_BE.ISO8859-1', + 'wa_be.iso885915': 'wa_BE.ISO8859-15', + 'wa_be at euro': 'wa_BE.ISO8859-15', + 'yi': 'yi_US.CP1255', + 'yi_us': 'yi_US.CP1255', + 'yi_us.cp1255': 'yi_US.CP1255', + 'yi_us.microsoftcp1255': 'yi_US.CP1255', + 'zh': 'zh_CN.eucCN', + 'zh_cn': 'zh_CN.gb2312', + 'zh_cn.big5': 'zh_TW.big5', + 'zh_cn.euc': 'zh_CN.eucCN', + 'zh_cn.gb18030': 'zh_CN.gb18030', + 'zh_cn.gb2312': 'zh_CN.gb2312', + 'zh_cn.gbk': 'zh_CN.gbk', + 'zh_hk': 'zh_HK.big5hkscs', + 'zh_hk.big5': 'zh_HK.big5', + 'zh_hk.big5hkscs': 'zh_HK.big5hkscs', + 'zh_tw': 'zh_TW.big5', + 'zh_tw.big5': 'zh_TW.big5', + 'zh_tw.euc': 'zh_TW.eucTW', +} + +# +# This maps Windows language identifiers to locale strings. +# +# This list has been updated from +# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/intl/nls_238z.asp +# to include every locale up to Windows XP. +# +# NOTE: this mapping is incomplete. If your language is missing, please +# submit a bug report to Python bug manager, which you can find via: +# http://www.python.org/dev/ +# Make sure you include the missing language identifier and the suggested +# locale code. +# + +windows_locale = { + 0x0436: "af_ZA", # Afrikaans + 0x041c: "sq_AL", # Albanian + 0x0401: "ar_SA", # Arabic - Saudi Arabia + 0x0801: "ar_IQ", # Arabic - Iraq + 0x0c01: "ar_EG", # Arabic - Egypt + 0x1001: "ar_LY", # Arabic - Libya + 0x1401: "ar_DZ", # Arabic - Algeria + 0x1801: "ar_MA", # Arabic - Morocco + 0x1c01: "ar_TN", # Arabic - Tunisia + 0x2001: "ar_OM", # Arabic - Oman + 0x2401: "ar_YE", # Arabic - Yemen + 0x2801: "ar_SY", # Arabic - Syria + 0x2c01: "ar_JO", # Arabic - Jordan + 0x3001: "ar_LB", # Arabic - Lebanon + 0x3401: "ar_KW", # Arabic - Kuwait + 0x3801: "ar_AE", # Arabic - United Arab Emirates + 0x3c01: "ar_BH", # Arabic - Bahrain + 0x4001: "ar_QA", # Arabic - Qatar + 0x042b: "hy_AM", # Armenian + 0x042c: "az_AZ", # Azeri Latin + 0x082c: "az_AZ", # Azeri - Cyrillic + 0x042d: "eu_ES", # Basque + 0x0423: "be_BY", # Belarusian + 0x0445: "bn_IN", # Begali + 0x201a: "bs_BA", # Bosnian + 0x141a: "bs_BA", # Bosnian - Cyrillic + 0x047e: "br_FR", # Breton - France + 0x0402: "bg_BG", # Bulgarian + 0x0403: "ca_ES", # Catalan + 0x0004: "zh_CHS",# Chinese - Simplified + 0x0404: "zh_TW", # Chinese - Taiwan + 0x0804: "zh_CN", # Chinese - PRC + 0x0c04: "zh_HK", # Chinese - Hong Kong S.A.R. + 0x1004: "zh_SG", # Chinese - Singapore + 0x1404: "zh_MO", # Chinese - Macao S.A.R. + 0x7c04: "zh_CHT",# Chinese - Traditional + 0x041a: "hr_HR", # Croatian + 0x101a: "hr_BA", # Croatian - Bosnia + 0x0405: "cs_CZ", # Czech + 0x0406: "da_DK", # Danish + 0x048c: "gbz_AF",# Dari - Afghanistan + 0x0465: "div_MV",# Divehi - Maldives + 0x0413: "nl_NL", # Dutch - The Netherlands + 0x0813: "nl_BE", # Dutch - Belgium + 0x0409: "en_US", # English - United States + 0x0809: "en_GB", # English - United Kingdom + 0x0c09: "en_AU", # English - Australia + 0x1009: "en_CA", # English - Canada + 0x1409: "en_NZ", # English - New Zealand + 0x1809: "en_IE", # English - Ireland + 0x1c09: "en_ZA", # English - South Africa + 0x2009: "en_JA", # English - Jamaica + 0x2409: "en_CB", # English - Carribbean + 0x2809: "en_BZ", # English - Belize + 0x2c09: "en_TT", # English - Trinidad + 0x3009: "en_ZW", # English - Zimbabwe + 0x3409: "en_PH", # English - Phillippines + 0x0425: "et_EE", # Estonian + 0x0438: "fo_FO", # Faroese + 0x0464: "fil_PH",# Filipino + 0x040b: "fi_FI", # Finnish + 0x040c: "fr_FR", # French - France + 0x080c: "fr_BE", # French - Belgium + 0x0c0c: "fr_CA", # French - Canada + 0x100c: "fr_CH", # French - Switzerland + 0x140c: "fr_LU", # French - Luxembourg + 0x180c: "fr_MC", # French - Monaco + 0x0462: "fy_NL", # Frisian - Netherlands + 0x0456: "gl_ES", # Galician + 0x0437: "ka_GE", # Georgian + 0x0407: "de_DE", # German - Germany + 0x0807: "de_CH", # German - Switzerland + 0x0c07: "de_AT", # German - Austria + 0x1007: "de_LU", # German - Luxembourg + 0x1407: "de_LI", # German - Liechtenstein + 0x0408: "el_GR", # Greek + 0x0447: "gu_IN", # Gujarati + 0x040d: "he_IL", # Hebrew + 0x0439: "hi_IN", # Hindi + 0x040e: "hu_HU", # Hungarian + 0x040f: "is_IS", # Icelandic + 0x0421: "id_ID", # Indonesian + 0x045d: "iu_CA", # Inuktitut + 0x085d: "iu_CA", # Inuktitut - Latin + 0x083c: "ga_IE", # Irish - Ireland + 0x0434: "xh_ZA", # Xhosa - South Africa + 0x0435: "zu_ZA", # Zulu + 0x0410: "it_IT", # Italian - Italy + 0x0810: "it_CH", # Italian - Switzerland + 0x0411: "ja_JP", # Japanese + 0x044b: "kn_IN", # Kannada - India + 0x043f: "kk_KZ", # Kazakh + 0x0457: "kok_IN",# Konkani + 0x0412: "ko_KR", # Korean + 0x0440: "ky_KG", # Kyrgyz + 0x0426: "lv_LV", # Latvian + 0x0427: "lt_LT", # Lithuanian + 0x046e: "lb_LU", # Luxembourgish + 0x042f: "mk_MK", # FYRO Macedonian + 0x043e: "ms_MY", # Malay - Malaysia + 0x083e: "ms_BN", # Malay - Brunei + 0x044c: "ml_IN", # Malayalam - India + 0x043a: "mt_MT", # Maltese + 0x0481: "mi_NZ", # Maori + 0x047a: "arn_CL",# Mapudungun + 0x044e: "mr_IN", # Marathi + 0x047c: "moh_CA",# Mohawk - Canada + 0x0450: "mn_MN", # Mongolian + 0x0461: "ne_NP", # Nepali + 0x0414: "nb_NO", # Norwegian - Bokmal + 0x0814: "nn_NO", # Norwegian - Nynorsk + 0x0482: "oc_FR", # Occitan - France + 0x0448: "or_IN", # Oriya - India + 0x0463: "ps_AF", # Pashto - Afghanistan + 0x0429: "fa_IR", # Persian + 0x0415: "pl_PL", # Polish + 0x0416: "pt_BR", # Portuguese - Brazil + 0x0816: "pt_PT", # Portuguese - Portugal + 0x0446: "pa_IN", # Punjabi + 0x046b: "quz_BO",# Quechua (Bolivia) + 0x086b: "quz_EC",# Quechua (Ecuador) + 0x0c6b: "quz_PE",# Quechua (Peru) + 0x0418: "ro_RO", # Romanian - Romania + 0x0417: "rm_CH", # Raeto-Romanese + 0x0419: "ru_RU", # Russian + 0x243b: "smn_FI",# Sami Finland + 0x103b: "smj_NO",# Sami Norway + 0x143b: "smj_SE",# Sami Sweden + 0x043b: "se_NO", # Sami Northern Norway + 0x083b: "se_SE", # Sami Northern Sweden + 0x0c3b: "se_FI", # Sami Northern Finland + 0x203b: "sms_FI",# Sami Skolt + 0x183b: "sma_NO",# Sami Southern Norway + 0x1c3b: "sma_SE",# Sami Southern Sweden + 0x044f: "sa_IN", # Sanskrit + 0x0c1a: "sr_SP", # Serbian - Cyrillic + 0x1c1a: "sr_BA", # Serbian - Bosnia Cyrillic + 0x081a: "sr_SP", # Serbian - Latin + 0x181a: "sr_BA", # Serbian - Bosnia Latin + 0x046c: "ns_ZA", # Northern Sotho + 0x0432: "tn_ZA", # Setswana - Southern Africa + 0x041b: "sk_SK", # Slovak + 0x0424: "sl_SI", # Slovenian + 0x040a: "es_ES", # Spanish - Spain + 0x080a: "es_MX", # Spanish - Mexico + 0x0c0a: "es_ES", # Spanish - Spain (Modern) + 0x100a: "es_GT", # Spanish - Guatemala + 0x140a: "es_CR", # Spanish - Costa Rica + 0x180a: "es_PA", # Spanish - Panama + 0x1c0a: "es_DO", # Spanish - Dominican Republic + 0x200a: "es_VE", # Spanish - Venezuela + 0x240a: "es_CO", # Spanish - Colombia + 0x280a: "es_PE", # Spanish - Peru + 0x2c0a: "es_AR", # Spanish - Argentina + 0x300a: "es_EC", # Spanish - Ecuador + 0x340a: "es_CL", # Spanish - Chile + 0x380a: "es_UR", # Spanish - Uruguay + 0x3c0a: "es_PY", # Spanish - Paraguay + 0x400a: "es_BO", # Spanish - Bolivia + 0x440a: "es_SV", # Spanish - El Salvador + 0x480a: "es_HN", # Spanish - Honduras + 0x4c0a: "es_NI", # Spanish - Nicaragua + 0x500a: "es_PR", # Spanish - Puerto Rico + 0x0441: "sw_KE", # Swahili + 0x041d: "sv_SE", # Swedish - Sweden + 0x081d: "sv_FI", # Swedish - Finland + 0x045a: "syr_SY",# Syriac + 0x0449: "ta_IN", # Tamil + 0x0444: "tt_RU", # Tatar + 0x044a: "te_IN", # Telugu + 0x041e: "th_TH", # Thai + 0x041f: "tr_TR", # Turkish + 0x0422: "uk_UA", # Ukrainian + 0x0420: "ur_PK", # Urdu + 0x0820: "ur_IN", # Urdu - India + 0x0443: "uz_UZ", # Uzbek - Latin + 0x0843: "uz_UZ", # Uzbek - Cyrillic + 0x042a: "vi_VN", # Vietnamese + 0x0452: "cy_GB", # Welsh +} + +def _print_locale(): + + """ Test function. + """ + categories = {} + def _init_categories(categories=categories): + for k,v in globals().items(): + if k[:3] == 'LC_': + categories[k] = v + _init_categories() + del categories['LC_ALL'] + + print 'Locale defaults as determined by getdefaultlocale():' + print '-'*72 + lang, enc = getdefaultlocale() + print 'Language: ', lang or '(undefined)' + print 'Encoding: ', enc or '(undefined)' + print + + print 'Locale settings on startup:' + print '-'*72 + for name,category in categories.items(): + print name, '...' + lang, enc = getlocale(category) + print ' Language: ', lang or '(undefined)' + print ' Encoding: ', enc or '(undefined)' + print + + print + print 'Locale settings after calling resetlocale():' + print '-'*72 + resetlocale() + for name,category in categories.items(): + print name, '...' + lang, enc = getlocale(category) + print ' Language: ', lang or '(undefined)' + print ' Encoding: ', enc or '(undefined)' + print + + try: + setlocale(LC_ALL, "") + except: + print 'NOTE:' + print 'setlocale(LC_ALL, "") does not support the default locale' + print 'given in the OS environment variables.' + else: + print + print 'Locale settings after calling setlocale(LC_ALL, ""):' + print '-'*72 + for name,category in categories.items(): + print name, '...' + lang, enc = getlocale(category) + print ' Language: ', lang or '(undefined)' + print ' Encoding: ', enc or '(undefined)' + print + +### + +try: + LC_MESSAGES +except NameError: + pass +else: + __all__.append("LC_MESSAGES") + +if __name__=='__main__': + print 'Locale aliasing:' + print + _print_locale() + print + print 'Number formatting:' + print + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,185 @@ + +""" +opcode module - potentially shared between dis and other modules which +operate on bytecodes (e.g. peephole optimizers). +""" + +__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs", + "haslocal", "hascompare", "hasfree", "opname", "opmap", + "HAVE_ARGUMENT", "EXTENDED_ARG"] + +cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is', + 'is not', 'exception match', 'BAD') + +hasconst = [] +hasname = [] +hasjrel = [] +hasjabs = [] +haslocal = [] +hascompare = [] +hasfree = [] + +opmap = {} +opname = [''] * 256 +for op in range(256): opname[op] = '<%r>' % (op,) +del op + +def def_op(name, op): + opname[op] = name + opmap[name] = op + +def name_op(name, op): + def_op(name, op) + hasname.append(op) + +def jrel_op(name, op): + def_op(name, op) + hasjrel.append(op) + +def jabs_op(name, op): + def_op(name, op) + hasjabs.append(op) + +# Instruction opcodes for compiled code +# Blank lines correspond to available opcodes + +def_op('STOP_CODE', 0) +def_op('POP_TOP', 1) +def_op('ROT_TWO', 2) +def_op('ROT_THREE', 3) +def_op('DUP_TOP', 4) +def_op('ROT_FOUR', 5) + +def_op('NOP', 9) +def_op('UNARY_POSITIVE', 10) +def_op('UNARY_NEGATIVE', 11) +def_op('UNARY_NOT', 12) +def_op('UNARY_CONVERT', 13) + +def_op('UNARY_INVERT', 15) + +def_op('LIST_APPEND', 18) +def_op('BINARY_POWER', 19) +def_op('BINARY_MULTIPLY', 20) +def_op('BINARY_DIVIDE', 21) +def_op('BINARY_MODULO', 22) +def_op('BINARY_ADD', 23) +def_op('BINARY_SUBTRACT', 24) +def_op('BINARY_SUBSCR', 25) +def_op('BINARY_FLOOR_DIVIDE', 26) +def_op('BINARY_TRUE_DIVIDE', 27) +def_op('INPLACE_FLOOR_DIVIDE', 28) +def_op('INPLACE_TRUE_DIVIDE', 29) +def_op('SLICE+0', 30) +def_op('SLICE+1', 31) +def_op('SLICE+2', 32) +def_op('SLICE+3', 33) + +def_op('STORE_SLICE+0', 40) +def_op('STORE_SLICE+1', 41) +def_op('STORE_SLICE+2', 42) +def_op('STORE_SLICE+3', 43) + +def_op('DELETE_SLICE+0', 50) +def_op('DELETE_SLICE+1', 51) +def_op('DELETE_SLICE+2', 52) +def_op('DELETE_SLICE+3', 53) + +def_op('INPLACE_ADD', 55) +def_op('INPLACE_SUBTRACT', 56) +def_op('INPLACE_MULTIPLY', 57) +def_op('INPLACE_DIVIDE', 58) +def_op('INPLACE_MODULO', 59) +def_op('STORE_SUBSCR', 60) +def_op('DELETE_SUBSCR', 61) +def_op('BINARY_LSHIFT', 62) +def_op('BINARY_RSHIFT', 63) +def_op('BINARY_AND', 64) +def_op('BINARY_XOR', 65) +def_op('BINARY_OR', 66) +def_op('INPLACE_POWER', 67) +def_op('GET_ITER', 68) + +def_op('PRINT_EXPR', 70) +def_op('PRINT_ITEM', 71) +def_op('PRINT_NEWLINE', 72) +def_op('PRINT_ITEM_TO', 73) +def_op('PRINT_NEWLINE_TO', 74) +def_op('INPLACE_LSHIFT', 75) +def_op('INPLACE_RSHIFT', 76) +def_op('INPLACE_AND', 77) +def_op('INPLACE_XOR', 78) +def_op('INPLACE_OR', 79) +def_op('BREAK_LOOP', 80) +def_op('WITH_CLEANUP', 81) +def_op('LOAD_LOCALS', 82) +def_op('RETURN_VALUE', 83) +def_op('IMPORT_STAR', 84) +def_op('EXEC_STMT', 85) +def_op('YIELD_VALUE', 86) +def_op('POP_BLOCK', 87) +def_op('END_FINALLY', 88) +def_op('BUILD_CLASS', 89) + +HAVE_ARGUMENT = 90 # Opcodes from here have an argument: + +name_op('STORE_NAME', 90) # Index in name list +name_op('DELETE_NAME', 91) # "" +def_op('UNPACK_SEQUENCE', 92) # Number of tuple items +jrel_op('FOR_ITER', 93) + +name_op('STORE_ATTR', 95) # Index in name list +name_op('DELETE_ATTR', 96) # "" +name_op('STORE_GLOBAL', 97) # "" +name_op('DELETE_GLOBAL', 98) # "" +def_op('DUP_TOPX', 99) # number of items to duplicate +def_op('LOAD_CONST', 100) # Index in const list +hasconst.append(100) +name_op('LOAD_NAME', 101) # Index in name list +def_op('BUILD_TUPLE', 102) # Number of tuple items +def_op('BUILD_LIST', 103) # Number of list items +def_op('BUILD_MAP', 104) # Always zero for now +name_op('LOAD_ATTR', 105) # Index in name list +def_op('COMPARE_OP', 106) # Comparison operator +hascompare.append(106) +name_op('IMPORT_NAME', 107) # Index in name list +name_op('IMPORT_FROM', 108) # Index in name list + +jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip +jrel_op('JUMP_IF_FALSE', 111) # "" +jrel_op('JUMP_IF_TRUE', 112) # "" +jabs_op('JUMP_ABSOLUTE', 113) # Target byte offset from beginning of code + +name_op('LOAD_GLOBAL', 116) # Index in name list + +jabs_op('CONTINUE_LOOP', 119) # Target address +jrel_op('SETUP_LOOP', 120) # Distance to target address +jrel_op('SETUP_EXCEPT', 121) # "" +jrel_op('SETUP_FINALLY', 122) # "" + +def_op('LOAD_FAST', 124) # Local variable number +haslocal.append(124) +def_op('STORE_FAST', 125) # Local variable number +haslocal.append(125) +def_op('DELETE_FAST', 126) # Local variable number +haslocal.append(126) + +def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3) +def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8) +def_op('MAKE_FUNCTION', 132) # Number of args with default values +def_op('BUILD_SLICE', 133) # Number of items +def_op('MAKE_CLOSURE', 134) +def_op('LOAD_CLOSURE', 135) +hasfree.append(135) +def_op('LOAD_DEREF', 136) +hasfree.append(136) +def_op('STORE_DEREF', 137) +hasfree.append(137) + +def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8) +def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8) +def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8) +def_op('EXTENDED_ARG', 143) +EXTENDED_ARG = 143 + +del def_op, name_op, jrel_op, jabs_op Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,1435 @@ +"""Create portable serialized representations of Python objects. + +See module cPickle for a (much) faster implementation. +See module copy_reg for a mechanism for registering custom picklers. +See module pickletools source for extensive comments. + +Classes: + + Pickler + Unpickler + +Functions: + + dump(object, file) + dumps(object) -> string + load(file) -> object + loads(string) -> object + +Misc variables: + + __version__ + format_version + compatible_formats + +""" + +__version__ = "$Revision: 38432 $" # Code version + +from types import * +from copy_reg import dispatch_table +from copy_reg import _extension_registry, _inverted_registry, _extension_cache +import marshal +import sys +import struct + +__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler", + "Unpickler", "dump", "dumps", "load", "loads"] + +# These are purely informational; no code uses these. +format_version = "2.0" # File format version we write +compatible_formats = ["1.0", # Original protocol 0 + "1.1", # Protocol 0 with INST added + "1.2", # Original protocol 1 + "1.3", # Protocol 1 with BINFLOAT added + "2.0", # Protocol 2 + ] # Old format versions we can read + +# Keep in synch with cPickle. This is the highest protocol number we +# know how to read. +HIGHEST_PROTOCOL = 2 + +# Why use struct.pack() for pickling but marshal.loads() for +# unpickling? struct.pack() is 40% faster than marshal.dumps(), but +# marshal.loads() is twice as fast as struct.unpack()! +mloads = marshal.loads + +class PickleError(Exception): + """A common base class for the other pickling exceptions.""" + pass + +class PicklingError(PickleError): + """This exception is raised when an unpicklable object is passed to the + dump() method. + + """ + pass + +class UnpicklingError(PickleError): + """This exception is raised when there is a problem unpickling an object, + such as a security violation. + + Note that other exceptions may also be raised during unpickling, including + (but not necessarily limited to) AttributeError, EOFError, ImportError, + and IndexError. + + """ + pass + +# An instance of _Stop is raised by Unpickler.load_stop() in response to +# the STOP opcode, passing the object that is the result of unpickling. +class _Stop(Exception): + def __init__(self, value): + self.value = value + +# Jython has PyStringMap; it's a dict subclass with string keys +try: + from org.python.core import PyStringMap +except ImportError: + PyStringMap = None + +# UnicodeType may or may not be exported (normally imported from types) +try: + UnicodeType +except NameError: + UnicodeType = None + +# Pickle opcodes. See pickletools.py for extensive docs. The listing +# here is in kind-of alphabetical order of 1-character pickle code. +# pickletools groups them by purpose. + +MARK = '(' # push special markobject on stack +STOP = '.' # every pickle ends with STOP +POP = '0' # discard topmost stack item +POP_MARK = '1' # discard stack top through topmost markobject +DUP = '2' # duplicate top stack item +FLOAT = 'F' # push float object; decimal string argument +INT = 'I' # push integer or bool; decimal string argument +BININT = 'J' # push four-byte signed int +BININT1 = 'K' # push 1-byte unsigned int +LONG = 'L' # push long; decimal string argument +BININT2 = 'M' # push 2-byte unsigned int +NONE = 'N' # push None +PERSID = 'P' # push persistent object; id is taken from string arg +BINPERSID = 'Q' # " " " ; " " " " stack +REDUCE = 'R' # apply callable to argtuple, both on stack +STRING = 'S' # push string; NL-terminated string argument +BINSTRING = 'T' # push string; counted binary string argument +SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes +UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument +BINUNICODE = 'X' # " " " ; counted UTF-8 string argument +APPEND = 'a' # append stack top to list below it +BUILD = 'b' # call __setstate__ or __dict__.update() +GLOBAL = 'c' # push self.find_class(modname, name); 2 string args +DICT = 'd' # build a dict from stack items +EMPTY_DICT = '}' # push empty dict +APPENDS = 'e' # extend list on stack by topmost stack slice +GET = 'g' # push item from memo on stack; index is string arg +BINGET = 'h' # " " " " " " ; " " 1-byte arg +INST = 'i' # build & push class instance +LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg +LIST = 'l' # build list from topmost stack items +EMPTY_LIST = ']' # push empty list +OBJ = 'o' # build & push class instance +PUT = 'p' # store stack top in memo; index is string arg +BINPUT = 'q' # " " " " " ; " " 1-byte arg +LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg +SETITEM = 's' # add key+value pair to dict +TUPLE = 't' # build tuple from topmost stack items +EMPTY_TUPLE = ')' # push empty tuple +SETITEMS = 'u' # modify dict by adding topmost key+value pairs +BINFLOAT = 'G' # push float; arg is 8-byte float encoding + +TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py +FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py + +# Protocol 2 + +PROTO = '\x80' # identify pickle protocol +NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple +EXT1 = '\x82' # push object from extension registry; 1-byte index +EXT2 = '\x83' # ditto, but 2-byte index +EXT4 = '\x84' # ditto, but 4-byte index +TUPLE1 = '\x85' # build 1-tuple from stack top +TUPLE2 = '\x86' # build 2-tuple from two topmost stack items +TUPLE3 = '\x87' # build 3-tuple from three topmost stack items +NEWTRUE = '\x88' # push True +NEWFALSE = '\x89' # push False +LONG1 = '\x8a' # push long from < 256 bytes +LONG4 = '\x8b' # push really big long + +_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3] + + +__all__.extend([x for x in dir() if x[0].isalpha() and x == x.upper()]) +del x + + +# Pickling machinery + +class Pickler: + + def __init__(self, file, protocol=None): + """This takes a file-like object for writing a pickle data stream. + + The optional protocol argument tells the pickler to use the + given protocol; supported protocols are 0, 1, 2. The default + protocol is 0, to be backwards compatible. (Protocol 0 is the + only protocol that can be written to a file opened in text + mode and read back successfully. When using a protocol higher + than 0, make sure the file is opened in binary mode, both when + pickling and unpickling.) + + Protocol 1 is more efficient than protocol 0; protocol 2 is + more efficient than protocol 1. + + Specifying a negative protocol version selects the highest + protocol version supported. The higher the protocol used, the + more recent the version of Python needed to read the pickle + produced. + + The file parameter must have a write() method that accepts a single + string argument. It can thus be an open file object, a StringIO + object, or any other custom object that meets this interface. + + """ + if protocol is None: + protocol = 0 + if protocol < 0: + protocol = HIGHEST_PROTOCOL + elif not 0 <= protocol <= HIGHEST_PROTOCOL: + raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL) + self.write = file.write + self.memo = {} + self.proto = int(protocol) + self.bin = protocol >= 1 + self.fast = 0 + + def _pickle_moduledict(self, obj): + try: + modict = self.module_dict_ids + except AttributeError: + modict = {} + from sys import modules + for mod in modules.values(): + if isinstance(mod, ModuleType): + try: + modict[id(mod.__dict__)] = mod + except KeyboardInterrupt: + raise + except: # obscure: the above can fail for + # arbitrary reasons, because of the py lib + pass + self.module_dict_ids = modict + + thisid = id(obj) + try: + themodule = modict[thisid] + except KeyError: + return None + from __builtin__ import getattr + return getattr, (themodule, '__dict__') + + def clear_memo(self): + """Clears the pickler's "memo". + + The memo is the data structure that remembers which objects the + pickler has already seen, so that shared or recursive objects are + pickled by reference and not by value. This method is useful when + re-using picklers. + + """ + self.memo.clear() + + def dump(self, obj): + """Write a pickled representation of obj to the open file.""" + if self.proto >= 2: + self.write(PROTO + chr(self.proto)) + self.save(obj) + self.write(STOP) + + def memoize(self, obj): + """Store an object in the memo.""" + + # The Pickler memo is a dictionary mapping object ids to 2-tuples + # that contain the Unpickler memo key and the object being memoized. + # The memo key is written to the pickle and will become + # the key in the Unpickler's memo. The object is stored in the + # Pickler memo so that transient objects are kept alive during + # pickling. + + # The use of the Unpickler memo length as the memo key is just a + # convention. The only requirement is that the memo values be unique. + # But there appears no advantage to any other scheme, and this + # scheme allows the Unpickler memo to be implemented as a plain (but + # growable) array, indexed by memo key. + if self.fast: + return + assert id(obj) not in self.memo + memo_len = len(self.memo) + self.write(self.put(memo_len)) + self.memo[id(obj)] = memo_len, obj + + # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i. + def put(self, i, pack=struct.pack): + if self.bin: + if i < 256: + return BINPUT + chr(i) + else: + return LONG_BINPUT + pack("= 2 and getattr(func, "__name__", "") == "__newobj__": + # A __reduce__ implementation can direct protocol 2 to + # use the more efficient NEWOBJ opcode, while still + # allowing protocol 0 and 1 to work normally. For this to + # work, the function returned by __reduce__ should be + # called __newobj__, and its first argument should be a + # new-style class. The implementation for __newobj__ + # should be as follows, although pickle has no way to + # verify this: + # + # def __newobj__(cls, *args): + # return cls.__new__(cls, *args) + # + # Protocols 0 and 1 will pickle a reference to __newobj__, + # while protocol 2 (and above) will pickle a reference to + # cls, the remaining args tuple, and the NEWOBJ code, + # which calls cls.__new__(cls, *args) at unpickling time + # (see load_newobj below). If __reduce__ returns a + # three-tuple, the state from the third tuple item will be + # pickled regardless of the protocol, calling __setstate__ + # at unpickling time (see load_build below). + # + # Note that no standard __newobj__ implementation exists; + # you have to provide your own. This is to enforce + # compatibility with Python 2.2 (pickles written using + # protocol 0 or 1 in Python 2.3 should be unpicklable by + # Python 2.2). + cls = args[0] + if not hasattr(cls, "__new__"): + raise PicklingError( + "args[0] from __newobj__ args has no __new__") + if obj is not None and cls is not obj.__class__: + raise PicklingError( + "args[0] from __newobj__ args has the wrong class") + args = args[1:] + save(cls) + save(args) + write(NEWOBJ) + else: + save(func) + save(args) + write(REDUCE) + + if obj is not None: + self.memoize(obj) + + # More new special cases (that work with older protocols as + # well): when __reduce__ returns a tuple with 4 or 5 items, + # the 4th and 5th item should be iterators that provide list + # items and dict items (as (key, value) tuples), or None. + + if listitems is not None: + self._batch_appends(listitems) + + if dictitems is not None: + self._batch_setitems(dictitems) + + if state is not None: + save(state) + write(BUILD) + + # Methods below this point are dispatched through the dispatch table + + dispatch = {} + + def save_none(self, obj): + self.write(NONE) + dispatch[NoneType] = save_none + + def save_bool(self, obj): + if self.proto >= 2: + self.write(obj and NEWTRUE or NEWFALSE) + else: + self.write(obj and TRUE or FALSE) + dispatch[bool] = save_bool + + def save_int(self, obj, pack=struct.pack): + if self.bin: + # If the int is small enough to fit in a signed 4-byte 2's-comp + # format, we can store it more efficiently than the general + # case. + # First one- and two-byte unsigned ints: + if obj >= 0: + if obj <= 0xff: + self.write(BININT1 + chr(obj)) + return + if obj <= 0xffff: + self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8)) + return + # Next check for 4-byte signed ints: + high_bits = obj >> 31 # note that Python shift sign-extends + if high_bits == 0 or high_bits == -1: + # All high bits are copies of bit 2**31, so the value + # fits in a 4-byte signed int. + self.write(BININT + pack("= 2: + bytes = encode_long(obj) + n = len(bytes) + if n < 256: + self.write(LONG1 + chr(n) + bytes) + else: + self.write(LONG4 + pack("d', obj)) + else: + self.write(FLOAT + repr(obj) + '\n') + dispatch[FloatType] = save_float + + def save_string(self, obj, pack=struct.pack): + if self.bin: + n = len(obj) + if n < 256: + self.write(SHORT_BINSTRING + chr(n) + obj) + else: + self.write(BINSTRING + pack("= 2: + for element in obj: + save(element) + # Subtle. Same as in the big comment below. + if id(obj) in memo: + get = self.get(memo[id(obj)][0]) + write(POP * n + get) + else: + write(_tuplesize2code[n]) + self.memoize(obj) + return + + # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple + # has more than 3 elements. + write(MARK) + for element in obj: + save(element) + + if id(obj) in memo: + # Subtle. d was not in memo when we entered save_tuple(), so + # the process of saving the tuple's elements must have saved + # the tuple itself: the tuple is recursive. The proper action + # now is to throw away everything we put on the stack, and + # simply GET the tuple (it's already constructed). This check + # could have been done in the "for element" loop instead, but + # recursive tuples are a rare thing. + get = self.get(memo[id(obj)][0]) + if proto: + write(POP_MARK + get) + else: # proto 0 -- POP_MARK not available + write(POP * (n+1) + get) + return + + # No recursion. + self.write(TUPLE) + self.memoize(obj) + + dispatch[TupleType] = save_tuple + + # save_empty_tuple() isn't used by anything in Python 2.3. However, I + # found a Pickler subclass in Zope3 that calls it, so it's not harmless + # to remove it. + def save_empty_tuple(self, obj): + self.write(EMPTY_TUPLE) + + def save_list(self, obj): + write = self.write + + if self.bin: + write(EMPTY_LIST) + else: # proto 0 -- can't use EMPTY_LIST + write(MARK + LIST) + + self.memoize(obj) + self._batch_appends(iter(obj)) + + dispatch[ListType] = save_list + + # Keep in synch with cPickle's BATCHSIZE. Nothing will break if it gets + # out of synch, though. + _BATCHSIZE = 1000 + + def _batch_appends(self, items): + # Helper to batch up APPENDS sequences + save = self.save + write = self.write + + if not self.bin: + for x in items: + save(x) + write(APPEND) + return + + r = xrange(self._BATCHSIZE) + while items is not None: + tmp = [] + for i in r: + try: + x = items.next() + tmp.append(x) + except StopIteration: + items = None + break + n = len(tmp) + if n > 1: + write(MARK) + for x in tmp: + save(x) + write(APPENDS) + elif n: + save(tmp[0]) + write(APPEND) + # else tmp is empty, and we're done + + def save_dict(self, obj): + ## Stackless addition BEGIN + modict_saver = self._pickle_moduledict(obj) + if modict_saver is not None: + return self.save_reduce(*modict_saver) + ## Stackless addition END + + write = self.write + + if self.bin: + write(EMPTY_DICT) + else: # proto 0 -- can't use EMPTY_DICT + write(MARK + DICT) + + self.memoize(obj) + self._batch_setitems(obj.iteritems()) + + dispatch[DictionaryType] = save_dict + if not PyStringMap is None: + dispatch[PyStringMap] = save_dict + + def _batch_setitems(self, items): + # Helper to batch up SETITEMS sequences; proto >= 1 only + save = self.save + write = self.write + + if not self.bin: + for k, v in items: + save(k) + save(v) + write(SETITEM) + return + + r = xrange(self._BATCHSIZE) + while items is not None: + tmp = [] + for i in r: + try: + tmp.append(items.next()) + except StopIteration: + items = None + break + n = len(tmp) + if n > 1: + write(MARK) + for k, v in tmp: + save(k) + save(v) + write(SETITEMS) + elif n: + k, v = tmp[0] + save(k) + save(v) + write(SETITEM) + # else tmp is empty, and we're done + + def save_inst(self, obj): + cls = obj.__class__ + + memo = self.memo + write = self.write + save = self.save + + if hasattr(obj, '__getinitargs__'): + args = obj.__getinitargs__() + len(args) # XXX Assert it's a sequence + _keep_alive(args, memo) + else: + args = () + + write(MARK) + + if self.bin: + save(cls) + for arg in args: + save(arg) + write(OBJ) + else: + for arg in args: + save(arg) + write(INST + cls.__module__ + '\n' + cls.__name__ + '\n') + + self.memoize(obj) + + try: + getstate = obj.__getstate__ + except AttributeError: + stuff = obj.__dict__ + else: + stuff = getstate() + _keep_alive(stuff, memo) + save(stuff) + write(BUILD) + + dispatch[InstanceType] = save_inst + + def save_global(self, obj, name=None, pack=struct.pack): + write = self.write + memo = self.memo + + if name is None: + name = obj.__name__ + + module = getattr(obj, "__module__", None) + if module is None: + module = whichmodule(obj, name) + + try: + __import__(module) + mod = sys.modules[module] + klass = getattr(mod, name) + except (ImportError, KeyError, AttributeError): + raise PicklingError( + "Can't pickle %r: it's not found as %s.%s" % + (obj, module, name)) + else: + if klass is not obj: + raise PicklingError( + "Can't pickle %r: it's not the same object as %s.%s" % + (obj, module, name)) + + if self.proto >= 2: + code = _extension_registry.get((module, name)) + if code: + assert code > 0 + if code <= 0xff: + write(EXT1 + chr(code)) + elif code <= 0xffff: + write("%c%c%c" % (EXT2, code&0xff, code>>8)) + else: + write(EXT4 + pack("d', self.read(8))[0]) + dispatch[BINFLOAT] = load_binfloat + + def load_string(self): + rep = self.readline()[:-1] + for q in "\"'": # double or single quote + if rep.startswith(q): + if not rep.endswith(q): + raise ValueError, "insecure string pickle" + rep = rep[len(q):-len(q)] + break + else: + raise ValueError, "insecure string pickle" + self.append(rep.decode("string-escape")) + dispatch[STRING] = load_string + + def load_binstring(self): + len = mloads('i' + self.read(4)) + self.append(self.read(len)) + dispatch[BINSTRING] = load_binstring + + def load_unicode(self): + self.append(unicode(self.readline()[:-1],'raw-unicode-escape')) + dispatch[UNICODE] = load_unicode + + def load_binunicode(self): + len = mloads('i' + self.read(4)) + self.append(unicode(self.read(len),'utf-8')) + dispatch[BINUNICODE] = load_binunicode + + def load_short_binstring(self): + len = ord(self.read(1)) + self.append(self.read(len)) + dispatch[SHORT_BINSTRING] = load_short_binstring + + def load_tuple(self): + k = self.marker() + self.stack[k:] = [tuple(self.stack[k+1:])] + dispatch[TUPLE] = load_tuple + + def load_empty_tuple(self): + self.stack.append(()) + dispatch[EMPTY_TUPLE] = load_empty_tuple + + def load_tuple1(self): + self.stack[-1] = (self.stack[-1],) + dispatch[TUPLE1] = load_tuple1 + + def load_tuple2(self): + self.stack[-2:] = [(self.stack[-2], self.stack[-1])] + dispatch[TUPLE2] = load_tuple2 + + def load_tuple3(self): + self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])] + dispatch[TUPLE3] = load_tuple3 + + def load_empty_list(self): + self.stack.append([]) + dispatch[EMPTY_LIST] = load_empty_list + + def load_empty_dictionary(self): + self.stack.append({}) + dispatch[EMPTY_DICT] = load_empty_dictionary + + def load_list(self): + k = self.marker() + self.stack[k:] = [self.stack[k+1:]] + dispatch[LIST] = load_list + + def load_dict(self): + k = self.marker() + d = {} + items = self.stack[k+1:] + for i in range(0, len(items), 2): + key = items[i] + value = items[i+1] + d[key] = value + self.stack[k:] = [d] + dispatch[DICT] = load_dict + + # INST and OBJ differ only in how they get a class object. It's not + # only sensible to do the rest in a common routine, the two routines + # previously diverged and grew different bugs. + # klass is the class to instantiate, and k points to the topmost mark + # object, following which are the arguments for klass.__init__. + def _instantiate(self, klass, k): + args = tuple(self.stack[k+1:]) + del self.stack[k:] + instantiated = 0 + if (not args and + type(klass) is ClassType and + not hasattr(klass, "__getinitargs__")): + try: + value = _EmptyClass() + value.__class__ = klass + instantiated = 1 + except RuntimeError: + # In restricted execution, assignment to inst.__class__ is + # prohibited + pass + if not instantiated: + try: + value = klass(*args) + except TypeError, err: + raise TypeError, "in constructor for %s: %s" % ( + klass.__name__, str(err)), sys.exc_info()[2] + self.append(value) + + def load_inst(self): + module = self.readline()[:-1] + name = self.readline()[:-1] + klass = self.find_class(module, name) + self._instantiate(klass, self.marker()) + dispatch[INST] = load_inst + + def load_obj(self): + # Stack is ... markobject classobject arg1 arg2 ... + k = self.marker() + klass = self.stack.pop(k+1) + self._instantiate(klass, k) + dispatch[OBJ] = load_obj + + def load_newobj(self): + args = self.stack.pop() + cls = self.stack[-1] + obj = cls.__new__(cls, *args) + self.stack[-1] = obj + dispatch[NEWOBJ] = load_newobj + + def load_global(self): + module = self.readline()[:-1] + name = self.readline()[:-1] + klass = self.find_class(module, name) + self.append(klass) + dispatch[GLOBAL] = load_global + + def load_ext1(self): + code = ord(self.read(1)) + self.get_extension(code) + dispatch[EXT1] = load_ext1 + + def load_ext2(self): + code = mloads('i' + self.read(2) + '\000\000') + self.get_extension(code) + dispatch[EXT2] = load_ext2 + + def load_ext4(self): + code = mloads('i' + self.read(4)) + self.get_extension(code) + dispatch[EXT4] = load_ext4 + + def get_extension(self, code): + nil = [] + obj = _extension_cache.get(code, nil) + if obj is not nil: + self.append(obj) + return + key = _inverted_registry.get(code) + if not key: + raise ValueError("unregistered extension code %d" % code) + obj = self.find_class(*key) + _extension_cache[code] = obj + self.append(obj) + + def find_class(self, module, name): + # Subclasses may override this + __import__(module) + mod = sys.modules[module] + klass = getattr(mod, name) + return klass + + def load_reduce(self): + stack = self.stack + args = stack.pop() + func = stack[-1] + value = func(*args) + stack[-1] = value + dispatch[REDUCE] = load_reduce + + def load_pop(self): + del self.stack[-1] + dispatch[POP] = load_pop + + def load_pop_mark(self): + k = self.marker() + del self.stack[k:] + dispatch[POP_MARK] = load_pop_mark + + def load_dup(self): + self.append(self.stack[-1]) + dispatch[DUP] = load_dup + + def load_get(self): + self.append(self.memo[self.readline()[:-1]]) + dispatch[GET] = load_get + + def load_binget(self): + i = ord(self.read(1)) + self.append(self.memo[repr(i)]) + dispatch[BINGET] = load_binget + + def load_long_binget(self): + i = mloads('i' + self.read(4)) + self.append(self.memo[repr(i)]) + dispatch[LONG_BINGET] = load_long_binget + + def load_put(self): + self.memo[self.readline()[:-1]] = self.stack[-1] + dispatch[PUT] = load_put + + def load_binput(self): + i = ord(self.read(1)) + self.memo[repr(i)] = self.stack[-1] + dispatch[BINPUT] = load_binput + + def load_long_binput(self): + i = mloads('i' + self.read(4)) + self.memo[repr(i)] = self.stack[-1] + dispatch[LONG_BINPUT] = load_long_binput + + def load_append(self): + stack = self.stack + value = stack.pop() + list = stack[-1] + list.append(value) + dispatch[APPEND] = load_append + + def load_appends(self): + stack = self.stack + mark = self.marker() + list = stack[mark - 1] + list.extend(stack[mark + 1:]) + del stack[mark:] + dispatch[APPENDS] = load_appends + + def load_setitem(self): + stack = self.stack + value = stack.pop() + key = stack.pop() + dict = stack[-1] + dict[key] = value + dispatch[SETITEM] = load_setitem + + def load_setitems(self): + stack = self.stack + mark = self.marker() + dict = stack[mark - 1] + for i in range(mark + 1, len(stack), 2): + dict[stack[i]] = stack[i + 1] + + del stack[mark:] + dispatch[SETITEMS] = load_setitems + + def load_build(self): + stack = self.stack + state = stack.pop() + inst = stack[-1] + setstate = getattr(inst, "__setstate__", None) + if setstate: + setstate(state) + return + slotstate = None + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + if state: + try: + inst.__dict__.update(state) + except RuntimeError: + # XXX In restricted execution, the instance's __dict__ + # is not accessible. Use the old way of unpickling + # the instance variables. This is a semantic + # difference when unpickling in restricted + # vs. unrestricted modes. + # Note, however, that cPickle has never tried to do the + # .update() business, and always uses + # PyObject_SetItem(inst.__dict__, key, value) in a + # loop over state.items(). + for k, v in state.items(): + setattr(inst, k, v) + if slotstate: + for k, v in slotstate.items(): + setattr(inst, k, v) + dispatch[BUILD] = load_build + + def load_mark(self): + self.append(self.mark) + dispatch[MARK] = load_mark + + def load_stop(self): + value = self.stack.pop() + raise _Stop(value) + dispatch[STOP] = load_stop + +# Helper class for load_inst/load_obj + +class _EmptyClass: + pass + +# Encode/decode longs in linear time. + +import binascii as _binascii + +def encode_long(x): + r"""Encode a long to a two's complement little-endian binary string. + Note that 0L is a special case, returning an empty string, to save a + byte in the LONG1 pickling context. + + >>> encode_long(0L) + '' + >>> encode_long(255L) + '\xff\x00' + >>> encode_long(32767L) + '\xff\x7f' + >>> encode_long(-256L) + '\x00\xff' + >>> encode_long(-32768L) + '\x00\x80' + >>> encode_long(-128L) + '\x80' + >>> encode_long(127L) + '\x7f' + >>> + """ + + if x == 0: + return '' + if x > 0: + ashex = hex(x) + assert ashex.startswith("0x") + njunkchars = 2 + ashex.endswith('L') + nibbles = len(ashex) - njunkchars + if nibbles & 1: + # need an even # of nibbles for unhexlify + ashex = "0x0" + ashex[2:] + elif int(ashex[2], 16) >= 8: + # "looks negative", so need a byte of sign bits + ashex = "0x00" + ashex[2:] + else: + # Build the 256's-complement: (1L << nbytes) + x. The trick is + # to find the number of bytes in linear time (although that should + # really be a constant-time task). + ashex = hex(-x) + assert ashex.startswith("0x") + njunkchars = 2 + ashex.endswith('L') + nibbles = len(ashex) - njunkchars + if nibbles & 1: + # Extend to a full byte. + nibbles += 1 + nbits = nibbles * 4 + x += 1L << nbits + assert x > 0 + ashex = hex(x) + njunkchars = 2 + ashex.endswith('L') + newnibbles = len(ashex) - njunkchars + if newnibbles < nibbles: + ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:] + if int(ashex[2], 16) < 8: + # "looks positive", so need a byte of sign bits + ashex = "0xff" + ashex[2:] + + if ashex.endswith('L'): + ashex = ashex[2:-1] + else: + ashex = ashex[2:] + assert len(ashex) & 1 == 0, (x, ashex) + binary = _binascii.unhexlify(ashex) + return binary[::-1] + +def decode_long(data): + r"""Decode a long from a two's complement little-endian binary string. + + >>> decode_long('') + 0L + >>> decode_long("\xff\x00") + 255L + >>> decode_long("\xff\x7f") + 32767L + >>> decode_long("\x00\xff") + -256L + >>> decode_long("\x00\x80") + -32768L + >>> decode_long("\x80") + -128L + >>> decode_long("\x7f") + 127L + """ + + nbytes = len(data) + if nbytes == 0: + return 0L + ashex = _binascii.hexlify(data[::-1]) + n = long(ashex, 16) # quadratic time before Python 2.3; linear now + if data[-1] >= '\x80': + n -= 1L << (nbytes * 8) + return n + +# Shorthands + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +def dump(obj, file, protocol=None): + Pickler(file, protocol).dump(obj) + +def dumps(obj, protocol=None): + file = StringIO() + Pickler(file, protocol).dump(obj) + return file.getvalue() + +def load(file): + return Unpickler(file).load() + +def loads(str): + file = StringIO(str) + return Unpickler(file).load() + +# Doctest + +def _test(): + import doctest + return doctest.testmod() + +if __name__ == "__main__": + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,2245 @@ +'''"Executable documentation" for the pickle module. + +Extensive comments about the pickle protocols and pickle-machine opcodes +can be found here. Some functions meant for external use: + +genops(pickle) + Generate all the opcodes in a pickle, as (opcode, arg, position) triples. + +dis(pickle, out=None, memo=None, indentlevel=4) + Print a symbolic disassembly of a pickle. +''' + +__all__ = ['dis', + 'genops', + ] + +# Other ideas: +# +# - A pickle verifier: read a pickle and check it exhaustively for +# well-formedness. dis() does a lot of this already. +# +# - A protocol identifier: examine a pickle and return its protocol number +# (== the highest .proto attr value among all the opcodes in the pickle). +# dis() already prints this info at the end. +# +# - A pickle optimizer: for example, tuple-building code is sometimes more +# elaborate than necessary, catering for the possibility that the tuple +# is recursive. Or lots of times a PUT is generated that's never accessed +# by a later GET. + + +""" +"A pickle" is a program for a virtual pickle machine (PM, but more accurately +called an unpickling machine). It's a sequence of opcodes, interpreted by the +PM, building an arbitrarily complex Python object. + +For the most part, the PM is very simple: there are no looping, testing, or +conditional instructions, no arithmetic and no function calls. Opcodes are +executed once each, from first to last, until a STOP opcode is reached. + +The PM has two data areas, "the stack" and "the memo". + +Many opcodes push Python objects onto the stack; e.g., INT pushes a Python +integer object on the stack, whose value is gotten from a decimal string +literal immediately following the INT opcode in the pickle bytestream. Other +opcodes take Python objects off the stack. The result of unpickling is +whatever object is left on the stack when the final STOP opcode is executed. + +The memo is simply an array of objects, or it can be implemented as a dict +mapping little integers to objects. The memo serves as the PM's "long term +memory", and the little integers indexing the memo are akin to variable +names. Some opcodes pop a stack object into the memo at a given index, +and others push a memo object at a given index onto the stack again. + +At heart, that's all the PM has. Subtleties arise for these reasons: + ++ Object identity. Objects can be arbitrarily complex, and subobjects + may be shared (for example, the list [a, a] refers to the same object a + twice). It can be vital that unpickling recreate an isomorphic object + graph, faithfully reproducing sharing. + ++ Recursive objects. For example, after "L = []; L.append(L)", L is a + list, and L[0] is the same list. This is related to the object identity + point, and some sequences of pickle opcodes are subtle in order to + get the right result in all cases. + ++ Things pickle doesn't know everything about. Examples of things pickle + does know everything about are Python's builtin scalar and container + types, like ints and tuples. They generally have opcodes dedicated to + them. For things like module references and instances of user-defined + classes, pickle's knowledge is limited. Historically, many enhancements + have been made to the pickle protocol in order to do a better (faster, + and/or more compact) job on those. + ++ Backward compatibility and micro-optimization. As explained below, + pickle opcodes never go away, not even when better ways to do a thing + get invented. The repertoire of the PM just keeps growing over time. + For example, protocol 0 had two opcodes for building Python integers (INT + and LONG), protocol 1 added three more for more-efficient pickling of short + integers, and protocol 2 added two more for more-efficient pickling of + long integers (before protocol 2, the only ways to pickle a Python long + took time quadratic in the number of digits, for both pickling and + unpickling). "Opcode bloat" isn't so much a subtlety as a source of + wearying complication. + + +Pickle protocols: + +For compatibility, the meaning of a pickle opcode never changes. Instead new +pickle opcodes get added, and each version's unpickler can handle all the +pickle opcodes in all protocol versions to date. So old pickles continue to +be readable forever. The pickler can generally be told to restrict itself to +the subset of opcodes available under previous protocol versions too, so that +users can create pickles under the current version readable by older +versions. However, a pickle does not contain its version number embedded +within it. If an older unpickler tries to read a pickle using a later +protocol, the result is most likely an exception due to seeing an unknown (in +the older unpickler) opcode. + +The original pickle used what's now called "protocol 0", and what was called +"text mode" before Python 2.3. The entire pickle bytestream is made up of +printable 7-bit ASCII characters, plus the newline character, in protocol 0. +That's why it was called text mode. Protocol 0 is small and elegant, but +sometimes painfully inefficient. + +The second major set of additions is now called "protocol 1", and was called +"binary mode" before Python 2.3. This added many opcodes with arguments +consisting of arbitrary bytes, including NUL bytes and unprintable "high bit" +bytes. Binary mode pickles can be substantially smaller than equivalent +text mode pickles, and sometimes faster too; e.g., BININT represents a 4-byte +int as 4 bytes following the opcode, which is cheaper to unpickle than the +(perhaps) 11-character decimal string attached to INT. Protocol 1 also added +a number of opcodes that operate on many stack elements at once (like APPENDS +and SETITEMS), and "shortcut" opcodes (like EMPTY_DICT and EMPTY_TUPLE). + +The third major set of additions came in Python 2.3, and is called "protocol +2". This added: + +- A better way to pickle instances of new-style classes (NEWOBJ). + +- A way for a pickle to identify its protocol (PROTO). + +- Time- and space- efficient pickling of long ints (LONG{1,4}). + +- Shortcuts for small tuples (TUPLE{1,2,3}}. + +- Dedicated opcodes for bools (NEWTRUE, NEWFALSE). + +- The "extension registry", a vector of popular objects that can be pushed + efficiently by index (EXT{1,2,4}). This is akin to the memo and GET, but + the registry contents are predefined (there's nothing akin to the memo's + PUT). + +Another independent change with Python 2.3 is the abandonment of any +pretense that it might be safe to load pickles received from untrusted +parties -- no sufficient security analysis has been done to guarantee +this and there isn't a use case that warrants the expense of such an +analysis. + +To this end, all tests for __safe_for_unpickling__ or for +copy_reg.safe_constructors are removed from the unpickling code. +References to these variables in the descriptions below are to be seen +as describing unpickling in Python 2.2 and before. +""" + +# Meta-rule: Descriptions are stored in instances of descriptor objects, +# with plain constructors. No meta-language is defined from which +# descriptors could be constructed. If you want, e.g., XML, write a little +# program to generate XML from the objects. + +############################################################################## +# Some pickle opcodes have an argument, following the opcode in the +# bytestream. An argument is of a specific type, described by an instance +# of ArgumentDescriptor. These are not to be confused with arguments taken +# off the stack -- ArgumentDescriptor applies only to arguments embedded in +# the opcode stream, immediately following an opcode. + +# Represents the number of bytes consumed by an argument delimited by the +# next newline character. +UP_TO_NEWLINE = -1 + +# Represents the number of bytes consumed by a two-argument opcode where +# the first argument gives the number of bytes in the second argument. +TAKEN_FROM_ARGUMENT1 = -2 # num bytes is 1-byte unsigned int +TAKEN_FROM_ARGUMENT4 = -3 # num bytes is 4-byte signed little-endian int + +class ArgumentDescriptor(object): + __slots__ = ( + # name of descriptor record, also a module global name; a string + 'name', + + # length of argument, in bytes; an int; UP_TO_NEWLINE and + # TAKEN_FROM_ARGUMENT{1,4} are negative values for variable-length + # cases + 'n', + + # a function taking a file-like object, reading this kind of argument + # from the object at the current position, advancing the current + # position by n bytes, and returning the value of the argument + 'reader', + + # human-readable docs for this arg descriptor; a string + 'doc', + ) + + def __init__(self, name, n, reader, doc): + assert isinstance(name, str) + self.name = name + + assert isinstance(n, int) and (n >= 0 or + n in (UP_TO_NEWLINE, + TAKEN_FROM_ARGUMENT1, + TAKEN_FROM_ARGUMENT4)) + self.n = n + + self.reader = reader + + assert isinstance(doc, str) + self.doc = doc + +from struct import unpack as _unpack + +def read_uint1(f): + r""" + >>> import StringIO + >>> read_uint1(StringIO.StringIO('\xff')) + 255 + """ + + data = f.read(1) + if data: + return ord(data) + raise ValueError("not enough data in stream to read uint1") + +uint1 = ArgumentDescriptor( + name='uint1', + n=1, + reader=read_uint1, + doc="One-byte unsigned integer.") + + +def read_uint2(f): + r""" + >>> import StringIO + >>> read_uint2(StringIO.StringIO('\xff\x00')) + 255 + >>> read_uint2(StringIO.StringIO('\xff\xff')) + 65535 + """ + + data = f.read(2) + if len(data) == 2: + return _unpack(">> import StringIO + >>> read_int4(StringIO.StringIO('\xff\x00\x00\x00')) + 255 + >>> read_int4(StringIO.StringIO('\x00\x00\x00\x80')) == -(2**31) + True + """ + + data = f.read(4) + if len(data) == 4: + return _unpack(">> import StringIO + >>> read_stringnl(StringIO.StringIO("'abcd'\nefg\n")) + 'abcd' + + >>> read_stringnl(StringIO.StringIO("\n")) + Traceback (most recent call last): + ... + ValueError: no string quotes around '' + + >>> read_stringnl(StringIO.StringIO("\n"), stripquotes=False) + '' + + >>> read_stringnl(StringIO.StringIO("''\n")) + '' + + >>> read_stringnl(StringIO.StringIO('"abcd"')) + Traceback (most recent call last): + ... + ValueError: no newline found when trying to read stringnl + + Embedded escapes are undone in the result. + >>> read_stringnl(StringIO.StringIO(r"'a\n\\b\x00c\td'" + "\n'e'")) + 'a\n\\b\x00c\td' + """ + + data = f.readline() + if not data.endswith('\n'): + raise ValueError("no newline found when trying to read stringnl") + data = data[:-1] # lose the newline + + if stripquotes: + for q in "'\"": + if data.startswith(q): + if not data.endswith(q): + raise ValueError("strinq quote %r not found at both " + "ends of %r" % (q, data)) + data = data[1:-1] + break + else: + raise ValueError("no string quotes around %r" % data) + + # I'm not sure when 'string_escape' was added to the std codecs; it's + # crazy not to use it if it's there. + if decode: + data = data.decode('string_escape') + return data + +stringnl = ArgumentDescriptor( + name='stringnl', + n=UP_TO_NEWLINE, + reader=read_stringnl, + doc="""A newline-terminated string. + + This is a repr-style string, with embedded escapes, and + bracketing quotes. + """) + +def read_stringnl_noescape(f): + return read_stringnl(f, decode=False, stripquotes=False) + +stringnl_noescape = ArgumentDescriptor( + name='stringnl_noescape', + n=UP_TO_NEWLINE, + reader=read_stringnl_noescape, + doc="""A newline-terminated string. + + This is a str-style string, without embedded escapes, + or bracketing quotes. It should consist solely of + printable ASCII characters. + """) + +def read_stringnl_noescape_pair(f): + r""" + >>> import StringIO + >>> read_stringnl_noescape_pair(StringIO.StringIO("Queue\nEmpty\njunk")) + 'Queue Empty' + """ + + return "%s %s" % (read_stringnl_noescape(f), read_stringnl_noescape(f)) + +stringnl_noescape_pair = ArgumentDescriptor( + name='stringnl_noescape_pair', + n=UP_TO_NEWLINE, + reader=read_stringnl_noescape_pair, + doc="""A pair of newline-terminated strings. + + These are str-style strings, without embedded + escapes, or bracketing quotes. They should + consist solely of printable ASCII characters. + The pair is returned as a single string, with + a single blank separating the two strings. + """) + +def read_string4(f): + r""" + >>> import StringIO + >>> read_string4(StringIO.StringIO("\x00\x00\x00\x00abc")) + '' + >>> read_string4(StringIO.StringIO("\x03\x00\x00\x00abcdef")) + 'abc' + >>> read_string4(StringIO.StringIO("\x00\x00\x00\x03abcdef")) + Traceback (most recent call last): + ... + ValueError: expected 50331648 bytes in a string4, but only 6 remain + """ + + n = read_int4(f) + if n < 0: + raise ValueError("string4 byte count < 0: %d" % n) + data = f.read(n) + if len(data) == n: + return data + raise ValueError("expected %d bytes in a string4, but only %d remain" % + (n, len(data))) + +string4 = ArgumentDescriptor( + name="string4", + n=TAKEN_FROM_ARGUMENT4, + reader=read_string4, + doc="""A counted string. + + The first argument is a 4-byte little-endian signed int giving + the number of bytes in the string, and the second argument is + that many bytes. + """) + + +def read_string1(f): + r""" + >>> import StringIO + >>> read_string1(StringIO.StringIO("\x00")) + '' + >>> read_string1(StringIO.StringIO("\x03abcdef")) + 'abc' + """ + + n = read_uint1(f) + assert n >= 0 + data = f.read(n) + if len(data) == n: + return data + raise ValueError("expected %d bytes in a string1, but only %d remain" % + (n, len(data))) + +string1 = ArgumentDescriptor( + name="string1", + n=TAKEN_FROM_ARGUMENT1, + reader=read_string1, + doc="""A counted string. + + The first argument is a 1-byte unsigned int giving the number + of bytes in the string, and the second argument is that many + bytes. + """) + + +def read_unicodestringnl(f): + r""" + >>> import StringIO + >>> read_unicodestringnl(StringIO.StringIO("abc\uabcd\njunk")) + u'abc\uabcd' + """ + + data = f.readline() + if not data.endswith('\n'): + raise ValueError("no newline found when trying to read " + "unicodestringnl") + data = data[:-1] # lose the newline + return unicode(data, 'raw-unicode-escape') + +unicodestringnl = ArgumentDescriptor( + name='unicodestringnl', + n=UP_TO_NEWLINE, + reader=read_unicodestringnl, + doc="""A newline-terminated Unicode string. + + This is raw-unicode-escape encoded, so consists of + printable ASCII characters, and may contain embedded + escape sequences. + """) + +def read_unicodestring4(f): + r""" + >>> import StringIO + >>> s = u'abcd\uabcd' + >>> enc = s.encode('utf-8') + >>> enc + 'abcd\xea\xaf\x8d' + >>> n = chr(len(enc)) + chr(0) * 3 # little-endian 4-byte length + >>> t = read_unicodestring4(StringIO.StringIO(n + enc + 'junk')) + >>> s == t + True + + >>> read_unicodestring4(StringIO.StringIO(n + enc[:-1])) + Traceback (most recent call last): + ... + ValueError: expected 7 bytes in a unicodestring4, but only 6 remain + """ + + n = read_int4(f) + if n < 0: + raise ValueError("unicodestring4 byte count < 0: %d" % n) + data = f.read(n) + if len(data) == n: + return unicode(data, 'utf-8') + raise ValueError("expected %d bytes in a unicodestring4, but only %d " + "remain" % (n, len(data))) + +unicodestring4 = ArgumentDescriptor( + name="unicodestring4", + n=TAKEN_FROM_ARGUMENT4, + reader=read_unicodestring4, + doc="""A counted Unicode string. + + The first argument is a 4-byte little-endian signed int + giving the number of bytes in the string, and the second + argument-- the UTF-8 encoding of the Unicode string -- + contains that many bytes. + """) + + +def read_decimalnl_short(f): + r""" + >>> import StringIO + >>> read_decimalnl_short(StringIO.StringIO("1234\n56")) + 1234 + + >>> read_decimalnl_short(StringIO.StringIO("1234L\n56")) + Traceback (most recent call last): + ... + ValueError: trailing 'L' not allowed in '1234L' + """ + + s = read_stringnl(f, decode=False, stripquotes=False) + if s.endswith("L"): + raise ValueError("trailing 'L' not allowed in %r" % s) + + # It's not necessarily true that the result fits in a Python short int: + # the pickle may have been written on a 64-bit box. There's also a hack + # for True and False here. + if s == "00": + return False + elif s == "01": + return True + + try: + return int(s) + except OverflowError: + return long(s) + +def read_decimalnl_long(f): + r""" + >>> import StringIO + + >>> read_decimalnl_long(StringIO.StringIO("1234\n56")) + Traceback (most recent call last): + ... + ValueError: trailing 'L' required in '1234' + + Someday the trailing 'L' will probably go away from this output. + + >>> read_decimalnl_long(StringIO.StringIO("1234L\n56")) + 1234L + + >>> read_decimalnl_long(StringIO.StringIO("123456789012345678901234L\n6")) + 123456789012345678901234L + """ + + s = read_stringnl(f, decode=False, stripquotes=False) + if not s.endswith("L"): + raise ValueError("trailing 'L' required in %r" % s) + return long(s) + + +decimalnl_short = ArgumentDescriptor( + name='decimalnl_short', + n=UP_TO_NEWLINE, + reader=read_decimalnl_short, + doc="""A newline-terminated decimal integer literal. + + This never has a trailing 'L', and the integer fit + in a short Python int on the box where the pickle + was written -- but there's no guarantee it will fit + in a short Python int on the box where the pickle + is read. + """) + +decimalnl_long = ArgumentDescriptor( + name='decimalnl_long', + n=UP_TO_NEWLINE, + reader=read_decimalnl_long, + doc="""A newline-terminated decimal integer literal. + + This has a trailing 'L', and can represent integers + of any size. + """) + + +def read_floatnl(f): + r""" + >>> import StringIO + >>> read_floatnl(StringIO.StringIO("-1.25\n6")) + -1.25 + """ + s = read_stringnl(f, decode=False, stripquotes=False) + return float(s) + +floatnl = ArgumentDescriptor( + name='floatnl', + n=UP_TO_NEWLINE, + reader=read_floatnl, + doc="""A newline-terminated decimal floating literal. + + In general this requires 17 significant digits for roundtrip + identity, and pickling then unpickling infinities, NaNs, and + minus zero doesn't work across boxes, or on some boxes even + on itself (e.g., Windows can't read the strings it produces + for infinities or NaNs). + """) + +def read_float8(f): + r""" + >>> import StringIO, struct + >>> raw = struct.pack(">d", -1.25) + >>> raw + '\xbf\xf4\x00\x00\x00\x00\x00\x00' + >>> read_float8(StringIO.StringIO(raw + "\n")) + -1.25 + """ + + data = f.read(8) + if len(data) == 8: + return _unpack(">d", data)[0] + raise ValueError("not enough data in stream to read float8") + + +float8 = ArgumentDescriptor( + name='float8', + n=8, + reader=read_float8, + doc="""An 8-byte binary representation of a float, big-endian. + + The format is unique to Python, and shared with the struct + module (format string '>d') "in theory" (the struct and cPickle + implementations don't share the code -- they should). It's + strongly related to the IEEE-754 double format, and, in normal + cases, is in fact identical to the big-endian 754 double format. + On other boxes the dynamic range is limited to that of a 754 + double, and "add a half and chop" rounding is used to reduce + the precision to 53 bits. However, even on a 754 box, + infinities, NaNs, and minus zero may not be handled correctly + (may not survive roundtrip pickling intact). + """) + +# Protocol 2 formats + +from pickle import decode_long + +def read_long1(f): + r""" + >>> import StringIO + >>> read_long1(StringIO.StringIO("\x00")) + 0L + >>> read_long1(StringIO.StringIO("\x02\xff\x00")) + 255L + >>> read_long1(StringIO.StringIO("\x02\xff\x7f")) + 32767L + >>> read_long1(StringIO.StringIO("\x02\x00\xff")) + -256L + >>> read_long1(StringIO.StringIO("\x02\x00\x80")) + -32768L + """ + + n = read_uint1(f) + data = f.read(n) + if len(data) != n: + raise ValueError("not enough data in stream to read long1") + return decode_long(data) + +long1 = ArgumentDescriptor( + name="long1", + n=TAKEN_FROM_ARGUMENT1, + reader=read_long1, + doc="""A binary long, little-endian, using 1-byte size. + + This first reads one byte as an unsigned size, then reads that + many bytes and interprets them as a little-endian 2's-complement long. + If the size is 0, that's taken as a shortcut for the long 0L. + """) + +def read_long4(f): + r""" + >>> import StringIO + >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x00")) + 255L + >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x7f")) + 32767L + >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\xff")) + -256L + >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\x80")) + -32768L + >>> read_long1(StringIO.StringIO("\x00\x00\x00\x00")) + 0L + """ + + n = read_int4(f) + if n < 0: + raise ValueError("long4 byte count < 0: %d" % n) + data = f.read(n) + if len(data) != n: + raise ValueError("not enough data in stream to read long4") + return decode_long(data) + +long4 = ArgumentDescriptor( + name="long4", + n=TAKEN_FROM_ARGUMENT4, + reader=read_long4, + doc="""A binary representation of a long, little-endian. + + This first reads four bytes as a signed size (but requires the + size to be >= 0), then reads that many bytes and interprets them + as a little-endian 2's-complement long. If the size is 0, that's taken + as a shortcut for the long 0L, although LONG1 should really be used + then instead (and in any case where # of bytes < 256). + """) + + +############################################################################## +# Object descriptors. The stack used by the pickle machine holds objects, +# and in the stack_before and stack_after attributes of OpcodeInfo +# descriptors we need names to describe the various types of objects that can +# appear on the stack. + +class StackObject(object): + __slots__ = ( + # name of descriptor record, for info only + 'name', + + # type of object, or tuple of type objects (meaning the object can + # be of any type in the tuple) + 'obtype', + + # human-readable docs for this kind of stack object; a string + 'doc', + ) + + def __init__(self, name, obtype, doc): + assert isinstance(name, str) + self.name = name + + assert isinstance(obtype, type) or isinstance(obtype, tuple) + if isinstance(obtype, tuple): + for contained in obtype: + assert isinstance(contained, type) + self.obtype = obtype + + assert isinstance(doc, str) + self.doc = doc + + def __repr__(self): + return self.name + + +pyint = StackObject( + name='int', + obtype=int, + doc="A short (as opposed to long) Python integer object.") + +pylong = StackObject( + name='long', + obtype=long, + doc="A long (as opposed to short) Python integer object.") + +pyinteger_or_bool = StackObject( + name='int_or_bool', + obtype=(int, long, bool), + doc="A Python integer object (short or long), or " + "a Python bool.") + +pybool = StackObject( + name='bool', + obtype=(bool,), + doc="A Python bool object.") + +pyfloat = StackObject( + name='float', + obtype=float, + doc="A Python float object.") + +pystring = StackObject( + name='str', + obtype=str, + doc="A Python string object.") + +pyunicode = StackObject( + name='unicode', + obtype=unicode, + doc="A Python Unicode string object.") + +pynone = StackObject( + name="None", + obtype=type(None), + doc="The Python None object.") + +pytuple = StackObject( + name="tuple", + obtype=tuple, + doc="A Python tuple object.") + +pylist = StackObject( + name="list", + obtype=list, + doc="A Python list object.") + +pydict = StackObject( + name="dict", + obtype=dict, + doc="A Python dict object.") + +anyobject = StackObject( + name='any', + obtype=object, + doc="Any kind of object whatsoever.") + +markobject = StackObject( + name="mark", + obtype=StackObject, + doc="""'The mark' is a unique object. + + Opcodes that operate on a variable number of objects + generally don't embed the count of objects in the opcode, + or pull it off the stack. Instead the MARK opcode is used + to push a special marker object on the stack, and then + some other opcodes grab all the objects from the top of + the stack down to (but not including) the topmost marker + object. + """) + +stackslice = StackObject( + name="stackslice", + obtype=StackObject, + doc="""An object representing a contiguous slice of the stack. + + This is used in conjuction with markobject, to represent all + of the stack following the topmost markobject. For example, + the POP_MARK opcode changes the stack from + + [..., markobject, stackslice] + to + [...] + + No matter how many object are on the stack after the topmost + markobject, POP_MARK gets rid of all of them (including the + topmost markobject too). + """) + +############################################################################## +# Descriptors for pickle opcodes. + +class OpcodeInfo(object): + + __slots__ = ( + # symbolic name of opcode; a string + 'name', + + # the code used in a bytestream to represent the opcode; a + # one-character string + 'code', + + # If the opcode has an argument embedded in the byte string, an + # instance of ArgumentDescriptor specifying its type. Note that + # arg.reader(s) can be used to read and decode the argument from + # the bytestream s, and arg.doc documents the format of the raw + # argument bytes. If the opcode doesn't have an argument embedded + # in the bytestream, arg should be None. + 'arg', + + # what the stack looks like before this opcode runs; a list + 'stack_before', + + # what the stack looks like after this opcode runs; a list + 'stack_after', + + # the protocol number in which this opcode was introduced; an int + 'proto', + + # human-readable docs for this opcode; a string + 'doc', + ) + + def __init__(self, name, code, arg, + stack_before, stack_after, proto, doc): + assert isinstance(name, str) + self.name = name + + assert isinstance(code, str) + assert len(code) == 1 + self.code = code + + assert arg is None or isinstance(arg, ArgumentDescriptor) + self.arg = arg + + assert isinstance(stack_before, list) + for x in stack_before: + assert isinstance(x, StackObject) + self.stack_before = stack_before + + assert isinstance(stack_after, list) + for x in stack_after: + assert isinstance(x, StackObject) + self.stack_after = stack_after + + assert isinstance(proto, int) and 0 <= proto <= 2 + self.proto = proto + + assert isinstance(doc, str) + self.doc = doc + +I = OpcodeInfo +opcodes = [ + + # Ways to spell integers. + + I(name='INT', + code='I', + arg=decimalnl_short, + stack_before=[], + stack_after=[pyinteger_or_bool], + proto=0, + doc="""Push an integer or bool. + + The argument is a newline-terminated decimal literal string. + + The intent may have been that this always fit in a short Python int, + but INT can be generated in pickles written on a 64-bit box that + require a Python long on a 32-bit box. The difference between this + and LONG then is that INT skips a trailing 'L', and produces a short + int whenever possible. + + Another difference is due to that, when bool was introduced as a + distinct type in 2.3, builtin names True and False were also added to + 2.2.2, mapping to ints 1 and 0. For compatibility in both directions, + True gets pickled as INT + "I01\\n", and False as INT + "I00\\n". + Leading zeroes are never produced for a genuine integer. The 2.3 + (and later) unpicklers special-case these and return bool instead; + earlier unpicklers ignore the leading "0" and return the int. + """), + + I(name='BININT', + code='J', + arg=int4, + stack_before=[], + stack_after=[pyint], + proto=1, + doc="""Push a four-byte signed integer. + + This handles the full range of Python (short) integers on a 32-bit + box, directly as binary bytes (1 for the opcode and 4 for the integer). + If the integer is non-negative and fits in 1 or 2 bytes, pickling via + BININT1 or BININT2 saves space. + """), + + I(name='BININT1', + code='K', + arg=uint1, + stack_before=[], + stack_after=[pyint], + proto=1, + doc="""Push a one-byte unsigned integer. + + This is a space optimization for pickling very small non-negative ints, + in range(256). + """), + + I(name='BININT2', + code='M', + arg=uint2, + stack_before=[], + stack_after=[pyint], + proto=1, + doc="""Push a two-byte unsigned integer. + + This is a space optimization for pickling small positive ints, in + range(256, 2**16). Integers in range(256) can also be pickled via + BININT2, but BININT1 instead saves a byte. + """), + + I(name='LONG', + code='L', + arg=decimalnl_long, + stack_before=[], + stack_after=[pylong], + proto=0, + doc="""Push a long integer. + + The same as INT, except that the literal ends with 'L', and always + unpickles to a Python long. There doesn't seem a real purpose to the + trailing 'L'. + + Note that LONG takes time quadratic in the number of digits when + unpickling (this is simply due to the nature of decimal->binary + conversion). Proto 2 added linear-time (in C; still quadratic-time + in Python) LONG1 and LONG4 opcodes. + """), + + I(name="LONG1", + code='\x8a', + arg=long1, + stack_before=[], + stack_after=[pylong], + proto=2, + doc="""Long integer using one-byte length. + + A more efficient encoding of a Python long; the long1 encoding + says it all."""), + + I(name="LONG4", + code='\x8b', + arg=long4, + stack_before=[], + stack_after=[pylong], + proto=2, + doc="""Long integer using found-byte length. + + A more efficient encoding of a Python long; the long4 encoding + says it all."""), + + # Ways to spell strings (8-bit, not Unicode). + + I(name='STRING', + code='S', + arg=stringnl, + stack_before=[], + stack_after=[pystring], + proto=0, + doc="""Push a Python string object. + + The argument is a repr-style string, with bracketing quote characters, + and perhaps embedded escapes. The argument extends until the next + newline character. + """), + + I(name='BINSTRING', + code='T', + arg=string4, + stack_before=[], + stack_after=[pystring], + proto=1, + doc="""Push a Python string object. + + There are two arguments: the first is a 4-byte little-endian signed int + giving the number of bytes in the string, and the second is that many + bytes, which are taken literally as the string content. + """), + + I(name='SHORT_BINSTRING', + code='U', + arg=string1, + stack_before=[], + stack_after=[pystring], + proto=1, + doc="""Push a Python string object. + + There are two arguments: the first is a 1-byte unsigned int giving + the number of bytes in the string, and the second is that many bytes, + which are taken literally as the string content. + """), + + # Ways to spell None. + + I(name='NONE', + code='N', + arg=None, + stack_before=[], + stack_after=[pynone], + proto=0, + doc="Push None on the stack."), + + # Ways to spell bools, starting with proto 2. See INT for how this was + # done before proto 2. + + I(name='NEWTRUE', + code='\x88', + arg=None, + stack_before=[], + stack_after=[pybool], + proto=2, + doc="""True. + + Push True onto the stack."""), + + I(name='NEWFALSE', + code='\x89', + arg=None, + stack_before=[], + stack_after=[pybool], + proto=2, + doc="""True. + + Push False onto the stack."""), + + # Ways to spell Unicode strings. + + I(name='UNICODE', + code='V', + arg=unicodestringnl, + stack_before=[], + stack_after=[pyunicode], + proto=0, # this may be pure-text, but it's a later addition + doc="""Push a Python Unicode string object. + + The argument is a raw-unicode-escape encoding of a Unicode string, + and so may contain embedded escape sequences. The argument extends + until the next newline character. + """), + + I(name='BINUNICODE', + code='X', + arg=unicodestring4, + stack_before=[], + stack_after=[pyunicode], + proto=1, + doc="""Push a Python Unicode string object. + + There are two arguments: the first is a 4-byte little-endian signed int + giving the number of bytes in the string. The second is that many + bytes, and is the UTF-8 encoding of the Unicode string. + """), + + # Ways to spell floats. + + I(name='FLOAT', + code='F', + arg=floatnl, + stack_before=[], + stack_after=[pyfloat], + proto=0, + doc="""Newline-terminated decimal float literal. + + The argument is repr(a_float), and in general requires 17 significant + digits for roundtrip conversion to be an identity (this is so for + IEEE-754 double precision values, which is what Python float maps to + on most boxes). + + In general, FLOAT cannot be used to transport infinities, NaNs, or + minus zero across boxes (or even on a single box, if the platform C + library can't read the strings it produces for such things -- Windows + is like that), but may do less damage than BINFLOAT on boxes with + greater precision or dynamic range than IEEE-754 double. + """), + + I(name='BINFLOAT', + code='G', + arg=float8, + stack_before=[], + stack_after=[pyfloat], + proto=1, + doc="""Float stored in binary form, with 8 bytes of data. + + This generally requires less than half the space of FLOAT encoding. + In general, BINFLOAT cannot be used to transport infinities, NaNs, or + minus zero, raises an exception if the exponent exceeds the range of + an IEEE-754 double, and retains no more than 53 bits of precision (if + there are more than that, "add a half and chop" rounding is used to + cut it back to 53 significant bits). + """), + + # Ways to build lists. + + I(name='EMPTY_LIST', + code=']', + arg=None, + stack_before=[], + stack_after=[pylist], + proto=1, + doc="Push an empty list."), + + I(name='APPEND', + code='a', + arg=None, + stack_before=[pylist, anyobject], + stack_after=[pylist], + proto=0, + doc="""Append an object to a list. + + Stack before: ... pylist anyobject + Stack after: ... pylist+[anyobject] + + although pylist is really extended in-place. + """), + + I(name='APPENDS', + code='e', + arg=None, + stack_before=[pylist, markobject, stackslice], + stack_after=[pylist], + proto=1, + doc="""Extend a list by a slice of stack objects. + + Stack before: ... pylist markobject stackslice + Stack after: ... pylist+stackslice + + although pylist is really extended in-place. + """), + + I(name='LIST', + code='l', + arg=None, + stack_before=[markobject, stackslice], + stack_after=[pylist], + proto=0, + doc="""Build a list out of the topmost stack slice, after markobject. + + All the stack entries following the topmost markobject are placed into + a single Python list, which single list object replaces all of the + stack from the topmost markobject onward. For example, + + Stack before: ... markobject 1 2 3 'abc' + Stack after: ... [1, 2, 3, 'abc'] + """), + + # Ways to build tuples. + + I(name='EMPTY_TUPLE', + code=')', + arg=None, + stack_before=[], + stack_after=[pytuple], + proto=1, + doc="Push an empty tuple."), + + I(name='TUPLE', + code='t', + arg=None, + stack_before=[markobject, stackslice], + stack_after=[pytuple], + proto=0, + doc="""Build a tuple out of the topmost stack slice, after markobject. + + All the stack entries following the topmost markobject are placed into + a single Python tuple, which single tuple object replaces all of the + stack from the topmost markobject onward. For example, + + Stack before: ... markobject 1 2 3 'abc' + Stack after: ... (1, 2, 3, 'abc') + """), + + I(name='TUPLE1', + code='\x85', + arg=None, + stack_before=[anyobject], + stack_after=[pytuple], + proto=2, + doc="""One-tuple. + + This code pops one value off the stack and pushes a tuple of + length 1 whose one item is that value back onto it. IOW: + + stack[-1] = tuple(stack[-1:]) + """), + + I(name='TUPLE2', + code='\x86', + arg=None, + stack_before=[anyobject, anyobject], + stack_after=[pytuple], + proto=2, + doc="""One-tuple. + + This code pops two values off the stack and pushes a tuple + of length 2 whose items are those values back onto it. IOW: + + stack[-2:] = [tuple(stack[-2:])] + """), + + I(name='TUPLE3', + code='\x87', + arg=None, + stack_before=[anyobject, anyobject, anyobject], + stack_after=[pytuple], + proto=2, + doc="""One-tuple. + + This code pops three values off the stack and pushes a tuple + of length 3 whose items are those values back onto it. IOW: + + stack[-3:] = [tuple(stack[-3:])] + """), + + # Ways to build dicts. + + I(name='EMPTY_DICT', + code='}', + arg=None, + stack_before=[], + stack_after=[pydict], + proto=1, + doc="Push an empty dict."), + + I(name='DICT', + code='d', + arg=None, + stack_before=[markobject, stackslice], + stack_after=[pydict], + proto=0, + doc="""Build a dict out of the topmost stack slice, after markobject. + + All the stack entries following the topmost markobject are placed into + a single Python dict, which single dict object replaces all of the + stack from the topmost markobject onward. The stack slice alternates + key, value, key, value, .... For example, + + Stack before: ... markobject 1 2 3 'abc' + Stack after: ... {1: 2, 3: 'abc'} + """), + + I(name='SETITEM', + code='s', + arg=None, + stack_before=[pydict, anyobject, anyobject], + stack_after=[pydict], + proto=0, + doc="""Add a key+value pair to an existing dict. + + Stack before: ... pydict key value + Stack after: ... pydict + + where pydict has been modified via pydict[key] = value. + """), + + I(name='SETITEMS', + code='u', + arg=None, + stack_before=[pydict, markobject, stackslice], + stack_after=[pydict], + proto=1, + doc="""Add an arbitrary number of key+value pairs to an existing dict. + + The slice of the stack following the topmost markobject is taken as + an alternating sequence of keys and values, added to the dict + immediately under the topmost markobject. Everything at and after the + topmost markobject is popped, leaving the mutated dict at the top + of the stack. + + Stack before: ... pydict markobject key_1 value_1 ... key_n value_n + Stack after: ... pydict + + where pydict has been modified via pydict[key_i] = value_i for i in + 1, 2, ..., n, and in that order. + """), + + # Stack manipulation. + + I(name='POP', + code='0', + arg=None, + stack_before=[anyobject], + stack_after=[], + proto=0, + doc="Discard the top stack item, shrinking the stack by one item."), + + I(name='DUP', + code='2', + arg=None, + stack_before=[anyobject], + stack_after=[anyobject, anyobject], + proto=0, + doc="Push the top stack item onto the stack again, duplicating it."), + + I(name='MARK', + code='(', + arg=None, + stack_before=[], + stack_after=[markobject], + proto=0, + doc="""Push markobject onto the stack. + + markobject is a unique object, used by other opcodes to identify a + region of the stack containing a variable number of objects for them + to work on. See markobject.doc for more detail. + """), + + I(name='POP_MARK', + code='1', + arg=None, + stack_before=[markobject, stackslice], + stack_after=[], + proto=0, + doc="""Pop all the stack objects at and above the topmost markobject. + + When an opcode using a variable number of stack objects is done, + POP_MARK is used to remove those objects, and to remove the markobject + that delimited their starting position on the stack. + """), + + # Memo manipulation. There are really only two operations (get and put), + # each in all-text, "short binary", and "long binary" flavors. + + I(name='GET', + code='g', + arg=decimalnl_short, + stack_before=[], + stack_after=[anyobject], + proto=0, + doc="""Read an object from the memo and push it on the stack. + + The index of the memo object to push is given by the newline-teriminated + decimal string following. BINGET and LONG_BINGET are space-optimized + versions. + """), + + I(name='BINGET', + code='h', + arg=uint1, + stack_before=[], + stack_after=[anyobject], + proto=1, + doc="""Read an object from the memo and push it on the stack. + + The index of the memo object to push is given by the 1-byte unsigned + integer following. + """), + + I(name='LONG_BINGET', + code='j', + arg=int4, + stack_before=[], + stack_after=[anyobject], + proto=1, + doc="""Read an object from the memo and push it on the stack. + + The index of the memo object to push is given by the 4-byte signed + little-endian integer following. + """), + + I(name='PUT', + code='p', + arg=decimalnl_short, + stack_before=[], + stack_after=[], + proto=0, + doc="""Store the stack top into the memo. The stack is not popped. + + The index of the memo location to write into is given by the newline- + terminated decimal string following. BINPUT and LONG_BINPUT are + space-optimized versions. + """), + + I(name='BINPUT', + code='q', + arg=uint1, + stack_before=[], + stack_after=[], + proto=1, + doc="""Store the stack top into the memo. The stack is not popped. + + The index of the memo location to write into is given by the 1-byte + unsigned integer following. + """), + + I(name='LONG_BINPUT', + code='r', + arg=int4, + stack_before=[], + stack_after=[], + proto=1, + doc="""Store the stack top into the memo. The stack is not popped. + + The index of the memo location to write into is given by the 4-byte + signed little-endian integer following. + """), + + # Access the extension registry (predefined objects). Akin to the GET + # family. + + I(name='EXT1', + code='\x82', + arg=uint1, + stack_before=[], + stack_after=[anyobject], + proto=2, + doc="""Extension code. + + This code and the similar EXT2 and EXT4 allow using a registry + of popular objects that are pickled by name, typically classes. + It is envisioned that through a global negotiation and + registration process, third parties can set up a mapping between + ints and object names. + + In order to guarantee pickle interchangeability, the extension + code registry ought to be global, although a range of codes may + be reserved for private use. + + EXT1 has a 1-byte integer argument. This is used to index into the + extension registry, and the object at that index is pushed on the stack. + """), + + I(name='EXT2', + code='\x83', + arg=uint2, + stack_before=[], + stack_after=[anyobject], + proto=2, + doc="""Extension code. + + See EXT1. EXT2 has a two-byte integer argument. + """), + + I(name='EXT4', + code='\x84', + arg=int4, + stack_before=[], + stack_after=[anyobject], + proto=2, + doc="""Extension code. + + See EXT1. EXT4 has a four-byte integer argument. + """), + + # Push a class object, or module function, on the stack, via its module + # and name. + + I(name='GLOBAL', + code='c', + arg=stringnl_noescape_pair, + stack_before=[], + stack_after=[anyobject], + proto=0, + doc="""Push a global object (module.attr) on the stack. + + Two newline-terminated strings follow the GLOBAL opcode. The first is + taken as a module name, and the second as a class name. The class + object module.class is pushed on the stack. More accurately, the + object returned by self.find_class(module, class) is pushed on the + stack, so unpickling subclasses can override this form of lookup. + """), + + # Ways to build objects of classes pickle doesn't know about directly + # (user-defined classes). I despair of documenting this accurately + # and comprehensibly -- you really have to read the pickle code to + # find all the special cases. + + I(name='REDUCE', + code='R', + arg=None, + stack_before=[anyobject, anyobject], + stack_after=[anyobject], + proto=0, + doc="""Push an object built from a callable and an argument tuple. + + The opcode is named to remind of the __reduce__() method. + + Stack before: ... callable pytuple + Stack after: ... callable(*pytuple) + + The callable and the argument tuple are the first two items returned + by a __reduce__ method. Applying the callable to the argtuple is + supposed to reproduce the original object, or at least get it started. + If the __reduce__ method returns a 3-tuple, the last component is an + argument to be passed to the object's __setstate__, and then the REDUCE + opcode is followed by code to create setstate's argument, and then a + BUILD opcode to apply __setstate__ to that argument. + + If type(callable) is not ClassType, REDUCE complains unless the + callable has been registered with the copy_reg module's + safe_constructors dict, or the callable has a magic + '__safe_for_unpickling__' attribute with a true value. I'm not sure + why it does this, but I've sure seen this complaint often enough when + I didn't want to . + """), + + I(name='BUILD', + code='b', + arg=None, + stack_before=[anyobject, anyobject], + stack_after=[anyobject], + proto=0, + doc="""Finish building an object, via __setstate__ or dict update. + + Stack before: ... anyobject argument + Stack after: ... anyobject + + where anyobject may have been mutated, as follows: + + If the object has a __setstate__ method, + + anyobject.__setstate__(argument) + + is called. + + Else the argument must be a dict, the object must have a __dict__, and + the object is updated via + + anyobject.__dict__.update(argument) + + This may raise RuntimeError in restricted execution mode (which + disallows access to __dict__ directly); in that case, the object + is updated instead via + + for k, v in argument.items(): + anyobject[k] = v + """), + + I(name='INST', + code='i', + arg=stringnl_noescape_pair, + stack_before=[markobject, stackslice], + stack_after=[anyobject], + proto=0, + doc="""Build a class instance. + + This is the protocol 0 version of protocol 1's OBJ opcode. + INST is followed by two newline-terminated strings, giving a + module and class name, just as for the GLOBAL opcode (and see + GLOBAL for more details about that). self.find_class(module, name) + is used to get a class object. + + In addition, all the objects on the stack following the topmost + markobject are gathered into a tuple and popped (along with the + topmost markobject), just as for the TUPLE opcode. + + Now it gets complicated. If all of these are true: + + + The argtuple is empty (markobject was at the top of the stack + at the start). + + + It's an old-style class object (the type of the class object is + ClassType). + + + The class object does not have a __getinitargs__ attribute. + + then we want to create an old-style class instance without invoking + its __init__() method (pickle has waffled on this over the years; not + calling __init__() is current wisdom). In this case, an instance of + an old-style dummy class is created, and then we try to rebind its + __class__ attribute to the desired class object. If this succeeds, + the new instance object is pushed on the stack, and we're done. In + restricted execution mode it can fail (assignment to __class__ is + disallowed), and I'm not really sure what happens then -- it looks + like the code ends up calling the class object's __init__ anyway, + via falling into the next case. + + Else (the argtuple is not empty, it's not an old-style class object, + or the class object does have a __getinitargs__ attribute), the code + first insists that the class object have a __safe_for_unpickling__ + attribute. Unlike as for the __safe_for_unpickling__ check in REDUCE, + it doesn't matter whether this attribute has a true or false value, it + only matters whether it exists (XXX this is a bug; cPickle + requires the attribute to be true). If __safe_for_unpickling__ + doesn't exist, UnpicklingError is raised. + + Else (the class object does have a __safe_for_unpickling__ attr), + the class object obtained from INST's arguments is applied to the + argtuple obtained from the stack, and the resulting instance object + is pushed on the stack. + + NOTE: checks for __safe_for_unpickling__ went away in Python 2.3. + """), + + I(name='OBJ', + code='o', + arg=None, + stack_before=[markobject, anyobject, stackslice], + stack_after=[anyobject], + proto=1, + doc="""Build a class instance. + + This is the protocol 1 version of protocol 0's INST opcode, and is + very much like it. The major difference is that the class object + is taken off the stack, allowing it to be retrieved from the memo + repeatedly if several instances of the same class are created. This + can be much more efficient (in both time and space) than repeatedly + embedding the module and class names in INST opcodes. + + Unlike INST, OBJ takes no arguments from the opcode stream. Instead + the class object is taken off the stack, immediately above the + topmost markobject: + + Stack before: ... markobject classobject stackslice + Stack after: ... new_instance_object + + As for INST, the remainder of the stack above the markobject is + gathered into an argument tuple, and then the logic seems identical, + except that no __safe_for_unpickling__ check is done (XXX this is + a bug; cPickle does test __safe_for_unpickling__). See INST for + the gory details. + + NOTE: In Python 2.3, INST and OBJ are identical except for how they + get the class object. That was always the intent; the implementations + had diverged for accidental reasons. + """), + + I(name='NEWOBJ', + code='\x81', + arg=None, + stack_before=[anyobject, anyobject], + stack_after=[anyobject], + proto=2, + doc="""Build an object instance. + + The stack before should be thought of as containing a class + object followed by an argument tuple (the tuple being the stack + top). Call these cls and args. They are popped off the stack, + and the value returned by cls.__new__(cls, *args) is pushed back + onto the stack. + """), + + # Machine control. + + I(name='PROTO', + code='\x80', + arg=uint1, + stack_before=[], + stack_after=[], + proto=2, + doc="""Protocol version indicator. + + For protocol 2 and above, a pickle must start with this opcode. + The argument is the protocol version, an int in range(2, 256). + """), + + I(name='STOP', + code='.', + arg=None, + stack_before=[anyobject], + stack_after=[], + proto=0, + doc="""Stop the unpickling machine. + + Every pickle ends with this opcode. The object at the top of the stack + is popped, and that's the result of unpickling. The stack should be + empty then. + """), + + # Ways to deal with persistent IDs. + + I(name='PERSID', + code='P', + arg=stringnl_noescape, + stack_before=[], + stack_after=[anyobject], + proto=0, + doc="""Push an object identified by a persistent ID. + + The pickle module doesn't define what a persistent ID means. PERSID's + argument is a newline-terminated str-style (no embedded escapes, no + bracketing quote characters) string, which *is* "the persistent ID". + The unpickler passes this string to self.persistent_load(). Whatever + object that returns is pushed on the stack. There is no implementation + of persistent_load() in Python's unpickler: it must be supplied by an + unpickler subclass. + """), + + I(name='BINPERSID', + code='Q', + arg=None, + stack_before=[anyobject], + stack_after=[anyobject], + proto=1, + doc="""Push an object identified by a persistent ID. + + Like PERSID, except the persistent ID is popped off the stack (instead + of being a string embedded in the opcode bytestream). The persistent + ID is passed to self.persistent_load(), and whatever object that + returns is pushed on the stack. See PERSID for more detail. + """), +] +del I + +# Verify uniqueness of .name and .code members. +name2i = {} +code2i = {} + +for i, d in enumerate(opcodes): + if d.name in name2i: + raise ValueError("repeated name %r at indices %d and %d" % + (d.name, name2i[d.name], i)) + if d.code in code2i: + raise ValueError("repeated code %r at indices %d and %d" % + (d.code, code2i[d.code], i)) + + name2i[d.name] = i + code2i[d.code] = i + +del name2i, code2i, i, d + +############################################################################## +# Build a code2op dict, mapping opcode characters to OpcodeInfo records. +# Also ensure we've got the same stuff as pickle.py, although the +# introspection here is dicey. + +code2op = {} +for d in opcodes: + code2op[d.code] = d +del d + +def assure_pickle_consistency(verbose=False): + import pickle, re + + copy = code2op.copy() + for name in pickle.__all__: + if not re.match("[A-Z][A-Z0-9_]+$", name): + if verbose: + print "skipping %r: it doesn't look like an opcode name" % name + continue + picklecode = getattr(pickle, name) + if not isinstance(picklecode, str) or len(picklecode) != 1: + if verbose: + print ("skipping %r: value %r doesn't look like a pickle " + "code" % (name, picklecode)) + continue + if picklecode in copy: + if verbose: + print "checking name %r w/ code %r for consistency" % ( + name, picklecode) + d = copy[picklecode] + if d.name != name: + raise ValueError("for pickle code %r, pickle.py uses name %r " + "but we're using name %r" % (picklecode, + name, + d.name)) + # Forget this one. Any left over in copy at the end are a problem + # of a different kind. + del copy[picklecode] + else: + raise ValueError("pickle.py appears to have a pickle opcode with " + "name %r and code %r, but we don't" % + (name, picklecode)) + if copy: + msg = ["we appear to have pickle opcodes that pickle.py doesn't have:"] + for code, d in copy.items(): + msg.append(" name %r with code %r" % (d.name, code)) + raise ValueError("\n".join(msg)) + +assure_pickle_consistency() +del assure_pickle_consistency + +############################################################################## +# A pickle opcode generator. + +def genops(pickle): + """Generate all the opcodes in a pickle. + + 'pickle' is a file-like object, or string, containing the pickle. + + Each opcode in the pickle is generated, from the current pickle position, + stopping after a STOP opcode is delivered. A triple is generated for + each opcode: + + opcode, arg, pos + + opcode is an OpcodeInfo record, describing the current opcode. + + If the opcode has an argument embedded in the pickle, arg is its decoded + value, as a Python object. If the opcode doesn't have an argument, arg + is None. + + If the pickle has a tell() method, pos was the value of pickle.tell() + before reading the current opcode. If the pickle is a string object, + it's wrapped in a StringIO object, and the latter's tell() result is + used. Else (the pickle doesn't have a tell(), and it's not obvious how + to query its current position) pos is None. + """ + + import cStringIO as StringIO + + if isinstance(pickle, str): + pickle = StringIO.StringIO(pickle) + + if hasattr(pickle, "tell"): + getpos = pickle.tell + else: + getpos = lambda: None + + while True: + pos = getpos() + code = pickle.read(1) + opcode = code2op.get(code) + if opcode is None: + if code == "": + raise ValueError("pickle exhausted before seeing STOP") + else: + raise ValueError("at position %s, opcode %r unknown" % ( + pos is None and "" or pos, + code)) + if opcode.arg is None: + arg = None + else: + arg = opcode.arg.reader(pickle) + yield opcode, arg, pos + if code == '.': + assert opcode.name == 'STOP' + break + +############################################################################## +# A symbolic pickle disassembler. + +def dis(pickle, out=None, memo=None, indentlevel=4): + """Produce a symbolic disassembly of a pickle. + + 'pickle' is a file-like object, or string, containing a (at least one) + pickle. The pickle is disassembled from the current position, through + the first STOP opcode encountered. + + Optional arg 'out' is a file-like object to which the disassembly is + printed. It defaults to sys.stdout. + + Optional arg 'memo' is a Python dict, used as the pickle's memo. It + may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes. + Passing the same memo object to another dis() call then allows disassembly + to proceed across multiple pickles that were all created by the same + pickler with the same memo. Ordinarily you don't need to worry about this. + + Optional arg indentlevel is the number of blanks by which to indent + a new MARK level. It defaults to 4. + + In addition to printing the disassembly, some sanity checks are made: + + + All embedded opcode arguments "make sense". + + + Explicit and implicit pop operations have enough items on the stack. + + + When an opcode implicitly refers to a markobject, a markobject is + actually on the stack. + + + A memo entry isn't referenced before it's defined. + + + The markobject isn't stored in the memo. + + + A memo entry isn't redefined. + """ + + # Most of the hair here is for sanity checks, but most of it is needed + # anyway to detect when a protocol 0 POP takes a MARK off the stack + # (which in turn is needed to indent MARK blocks correctly). + + stack = [] # crude emulation of unpickler stack + if memo is None: + memo = {} # crude emulation of unpicker memo + maxproto = -1 # max protocol number seen + markstack = [] # bytecode positions of MARK opcodes + indentchunk = ' ' * indentlevel + errormsg = None + for opcode, arg, pos in genops(pickle): + if pos is not None: + print >> out, "%5d:" % pos, + + line = "%-4s %s%s" % (repr(opcode.code)[1:-1], + indentchunk * len(markstack), + opcode.name) + + maxproto = max(maxproto, opcode.proto) + before = opcode.stack_before # don't mutate + after = opcode.stack_after # don't mutate + numtopop = len(before) + + # See whether a MARK should be popped. + markmsg = None + if markobject in before or (opcode.name == "POP" and + stack and + stack[-1] is markobject): + assert markobject not in after + if __debug__: + if markobject in before: + assert before[-1] is stackslice + if markstack: + markpos = markstack.pop() + if markpos is None: + markmsg = "(MARK at unknown opcode offset)" + else: + markmsg = "(MARK at %d)" % markpos + # Pop everything at and after the topmost markobject. + while stack[-1] is not markobject: + stack.pop() + stack.pop() + # Stop later code from popping too much. + try: + numtopop = before.index(markobject) + except ValueError: + assert opcode.name == "POP" + numtopop = 0 + else: + errormsg = markmsg = "no MARK exists on stack" + + # Check for correct memo usage. + if opcode.name in ("PUT", "BINPUT", "LONG_BINPUT"): + assert arg is not None + if arg in memo: + errormsg = "memo key %r already defined" % arg + elif not stack: + errormsg = "stack is empty -- can't store into memo" + elif stack[-1] is markobject: + errormsg = "can't store markobject in the memo" + else: + memo[arg] = stack[-1] + + elif opcode.name in ("GET", "BINGET", "LONG_BINGET"): + if arg in memo: + assert len(after) == 1 + after = [memo[arg]] # for better stack emulation + else: + errormsg = "memo key %r has never been stored into" % arg + + if arg is not None or markmsg: + # make a mild effort to align arguments + line += ' ' * (10 - len(opcode.name)) + if arg is not None: + line += ' ' + repr(arg) + if markmsg: + line += ' ' + markmsg + print >> out, line + + if errormsg: + # Note that we delayed complaining until the offending opcode + # was printed. + raise ValueError(errormsg) + + # Emulate the stack effects. + if len(stack) < numtopop: + raise ValueError("tries to pop %d items from stack with " + "only %d items" % (numtopop, len(stack))) + if numtopop: + del stack[-numtopop:] + if markobject in after: + assert markobject not in before + markstack.append(pos) + + stack.extend(after) + + print >> out, "highest protocol among opcodes =", maxproto + if stack: + raise ValueError("stack not empty after STOP: %r" % stack) + +# For use in the doctest, simply as an example of a class to pickle. +class _Example: + def __init__(self, value): + self.value = value + +_dis_test = r""" +>>> import pickle +>>> x = [1, 2, (3, 4), {'abc': u"def"}] +>>> pkl = pickle.dumps(x, 0) +>>> dis(pkl) + 0: ( MARK + 1: l LIST (MARK at 0) + 2: p PUT 0 + 5: I INT 1 + 8: a APPEND + 9: I INT 2 + 12: a APPEND + 13: ( MARK + 14: I INT 3 + 17: I INT 4 + 20: t TUPLE (MARK at 13) + 21: p PUT 1 + 24: a APPEND + 25: ( MARK + 26: d DICT (MARK at 25) + 27: p PUT 2 + 30: S STRING 'abc' + 37: p PUT 3 + 40: V UNICODE u'def' + 45: p PUT 4 + 48: s SETITEM + 49: a APPEND + 50: . STOP +highest protocol among opcodes = 0 + +Try again with a "binary" pickle. + +>>> pkl = pickle.dumps(x, 1) +>>> dis(pkl) + 0: ] EMPTY_LIST + 1: q BINPUT 0 + 3: ( MARK + 4: K BININT1 1 + 6: K BININT1 2 + 8: ( MARK + 9: K BININT1 3 + 11: K BININT1 4 + 13: t TUPLE (MARK at 8) + 14: q BINPUT 1 + 16: } EMPTY_DICT + 17: q BINPUT 2 + 19: U SHORT_BINSTRING 'abc' + 24: q BINPUT 3 + 26: X BINUNICODE u'def' + 34: q BINPUT 4 + 36: s SETITEM + 37: e APPENDS (MARK at 3) + 38: . STOP +highest protocol among opcodes = 1 + +Exercise the INST/OBJ/BUILD family. + +>>> dis(pickle.dumps(zip, 0)) + 0: c GLOBAL '__builtin__ zip' + 17: p PUT 0 + 20: . STOP +highest protocol among opcodes = 0 + +>>> from pickletools import _Example +>>> x = [_Example(42)] * 2 +>>> dis(pickle.dumps(x, 0)) + 0: ( MARK + 1: l LIST (MARK at 0) + 2: p PUT 0 + 5: ( MARK + 6: i INST 'pickletools _Example' (MARK at 5) + 28: p PUT 1 + 31: ( MARK + 32: d DICT (MARK at 31) + 33: p PUT 2 + 36: S STRING 'value' + 45: p PUT 3 + 48: I INT 42 + 52: s SETITEM + 53: b BUILD + 54: a APPEND + 55: g GET 1 + 58: a APPEND + 59: . STOP +highest protocol among opcodes = 0 + +>>> dis(pickle.dumps(x, 1)) + 0: ] EMPTY_LIST + 1: q BINPUT 0 + 3: ( MARK + 4: ( MARK + 5: c GLOBAL 'pickletools _Example' + 27: q BINPUT 1 + 29: o OBJ (MARK at 4) + 30: q BINPUT 2 + 32: } EMPTY_DICT + 33: q BINPUT 3 + 35: U SHORT_BINSTRING 'value' + 42: q BINPUT 4 + 44: K BININT1 42 + 46: s SETITEM + 47: b BUILD + 48: h BINGET 2 + 50: e APPENDS (MARK at 3) + 51: . STOP +highest protocol among opcodes = 1 + +Try "the canonical" recursive-object test. + +>>> L = [] +>>> T = L, +>>> L.append(T) +>>> L[0] is T +True +>>> T[0] is L +True +>>> L[0][0] is L +True +>>> T[0][0] is T +True +>>> dis(pickle.dumps(L, 0)) + 0: ( MARK + 1: l LIST (MARK at 0) + 2: p PUT 0 + 5: ( MARK + 6: g GET 0 + 9: t TUPLE (MARK at 5) + 10: p PUT 1 + 13: a APPEND + 14: . STOP +highest protocol among opcodes = 0 + +>>> dis(pickle.dumps(L, 1)) + 0: ] EMPTY_LIST + 1: q BINPUT 0 + 3: ( MARK + 4: h BINGET 0 + 6: t TUPLE (MARK at 3) + 7: q BINPUT 1 + 9: a APPEND + 10: . STOP +highest protocol among opcodes = 1 + +Note that, in the protocol 0 pickle of the recursive tuple, the disassembler +has to emulate the stack in order to realize that the POP opcode at 16 gets +rid of the MARK at 0. + +>>> dis(pickle.dumps(T, 0)) + 0: ( MARK + 1: ( MARK + 2: l LIST (MARK at 1) + 3: p PUT 0 + 6: ( MARK + 7: g GET 0 + 10: t TUPLE (MARK at 6) + 11: p PUT 1 + 14: a APPEND + 15: 0 POP + 16: 0 POP (MARK at 0) + 17: g GET 1 + 20: . STOP +highest protocol among opcodes = 0 + +>>> dis(pickle.dumps(T, 1)) + 0: ( MARK + 1: ] EMPTY_LIST + 2: q BINPUT 0 + 4: ( MARK + 5: h BINGET 0 + 7: t TUPLE (MARK at 4) + 8: q BINPUT 1 + 10: a APPEND + 11: 1 POP_MARK (MARK at 0) + 12: h BINGET 1 + 14: . STOP +highest protocol among opcodes = 1 + +Try protocol 2. + +>>> dis(pickle.dumps(L, 2)) + 0: \x80 PROTO 2 + 2: ] EMPTY_LIST + 3: q BINPUT 0 + 5: h BINGET 0 + 7: \x85 TUPLE1 + 8: q BINPUT 1 + 10: a APPEND + 11: . STOP +highest protocol among opcodes = 2 + +>>> dis(pickle.dumps(T, 2)) + 0: \x80 PROTO 2 + 2: ] EMPTY_LIST + 3: q BINPUT 0 + 5: h BINGET 0 + 7: \x85 TUPLE1 + 8: q BINPUT 1 + 10: a APPEND + 11: 0 POP + 12: h BINGET 1 + 14: . STOP +highest protocol among opcodes = 2 +""" + +_memo_test = r""" +>>> import pickle +>>> from StringIO import StringIO +>>> f = StringIO() +>>> p = pickle.Pickler(f, 2) +>>> x = [1, 2, 3] +>>> p.dump(x) +>>> p.dump(x) +>>> f.seek(0) +>>> memo = {} +>>> dis(f, memo=memo) + 0: \x80 PROTO 2 + 2: ] EMPTY_LIST + 3: q BINPUT 0 + 5: ( MARK + 6: K BININT1 1 + 8: K BININT1 2 + 10: K BININT1 3 + 12: e APPENDS (MARK at 5) + 13: . STOP +highest protocol among opcodes = 2 +>>> dis(f, memo=memo) + 14: \x80 PROTO 2 + 16: h BINGET 0 + 18: . STOP +highest protocol among opcodes = 2 +""" + +__test__ = {'disassembler_test': _dis_test, + 'disassembler_memo_test': _memo_test, + } + +def _test(): + import doctest + return doctest.testmod() + +if __name__ == "__main__": + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,252 @@ +"""Spawn a command with pipes to its stdin, stdout, and optionally stderr. + +The normal os.popen(cmd, mode) call spawns a shell command and provides a +file interface to just the input or output of the process depending on +whether mode is 'r' or 'w'. This module provides the functions popen2(cmd) +and popen3(cmd) which return two or three pipes to the spawned command. +""" + +import os +import sys + +__all__ = ["popen2", "popen3", "popen4"] + +try: + MAXFD = os.sysconf('SC_OPEN_MAX') +except (AttributeError, ValueError): + MAXFD = 256 + +_active = [] + +def _cleanup(): + for inst in _active[:]: + if inst.poll(_deadstate=sys.maxint) >= 0: + try: + _active.remove(inst) + except ValueError: + # This can happen if two threads create a new Popen instance. + # It's harmless that it was already removed, so ignore. + pass + +class Popen3: + """Class representing a child process. Normally instances are created + by the factory functions popen2() and popen3().""" + + sts = -1 # Child not completed yet + + def __init__(self, cmd, capturestderr=False, bufsize=-1): + """The parameter 'cmd' is the shell command to execute in a + sub-process. On UNIX, 'cmd' may be a sequence, in which case arguments + will be passed directly to the program without shell intervention (as + with os.spawnv()). If 'cmd' is a string it will be passed to the shell + (as with os.system()). The 'capturestderr' flag, if true, specifies + that the object should capture standard error output of the child + process. The default is false. If the 'bufsize' parameter is + specified, it specifies the size of the I/O buffers to/from the child + process.""" + _cleanup() + self.cmd = cmd + p2cread, p2cwrite = os.pipe() + c2pread, c2pwrite = os.pipe() + if capturestderr: + errout, errin = os.pipe() + gc.disable_finalizers() + try: + self.pid = os.fork() + if self.pid == 0: + # Child + os.dup2(p2cread, 0) + os.dup2(c2pwrite, 1) + if capturestderr: + os.dup2(errin, 2) + self._run_child(cmd) + finally: + gc.enable_finalizers() + os.close(p2cread) + self.tochild = os.fdopen(p2cwrite, 'w', bufsize) + os.close(c2pwrite) + self.fromchild = os.fdopen(c2pread, 'r', bufsize) + if capturestderr: + os.close(errin) + self.childerr = os.fdopen(errout, 'r', bufsize) + else: + self.childerr = None + + def __del__(self): + # In case the child hasn't been waited on, check if it's done. + self.poll(_deadstate=sys.maxint) + if self.sts < 0: + if _active is not None: + # Child is still running, keep us alive until we can wait on it. + _active.append(self) + + def _run_child(self, cmd): + if isinstance(cmd, basestring): + cmd = ['/bin/sh', '-c', cmd] + for i in xrange(3, MAXFD): + try: + os.close(i) + except OSError: + pass + try: + os.execvp(cmd[0], cmd) + finally: + os._exit(1) + + def poll(self, _deadstate=None): + """Return the exit status of the child process if it has finished, + or -1 if it hasn't finished yet.""" + if self.sts < 0: + try: + pid, sts = os.waitpid(self.pid, os.WNOHANG) + # pid will be 0 if self.pid hasn't terminated + if pid == self.pid: + self.sts = sts + except os.error: + if _deadstate is not None: + self.sts = _deadstate + return self.sts + + def wait(self): + """Wait for and return the exit status of the child process.""" + if self.sts < 0: + pid, sts = os.waitpid(self.pid, 0) + # This used to be a test, but it is believed to be + # always true, so I changed it to an assertion - mvl + assert pid == self.pid + self.sts = sts + return self.sts + + +class Popen4(Popen3): + childerr = None + + def __init__(self, cmd, bufsize=-1): + _cleanup() + self.cmd = cmd + p2cread, p2cwrite = os.pipe() + c2pread, c2pwrite = os.pipe() + gc.disable_finalizers() + try: + self.pid = os.fork() + if self.pid == 0: + # Child + os.dup2(p2cread, 0) + os.dup2(c2pwrite, 1) + os.dup2(c2pwrite, 2) + self._run_child(cmd) + finally: + gc.enable_finalizers() + os.close(p2cread) + self.tochild = os.fdopen(p2cwrite, 'w', bufsize) + os.close(c2pwrite) + self.fromchild = os.fdopen(c2pread, 'r', bufsize) + + +if sys.platform[:3] == "win" or sys.platform == "os2emx": + # Some things don't make sense on non-Unix platforms. + del Popen3, Popen4 + + def popen2(cmd, bufsize=-1, mode='t'): + """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may + be a sequence, in which case arguments will be passed directly to the + program without shell intervention (as with os.spawnv()). If 'cmd' is a + string it will be passed to the shell (as with os.system()). If + 'bufsize' is specified, it sets the buffer size for the I/O pipes. The + file objects (child_stdout, child_stdin) are returned.""" + w, r = os.popen2(cmd, mode, bufsize) + return r, w + + def popen3(cmd, bufsize=-1, mode='t'): + """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may + be a sequence, in which case arguments will be passed directly to the + program without shell intervention (as with os.spawnv()). If 'cmd' is a + string it will be passed to the shell (as with os.system()). If + 'bufsize' is specified, it sets the buffer size for the I/O pipes. The + file objects (child_stdout, child_stdin, child_stderr) are returned.""" + w, r, e = os.popen3(cmd, mode, bufsize) + return r, w, e + + def popen4(cmd, bufsize=-1, mode='t'): + """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may + be a sequence, in which case arguments will be passed directly to the + program without shell intervention (as with os.spawnv()). If 'cmd' is a + string it will be passed to the shell (as with os.system()). If + 'bufsize' is specified, it sets the buffer size for the I/O pipes. The + file objects (child_stdout_stderr, child_stdin) are returned.""" + w, r = os.popen4(cmd, mode, bufsize) + return r, w +else: + def popen2(cmd, bufsize=-1, mode='t'): + """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may + be a sequence, in which case arguments will be passed directly to the + program without shell intervention (as with os.spawnv()). If 'cmd' is a + string it will be passed to the shell (as with os.system()). If + 'bufsize' is specified, it sets the buffer size for the I/O pipes. The + file objects (child_stdout, child_stdin) are returned.""" + inst = Popen3(cmd, False, bufsize) + return inst.fromchild, inst.tochild + + def popen3(cmd, bufsize=-1, mode='t'): + """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may + be a sequence, in which case arguments will be passed directly to the + program without shell intervention (as with os.spawnv()). If 'cmd' is a + string it will be passed to the shell (as with os.system()). If + 'bufsize' is specified, it sets the buffer size for the I/O pipes. The + file objects (child_stdout, child_stdin, child_stderr) are returned.""" + inst = Popen3(cmd, True, bufsize) + return inst.fromchild, inst.tochild, inst.childerr + + def popen4(cmd, bufsize=-1, mode='t'): + """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may + be a sequence, in which case arguments will be passed directly to the + program without shell intervention (as with os.spawnv()). If 'cmd' is a + string it will be passed to the shell (as with os.system()). If + 'bufsize' is specified, it sets the buffer size for the I/O pipes. The + file objects (child_stdout_stderr, child_stdin) are returned.""" + inst = Popen4(cmd, bufsize) + return inst.fromchild, inst.tochild + + __all__.extend(["Popen3", "Popen4"]) + +def _test(): + # When the test runs, there shouldn't be any open pipes + _cleanup() + assert not _active, "Active pipes when test starts " + repr([c.cmd for c in _active]) + cmd = "cat" + teststr = "ab cd\n" + if os.name == "nt": + cmd = "more" + # "more" doesn't act the same way across Windows flavors, + # sometimes adding an extra newline at the start or the + # end. So we strip whitespace off both ends for comparison. + expected = teststr.strip() + print "testing popen2..." + r, w = popen2(cmd) + w.write(teststr) + w.close() + got = r.read() + if got.strip() != expected: + raise ValueError("wrote %r read %r" % (teststr, got)) + print "testing popen3..." + try: + r, w, e = popen3([cmd]) + except: + r, w, e = popen3(cmd) + w.write(teststr) + w.close() + got = r.read() + if got.strip() != expected: + raise ValueError("wrote %r read %r" % (teststr, got)) + got = e.read() + if got: + raise ValueError("unexpected %r on stderr" % (got,)) + for inst in _active[:]: + inst.wait() + _cleanup() + if _active: + raise ValueError("_active not empty") + print "All OK" + +if __name__ == '__main__': + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,315 @@ +# Author: Fred L. Drake, Jr. +# fdrake at acm.org +# +# This is a simple little module I wrote to make life easier. I didn't +# see anything quite like it in the library, though I may have overlooked +# something. I wrote this when I was trying to read some heavily nested +# tuples with fairly non-descriptive content. This is modeled very much +# after Lisp/Scheme - style pretty-printing of lists. If you find it +# useful, thank small children who sleep at night. + +"""Support to pretty-print lists, tuples, & dictionaries recursively. + +Very simple, but useful, especially in debugging data structures. + +Classes +------- + +PrettyPrinter() + Handle pretty-printing operations onto a stream using a configured + set of formatting parameters. + +Functions +--------- + +pformat() + Format a Python object into a pretty-printed representation. + +pprint() + Pretty-print a Python object to a stream [default is sys.stdout]. + +saferepr() + Generate a 'standard' repr()-like value, but protect against recursive + data structures. + +""" + +import sys as _sys + +from cStringIO import StringIO as _StringIO + +__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr", + "PrettyPrinter"] + +# cache these for faster access: +_commajoin = ", ".join +_id = id +_len = len +_type = type + + +def pprint(object, stream=None, indent=1, width=80, depth=None): + """Pretty-print a Python object to a stream [default is sys.stdout].""" + printer = PrettyPrinter( + stream=stream, indent=indent, width=width, depth=depth) + printer.pprint(object) + +def pformat(object, indent=1, width=80, depth=None): + """Format a Python object into a pretty-printed representation.""" + return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object) + +def saferepr(object): + """Version of repr() which can handle recursive data structures.""" + return _safe_repr(object, {}, None, 0)[0] + +def isreadable(object): + """Determine if saferepr(object) is readable by eval().""" + return _safe_repr(object, {}, None, 0)[1] + +def isrecursive(object): + """Determine if object requires a recursive representation.""" + return _safe_repr(object, {}, None, 0)[2] + +class PrettyPrinter: + def __init__(self, indent=1, width=80, depth=None, stream=None): + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + stream + The desired output stream. If omitted (or false), the standard + output stream available at construction will be used. + + """ + indent = int(indent) + width = int(width) + assert indent >= 0, "indent must be >= 0" + assert depth is None or depth > 0, "depth must be > 0" + assert width, "width must be != 0" + self._depth = depth + self._indent_per_level = indent + self._width = width + if stream is not None: + self._stream = stream + else: + self._stream = _sys.stdout + + def pprint(self, object): + self._format(object, self._stream, 0, 0, {}, 0) + self._stream.write("\n") + + def pformat(self, object): + sio = _StringIO() + self._format(object, sio, 0, 0, {}, 0) + return sio.getvalue() + + def isrecursive(self, object): + return self.format(object, {}, 0, 0)[2] + + def isreadable(self, object): + s, readable, recursive = self.format(object, {}, 0, 0) + return readable and not recursive + + def _format(self, object, stream, indent, allowance, context, level): + level = level + 1 + objid = _id(object) + if objid in context: + stream.write(_recursion(object)) + self._recursive = True + self._readable = False + return + rep = self._repr(object, context, level - 1) + typ = _type(object) + sepLines = _len(rep) > (self._width - 1 - indent - allowance) + write = stream.write + + if sepLines: + r = getattr(typ, "__repr__", None) + if issubclass(typ, dict) and r == dict.__repr__: + write('{') + if self._indent_per_level > 1: + write((self._indent_per_level - 1) * ' ') + length = _len(object) + if length: + context[objid] = 1 + indent = indent + self._indent_per_level + items = object.items() + items.sort() + key, ent = items[0] + rep = self._repr(key, context, level) + write(rep) + write(': ') + self._format(ent, stream, indent + _len(rep) + 2, + allowance + 1, context, level) + if length > 1: + for key, ent in items[1:]: + rep = self._repr(key, context, level) + write(',\n%s%s: ' % (' '*indent, rep)) + self._format(ent, stream, indent + _len(rep) + 2, + allowance + 1, context, level) + indent = indent - self._indent_per_level + del context[objid] + write('}') + return + + if (issubclass(typ, list) and r == list.__repr__) or \ + (issubclass(typ, tuple) and r == tuple.__repr__): + if issubclass(typ, list): + write('[') + endchar = ']' + else: + write('(') + endchar = ')' + if self._indent_per_level > 1: + write((self._indent_per_level - 1) * ' ') + length = _len(object) + if length: + context[objid] = 1 + indent = indent + self._indent_per_level + self._format(object[0], stream, indent, allowance + 1, + context, level) + if length > 1: + for ent in object[1:]: + write(',\n' + ' '*indent) + self._format(ent, stream, indent, + allowance + 1, context, level) + indent = indent - self._indent_per_level + del context[objid] + if issubclass(typ, tuple) and length == 1: + write(',') + write(endchar) + return + + write(rep) + + def _repr(self, object, context, level): + repr, readable, recursive = self.format(object, context.copy(), + self._depth, level) + if not readable: + self._readable = False + if recursive: + self._recursive = True + return repr + + def format(self, object, context, maxlevels, level): + """Format object for a specific context, returning a string + and flags indicating whether the representation is 'readable' + and whether the object represents a recursive construct. + """ + return _safe_repr(object, context, maxlevels, level) + + +# Return triple (repr_string, isreadable, isrecursive). + +def _safe_repr(object, context, maxlevels, level): + typ = _type(object) + if typ is str: + if 'locale' not in _sys.modules: + return repr(object), True, False + if "'" in object and '"' not in object: + closure = '"' + quotes = {'"': '\\"'} + else: + closure = "'" + quotes = {"'": "\\'"} + qget = quotes.get + sio = _StringIO() + write = sio.write + for char in object: + if char.isalpha(): + write(char) + else: + write(qget(char, repr(char)[1:-1])) + return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False + + r = getattr(typ, "__repr__", None) + if issubclass(typ, dict) and r == dict.__repr__: + if not object: + return "{}", True, False + objid = _id(object) + if maxlevels and level > maxlevels: + return "{...}", False, objid in context + if objid in context: + return _recursion(object), False, True + context[objid] = 1 + readable = True + recursive = False + components = [] + append = components.append + level += 1 + saferepr = _safe_repr + for k, v in sorted(object.items()): + krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) + vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) + append("%s: %s" % (krepr, vrepr)) + readable = readable and kreadable and vreadable + if krecur or vrecur: + recursive = True + del context[objid] + return "{%s}" % _commajoin(components), readable, recursive + + if (issubclass(typ, list) and r == list.__repr__) or \ + (issubclass(typ, tuple) and r == tuple.__repr__): + if issubclass(typ, list): + if not object: + return "[]", True, False + format = "[%s]" + elif _len(object) == 1: + format = "(%s,)" + else: + if not object: + return "()", True, False + format = "(%s)" + objid = _id(object) + if maxlevels and level > maxlevels: + return format % "...", False, objid in context + if objid in context: + return _recursion(object), False, True + context[objid] = 1 + readable = True + recursive = False + components = [] + append = components.append + level += 1 + for o in object: + orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) + append(orepr) + if not oreadable: + readable = False + if orecur: + recursive = True + del context[objid] + return format % _commajoin(components), readable, recursive + + rep = repr(object) + return rep, (rep and not rep.startswith('<')), False + + +def _recursion(object): + return ("" + % (_type(object).__name__, _id(object))) + + +def _perfcheck(object=None): + import time + if object is None: + object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000 + p = PrettyPrinter() + t1 = time.time() + _safe_repr(object, {}, None, 0) + t2 = time.time() + p.pformat(object) + t3 = time.time() + print "_safe_repr:", t2 - t1 + print "pformat:", t3 - t2 + +if __name__ == "__main__": + _perfcheck() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,2259 @@ +#!/usr/bin/env python +# -*- coding: Latin-1 -*- +"""Generate Python documentation in HTML or text for interactive use. + +In the Python interpreter, do "from pydoc import help" to provide online +help. Calling help(thing) on a Python object documents the object. + +Or, at the shell command line outside of Python: + +Run "pydoc " to show documentation on something. may be +the name of a function, module, package, or a dotted reference to a +class or function within a module or module in a package. If the +argument contains a path segment delimiter (e.g. slash on Unix, +backslash on Windows) it is treated as the path to a Python source file. + +Run "pydoc -k " to search for a keyword in the synopsis lines +of all available modules. + +Run "pydoc -p " to start an HTTP server on a given port on the +local machine to generate documentation web pages. + +For platforms without a command line, "pydoc -g" starts the HTTP server +and also pops up a little window for controlling it. + +Run "pydoc -w " to write out the HTML documentation for a module +to a file named ".html". + +Module docs for core modules are assumed to be in + + http://www.python.org/doc/current/lib/ + +This can be overridden by setting the PYTHONDOCS environment variable +to a different URL or to a local directory containing the Library +Reference Manual pages. +""" + +__author__ = "Ka-Ping Yee " +__date__ = "26 February 2001" + +__version__ = "$Revision: 54366 $" +__credits__ = """Guido van Rossum, for an excellent programming language. +Tommy Burnette, the original creator of manpy. +Paul Prescod, for all his work on onlinehelp. +Richard Chamberlain, for the first implementation of textdoc. +""" + +# Known bugs that can't be fixed here: +# - imp.load_module() cannot be prevented from clobbering existing +# loaded modules, so calling synopsis() on a binary module file +# changes the contents of any existing module with the same name. +# - If the __file__ attribute on a module is a relative path and +# the current directory is changed with os.chdir(), an incorrect +# path will be displayed. + +import sys, imp, os, re, types, inspect, __builtin__, pkgutil +from repr import Repr +from string import expandtabs, find, join, lower, split, strip, rfind, rstrip +try: + from collections import deque +except ImportError: + # Python 2.3 compatibility + class deque(list): + def popleft(self): + return self.pop(0) + +# --------------------------------------------------------- common routines + +def pathdirs(): + """Convert sys.path into a list of absolute, existing, unique paths.""" + dirs = [] + normdirs = [] + for dir in sys.path: + dir = os.path.abspath(dir or '.') + normdir = os.path.normcase(dir) + if normdir not in normdirs and os.path.isdir(dir): + dirs.append(dir) + normdirs.append(normdir) + return dirs + +def getdoc(object): + """Get the doc string or comments for an object.""" + result = inspect.getdoc(object) or inspect.getcomments(object) + return result and re.sub('^ *\n', '', rstrip(result)) or '' + +def splitdoc(doc): + """Split a doc string into a synopsis line (if any) and the rest.""" + lines = split(strip(doc), '\n') + if len(lines) == 1: + return lines[0], '' + elif len(lines) >= 2 and not rstrip(lines[1]): + return lines[0], join(lines[2:], '\n') + return '', join(lines, '\n') + +def classname(object, modname): + """Get a class name and qualify it with a module name if necessary.""" + name = object.__name__ + if object.__module__ != modname: + name = object.__module__ + '.' + name + return name + +def isdata(object): + """Check if an object is of a type that probably means it's data.""" + return not (inspect.ismodule(object) or inspect.isclass(object) or + inspect.isroutine(object) or inspect.isframe(object) or + inspect.istraceback(object) or inspect.iscode(object)) + +def replace(text, *pairs): + """Do a series of global replacements on a string.""" + while pairs: + text = join(split(text, pairs[0]), pairs[1]) + pairs = pairs[2:] + return text + +def cram(text, maxlen): + """Omit part of a string if needed to make it fit in a maximum length.""" + if len(text) > maxlen: + pre = max(0, (maxlen-3)//2) + post = max(0, maxlen-3-pre) + return text[:pre] + '...' + text[len(text)-post:] + return text + +_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE) +def stripid(text): + """Remove the hexadecimal id from a Python object representation.""" + # The behaviour of %p is implementation-dependent in terms of case. + if _re_stripid.search(repr(Exception)): + return _re_stripid.sub(r'\1', text) + return text + +def _is_some_method(obj): + return inspect.ismethod(obj) or inspect.ismethoddescriptor(obj) + +def allmethods(cl): + methods = {} + for key, value in inspect.getmembers(cl, _is_some_method): + methods[key] = 1 + for base in cl.__bases__: + methods.update(allmethods(base)) # all your base are belong to us + for key in methods.keys(): + methods[key] = getattr(cl, key) + return methods + +def _split_list(s, predicate): + """Split sequence s via predicate, and return pair ([true], [false]). + + The return value is a 2-tuple of lists, + ([x for x in s if predicate(x)], + [x for x in s if not predicate(x)]) + """ + + yes = [] + no = [] + for x in s: + if predicate(x): + yes.append(x) + else: + no.append(x) + return yes, no + +def visiblename(name, all=None): + """Decide whether to show documentation on a variable.""" + # Certain special names are redundant. + if name in ('__builtins__', '__doc__', '__file__', '__path__', + '__module__', '__name__', '__slots__'): return 0 + # Private names are hidden, but special names are displayed. + if name.startswith('__') and name.endswith('__'): return 1 + if all is not None: + # only document that which the programmer exported in __all__ + return name in all + else: + return not name.startswith('_') + +def classify_class_attrs(object): + """Wrap inspect.classify_class_attrs, with fixup for data descriptors.""" + def fixup((name, kind, cls, value)): + if inspect.isdatadescriptor(value): + kind = 'data descriptor' + return name, kind, cls, value + return map(fixup, inspect.classify_class_attrs(object)) + +# ----------------------------------------------------- module manipulation + +def ispackage(path): + """Guess whether a path refers to a package directory.""" + if os.path.isdir(path): + for ext in ('.py', '.pyc', '.pyo'): + if os.path.isfile(os.path.join(path, '__init__' + ext)): + return True + return False + +def source_synopsis(file): + line = file.readline() + while line[:1] == '#' or not strip(line): + line = file.readline() + if not line: break + line = strip(line) + if line[:4] == 'r"""': line = line[1:] + if line[:3] == '"""': + line = line[3:] + if line[-1:] == '\\': line = line[:-1] + while not strip(line): + line = file.readline() + if not line: break + result = strip(split(line, '"""')[0]) + else: result = None + return result + +def synopsis(filename, cache={}): + """Get the one-line summary out of a module file.""" + mtime = os.stat(filename).st_mtime + lastupdate, result = cache.get(filename, (0, None)) + if lastupdate < mtime: + info = inspect.getmoduleinfo(filename) + try: + file = open(filename) + except IOError: + # module can't be opened, so skip it + return None + if info and 'b' in info[2]: # binary modules have to be imported + try: module = imp.load_module('__temp__', file, filename, info[1:]) + except: return None + result = (module.__doc__ or '').splitlines()[0] + del sys.modules['__temp__'] + else: # text modules can be directly examined + result = source_synopsis(file) + file.close() + cache[filename] = (mtime, result) + return result + +class ErrorDuringImport(Exception): + """Errors that occurred while trying to import something to document it.""" + def __init__(self, filename, (exc, value, tb)): + self.filename = filename + self.exc = exc + self.value = value + self.tb = tb + + def __str__(self): + exc = self.exc + if type(exc) is types.ClassType: + exc = exc.__name__ + return 'problem in %s - %s: %s' % (self.filename, exc, self.value) + +def importfile(path): + """Import a Python source file or compiled file given its path.""" + magic = imp.get_magic() + file = open(path, 'r') + if file.read(len(magic)) == magic: + kind = imp.PY_COMPILED + else: + kind = imp.PY_SOURCE + file.close() + filename = os.path.basename(path) + name, ext = os.path.splitext(filename) + file = open(path, 'r') + try: + module = imp.load_module(name, file, path, (ext, 'r', kind)) + except: + raise ErrorDuringImport(path, sys.exc_info()) + file.close() + return module + +def safeimport(path, forceload=0, cache={}): + """Import a module; handle errors; return None if the module isn't found. + + If the module *is* found but an exception occurs, it's wrapped in an + ErrorDuringImport exception and reraised. Unlike __import__, if a + package path is specified, the module at the end of the path is returned, + not the package at the beginning. If the optional 'forceload' argument + is 1, we reload the module from disk (unless it's a dynamic extension).""" + try: + # If forceload is 1 and the module has been previously loaded from + # disk, we always have to reload the module. Checking the file's + # mtime isn't good enough (e.g. the module could contain a class + # that inherits from another module that has changed). + if forceload and path in sys.modules: + if path not in sys.builtin_module_names: + # Avoid simply calling reload() because it leaves names in + # the currently loaded module lying around if they're not + # defined in the new source file. Instead, remove the + # module from sys.modules and re-import. Also remove any + # submodules because they won't appear in the newly loaded + # module's namespace if they're already in sys.modules. + subs = [m for m in sys.modules if m.startswith(path + '.')] + for key in [path] + subs: + # Prevent garbage collection. + cache[key] = sys.modules[key] + del sys.modules[key] + module = __import__(path) + except: + # Did the error occur before or after the module was found? + (exc, value, tb) = info = sys.exc_info() + if path in sys.modules: + # An error occurred while executing the imported module. + raise ErrorDuringImport(sys.modules[path].__file__, info) + elif exc is SyntaxError: + # A SyntaxError occurred before we could execute the module. + raise ErrorDuringImport(value.filename, info) + elif exc is ImportError and \ + split(lower(str(value)))[:2] == ['no', 'module']: + # The module was not found. + return None + else: + # Some other error occurred during the importing process. + raise ErrorDuringImport(path, sys.exc_info()) + for part in split(path, '.')[1:]: + try: module = getattr(module, part) + except AttributeError: return None + return module + +# ---------------------------------------------------- formatter base class + +class Doc: + def document(self, object, name=None, *args): + """Generate documentation for an object.""" + args = (object, name) + args + # 'try' clause is to attempt to handle the possibility that inspect + # identifies something in a way that pydoc itself has issues handling; + # think 'super' and how it is a descriptor (which raises the exception + # by lacking a __name__ attribute) and an instance. + if inspect.isgetsetdescriptor(object): return self.docdata(*args) + if inspect.ismemberdescriptor(object): return self.docdata(*args) + try: + if inspect.ismodule(object): return self.docmodule(*args) + if inspect.isclass(object): return self.docclass(*args) + if inspect.isroutine(object): return self.docroutine(*args) + except AttributeError: + pass + if isinstance(object, property): return self.docproperty(*args) + return self.docother(*args) + + def fail(self, object, name=None, *args): + """Raise an exception for unimplemented types.""" + message = "don't know how to document object%s of type %s" % ( + name and ' ' + repr(name), type(object).__name__) + raise TypeError, message + + docmodule = docclass = docroutine = docother = docproperty = docdata = fail + + def getdocloc(self, object): + """Return the location of module docs or None""" + + try: + file = inspect.getabsfile(object) + except TypeError: + file = '(built-in)' + + docloc = os.environ.get("PYTHONDOCS", + "http://www.python.org/doc/current/lib") + basedir = os.path.join(sys.exec_prefix, "lib", + "python"+sys.version[0:3]) + if (isinstance(object, type(os)) and + (object.__name__ in ('errno', 'exceptions', 'gc', 'imp', + 'marshal', 'posix', 'signal', 'sys', + 'thread', 'zipimport') or + (file.startswith(basedir) and + not file.startswith(os.path.join(basedir, 'site-packages'))))): + htmlfile = "module-%s.html" % object.__name__ + if docloc.startswith("http://"): + docloc = "%s/%s" % (docloc.rstrip("/"), htmlfile) + else: + docloc = os.path.join(docloc, htmlfile) + else: + docloc = None + return docloc + +# -------------------------------------------- HTML documentation generator + +class HTMLRepr(Repr): + """Class for safely making an HTML representation of a Python object.""" + def __init__(self): + Repr.__init__(self) + self.maxlist = self.maxtuple = 20 + self.maxdict = 10 + self.maxstring = self.maxother = 100 + + def escape(self, text): + return replace(text, '&', '&', '<', '<', '>', '>') + + def repr(self, object): + return Repr.repr(self, object) + + def repr1(self, x, level): + if hasattr(type(x), '__name__'): + methodname = 'repr_' + join(split(type(x).__name__), '_') + if hasattr(self, methodname): + return getattr(self, methodname)(x, level) + return self.escape(cram(stripid(repr(x)), self.maxother)) + + def repr_string(self, x, level): + test = cram(x, self.maxstring) + testrepr = repr(test) + if '\\' in test and '\\' not in replace(testrepr, r'\\', ''): + # Backslashes are only literal in the string and are never + # needed to make any special characters, so show a raw string. + return 'r' + testrepr[0] + self.escape(test) + testrepr[0] + return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)', + r'\1', + self.escape(testrepr)) + + repr_str = repr_string + + def repr_instance(self, x, level): + try: + return self.escape(cram(stripid(repr(x)), self.maxstring)) + except: + return self.escape('<%s instance>' % x.__class__.__name__) + + repr_unicode = repr_string + +class HTMLDoc(Doc): + """Formatter class for HTML documentation.""" + + # ------------------------------------------- HTML formatting utilities + + _repr_instance = HTMLRepr() + repr = _repr_instance.repr + escape = _repr_instance.escape + + def page(self, title, contents): + """Format an HTML page.""" + return ''' + +Python: %s + +%s +''' % (title, contents) + + def heading(self, title, fgcol, bgcol, extras=''): + """Format a page heading.""" + return ''' + + +
 
+ 
%s
%s
+ ''' % (bgcol, fgcol, title, fgcol, extras or ' ') + + def section(self, title, fgcol, bgcol, contents, width=6, + prelude='', marginalia=None, gap=' '): + """Format a section with a heading.""" + if marginalia is None: + marginalia = '' + ' ' * width + '' + result = '''

+ + + + ''' % (bgcol, fgcol, title) + if prelude: + result = result + ''' + + +''' % (bgcol, marginalia, prelude, gap) + else: + result = result + ''' +''' % (bgcol, marginalia, gap) + + return result + '\n
 
+%s
%s%s
%s
%s%s%s
' % contents + + def bigsection(self, title, *args): + """Format a section with a big heading.""" + title = '%s' % title + return self.section(title, *args) + + def preformat(self, text): + """Format literal preformatted text.""" + text = self.escape(expandtabs(text)) + return replace(text, '\n\n', '\n \n', '\n\n', '\n \n', + ' ', ' ', '\n', '
\n') + + def multicolumn(self, list, format, cols=4): + """Format a list of items into a multi-column list.""" + result = '' + rows = (len(list)+cols-1)/cols + for col in range(cols): + result = result + '' % (100/cols) + for i in range(rows*col, rows*col+rows): + if i < len(list): + result = result + format(list[i]) + '
\n' + result = result + '' + return '%s
' % result + + def grey(self, text): return '%s' % text + + def namelink(self, name, *dicts): + """Make a link for an identifier, given name-to-URL mappings.""" + for dict in dicts: + if name in dict: + return '%s' % (dict[name], name) + return name + + def classlink(self, object, modname): + """Make a link for a class.""" + name, module = object.__name__, sys.modules.get(object.__module__) + if hasattr(module, name) and getattr(module, name) is object: + return '%s' % ( + module.__name__, name, classname(object, modname)) + return classname(object, modname) + + def modulelink(self, object): + """Make a link for a module.""" + return '%s' % (object.__name__, object.__name__) + + def modpkglink(self, (name, path, ispackage, shadowed)): + """Make a link for a module or package to display in an index.""" + if shadowed: + return self.grey(name) + if path: + url = '%s.%s.html' % (path, name) + else: + url = '%s.html' % name + if ispackage: + text = '%s (package)' % name + else: + text = name + return '%s' % (url, text) + + def markup(self, text, escape=None, funcs={}, classes={}, methods={}): + """Mark up some plain text, given a context of symbols to look for. + Each context dictionary maps object names to anchor names.""" + escape = escape or self.escape + results = [] + here = 0 + pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|' + r'RFC[- ]?(\d+)|' + r'PEP[- ]?(\d+)|' + r'(self\.)?(\w+))') + while True: + match = pattern.search(text, here) + if not match: break + start, end = match.span() + results.append(escape(text[here:start])) + + all, scheme, rfc, pep, selfdot, name = match.groups() + if scheme: + url = escape(all).replace('"', '"') + results.append('%s' % (url, url)) + elif rfc: + url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) + results.append('%s' % (url, escape(all))) + elif pep: + url = 'http://www.python.org/peps/pep-%04d.html' % int(pep) + results.append('%s' % (url, escape(all))) + elif text[end:end+1] == '(': + results.append(self.namelink(name, methods, funcs, classes)) + elif selfdot: + results.append('self.%s' % name) + else: + results.append(self.namelink(name, classes)) + here = end + results.append(escape(text[here:])) + return join(results, '') + + # ---------------------------------------------- type-specific routines + + def formattree(self, tree, modname, parent=None): + """Produce HTML for a class tree as given by inspect.getclasstree().""" + result = '' + for entry in tree: + if type(entry) is type(()): + c, bases = entry + result = result + '

' + result = result + self.classlink(c, modname) + if bases and bases != (parent,): + parents = [] + for base in bases: + parents.append(self.classlink(base, modname)) + result = result + '(' + join(parents, ', ') + ')' + result = result + '\n
' + elif type(entry) is type([]): + result = result + '
\n%s
\n' % self.formattree( + entry, modname, c) + return '
\n%s
\n' % result + + def docmodule(self, object, name=None, mod=None, *ignored): + """Produce HTML documentation for a module object.""" + name = object.__name__ # ignore the passed-in name + try: + all = object.__all__ + except AttributeError: + all = None + parts = split(name, '.') + links = [] + for i in range(len(parts)-1): + links.append( + '%s' % + (join(parts[:i+1], '.'), parts[i])) + linkedname = join(links + parts[-1:], '.') + head = '%s' % linkedname + try: + path = inspect.getabsfile(object) + url = path + if sys.platform == 'win32': + import nturl2path + url = nturl2path.pathname2url(path) + filelink = '%s' % (url, path) + except TypeError: + filelink = '(built-in)' + info = [] + if hasattr(object, '__version__'): + version = str(object.__version__) + if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': + version = strip(version[11:-1]) + info.append('version %s' % self.escape(version)) + if hasattr(object, '__date__'): + info.append(self.escape(str(object.__date__))) + if info: + head = head + ' (%s)' % join(info, ', ') + docloc = self.getdocloc(object) + if docloc is not None: + docloc = '
Module Docs' % locals() + else: + docloc = '' + result = self.heading( + head, '#ffffff', '#7799ee', + 'index
' + filelink + docloc) + + modules = inspect.getmembers(object, inspect.ismodule) + + classes, cdict = [], {} + for key, value in inspect.getmembers(object, inspect.isclass): + # if __all__ exists, believe it. Otherwise use old heuristic. + if (all is not None or + (inspect.getmodule(value) or object) is object): + if visiblename(key, all): + classes.append((key, value)) + cdict[key] = cdict[value] = '#' + key + for key, value in classes: + for base in value.__bases__: + key, modname = base.__name__, base.__module__ + module = sys.modules.get(modname) + if modname != name and module and hasattr(module, key): + if getattr(module, key) is base: + if not key in cdict: + cdict[key] = cdict[base] = modname + '.html#' + key + funcs, fdict = [], {} + for key, value in inspect.getmembers(object, inspect.isroutine): + # if __all__ exists, believe it. Otherwise use old heuristic. + if (all is not None or + inspect.isbuiltin(value) or inspect.getmodule(value) is object): + if visiblename(key, all): + funcs.append((key, value)) + fdict[key] = '#-' + key + if inspect.isfunction(value): fdict[value] = fdict[key] + data = [] + for key, value in inspect.getmembers(object, isdata): + if visiblename(key, all): + data.append((key, value)) + + doc = self.markup(getdoc(object), self.preformat, fdict, cdict) + doc = doc and '%s' % doc + result = result + '

%s

\n' % doc + + if hasattr(object, '__path__'): + modpkgs = [] + for importer, modname, ispkg in pkgutil.iter_modules(object.__path__): + modpkgs.append((modname, name, ispkg, 0)) + modpkgs.sort() + contents = self.multicolumn(modpkgs, self.modpkglink) + result = result + self.bigsection( + 'Package Contents', '#ffffff', '#aa55cc', contents) + elif modules: + contents = self.multicolumn( + modules, lambda (key, value), s=self: s.modulelink(value)) + result = result + self.bigsection( + 'Modules', '#fffff', '#aa55cc', contents) + + if classes: + classlist = map(lambda (key, value): value, classes) + contents = [ + self.formattree(inspect.getclasstree(classlist, 1), name)] + for key, value in classes: + contents.append(self.document(value, key, name, fdict, cdict)) + result = result + self.bigsection( + 'Classes', '#ffffff', '#ee77aa', join(contents)) + if funcs: + contents = [] + for key, value in funcs: + contents.append(self.document(value, key, name, fdict, cdict)) + result = result + self.bigsection( + 'Functions', '#ffffff', '#eeaa77', join(contents)) + if data: + contents = [] + for key, value in data: + contents.append(self.document(value, key)) + result = result + self.bigsection( + 'Data', '#ffffff', '#55aa55', join(contents, '
\n')) + if hasattr(object, '__author__'): + contents = self.markup(str(object.__author__), self.preformat) + result = result + self.bigsection( + 'Author', '#ffffff', '#7799ee', contents) + if hasattr(object, '__credits__'): + contents = self.markup(str(object.__credits__), self.preformat) + result = result + self.bigsection( + 'Credits', '#ffffff', '#7799ee', contents) + + return result + + def docclass(self, object, name=None, mod=None, funcs={}, classes={}, + *ignored): + """Produce HTML documentation for a class object.""" + realname = object.__name__ + name = name or realname + bases = object.__bases__ + + contents = [] + push = contents.append + + # Cute little class to pump out a horizontal rule between sections. + class HorizontalRule: + def __init__(self): + self.needone = 0 + def maybe(self): + if self.needone: + push('
\n') + self.needone = 1 + hr = HorizontalRule() + + # List the mro, if non-trivial. + mro = deque(inspect.getmro(object)) + if len(mro) > 2: + hr.maybe() + push('
Method resolution order:
\n') + for base in mro: + push('
%s
\n' % self.classlink(base, + object.__module__)) + push('
\n') + + def spill(msg, attrs, predicate): + ok, attrs = _split_list(attrs, predicate) + if ok: + hr.maybe() + push(msg) + for name, kind, homecls, value in ok: + push(self.document(getattr(object, name), name, mod, + funcs, classes, mdict, object)) + push('\n') + return attrs + + def spilldescriptors(msg, attrs, predicate): + ok, attrs = _split_list(attrs, predicate) + if ok: + hr.maybe() + push(msg) + for name, kind, homecls, value in ok: + push(self._docdescriptor(name, value, mod)) + return attrs + + def spilldata(msg, attrs, predicate): + ok, attrs = _split_list(attrs, predicate) + if ok: + hr.maybe() + push(msg) + for name, kind, homecls, value in ok: + base = self.docother(getattr(object, name), name, mod) + if callable(value) or inspect.isdatadescriptor(value): + doc = getattr(value, "__doc__", None) + else: + doc = None + if doc is None: + push('
%s
\n' % base) + else: + doc = self.markup(getdoc(value), self.preformat, + funcs, classes, mdict) + doc = '
%s' % doc + push('
%s%s
\n' % (base, doc)) + push('\n') + return attrs + + attrs = filter(lambda (name, kind, cls, value): visiblename(name), + classify_class_attrs(object)) + mdict = {} + for key, kind, homecls, value in attrs: + mdict[key] = anchor = '#' + name + '-' + key + value = getattr(object, key) + try: + # The value may not be hashable (e.g., a data attr with + # a dict or list value). + mdict[value] = anchor + except TypeError: + pass + + while attrs: + if mro: + thisclass = mro.popleft() + else: + thisclass = attrs[0][2] + attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass) + + if thisclass is __builtin__.object: + attrs = inherited + continue + elif thisclass is object: + tag = 'defined here' + else: + tag = 'inherited from %s' % self.classlink(thisclass, + object.__module__) + tag += ':
\n' + + # Sort attrs by name. + try: + attrs.sort(key=lambda t: t[0]) + except TypeError: + attrs.sort(lambda t1, t2: cmp(t1[0], t2[0])) # 2.3 compat + + # Pump out the attrs, segregated by kind. + attrs = spill('Methods %s' % tag, attrs, + lambda t: t[1] == 'method') + attrs = spill('Class methods %s' % tag, attrs, + lambda t: t[1] == 'class method') + attrs = spill('Static methods %s' % tag, attrs, + lambda t: t[1] == 'static method') + attrs = spilldescriptors('Data descriptors %s' % tag, attrs, + lambda t: t[1] == 'data descriptor') + attrs = spilldata('Data and other attributes %s' % tag, attrs, + lambda t: t[1] == 'data') + assert attrs == [] + attrs = inherited + + contents = ''.join(contents) + + if name == realname: + title = 'class %s' % ( + name, realname) + else: + title = '%s = class %s' % ( + name, name, realname) + if bases: + parents = [] + for base in bases: + parents.append(self.classlink(base, object.__module__)) + title = title + '(%s)' % join(parents, ', ') + doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict) + doc = doc and '%s
 
' % doc + + return self.section(title, '#000000', '#ffc8d8', contents, 3, doc) + + def formatvalue(self, object): + """Format an argument default value as text.""" + return self.grey('=' + self.repr(object)) + + def docroutine(self, object, name=None, mod=None, + funcs={}, classes={}, methods={}, cl=None): + """Produce HTML documentation for a function or method object.""" + realname = object.__name__ + name = name or realname + anchor = (cl and cl.__name__ or '') + '-' + name + note = '' + skipdocs = 0 + if inspect.ismethod(object): + imclass = object.im_class + if cl: + if imclass is not cl: + note = ' from ' + self.classlink(imclass, mod) + else: + if object.im_self is not None: + note = ' method of %s instance' % self.classlink( + object.im_self.__class__, mod) + else: + note = ' unbound %s method' % self.classlink(imclass,mod) + object = object.im_func + + if name == realname: + title = '%s' % (anchor, realname) + else: + if (cl and realname in cl.__dict__ and + cl.__dict__[realname] is object): + reallink = '%s' % ( + cl.__name__ + '-' + realname, realname) + skipdocs = 1 + else: + reallink = realname + title = '%s = %s' % ( + anchor, name, reallink) + if inspect.isfunction(object) or ( + inspect.isbuiltin(object) and hasattr(object, 'func_code')): + # PyPy extension: the code below works for built-in functions too + args, varargs, varkw, defaults = inspect.getargspec(object) + argspec = inspect.formatargspec( + args, varargs, varkw, defaults, formatvalue=self.formatvalue) + if realname == '': + title = '%s lambda ' % name + argspec = argspec[1:-1] # remove parentheses + else: + argspec = '(...)' + + decl = title + argspec + (note and self.grey( + '%s' % note)) + + if skipdocs: + return '
%s
\n' % decl + else: + doc = self.markup( + getdoc(object), self.preformat, funcs, classes, methods) + doc = doc and '
%s
' % doc + return '
%s
%s
\n' % (decl, doc) + + def _docdescriptor(self, name, value, mod): + results = [] + push = results.append + + if name: + push('
%s
\n' % name) + if value.__doc__ is not None: + doc = self.markup(getdoc(value), self.preformat) + push('
%s
\n' % doc) + push('
\n') + + return ''.join(results) + + def docproperty(self, object, name=None, mod=None, cl=None): + """Produce html documentation for a property.""" + return self._docdescriptor(name, object, mod) + + def docother(self, object, name=None, mod=None, *ignored): + """Produce HTML documentation for a data object.""" + lhs = name and '%s = ' % name or '' + return lhs + self.repr(object) + + def docdata(self, object, name=None, mod=None, cl=None): + """Produce html documentation for a data descriptor.""" + return self._docdescriptor(name, object, mod) + + def index(self, dir, shadowed=None): + """Generate an HTML index for a directory of modules.""" + modpkgs = [] + if shadowed is None: shadowed = {} + for importer, name, ispkg in pkgutil.iter_modules([dir]): + modpkgs.append((name, '', ispkg, name in shadowed)) + shadowed[name] = 1 + + modpkgs.sort() + contents = self.multicolumn(modpkgs, self.modpkglink) + return self.bigsection(dir, '#ffffff', '#ee77aa', contents) + +# -------------------------------------------- text documentation generator + +class TextRepr(Repr): + """Class for safely making a text representation of a Python object.""" + def __init__(self): + Repr.__init__(self) + self.maxlist = self.maxtuple = 20 + self.maxdict = 10 + self.maxstring = self.maxother = 100 + + def repr1(self, x, level): + if hasattr(type(x), '__name__'): + methodname = 'repr_' + join(split(type(x).__name__), '_') + if hasattr(self, methodname): + return getattr(self, methodname)(x, level) + return cram(stripid(repr(x)), self.maxother) + + def repr_string(self, x, level): + test = cram(x, self.maxstring) + testrepr = repr(test) + if '\\' in test and '\\' not in replace(testrepr, r'\\', ''): + # Backslashes are only literal in the string and are never + # needed to make any special characters, so show a raw string. + return 'r' + testrepr[0] + test + testrepr[0] + return testrepr + + repr_str = repr_string + + def repr_instance(self, x, level): + try: + return cram(stripid(repr(x)), self.maxstring) + except: + return '<%s instance>' % x.__class__.__name__ + +class TextDoc(Doc): + """Formatter class for text documentation.""" + + # ------------------------------------------- text formatting utilities + + _repr_instance = TextRepr() + repr = _repr_instance.repr + + def bold(self, text): + """Format a string in bold by overstriking.""" + return join(map(lambda ch: ch + '\b' + ch, text), '') + + def indent(self, text, prefix=' '): + """Indent text by prepending a given prefix to each line.""" + if not text: return '' + lines = split(text, '\n') + lines = map(lambda line, prefix=prefix: prefix + line, lines) + if lines: lines[-1] = rstrip(lines[-1]) + return join(lines, '\n') + + def section(self, title, contents): + """Format a section with a given heading.""" + return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n' + + # ---------------------------------------------- type-specific routines + + def formattree(self, tree, modname, parent=None, prefix=''): + """Render in text a class tree as returned by inspect.getclasstree().""" + result = '' + for entry in tree: + if type(entry) is type(()): + c, bases = entry + result = result + prefix + classname(c, modname) + if bases and bases != (parent,): + parents = map(lambda c, m=modname: classname(c, m), bases) + result = result + '(%s)' % join(parents, ', ') + result = result + '\n' + elif type(entry) is type([]): + result = result + self.formattree( + entry, modname, c, prefix + ' ') + return result + + def docmodule(self, object, name=None, mod=None): + """Produce text documentation for a given module object.""" + name = object.__name__ # ignore the passed-in name + synop, desc = splitdoc(getdoc(object)) + result = self.section('NAME', name + (synop and ' - ' + synop)) + + try: + all = object.__all__ + except AttributeError: + all = None + + try: + file = inspect.getabsfile(object) + except TypeError: + file = '(built-in)' + result = result + self.section('FILE', file) + + docloc = self.getdocloc(object) + if docloc is not None: + result = result + self.section('MODULE DOCS', docloc) + + if desc: + result = result + self.section('DESCRIPTION', desc) + + classes = [] + for key, value in inspect.getmembers(object, inspect.isclass): + # if __all__ exists, believe it. Otherwise use old heuristic. + if (all is not None + or (inspect.getmodule(value) or object) is object): + if visiblename(key, all): + classes.append((key, value)) + funcs = [] + for key, value in inspect.getmembers(object, inspect.isroutine): + # if __all__ exists, believe it. Otherwise use old heuristic. + if (all is not None or + inspect.isbuiltin(value) or inspect.getmodule(value) is object): + if visiblename(key, all): + funcs.append((key, value)) + data = [] + for key, value in inspect.getmembers(object, isdata): + if visiblename(key, all): + data.append((key, value)) + + if hasattr(object, '__path__'): + modpkgs = [] + for importer, modname, ispkg in pkgutil.iter_modules(object.__path__): + if ispkg: + modpkgs.append(modname + ' (package)') + else: + modpkgs.append(modname) + + modpkgs.sort() + result = result + self.section( + 'PACKAGE CONTENTS', join(modpkgs, '\n')) + + if classes: + classlist = map(lambda (key, value): value, classes) + contents = [self.formattree( + inspect.getclasstree(classlist, 1), name)] + for key, value in classes: + contents.append(self.document(value, key, name)) + result = result + self.section('CLASSES', join(contents, '\n')) + + if funcs: + contents = [] + for key, value in funcs: + contents.append(self.document(value, key, name)) + result = result + self.section('FUNCTIONS', join(contents, '\n')) + + if data: + contents = [] + for key, value in data: + contents.append(self.docother(value, key, name, maxlen=70)) + result = result + self.section('DATA', join(contents, '\n')) + + if hasattr(object, '__version__'): + version = str(object.__version__) + if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': + version = strip(version[11:-1]) + result = result + self.section('VERSION', version) + if hasattr(object, '__date__'): + result = result + self.section('DATE', str(object.__date__)) + if hasattr(object, '__author__'): + result = result + self.section('AUTHOR', str(object.__author__)) + if hasattr(object, '__credits__'): + result = result + self.section('CREDITS', str(object.__credits__)) + return result + + def docclass(self, object, name=None, mod=None): + """Produce text documentation for a given class object.""" + realname = object.__name__ + name = name or realname + bases = object.__bases__ + + def makename(c, m=object.__module__): + return classname(c, m) + + if name == realname: + title = 'class ' + self.bold(realname) + else: + title = self.bold(name) + ' = class ' + realname + if bases: + parents = map(makename, bases) + title = title + '(%s)' % join(parents, ', ') + + doc = getdoc(object) + contents = doc and [doc + '\n'] or [] + push = contents.append + + # List the mro, if non-trivial. + mro = deque(inspect.getmro(object)) + if len(mro) > 2: + push("Method resolution order:") + for base in mro: + push(' ' + makename(base)) + push('') + + # Cute little class to pump out a horizontal rule between sections. + class HorizontalRule: + def __init__(self): + self.needone = 0 + def maybe(self): + if self.needone: + push('-' * 70) + self.needone = 1 + hr = HorizontalRule() + + def spill(msg, attrs, predicate): + ok, attrs = _split_list(attrs, predicate) + if ok: + hr.maybe() + push(msg) + for name, kind, homecls, value in ok: + push(self.document(getattr(object, name), + name, mod, object)) + return attrs + + def spilldescriptors(msg, attrs, predicate): + ok, attrs = _split_list(attrs, predicate) + if ok: + hr.maybe() + push(msg) + for name, kind, homecls, value in ok: + push(self._docdescriptor(name, value, mod)) + return attrs + + def spilldata(msg, attrs, predicate): + ok, attrs = _split_list(attrs, predicate) + if ok: + hr.maybe() + push(msg) + for name, kind, homecls, value in ok: + if callable(value) or inspect.isdatadescriptor(value): + doc = getdoc(value) + else: + doc = None + push(self.docother(getattr(object, name), + name, mod, maxlen=70, doc=doc) + '\n') + return attrs + + attrs = filter(lambda (name, kind, cls, value): visiblename(name), + classify_class_attrs(object)) + while attrs: + if mro: + thisclass = mro.popleft() + else: + thisclass = attrs[0][2] + attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass) + + if thisclass is __builtin__.object: + attrs = inherited + continue + elif thisclass is object: + tag = "defined here" + else: + tag = "inherited from %s" % classname(thisclass, + object.__module__) + filter(lambda t: not t[0].startswith('_'), attrs) + + # Sort attrs by name. + attrs.sort() + + # Pump out the attrs, segregated by kind. + attrs = spill("Methods %s:\n" % tag, attrs, + lambda t: t[1] == 'method') + attrs = spill("Class methods %s:\n" % tag, attrs, + lambda t: t[1] == 'class method') + attrs = spill("Static methods %s:\n" % tag, attrs, + lambda t: t[1] == 'static method') + attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs, + lambda t: t[1] == 'data descriptor') + attrs = spilldata("Data and other attributes %s:\n" % tag, attrs, + lambda t: t[1] == 'data') + assert attrs == [] + attrs = inherited + + contents = '\n'.join(contents) + if not contents: + return title + '\n' + return title + '\n' + self.indent(rstrip(contents), ' | ') + '\n' + + def formatvalue(self, object): + """Format an argument default value as text.""" + return '=' + self.repr(object) + + def docroutine(self, object, name=None, mod=None, cl=None): + """Produce text documentation for a function or method object.""" + realname = object.__name__ + name = name or realname + note = '' + skipdocs = 0 + if inspect.ismethod(object): + imclass = object.im_class + if cl: + if imclass is not cl: + note = ' from ' + classname(imclass, mod) + else: + if object.im_self is not None: + note = ' method of %s instance' % classname( + object.im_self.__class__, mod) + else: + note = ' unbound %s method' % classname(imclass,mod) + object = object.im_func + + if name == realname: + title = self.bold(realname) + else: + if (cl and realname in cl.__dict__ and + cl.__dict__[realname] is object): + skipdocs = 1 + title = self.bold(name) + ' = ' + realname + if (inspect.isfunction(object) or + inspect.isbuiltin(object) and hasattr(object, 'func_code')): + # PyPy extension: the code below works for built-in functions too + args, varargs, varkw, defaults = inspect.getargspec(object) + argspec = inspect.formatargspec( + args, varargs, varkw, defaults, formatvalue=self.formatvalue) + if realname == '': + title = self.bold(name) + ' lambda ' + argspec = argspec[1:-1] # remove parentheses + else: + argspec = '(...)' + decl = title + argspec + note + + if skipdocs: + return decl + '\n' + else: + doc = getdoc(object) or '' + return decl + '\n' + (doc and rstrip(self.indent(doc)) + '\n') + + def _docdescriptor(self, name, value, mod): + results = [] + push = results.append + + if name: + push(self.bold(name)) + push('\n') + doc = getdoc(value) or '' + if doc: + push(self.indent(doc)) + push('\n') + return ''.join(results) + + def docproperty(self, object, name=None, mod=None, cl=None): + """Produce text documentation for a property.""" + return self._docdescriptor(name, object, mod) + + def docdata(self, object, name=None, mod=None, cl=None): + """Produce text documentation for a data descriptor.""" + return self._docdescriptor(name, object, mod) + + def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None): + """Produce text documentation for a data object.""" + repr = self.repr(object) + if maxlen: + line = (name and name + ' = ' or '') + repr + chop = maxlen - len(line) + if chop < 0: repr = repr[:chop] + '...' + line = (name and self.bold(name) + ' = ' or '') + repr + if doc is not None: + line += '\n' + self.indent(str(doc)) + return line + +# --------------------------------------------------------- user interfaces + +def pager(text): + """The first time this is called, determine what kind of pager to use.""" + global pager + pager = getpager() + pager(text) + +def getpager(): + """Decide what method to use for paging through text.""" + if type(sys.stdout) is not types.FileType: + return plainpager + if not sys.stdin.isatty() or not sys.stdout.isatty(): + return plainpager + if 'PAGER' in os.environ: + if sys.platform == 'win32': # pipes completely broken in Windows + return lambda text: tempfilepager(plain(text), os.environ['PAGER']) + elif os.environ.get('TERM') in ('dumb', 'emacs'): + return lambda text: pipepager(plain(text), os.environ['PAGER']) + else: + return lambda text: pipepager(text, os.environ['PAGER']) + if os.environ.get('TERM') in ('dumb', 'emacs'): + return plainpager + if sys.platform == 'win32' or sys.platform.startswith('os2'): + return lambda text: tempfilepager(plain(text), 'more <') + if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: + return lambda text: pipepager(text, 'less') + + import tempfile + (fd, filename) = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, 'system') and os.system('more %s' % filename) == 0: + return lambda text: pipepager(text, 'more') + else: + return ttypager + finally: + os.unlink(filename) + +def plain(text): + """Remove boldface formatting from text.""" + return re.sub('.\b', '', text) + +def pipepager(text, cmd): + """Page through text by feeding it to another program.""" + pipe = os.popen(cmd, 'w') + try: + pipe.write(text) + pipe.close() + except IOError: + pass # Ignore broken pipes caused by quitting the pager program. + +def tempfilepager(text, cmd): + """Page through text by invoking a program on a temporary file.""" + import tempfile + filename = tempfile.mktemp() + file = open(filename, 'w') + file.write(text) + file.close() + try: + os.system(cmd + ' ' + filename) + finally: + os.unlink(filename) + +def ttypager(text): + """Page through text on a text terminal.""" + lines = split(plain(text), '\n') + try: + import tty + fd = sys.stdin.fileno() + old = tty.tcgetattr(fd) + tty.setcbreak(fd) + getchar = lambda: sys.stdin.read(1) + except (ImportError, AttributeError): + tty = None + getchar = lambda: sys.stdin.readline()[:-1][:1] + + try: + r = inc = os.environ.get('LINES', 25) - 1 + sys.stdout.write(join(lines[:inc], '\n') + '\n') + while lines[r:]: + sys.stdout.write('-- more --') + sys.stdout.flush() + c = getchar() + + if c in ('q', 'Q'): + sys.stdout.write('\r \r') + break + elif c in ('\r', '\n'): + sys.stdout.write('\r \r' + lines[r] + '\n') + r = r + 1 + continue + if c in ('b', 'B', '\x1b'): + r = r - inc - inc + if r < 0: r = 0 + sys.stdout.write('\n' + join(lines[r:r+inc], '\n') + '\n') + r = r + inc + + finally: + if tty: + tty.tcsetattr(fd, tty.TCSAFLUSH, old) + +def plainpager(text): + """Simply print unformatted text. This is the ultimate fallback.""" + sys.stdout.write(plain(text)) + +def describe(thing): + """Produce a short description of the given thing.""" + if inspect.ismodule(thing): + if thing.__name__ in sys.builtin_module_names: + return 'built-in module ' + thing.__name__ + if hasattr(thing, '__path__'): + return 'package ' + thing.__name__ + else: + return 'module ' + thing.__name__ + if inspect.isbuiltin(thing): + return 'built-in function ' + thing.__name__ + if inspect.isgetsetdescriptor(thing): + return 'getset descriptor %s.%s.%s' % ( + thing.__objclass__.__module__, thing.__objclass__.__name__, + thing.__name__) + if inspect.ismemberdescriptor(thing): + return 'member descriptor %s.%s.%s' % ( + thing.__objclass__.__module__, thing.__objclass__.__name__, + thing.__name__) + if inspect.isclass(thing): + return 'class ' + thing.__name__ + if inspect.isfunction(thing): + return 'function ' + thing.__name__ + if inspect.ismethod(thing): + return 'method ' + thing.__name__ + if type(thing) is types.InstanceType: + return 'instance of ' + thing.__class__.__name__ + return type(thing).__name__ + +def locate(path, forceload=0): + """Locate an object by name or dotted path, importing as necessary.""" + parts = [part for part in split(path, '.') if part] + module, n = None, 0 + while n < len(parts): + nextmodule = safeimport(join(parts[:n+1], '.'), forceload) + if nextmodule: module, n = nextmodule, n + 1 + else: break + if module: + object = module + for part in parts[n:]: + try: object = getattr(object, part) + except AttributeError: return None + return object + else: + if hasattr(__builtin__, path): + return getattr(__builtin__, path) + +# --------------------------------------- interactive interpreter interface + +text = TextDoc() +html = HTMLDoc() + +def resolve(thing, forceload=0): + """Given an object or a path to an object, get the object and its name.""" + if isinstance(thing, str): + object = locate(thing, forceload) + if not object: + raise ImportError, 'no Python documentation found for %r' % thing + return object, thing + else: + return thing, getattr(thing, '__name__', None) + +def doc(thing, title='Python Library Documentation: %s', forceload=0): + """Display text documentation, given an object or a path to an object.""" + try: + object, name = resolve(thing, forceload) + desc = describe(object) + module = inspect.getmodule(object) + if name and '.' in name: + desc += ' in ' + name[:name.rfind('.')] + elif module and module is not object: + desc += ' in module ' + module.__name__ + if not (inspect.ismodule(object) or + inspect.isclass(object) or + inspect.isroutine(object) or + inspect.isgetsetdescriptor(object) or + inspect.ismemberdescriptor(object) or + isinstance(object, property)): + # If the passed object is a piece of data or an instance, + # document its available methods instead of its value. + object = type(object) + desc += ' object' + pager(title % desc + '\n\n' + text.document(object, name)) + except (ImportError, ErrorDuringImport), value: + print value + +def writedoc(thing, forceload=0): + """Write HTML documentation to a file in the current directory.""" + try: + object, name = resolve(thing, forceload) + page = html.page(describe(object), html.document(object, name)) + file = open(name + '.html', 'w') + file.write(page) + file.close() + print 'wrote', name + '.html' + except (ImportError, ErrorDuringImport), value: + print value + +def writedocs(dir, pkgpath='', done=None): + """Write out HTML documentation for all modules in a directory tree.""" + if done is None: done = {} + for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath): + writedoc(modname) + return + +class Helper: + keywords = { + 'and': 'BOOLEAN', + 'as': 'with', + 'assert': ('ref/assert', ''), + 'break': ('ref/break', 'while for'), + 'class': ('ref/class', 'CLASSES SPECIALMETHODS'), + 'continue': ('ref/continue', 'while for'), + 'def': ('ref/function', ''), + 'del': ('ref/del', 'BASICMETHODS'), + 'elif': 'if', + 'else': ('ref/if', 'while for'), + 'except': 'try', + 'exec': ('ref/exec', ''), + 'finally': 'try', + 'for': ('ref/for', 'break continue while'), + 'from': 'import', + 'global': ('ref/global', 'NAMESPACES'), + 'if': ('ref/if', 'TRUTHVALUE'), + 'import': ('ref/import', 'MODULES'), + 'in': ('ref/comparisons', 'SEQUENCEMETHODS2'), + 'is': 'COMPARISON', + 'lambda': ('ref/lambdas', 'FUNCTIONS'), + 'not': 'BOOLEAN', + 'or': 'BOOLEAN', + 'pass': ('ref/pass', ''), + 'print': ('ref/print', ''), + 'raise': ('ref/raise', 'EXCEPTIONS'), + 'return': ('ref/return', 'FUNCTIONS'), + 'try': ('ref/try', 'EXCEPTIONS'), + 'while': ('ref/while', 'break continue if TRUTHVALUE'), + 'with': ('ref/with', 'CONTEXTMANAGERS EXCEPTIONS yield'), + 'yield': ('ref/yield', ''), + } + + topics = { + 'TYPES': ('ref/types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS FUNCTIONS CLASSES MODULES FILES inspect'), + 'STRINGS': ('ref/strings', 'str UNICODE SEQUENCES STRINGMETHODS FORMATTING TYPES'), + 'STRINGMETHODS': ('lib/string-methods', 'STRINGS FORMATTING'), + 'FORMATTING': ('lib/typesseq-strings', 'OPERATORS'), + 'UNICODE': ('ref/strings', 'encodings unicode SEQUENCES STRINGMETHODS FORMATTING TYPES'), + 'NUMBERS': ('ref/numbers', 'INTEGER FLOAT COMPLEX TYPES'), + 'INTEGER': ('ref/integers', 'int range'), + 'FLOAT': ('ref/floating', 'float math'), + 'COMPLEX': ('ref/imaginary', 'complex cmath'), + 'SEQUENCES': ('lib/typesseq', 'STRINGMETHODS FORMATTING xrange LISTS'), + 'MAPPINGS': 'DICTIONARIES', + 'FUNCTIONS': ('lib/typesfunctions', 'def TYPES'), + 'METHODS': ('lib/typesmethods', 'class def CLASSES TYPES'), + 'CODEOBJECTS': ('lib/bltin-code-objects', 'compile FUNCTIONS TYPES'), + 'TYPEOBJECTS': ('lib/bltin-type-objects', 'types TYPES'), + 'FRAMEOBJECTS': 'TYPES', + 'TRACEBACKS': 'TYPES', + 'NONE': ('lib/bltin-null-object', ''), + 'ELLIPSIS': ('lib/bltin-ellipsis-object', 'SLICINGS'), + 'FILES': ('lib/bltin-file-objects', ''), + 'SPECIALATTRIBUTES': ('lib/specialattrs', ''), + 'CLASSES': ('ref/types', 'class SPECIALMETHODS PRIVATENAMES'), + 'MODULES': ('lib/typesmodules', 'import'), + 'PACKAGES': 'import', + 'EXPRESSIONS': ('ref/summary', 'lambda or and not in is BOOLEAN COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES LISTS DICTIONARIES BACKQUOTES'), + 'OPERATORS': 'EXPRESSIONS', + 'PRECEDENCE': 'EXPRESSIONS', + 'OBJECTS': ('ref/objects', 'TYPES'), + 'SPECIALMETHODS': ('ref/specialnames', 'BASICMETHODS ATTRIBUTEMETHODS CALLABLEMETHODS SEQUENCEMETHODS1 MAPPINGMETHODS SEQUENCEMETHODS2 NUMBERMETHODS CLASSES'), + 'BASICMETHODS': ('ref/customization', 'cmp hash repr str SPECIALMETHODS'), + 'ATTRIBUTEMETHODS': ('ref/attribute-access', 'ATTRIBUTES SPECIALMETHODS'), + 'CALLABLEMETHODS': ('ref/callable-types', 'CALLS SPECIALMETHODS'), + 'SEQUENCEMETHODS1': ('ref/sequence-types', 'SEQUENCES SEQUENCEMETHODS2 SPECIALMETHODS'), + 'SEQUENCEMETHODS2': ('ref/sequence-methods', 'SEQUENCES SEQUENCEMETHODS1 SPECIALMETHODS'), + 'MAPPINGMETHODS': ('ref/sequence-types', 'MAPPINGS SPECIALMETHODS'), + 'NUMBERMETHODS': ('ref/numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT SPECIALMETHODS'), + 'EXECUTION': ('ref/execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'), + 'NAMESPACES': ('ref/naming', 'global ASSIGNMENT DELETION DYNAMICFEATURES'), + 'DYNAMICFEATURES': ('ref/dynamic-features', ''), + 'SCOPING': 'NAMESPACES', + 'FRAMES': 'NAMESPACES', + 'EXCEPTIONS': ('ref/exceptions', 'try except finally raise'), + 'COERCIONS': ('ref/coercion-rules','CONVERSIONS'), + 'CONVERSIONS': ('ref/conversions', 'COERCIONS'), + 'IDENTIFIERS': ('ref/identifiers', 'keywords SPECIALIDENTIFIERS'), + 'SPECIALIDENTIFIERS': ('ref/id-classes', ''), + 'PRIVATENAMES': ('ref/atom-identifiers', ''), + 'LITERALS': ('ref/atom-literals', 'STRINGS BACKQUOTES NUMBERS TUPLELITERALS LISTLITERALS DICTIONARYLITERALS'), + 'TUPLES': 'SEQUENCES', + 'TUPLELITERALS': ('ref/exprlists', 'TUPLES LITERALS'), + 'LISTS': ('lib/typesseq-mutable', 'LISTLITERALS'), + 'LISTLITERALS': ('ref/lists', 'LISTS LITERALS'), + 'DICTIONARIES': ('lib/typesmapping', 'DICTIONARYLITERALS'), + 'DICTIONARYLITERALS': ('ref/dict', 'DICTIONARIES LITERALS'), + 'BACKQUOTES': ('ref/string-conversions', 'repr str STRINGS LITERALS'), + 'ATTRIBUTES': ('ref/attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'), + 'SUBSCRIPTS': ('ref/subscriptions', 'SEQUENCEMETHODS1'), + 'SLICINGS': ('ref/slicings', 'SEQUENCEMETHODS2'), + 'CALLS': ('ref/calls', 'EXPRESSIONS'), + 'POWER': ('ref/power', 'EXPRESSIONS'), + 'UNARY': ('ref/unary', 'EXPRESSIONS'), + 'BINARY': ('ref/binary', 'EXPRESSIONS'), + 'SHIFTING': ('ref/shifting', 'EXPRESSIONS'), + 'BITWISE': ('ref/bitwise', 'EXPRESSIONS'), + 'COMPARISON': ('ref/comparisons', 'EXPRESSIONS BASICMETHODS'), + 'BOOLEAN': ('ref/Booleans', 'EXPRESSIONS TRUTHVALUE'), + 'ASSERTION': 'assert', + 'ASSIGNMENT': ('ref/assignment', 'AUGMENTEDASSIGNMENT'), + 'AUGMENTEDASSIGNMENT': ('ref/augassign', 'NUMBERMETHODS'), + 'DELETION': 'del', + 'PRINTING': 'print', + 'RETURNING': 'return', + 'IMPORTING': 'import', + 'CONDITIONAL': 'if', + 'LOOPING': ('ref/compound', 'for while break continue'), + 'TRUTHVALUE': ('lib/truth', 'if while and or not BASICMETHODS'), + 'DEBUGGING': ('lib/module-pdb', 'pdb'), + 'CONTEXTMANAGERS': ('ref/context-managers', 'with'), + } + + def __init__(self, input, output): + self.input = input + self.output = output + self.docdir = None + execdir = os.path.dirname(sys.executable) + homedir = os.environ.get('PYTHONHOME') + for dir in [os.environ.get('PYTHONDOCS'), + homedir and os.path.join(homedir, 'doc'), + os.path.join(execdir, 'doc'), + '/usr/doc/python-docs-' + split(sys.version)[0], + '/usr/doc/python-' + split(sys.version)[0], + '/usr/doc/python-docs-' + sys.version[:3], + '/usr/doc/python-' + sys.version[:3], + os.path.join(sys.prefix, 'Resources/English.lproj/Documentation')]: + if dir and os.path.isdir(os.path.join(dir, 'lib')): + self.docdir = dir + + def __repr__(self): + if inspect.stack()[1][3] == '?': + self() + return '' + return '' + + def __call__(self, request=None): + if request is not None: + self.help(request) + else: + self.intro() + self.interact() + self.output.write(''' +You are now leaving help and returning to the Python interpreter. +If you want to ask for help on a particular object directly from the +interpreter, you can type "help(object)". Executing "help('string')" +has the same effect as typing a particular string at the help> prompt. +''') + + def interact(self): + self.output.write('\n') + while True: + try: + request = self.getline('help> ') + if not request: break + except (KeyboardInterrupt, EOFError): + break + request = strip(replace(request, '"', '', "'", '')) + if lower(request) in ('q', 'quit'): break + self.help(request) + + def getline(self, prompt): + """Read one line, using raw_input when available.""" + if self.input is sys.stdin: + return raw_input(prompt) + else: + self.output.write(prompt) + self.output.flush() + return self.input.readline() + + def help(self, request): + if type(request) is type(''): + if request == 'help': self.intro() + elif request == 'keywords': self.listkeywords() + elif request == 'topics': self.listtopics() + elif request == 'modules': self.listmodules() + elif request[:8] == 'modules ': + self.listmodules(split(request)[1]) + elif request in self.keywords: self.showtopic(request) + elif request in self.topics: self.showtopic(request) + elif request: doc(request, 'Help on %s:') + elif isinstance(request, Helper): self() + else: doc(request, 'Help on %s:') + self.output.write('\n') + + def intro(self): + self.output.write(''' +Welcome to Python %s! This is the online help utility. + +If this is your first time using Python, you should definitely check out +the tutorial on the Internet at http://www.python.org/doc/tut/. + +Enter the name of any module, keyword, or topic to get help on writing +Python programs and using Python modules. To quit this help utility and +return to the interpreter, just type "quit". + +To get a list of available modules, keywords, or topics, type "modules", +"keywords", or "topics". Each module also comes with a one-line summary +of what it does; to list the modules whose summaries contain a given word +such as "spam", type "modules spam". +''' % sys.version[:3]) + + def list(self, items, columns=4, width=80): + items = items[:] + items.sort() + colw = width / columns + rows = (len(items) + columns - 1) / columns + for row in range(rows): + for col in range(columns): + i = col * rows + row + if i < len(items): + self.output.write(items[i]) + if col < columns - 1: + self.output.write(' ' + ' ' * (colw-1 - len(items[i]))) + self.output.write('\n') + + def listkeywords(self): + self.output.write(''' +Here is a list of the Python keywords. Enter any keyword to get more help. + +''') + self.list(self.keywords.keys()) + + def listtopics(self): + self.output.write(''' +Here is a list of available topics. Enter any topic name to get more help. + +''') + self.list(self.topics.keys()) + + def showtopic(self, topic): + if not self.docdir: + self.output.write(''' +Sorry, topic and keyword documentation is not available because the Python +HTML documentation files could not be found. If you have installed them, +please set the environment variable PYTHONDOCS to indicate their location. + +On the Microsoft Windows operating system, the files can be built by +running "hh -decompile . PythonNN.chm" in the C:\PythonNN\Doc> directory. +''') + return + target = self.topics.get(topic, self.keywords.get(topic)) + if not target: + self.output.write('no documentation found for %s\n' % repr(topic)) + return + if type(target) is type(''): + return self.showtopic(target) + + filename, xrefs = target + filename = self.docdir + '/' + filename + '.html' + try: + file = open(filename) + except: + self.output.write('could not read docs from %s\n' % filename) + return + + divpat = re.compile(']*navigat.*?', re.I | re.S) + addrpat = re.compile('.*?', re.I | re.S) + document = re.sub(addrpat, '', re.sub(divpat, '', file.read())) + file.close() + + import htmllib, formatter, StringIO + buffer = StringIO.StringIO() + parser = htmllib.HTMLParser( + formatter.AbstractFormatter(formatter.DumbWriter(buffer))) + parser.start_table = parser.do_p + parser.end_table = lambda parser=parser: parser.do_p({}) + parser.start_tr = parser.do_br + parser.start_td = parser.start_th = lambda a, b=buffer: b.write('\t') + parser.feed(document) + buffer = replace(buffer.getvalue(), '\xa0', ' ', '\n', '\n ') + pager(' ' + strip(buffer) + '\n') + if xrefs: + buffer = StringIO.StringIO() + formatter.DumbWriter(buffer).send_flowing_data( + 'Related help topics: ' + join(split(xrefs), ', ') + '\n') + self.output.write('\n%s\n' % buffer.getvalue()) + + def listmodules(self, key=''): + if key: + self.output.write(''' +Here is a list of matching modules. Enter any module name to get more help. + +''') + apropos(key) + else: + self.output.write(''' +Please wait a moment while I gather a list of all available modules... + +''') + modules = {} + def callback(path, modname, desc, modules=modules): + if modname and modname[-9:] == '.__init__': + modname = modname[:-9] + ' (package)' + if find(modname, '.') < 0: + modules[modname] = 1 + ModuleScanner().run(callback) + self.list(modules.keys()) + self.output.write(''' +Enter any module name to get more help. Or, type "modules spam" to search +for modules whose descriptions contain the word "spam". +''') + +help = Helper(sys.stdin, sys.stdout) + +class Scanner: + """A generic tree iterator.""" + def __init__(self, roots, children, descendp): + self.roots = roots[:] + self.state = [] + self.children = children + self.descendp = descendp + + def next(self): + if not self.state: + if not self.roots: + return None + root = self.roots.pop(0) + self.state = [(root, self.children(root))] + node, children = self.state[-1] + if not children: + self.state.pop() + return self.next() + child = children.pop(0) + if self.descendp(child): + self.state.append((child, self.children(child))) + return child + + +class ModuleScanner: + """An interruptible scanner that searches module synopses.""" + + def run(self, callback, key=None, completer=None): + if key: key = lower(key) + self.quit = False + seen = {} + + for modname in sys.builtin_module_names: + if modname != '__main__': + seen[modname] = 1 + if key is None: + callback(None, modname, '') + else: + desc = split(__import__(modname).__doc__ or '', '\n')[0] + if find(lower(modname + ' - ' + desc), key) >= 0: + callback(None, modname, desc) + + for importer, modname, ispkg in pkgutil.walk_packages(): + if self.quit: + break + if key is None: + callback(None, modname, '') + else: + loader = importer.find_module(modname) + if hasattr(loader,'get_source'): + import StringIO + desc = source_synopsis( + StringIO.StringIO(loader.get_source(modname)) + ) or '' + if hasattr(loader,'get_filename'): + path = loader.get_filename(modname) + else: + path = None + else: + module = loader.load_module(modname) + desc = (module.__doc__ or '').splitlines()[0] + path = getattr(module,'__file__',None) + if find(lower(modname + ' - ' + desc), key) >= 0: + callback(path, modname, desc) + + if completer: + completer() + +def apropos(key): + """Print all the one-line module summaries that contain a substring.""" + def callback(path, modname, desc): + if modname[-9:] == '.__init__': + modname = modname[:-9] + ' (package)' + print modname, desc and '- ' + desc + try: import warnings + except ImportError: pass + else: warnings.filterwarnings('ignore') # ignore problems during import + ModuleScanner().run(callback, key) + +# --------------------------------------------------- web browser interface + +def serve(port, callback=None, completer=None): + import BaseHTTPServer, mimetools, select + + # Patch up mimetools.Message so it doesn't break if rfc822 is reloaded. + class Message(mimetools.Message): + def __init__(self, fp, seekable=1): + Message = self.__class__ + Message.__bases__[0].__bases__[0].__init__(self, fp, seekable) + self.encodingheader = self.getheader('content-transfer-encoding') + self.typeheader = self.getheader('content-type') + self.parsetype() + self.parseplist() + + class DocHandler(BaseHTTPServer.BaseHTTPRequestHandler): + def send_document(self, title, contents): + try: + self.send_response(200) + self.send_header('Content-Type', 'text/html') + self.end_headers() + self.wfile.write(html.page(title, contents)) + except IOError: pass + + def do_GET(self): + path = self.path + if path[-5:] == '.html': path = path[:-5] + if path[:1] == '/': path = path[1:] + if path and path != '.': + try: + obj = locate(path, forceload=1) + except ErrorDuringImport, value: + self.send_document(path, html.escape(str(value))) + return + if obj: + self.send_document(describe(obj), html.document(obj, path)) + else: + self.send_document(path, +'no Python documentation found for %s' % repr(path)) + else: + heading = html.heading( +'Python: Index of Modules', +'#ffffff', '#7799ee') + def bltinlink(name): + return '%s' % (name, name) + names = filter(lambda x: x != '__main__', + sys.builtin_module_names) + contents = html.multicolumn(names, bltinlink) + indices = ['

' + html.bigsection( + 'Built-in Modules', '#ffffff', '#ee77aa', contents)] + + seen = {} + for dir in sys.path: + indices.append(html.index(dir, seen)) + contents = heading + join(indices) + '''

+ +pydoc by Ka-Ping Yee <ping at lfw.org>''' + self.send_document('Index of Modules', contents) + + def log_message(self, *args): pass + + class DocServer(BaseHTTPServer.HTTPServer): + def __init__(self, port, callback): + host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost' + self.address = ('', port) + self.url = 'http://%s:%d/' % (host, port) + self.callback = callback + self.base.__init__(self, self.address, self.handler) + + def serve_until_quit(self): + import select + self.quit = False + while not self.quit: + rd, wr, ex = select.select([self.socket.fileno()], [], [], 1) + if rd: self.handle_request() + + def server_activate(self): + self.base.server_activate(self) + if self.callback: self.callback(self) + + DocServer.base = BaseHTTPServer.HTTPServer + DocServer.handler = DocHandler + DocHandler.MessageClass = Message + try: + try: + DocServer(port, callback).serve_until_quit() + except (KeyboardInterrupt, select.error): + pass + finally: + if completer: completer() + +# ----------------------------------------------------- graphical interface + +def gui(): + """Graphical interface (starts web server and pops up a control window).""" + class GUI: + def __init__(self, window, port=7464): + self.window = window + self.server = None + self.scanner = None + + import Tkinter + self.server_frm = Tkinter.Frame(window) + self.title_lbl = Tkinter.Label(self.server_frm, + text='Starting server...\n ') + self.open_btn = Tkinter.Button(self.server_frm, + text='open browser', command=self.open, state='disabled') + self.quit_btn = Tkinter.Button(self.server_frm, + text='quit serving', command=self.quit, state='disabled') + + self.search_frm = Tkinter.Frame(window) + self.search_lbl = Tkinter.Label(self.search_frm, text='Search for') + self.search_ent = Tkinter.Entry(self.search_frm) + self.search_ent.bind('', self.search) + self.stop_btn = Tkinter.Button(self.search_frm, + text='stop', pady=0, command=self.stop, state='disabled') + if sys.platform == 'win32': + # Trying to hide and show this button crashes under Windows. + self.stop_btn.pack(side='right') + + self.window.title('pydoc') + self.window.protocol('WM_DELETE_WINDOW', self.quit) + self.title_lbl.pack(side='top', fill='x') + self.open_btn.pack(side='left', fill='x', expand=1) + self.quit_btn.pack(side='right', fill='x', expand=1) + self.server_frm.pack(side='top', fill='x') + + self.search_lbl.pack(side='left') + self.search_ent.pack(side='right', fill='x', expand=1) + self.search_frm.pack(side='top', fill='x') + self.search_ent.focus_set() + + font = ('helvetica', sys.platform == 'win32' and 8 or 10) + self.result_lst = Tkinter.Listbox(window, font=font, height=6) + self.result_lst.bind('', self.select) + self.result_lst.bind('', self.goto) + self.result_scr = Tkinter.Scrollbar(window, + orient='vertical', command=self.result_lst.yview) + self.result_lst.config(yscrollcommand=self.result_scr.set) + + self.result_frm = Tkinter.Frame(window) + self.goto_btn = Tkinter.Button(self.result_frm, + text='go to selected', command=self.goto) + self.hide_btn = Tkinter.Button(self.result_frm, + text='hide results', command=self.hide) + self.goto_btn.pack(side='left', fill='x', expand=1) + self.hide_btn.pack(side='right', fill='x', expand=1) + + self.window.update() + self.minwidth = self.window.winfo_width() + self.minheight = self.window.winfo_height() + self.bigminheight = (self.server_frm.winfo_reqheight() + + self.search_frm.winfo_reqheight() + + self.result_lst.winfo_reqheight() + + self.result_frm.winfo_reqheight()) + self.bigwidth, self.bigheight = self.minwidth, self.bigminheight + self.expanded = 0 + self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight)) + self.window.wm_minsize(self.minwidth, self.minheight) + self.window.tk.willdispatch() + + import threading + threading.Thread( + target=serve, args=(port, self.ready, self.quit)).start() + + def ready(self, server): + self.server = server + self.title_lbl.config( + text='Python documentation server at\n' + server.url) + self.open_btn.config(state='normal') + self.quit_btn.config(state='normal') + + def open(self, event=None, url=None): + url = url or self.server.url + try: + import webbrowser + webbrowser.open(url) + except ImportError: # pre-webbrowser.py compatibility + if sys.platform == 'win32': + os.system('start "%s"' % url) + elif sys.platform == 'mac': + try: import ic + except ImportError: pass + else: ic.launchurl(url) + else: + rc = os.system('netscape -remote "openURL(%s)" &' % url) + if rc: os.system('netscape "%s" &' % url) + + def quit(self, event=None): + if self.server: + self.server.quit = 1 + self.window.quit() + + def search(self, event=None): + key = self.search_ent.get() + self.stop_btn.pack(side='right') + self.stop_btn.config(state='normal') + self.search_lbl.config(text='Searching for "%s"...' % key) + self.search_ent.forget() + self.search_lbl.pack(side='left') + self.result_lst.delete(0, 'end') + self.goto_btn.config(state='disabled') + self.expand() + + import threading + if self.scanner: + self.scanner.quit = 1 + self.scanner = ModuleScanner() + threading.Thread(target=self.scanner.run, + args=(self.update, key, self.done)).start() + + def update(self, path, modname, desc): + if modname[-9:] == '.__init__': + modname = modname[:-9] + ' (package)' + self.result_lst.insert('end', + modname + ' - ' + (desc or '(no description)')) + + def stop(self, event=None): + if self.scanner: + self.scanner.quit = 1 + self.scanner = None + + def done(self): + self.scanner = None + self.search_lbl.config(text='Search for') + self.search_lbl.pack(side='left') + self.search_ent.pack(side='right', fill='x', expand=1) + if sys.platform != 'win32': self.stop_btn.forget() + self.stop_btn.config(state='disabled') + + def select(self, event=None): + self.goto_btn.config(state='normal') + + def goto(self, event=None): + selection = self.result_lst.curselection() + if selection: + modname = split(self.result_lst.get(selection[0]))[0] + self.open(url=self.server.url + modname + '.html') + + def collapse(self): + if not self.expanded: return + self.result_frm.forget() + self.result_scr.forget() + self.result_lst.forget() + self.bigwidth = self.window.winfo_width() + self.bigheight = self.window.winfo_height() + self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight)) + self.window.wm_minsize(self.minwidth, self.minheight) + self.expanded = 0 + + def expand(self): + if self.expanded: return + self.result_frm.pack(side='bottom', fill='x') + self.result_scr.pack(side='right', fill='y') + self.result_lst.pack(side='top', fill='both', expand=1) + self.window.wm_geometry('%dx%d' % (self.bigwidth, self.bigheight)) + self.window.wm_minsize(self.minwidth, self.bigminheight) + self.expanded = 1 + + def hide(self, event=None): + self.stop() + self.collapse() + + import Tkinter + try: + root = Tkinter.Tk() + # Tk will crash if pythonw.exe has an XP .manifest + # file and the root has is not destroyed explicitly. + # If the problem is ever fixed in Tk, the explicit + # destroy can go. + try: + gui = GUI(root) + root.mainloop() + finally: + root.destroy() + except KeyboardInterrupt: + pass + +# -------------------------------------------------- command-line interface + +def ispath(x): + return isinstance(x, str) and find(x, os.sep) >= 0 + +def cli(): + """Command-line interface (looks at sys.argv to decide what to do).""" + import getopt + class BadUsage: pass + + # Scripts don't get the current directory in their path by default. + scriptdir = os.path.dirname(sys.argv[0]) + if scriptdir in sys.path: + sys.path.remove(scriptdir) + sys.path.insert(0, '.') + + try: + opts, args = getopt.getopt(sys.argv[1:], 'gk:p:w') + writing = 0 + + for opt, val in opts: + if opt == '-g': + gui() + return + if opt == '-k': + apropos(val) + return + if opt == '-p': + try: + port = int(val) + except ValueError: + raise BadUsage + def ready(server): + print 'pydoc server ready at %s' % server.url + def stopped(): + print 'pydoc server stopped' + serve(port, ready, stopped) + return + if opt == '-w': + writing = 1 + + if not args: raise BadUsage + for arg in args: + if ispath(arg) and not os.path.exists(arg): + print 'file %r does not exist' % arg + break + try: + if ispath(arg) and os.path.isfile(arg): + arg = importfile(arg) + if writing: + if ispath(arg) and os.path.isdir(arg): + writedocs(arg) + else: + writedoc(arg) + else: + help.help(arg) + except ErrorDuringImport, value: + print value + + except (getopt.error, BadUsage): + cmd = os.path.basename(sys.argv[0]) + print """pydoc - the Python documentation tool + +%s ... + Show text documentation on something. may be the name of a + Python keyword, topic, function, module, or package, or a dotted + reference to a class or function within a module or module in a + package. If contains a '%s', it is used as the path to a + Python source file to document. If name is 'keywords', 'topics', + or 'modules', a listing of these things is displayed. + +%s -k + Search for a keyword in the synopsis lines of all available modules. + +%s -p + Start an HTTP server on the given port on the local machine. + +%s -g + Pop up a graphical interface for finding and serving documentation. + +%s -w ... + Write out the HTML documentation for a module to a file in the current + directory. If contains a '%s', it is treated as a filename; if + it names a directory, documentation is written for all the contents. +""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep) + +if __name__ == '__main__': cli() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,437 @@ +"""Append module search paths for third-party packages to sys.path. + +**************************************************************** +* This module is automatically imported during initialization. * +**************************************************************** + +In earlier versions of Python (up to 1.5a3), scripts or modules that +needed to use site-specific modules would place ``import site'' +somewhere near the top of their code. Because of the automatic +import, this is no longer necessary (but code that does it still +works). + +This will append site-specific paths to the module search path. On +Unix (including Mac OSX), it starts with sys.prefix and +sys.exec_prefix (if different) and appends +lib/python/site-packages as well as lib/site-python. +On other platforms (such as Windows), it tries each of the +prefixes directly, as well as with lib/site-packages appended. The +resulting directories, if they exist, are appended to sys.path, and +also inspected for path configuration files. + +A path configuration file is a file whose name has the form +.pth; its contents are additional directories (one per line) +to be added to sys.path. Non-existing directories (or +non-directories) are never added to sys.path; no directory is added to +sys.path more than once. Blank lines and lines beginning with +'#' are skipped. Lines starting with 'import' are executed. + +For example, suppose sys.prefix and sys.exec_prefix are set to +/usr/local and there is a directory /usr/local/lib/python2.5/site-packages +with three subdirectories, foo, bar and spam, and two path +configuration files, foo.pth and bar.pth. Assume foo.pth contains the +following: + + # foo package configuration + foo + bar + bletch + +and bar.pth contains: + + # bar package configuration + bar + +Then the following directories are added to sys.path, in this order: + + /usr/local/lib/python2.5/site-packages/bar + /usr/local/lib/python2.5/site-packages/foo + +Note that bletch is omitted because it doesn't exist; bar precedes foo +because bar.pth comes alphabetically before foo.pth; and spam is +omitted because it is not mentioned in either path configuration file. + +After these path manipulations, an attempt is made to import a module +named sitecustomize, which can perform arbitrary additional +site-specific customizations. If this import fails with an +ImportError exception, it is silently ignored. + +""" + +import sys +import os +import __builtin__ + + +def makepath(*paths): + dir = os.path.abspath(os.path.join(*paths)) + return dir, os.path.normcase(dir) + +def abs__file__(): + """Set all module' __file__ attribute to an absolute path""" + for m in sys.modules.values(): + if hasattr(m, '__loader__'): + continue # don't mess with a PEP 302-supplied __file__ + try: + prev = m.__file__ + new = os.path.abspath(m.__file__) + if prev != new: + m.__file__ = new + except AttributeError: + continue + +def removeduppaths(): + """ Remove duplicate entries from sys.path along with making them + absolute""" + # This ensures that the initial path provided by the interpreter contains + # only absolute pathnames, even if we're running from the build directory. + L = [] + known_paths = set() + for dir in sys.path: + # Filter out duplicate paths (on case-insensitive file systems also + # if they only differ in case); turn relative paths into absolute + # paths. + dir, dircase = makepath(dir) + if not dircase in known_paths: + L.append(dir) + known_paths.add(dircase) + sys.path[:] = L + return known_paths + +# XXX This should not be part of site.py, since it is needed even when +# using the -S option for Python. See http://www.python.org/sf/586680 +def addbuilddir(): + """Append ./build/lib. in case we're running in the build dir + (especially for Guido :-)""" + from distutils.util import get_platform + s = "build/lib.%s-%.3s" % (get_platform(), sys.version) + s = os.path.join(os.path.dirname(sys.path[-1]), s) + sys.path.append(s) + +def _init_pathinfo(): + """Return a set containing all existing directory entries from sys.path""" + d = set() + for dir in sys.path: + try: + if os.path.isdir(dir): + dir, dircase = makepath(dir) + d.add(dircase) + except TypeError: + continue + return d + +def addpackage(sitedir, name, known_paths): + """Add a new path to known_paths by combining sitedir and 'name' or execute + sitedir if it starts with 'import'""" + if known_paths is None: + _init_pathinfo() + reset = 1 + else: + reset = 0 + fullname = os.path.join(sitedir, name) + try: + f = open(fullname, "rU") + except IOError: + return + try: + for line in f: + if line.startswith("#"): + continue + if line.startswith("import"): + exec line + continue + line = line.rstrip() + dir, dircase = makepath(sitedir, line) + if not dircase in known_paths and os.path.exists(dir): + sys.path.append(dir) + known_paths.add(dircase) + finally: + f.close() + if reset: + known_paths = None + return known_paths + +def addsitedir(sitedir, known_paths=None): + """Add 'sitedir' argument to sys.path if missing and handle .pth files in + 'sitedir'""" + if known_paths is None: + known_paths = _init_pathinfo() + reset = 1 + else: + reset = 0 + sitedir, sitedircase = makepath(sitedir) + if not sitedircase in known_paths: + sys.path.append(sitedir) # Add path component + try: + names = os.listdir(sitedir) + except os.error: + return + names.sort() + for name in names: + if name.endswith(os.extsep + "pth"): + addpackage(sitedir, name, known_paths) + if reset: + known_paths = None + return known_paths + +def addsitepackages(known_paths): + """Add site-packages (and possibly site-python) to sys.path""" + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if prefix: + if sys.platform in ('os2emx', 'riscos'): + sitedirs = [os.path.join(prefix, "Lib", "site-packages")] + elif os.sep == '/': + sitedirs = [os.path.join(prefix, + "lib", + "python" + sys.version[:3], + "site-packages"), + os.path.join(prefix, "lib", "site-python")] + else: + sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")] + if sys.platform == 'darwin': + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' in prefix: + home = os.environ.get('HOME') + if home: + sitedirs.append( + os.path.join(home, + 'Library', + 'Python', + sys.version[:3], + 'site-packages')) + for sitedir in sitedirs: + if os.path.isdir(sitedir): + addsitedir(sitedir, known_paths) + return None + + +def setBEGINLIBPATH(): + """The OS/2 EMX port has optional extension modules that do double duty + as DLLs (and must use the .DLL file extension) for other extensions. + The library search path needs to be amended so these will be found + during module import. Use BEGINLIBPATH so that these are at the start + of the library search path. + + """ + dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") + libpath = os.environ['BEGINLIBPATH'].split(';') + if libpath[-1]: + libpath.append(dllpath) + else: + libpath[-1] = dllpath + os.environ['BEGINLIBPATH'] = ';'.join(libpath) + + +def setquit(): + """Define new built-ins 'quit' and 'exit'. + These are simply strings that display a hint on how to exit. + + """ + if os.sep == ':': + eof = 'Cmd-Q' + elif os.sep == '\\': + eof = 'Ctrl-Z plus Return' + else: + eof = 'Ctrl-D (i.e. EOF)' + + class Quitter(object): + def __init__(self, name): + self.name = name + def __repr__(self): + return 'Use %s() or %s to exit' % (self.name, eof) + def __call__(self, code=None): + # Shells like IDLE catch the SystemExit, but listen when their + # stdin wrapper is closed. + try: + sys.stdin.close() + except: + pass + raise SystemExit(code) + __builtin__.quit = Quitter('quit') + __builtin__.exit = Quitter('exit') + + +class _Printer(object): + """interactive prompt objects for printing the license text, a list of + contributors and the copyright notice.""" + + MAXLINES = 23 + + def __init__(self, name, data, files=(), dirs=()): + self.__name = name + self.__data = data + self.__files = files + self.__dirs = dirs + self.__lines = None + + def __setup(self): + if self.__lines: + return + data = None + for dir in self.__dirs: + for filename in self.__files: + filename = os.path.join(dir, filename) + try: + fp = file(filename, "rU") + data = fp.read() + fp.close() + break + except IOError: + pass + if data: + break + if not data: + data = self.__data + self.__lines = data.split('\n') + self.__linecnt = len(self.__lines) + + def __repr__(self): + self.__setup() + if len(self.__lines) <= self.MAXLINES: + return "\n".join(self.__lines) + else: + return "Type %s() to see the full %s text" % ((self.__name,)*2) + + def __call__(self): + self.__setup() + prompt = 'Hit Return for more, or q (and Return) to quit: ' + lineno = 0 + while 1: + try: + for i in range(lineno, lineno + self.MAXLINES): + print self.__lines[i] + except IndexError: + break + else: + lineno += self.MAXLINES + key = None + while key is None: + key = raw_input(prompt) + if key not in ('', 'q'): + key = None + if key == 'q': + break + +##def setcopyright(): +## """Set 'copyright' and 'credits' in __builtin__""" +## __builtin__.copyright = _Printer("copyright", sys.copyright) +## if sys.platform[:4] == 'java': +## __builtin__.credits = _Printer( +## "credits", +## "Jython is maintained by the Jython developers (www.jython.org).") +## else: +## __builtin__.credits = _Printer("credits", """\ +## Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands +## for supporting Python development. See www.python.org for more information.""") +## here = os.path.dirname(os.__file__) +## __builtin__.license = _Printer( +## "license", "See http://www.python.org/%.3s/license.html" % sys.version, +## ["LICENSE.txt", "LICENSE"], +## [os.path.join(here, os.pardir), here, os.curdir]) + +def setcopyright(): + # XXX this is the PyPy-specific version. Should be unified with the above. + __builtin__.credits = _Printer( + "credits", + "PyPy is maintained by the PyPy developers: http://codespeak.net/pypy") + __builtin__.license = _Printer( + "license", + "See http://codespeak.net/svn/pypy/dist/LICENSE") + + + +class _Helper(object): + """Define the built-in 'help'. + This is a wrapper around pydoc.help (with a twist). + + """ + + def __repr__(self): + return "Type help() for interactive help, " \ + "or help(object) for help about object." + def __call__(self, *args, **kwds): + import pydoc + return pydoc.help(*args, **kwds) + +def sethelper(): + __builtin__.help = _Helper() + +def aliasmbcs(): + """On Windows, some default encodings are not provided by Python, + while they are always available as "mbcs" in each locale. Make + them usable by aliasing to "mbcs" in such a case.""" + if sys.platform == 'win32': + import locale, codecs + enc = locale.getdefaultlocale()[1] + if enc is not None and enc.startswith('cp'): # "cp***" ? + try: + codecs.lookup(enc) + except LookupError: + import encodings + encodings._cache[enc] = encodings._unknown + encodings.aliases.aliases[enc] = 'mbcs' + +def setencoding(): + """Set the string encoding used by the Unicode implementation. The + default is 'ascii', but if you're willing to experiment, you can + change this.""" + encoding = "ascii" # Default value set by _PyUnicode_Init() + if 0: + # Enable to support locale aware default string encodings. + import locale + loc = locale.getdefaultlocale() + if loc[1]: + encoding = loc[1] + if 0: + # Enable to switch off string to Unicode coercion and implicit + # Unicode to string conversion. + encoding = "undefined" + if encoding != "ascii": + # On Non-Unicode builds this will raise an AttributeError... + sys.setdefaultencoding(encoding) # Needs Python Unicode build ! + + +def execsitecustomize(): + """Run custom site specific code, if available.""" + try: + import sitecustomize + except ImportError: + pass + + +def main(): + abs__file__() + paths_in_sys = removeduppaths() + if (os.name == "posix" and sys.path and + os.path.basename(sys.path[-1]) == "Modules"): + addbuilddir() + paths_in_sys = addsitepackages(paths_in_sys) + if sys.platform == 'os2emx': + setBEGINLIBPATH() + setquit() + setcopyright() + sethelper() + aliasmbcs() + setencoding() + execsitecustomize() + # Remove sys.setdefaultencoding() so that users cannot change the + # encoding after initialization. The test for presence is needed when + # this module is run as a script, because this code is executed twice. + if hasattr(sys, "setdefaultencoding"): + del sys.setdefaultencoding + +main() + +def _test(): + print "sys.path = [" + for dir in sys.path: + print " %r," % (dir,) + print "]" + +if __name__ == '__main__': + _test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,428 @@ +# Wrapper module for _socket, providing some additional facilities +# implemented in Python. + +"""\ +This module provides socket operations and some related functions. +On Unix, it supports IP (Internet Protocol) and Unix domain sockets. +On other systems, it only supports IP. Functions specific for a +socket are available as methods of the socket object. + +Functions: + +socket() -- create a new socket object +socketpair() -- create a pair of new socket objects [*] +fromfd() -- create a socket object from an open file descriptor [*] +gethostname() -- return the current hostname +gethostbyname() -- map a hostname to its IP number +gethostbyaddr() -- map an IP number or hostname to DNS info +getservbyname() -- map a service name and a protocol name to a port number +getprotobyname() -- mape a protocol name (e.g. 'tcp') to a number +ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order +htons(), htonl() -- convert 16, 32 bit int from host to network byte order +inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format +inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) +ssl() -- secure socket layer support (only available if configured) +socket.getdefaulttimeout() -- get the default timeout value +socket.setdefaulttimeout() -- set the default timeout value + + [*] not available on all platforms! + +Special objects: + +SocketType -- type object for socket objects +error -- exception raised for I/O errors +has_ipv6 -- boolean value indicating if IPv6 is supported + +Integer constants: + +AF_INET, AF_UNIX -- socket domains (first argument to socket() call) +SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) + +Many other constants may be defined; these may be used in calls to +the setsockopt() and getsockopt() methods. +""" + +import _socket +from _socket import * + +_have_ssl = False +try: + import _ssl + from _ssl import * + _have_ssl = True +except ImportError: + pass + +import os, sys + +try: + from errno import EBADF +except ImportError: + EBADF = 9 + +__all__ = ["getfqdn"] +__all__.extend(os._get_exports_list(_socket)) +if _have_ssl: + __all__.extend(os._get_exports_list(_ssl)) + +_realsocket = socket +if _have_ssl: + _realssl = ssl + def ssl(sock, keyfile=None, certfile=None): + if hasattr(sock, "_sock"): + sock = sock._sock + return _realssl(sock, keyfile, certfile) + +# WSA error codes +if sys.platform.lower().startswith("win"): + errorTab = {} + errorTab[10004] = "The operation was interrupted." + errorTab[10009] = "A bad file handle was passed." + errorTab[10013] = "Permission denied." + errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT + errorTab[10022] = "An invalid operation was attempted." + errorTab[10035] = "The socket operation would block" + errorTab[10036] = "A blocking operation is already in progress." + errorTab[10048] = "The network address is in use." + errorTab[10054] = "The connection has been reset." + errorTab[10058] = "The network has been shut down." + errorTab[10060] = "The operation timed out." + errorTab[10061] = "Connection refused." + errorTab[10063] = "The name is too long." + errorTab[10064] = "The host is down." + errorTab[10065] = "The host is unreachable." + __all__.append("errorTab") + + + +def getfqdn(name=''): + """Get fully qualified domain name from name. + + An empty argument is interpreted as meaning the local host. + + First the hostname returned by gethostbyaddr() is checked, then + possibly existing aliases. In case no FQDN is available, hostname + from gethostname() is returned. + """ + name = name.strip() + if not name or name == '0.0.0.0': + name = gethostname() + try: + hostname, aliases, ipaddrs = gethostbyaddr(name) + except error: + pass + else: + aliases.insert(0, hostname) + for name in aliases: + if '.' in name: + break + else: + name = hostname + return name + + +_socketmethods = ( + 'bind', 'connect', 'connect_ex', 'fileno', 'listen', + 'getpeername', 'getsockname', 'getsockopt', 'setsockopt', + 'sendall', 'setblocking', + 'settimeout', 'gettimeout', 'shutdown') + +if sys.platform == "riscos": + _socketmethods = _socketmethods + ('sleeptaskw',) + +# All the method names that must be delegated to either the real socket +# object or the _closedsocket object. +_delegate_methods = ("recv", "recvfrom", "recv_into", "recvfrom_into", + "send", "sendto") + +class _closedsocket(object): + __slots__ = [] + def _dummy(*args): + raise error(EBADF, 'Bad file descriptor') + def _drop(self): + pass + def _reuse(self): + pass + # All _delegate_methods must also be initialized here. + send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy + __getattr__ = _dummy + +class _socketobject(object): + + __doc__ = _realsocket.__doc__ + + __slots__ = ["_sock", "__weakref__"] + list(_delegate_methods) + + def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None): + if _sock is None: + _sock = _realsocket(family, type, proto) + self._sock = _sock + for method in _delegate_methods: + setattr(self, method, getattr(_sock, method)) + + def __del__(self): + self.close() + + + + + def close(self): + self._sock._drop() + self._sock = _closedsocket() + dummy = self._sock._dummy + for method in _delegate_methods: + setattr(self, method, dummy) + close.__doc__ = _realsocket.close.__doc__ + + def accept(self): + sock, addr = self._sock.accept() + return _socketobject(_sock=sock), addr + accept.__doc__ = _realsocket.accept.__doc__ + + def dup(self): + """dup() -> socket object + + Return a new socket object connected to the same system resource.""" + self._sock._reuse() + return _socketobject(_sock=self._sock) + + def makefile(self, mode='r', bufsize=-1): + """makefile([mode[, bufsize]]) -> file object + + Return a regular file object corresponding to the socket. The mode + and bufsize arguments are as for the built-in open() function.""" + self._sock._reuse() + return _fileobject(self._sock, mode, bufsize) + + family = property(lambda self: self._sock.family, doc="the socket family") + type = property(lambda self: self._sock.type, doc="the socket type") + proto = property(lambda self: self._sock.proto, doc="the socket protocol") + + _s = ("def %s(self, *args): return self._sock.%s(*args)\n\n" + "%s.__doc__ = _realsocket.%s.__doc__\n") + for _m in _socketmethods: + exec _s % (_m, _m, _m, _m) + del _m, _s + +socket = SocketType = _socketobject + +class _fileobject(object): + """Faux file object attached to a socket object.""" + + default_bufsize = 8192 + name = "" + + __slots__ = ["mode", "bufsize", "softspace", + # "closed" is a property, see below + "_sock", "_rbufsize", "_wbufsize", "_rbuf", "_wbuf", + "_close"] + + def __init__(self, sock, mode='rb', bufsize=-1, close=False): + self._sock = sock + self.mode = mode # Not actually used in this version + if bufsize < 0: + bufsize = self.default_bufsize + self.bufsize = bufsize + self.softspace = False + if bufsize == 0: + self._rbufsize = 1 + elif bufsize == 1: + self._rbufsize = self.default_bufsize + else: + self._rbufsize = bufsize + self._wbufsize = bufsize + self._rbuf = "" # A string + self._wbuf = [] # A list of strings + self._close = close + + def _getclosed(self): + return self._sock is None + closed = property(_getclosed, doc="True if the file is closed") + + def close(self): + if self._sock: + try: + self.flush() + finally: + if self._sock: + s = self._sock + self._sock = None + s._drop() + + def __del__(self): + try: + self.close() + except: + # close() may fail if __init__ didn't complete + pass + + def flush(self): + if self._wbuf: + buffer = "".join(self._wbuf) + self._wbuf = [] + self._sock.sendall(buffer) + + def fileno(self): + return self._sock.fileno() + + def write(self, data): + data = str(data) # XXX Should really reject non-string non-buffers + if not data: + return + self._wbuf.append(data) + if (self._wbufsize == 0 or + self._wbufsize == 1 and '\n' in data or + self._get_wbuf_len() >= self._wbufsize): + self.flush() + + def writelines(self, list): + # XXX We could do better here for very long lists + # XXX Should really reject non-string non-buffers + self._wbuf.extend(filter(None, map(str, list))) + if (self._wbufsize <= 1 or + self._get_wbuf_len() >= self._wbufsize): + self.flush() + + def _get_wbuf_len(self): + buf_len = 0 + for x in self._wbuf: + buf_len += len(x) + return buf_len + + def read(self, size=-1): + data = self._rbuf + if size < 0: + # Read until EOF + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + if self._rbufsize <= 1: + recv_size = self.default_bufsize + else: + recv_size = self._rbufsize + while True: + data = self._sock.recv(recv_size) + if not data: + break + buffers.append(data) + return "".join(buffers) + else: + # Read until size bytes or EOF seen, whichever comes first + buf_len = len(data) + if buf_len >= size: + self._rbuf = data[size:] + return data[:size] + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + while True: + left = size - buf_len + recv_size = max(self._rbufsize, left) + data = self._sock.recv(recv_size) + if not data: + break + buffers.append(data) + n = len(data) + if n >= left: + self._rbuf = data[left:] + buffers[-1] = data[:left] + break + buf_len += n + return "".join(buffers) + + def readline(self, size=-1): + data = self._rbuf + if size < 0: + # Read until \n or EOF, whichever comes first + if self._rbufsize <= 1: + # Speed up unbuffered case + assert data == "" + buffers = [] + recv = self._sock.recv + while data != "\n": + data = recv(1) + if not data: + break + buffers.append(data) + return "".join(buffers) + nl = data.find('\n') + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + return data[:nl] + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + while True: + data = self._sock.recv(self._rbufsize) + if not data: + break + buffers.append(data) + nl = data.find('\n') + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + buffers[-1] = data[:nl] + break + return "".join(buffers) + else: + # Read until size bytes or \n or EOF seen, whichever comes first + nl = data.find('\n', 0, size) + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + return data[:nl] + buf_len = len(data) + if buf_len >= size: + self._rbuf = data[size:] + return data[:size] + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + while True: + data = self._sock.recv(self._rbufsize) + if not data: + break + buffers.append(data) + left = size - buf_len + nl = data.find('\n', 0, left) + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + buffers[-1] = data[:nl] + break + n = len(data) + if n >= left: + self._rbuf = data[left:] + buffers[-1] = data[:left] + break + buf_len += n + return "".join(buffers) + + def readlines(self, sizehint=0): + total = 0 + list = [] + while True: + line = self.readline() + if not line: + break + list.append(line) + total += len(line) + if sizehint and total >= sizehint: + break + return list + + # Iterator protocols + + def __iter__(self): + return self + + def next(self): + line = self.readline() + if not line: + raise StopIteration + return line Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,532 @@ +# +# Secret Labs' Regular Expression Engine +# +# convert template to internal format +# +# Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. +# +# See the sre.py file for information on usage and redistribution. +# + +"""Internal support module for sre""" + +import _sre, sys + +from sre_constants import * + +# XXX see PyPy hack in sre_constants to support both the 2.3 and 2.4 _sre.c implementation. +#assert _sre.MAGIC == MAGIC, "SRE module mismatch" + +if _sre.CODESIZE == 2: + MAXCODE = 65535 +else: + MAXCODE = 0xFFFFFFFFL + +def _identityfunction(x): + return x + +def set(seq): + s = {} + for elem in seq: + s[elem] = 1 + return s + +_LITERAL_CODES = set([LITERAL, NOT_LITERAL]) +_REPEATING_CODES = set([REPEAT, MIN_REPEAT, MAX_REPEAT]) +_SUCCESS_CODES = set([SUCCESS, FAILURE]) +_ASSERT_CODES = set([ASSERT, ASSERT_NOT]) + +def _compile(code, pattern, flags): + # internal: compile a (sub)pattern + emit = code.append + _len = len + LITERAL_CODES = _LITERAL_CODES + REPEATING_CODES = _REPEATING_CODES + SUCCESS_CODES = _SUCCESS_CODES + ASSERT_CODES = _ASSERT_CODES + for op, av in pattern: + if op in LITERAL_CODES: + if flags & SRE_FLAG_IGNORECASE: + emit(OPCODES[OP_IGNORE[op]]) + emit(_sre.getlower(av, flags)) + else: + emit(OPCODES[op]) + emit(av) + elif op is IN: + if flags & SRE_FLAG_IGNORECASE: + emit(OPCODES[OP_IGNORE[op]]) + def fixup(literal, flags=flags): + return _sre.getlower(literal, flags) + else: + emit(OPCODES[op]) + fixup = _identityfunction + skip = _len(code); emit(0) + _compile_charset(av, flags, code, fixup) + code[skip] = _len(code) - skip + elif op is ANY: + if flags & SRE_FLAG_DOTALL: + emit(OPCODES[ANY_ALL]) + else: + emit(OPCODES[ANY]) + elif op in REPEATING_CODES: + if flags & SRE_FLAG_TEMPLATE: + raise error, "internal: unsupported template operator" + emit(OPCODES[REPEAT]) + skip = _len(code); emit(0) + emit(av[0]) + emit(av[1]) + _compile(code, av[2], flags) + emit(OPCODES[SUCCESS]) + code[skip] = _len(code) - skip + elif _simple(av) and op is not REPEAT: + if op is MAX_REPEAT: + emit(OPCODES[REPEAT_ONE]) + else: + emit(OPCODES[MIN_REPEAT_ONE]) + skip = _len(code); emit(0) + emit(av[0]) + emit(av[1]) + _compile(code, av[2], flags) + emit(OPCODES[SUCCESS]) + code[skip] = _len(code) - skip + else: + emit(OPCODES[REPEAT]) + skip = _len(code); emit(0) + emit(av[0]) + emit(av[1]) + _compile(code, av[2], flags) + code[skip] = _len(code) - skip + if op is MAX_REPEAT: + emit(OPCODES[MAX_UNTIL]) + else: + emit(OPCODES[MIN_UNTIL]) + elif op is SUBPATTERN: + if av[0]: + emit(OPCODES[MARK]) + emit((av[0]-1)*2) + # _compile_info(code, av[1], flags) + _compile(code, av[1], flags) + if av[0]: + emit(OPCODES[MARK]) + emit((av[0]-1)*2+1) + elif op in SUCCESS_CODES: + emit(OPCODES[op]) + elif op in ASSERT_CODES: + emit(OPCODES[op]) + skip = _len(code); emit(0) + if av[0] >= 0: + emit(0) # look ahead + else: + lo, hi = av[1].getwidth() + if lo != hi: + raise error, "look-behind requires fixed-width pattern" + emit(lo) # look behind + _compile(code, av[1], flags) + emit(OPCODES[SUCCESS]) + code[skip] = _len(code) - skip + elif op is CALL: + emit(OPCODES[op]) + skip = _len(code); emit(0) + _compile(code, av, flags) + emit(OPCODES[SUCCESS]) + code[skip] = _len(code) - skip + elif op is AT: + emit(OPCODES[op]) + if flags & SRE_FLAG_MULTILINE: + av = AT_MULTILINE.get(av, av) + if flags & SRE_FLAG_LOCALE: + av = AT_LOCALE.get(av, av) + elif flags & SRE_FLAG_UNICODE: + av = AT_UNICODE.get(av, av) + emit(ATCODES[av]) + elif op is BRANCH: + emit(OPCODES[op]) + tail = [] + tailappend = tail.append + for av in av[1]: + skip = _len(code); emit(0) + # _compile_info(code, av, flags) + _compile(code, av, flags) + emit(OPCODES[JUMP]) + tailappend(_len(code)); emit(0) + code[skip] = _len(code) - skip + emit(0) # end of branch + for tail in tail: + code[tail] = _len(code) - tail + elif op is CATEGORY: + emit(OPCODES[op]) + if flags & SRE_FLAG_LOCALE: + av = CH_LOCALE[av] + elif flags & SRE_FLAG_UNICODE: + av = CH_UNICODE[av] + emit(CHCODES[av]) + elif op is GROUPREF: + if flags & SRE_FLAG_IGNORECASE: + emit(OPCODES[OP_IGNORE[op]]) + else: + emit(OPCODES[op]) + emit(av-1) + elif op is GROUPREF_EXISTS: + emit(OPCODES[op]) + emit(av[0]-1) + skipyes = _len(code); emit(0) + _compile(code, av[1], flags) + if av[2]: + emit(OPCODES[JUMP]) + skipno = _len(code); emit(0) + code[skipyes] = _len(code) - skipyes + 1 + _compile(code, av[2], flags) + code[skipno] = _len(code) - skipno + else: + code[skipyes] = _len(code) - skipyes + 1 + else: + raise ValueError, ("unsupported operand type", op) + +def _compile_charset(charset, flags, code, fixup=None): + # compile charset subprogram + emit = code.append + if fixup is None: + fixup = _identityfunction + for op, av in _optimize_charset(charset, fixup): + emit(OPCODES[op]) + if op is NEGATE: + pass + elif op is LITERAL: + emit(fixup(av)) + elif op is RANGE: + emit(fixup(av[0])) + emit(fixup(av[1])) + elif op is CHARSET: + code.extend(av) + elif op is BIGCHARSET: + code.extend(av) + elif op is CATEGORY: + if flags & SRE_FLAG_LOCALE: + emit(CHCODES[CH_LOCALE[av]]) + elif flags & SRE_FLAG_UNICODE: + emit(CHCODES[CH_UNICODE[av]]) + else: + emit(CHCODES[av]) + else: + raise error, "internal: unsupported set operator" + emit(OPCODES[FAILURE]) + +def _optimize_charset(charset, fixup): + # internal: optimize character set + out = [] + outappend = out.append + charmap = [0]*256 + try: + for op, av in charset: + if op is NEGATE: + outappend((op, av)) + elif op is LITERAL: + charmap[fixup(av)] = 1 + elif op is RANGE: + for i in range(fixup(av[0]), fixup(av[1])+1): + charmap[i] = 1 + elif op is CATEGORY: + # XXX: could append to charmap tail + return charset # cannot compress + except IndexError: + # character set contains unicode characters + return _optimize_unicode(charset, fixup) + # compress character map + i = p = n = 0 + runs = [] + runsappend = runs.append + for c in charmap: + if c: + if n == 0: + p = i + n = n + 1 + elif n: + runsappend((p, n)) + n = 0 + i = i + 1 + if n: + runsappend((p, n)) + if len(runs) <= 2: + # use literal/range + for p, n in runs: + if n == 1: + outappend((LITERAL, p)) + else: + outappend((RANGE, (p, p+n-1))) + if len(out) < len(charset): + return out + else: + # use bitmap + data = _mk_bitmap(charmap) + outappend((CHARSET, data)) + return out + return charset + +def _mk_bitmap(bits): + data = [] + dataappend = data.append + if _sre.CODESIZE == 2: + start = (1, 0) + else: + start = (1L, 0L) + m, v = start + for c in bits: + if c: + v = v + m + m = m + m + if m > MAXCODE: + dataappend(v) + m, v = start + return data + +# To represent a big charset, first a bitmap of all characters in the +# set is constructed. Then, this bitmap is sliced into chunks of 256 +# characters, duplicate chunks are eliminitated, and each chunk is +# given a number. In the compiled expression, the charset is +# represented by a 16-bit word sequence, consisting of one word for +# the number of different chunks, a sequence of 256 bytes (128 words) +# of chunk numbers indexed by their original chunk position, and a +# sequence of chunks (16 words each). + +# Compression is normally good: in a typical charset, large ranges of +# Unicode will be either completely excluded (e.g. if only cyrillic +# letters are to be matched), or completely included (e.g. if large +# subranges of Kanji match). These ranges will be represented by +# chunks of all one-bits or all zero-bits. + +# Matching can be also done efficiently: the more significant byte of +# the Unicode character is an index into the chunk number, and the +# less significant byte is a bit index in the chunk (just like the +# CHARSET matching). + +# In UCS-4 mode, the BIGCHARSET opcode still supports only subsets +# of the basic multilingual plane; an efficient representation +# for all of UTF-16 has not yet been developed. This means, +# in particular, that negated charsets cannot be represented as +# bigcharsets. + +def _optimize_unicode(charset, fixup): + try: + import array + except ImportError: + return charset + charmap = [0]*65536 + negate = 0 + try: + for op, av in charset: + if op is NEGATE: + negate = 1 + elif op is LITERAL: + charmap[fixup(av)] = 1 + elif op is RANGE: + for i in xrange(fixup(av[0]), fixup(av[1])+1): + charmap[i] = 1 + elif op is CATEGORY: + # XXX: could expand category + return charset # cannot compress + except IndexError: + # non-BMP characters + return charset + if negate: + if sys.maxunicode != 65535: + # XXX: negation does not work with big charsets + return charset + for i in xrange(65536): + charmap[i] = not charmap[i] + comps = {} + mapping = [0]*256 + block = 0 + data = [] + for i in xrange(256): + chunk = tuple(charmap[i*256:(i+1)*256]) + new = comps.setdefault(chunk, block) + mapping[i] = new + if new == block: + block = block + 1 + data = data + _mk_bitmap(chunk) + header = [block] + if _sre.CODESIZE == 2: + code = 'H' + else: + code = 'I' + # Convert block indices to byte array of 256 bytes + mapping = array.array('b', mapping).tostring() + # Convert byte array to word array + mapping = array.array(code, mapping) + assert mapping.itemsize == _sre.CODESIZE + header = header + mapping.tolist() + data[0:0] = header + return [(BIGCHARSET, data)] + +def _simple(av): + # check if av is a "simple" operator + lo, hi = av[2].getwidth() + if lo == 0 and hi == MAXREPEAT: + raise error, "nothing to repeat" + return lo == hi == 1 and av[2][0][0] != SUBPATTERN + +def _compile_info(code, pattern, flags): + # internal: compile an info block. in the current version, + # this contains min/max pattern width, and an optional literal + # prefix or a character map + lo, hi = pattern.getwidth() + if lo == 0: + return # not worth it + # look for a literal prefix + prefix = [] + prefixappend = prefix.append + prefix_skip = 0 + charset = [] # not used + charsetappend = charset.append + if not (flags & SRE_FLAG_IGNORECASE): + # look for literal prefix + for op, av in pattern.data: + if op is LITERAL: + if len(prefix) == prefix_skip: + prefix_skip = prefix_skip + 1 + prefixappend(av) + elif op is SUBPATTERN and len(av[1]) == 1: + op, av = av[1][0] + if op is LITERAL: + prefixappend(av) + else: + break + else: + break + # if no prefix, look for charset prefix + if not prefix and pattern.data: + op, av = pattern.data[0] + if op is SUBPATTERN and av[1]: + op, av = av[1][0] + if op is LITERAL: + charsetappend((op, av)) + elif op is BRANCH: + c = [] + cappend = c.append + for p in av[1]: + if not p: + break + op, av = p[0] + if op is LITERAL: + cappend((op, av)) + else: + break + else: + charset = c + elif op is BRANCH: + c = [] + cappend = c.append + for p in av[1]: + if not p: + break + op, av = p[0] + if op is LITERAL: + cappend((op, av)) + else: + break + else: + charset = c + elif op is IN: + charset = av +## if prefix: +## print "*** PREFIX", prefix, prefix_skip +## if charset: +## print "*** CHARSET", charset + # add an info block + emit = code.append + emit(OPCODES[INFO]) + skip = len(code); emit(0) + # literal flag + mask = 0 + if prefix: + mask = SRE_INFO_PREFIX + if len(prefix) == prefix_skip == len(pattern.data): + mask = mask + SRE_INFO_LITERAL + elif charset: + mask = mask + SRE_INFO_CHARSET + emit(mask) + # pattern length + if lo < MAXCODE: + emit(lo) + else: + emit(MAXCODE) + prefix = prefix[:MAXCODE] + if hi < MAXCODE: + emit(hi) + else: + emit(0) + # add literal prefix + if prefix: + emit(len(prefix)) # length + emit(prefix_skip) # skip + code.extend(prefix) + # generate overlap table + table = [-1] + ([0]*len(prefix)) + for i in xrange(len(prefix)): + table[i+1] = table[i]+1 + while table[i+1] > 0 and prefix[i] != prefix[table[i+1]-1]: + table[i+1] = table[table[i+1]-1]+1 + code.extend(table[1:]) # don't store first entry + elif charset: + _compile_charset(charset, flags, code) + code[skip] = len(code) - skip + +try: + unicode +except NameError: + STRING_TYPES = (type(""),) +else: + STRING_TYPES = (type(""), type(unicode(""))) + +def isstring(obj): + for tp in STRING_TYPES: + if isinstance(obj, tp): + return 1 + return 0 + +def _code(p, flags): + + flags = p.pattern.flags | flags + code = [] + + # compile info block + _compile_info(code, p, flags) + + # compile the pattern + _compile(code, p.data, flags) + + code.append(OPCODES[SUCCESS]) + + return code + +def compile(p, flags=0): + # internal: convert pattern list to internal format + + if isstring(p): + import sre_parse + pattern = p + p = sre_parse.parse(p, flags) + else: + pattern = None + + code = _code(p, flags) + + # print code + + # XXX: get rid of this limitation! + if p.pattern.groups > 100: + raise AssertionError( + "sorry, but this version only supports 100 named groups" + ) + + # map in either direction + groupindex = p.pattern.groupdict + indexgroup = [None] * p.pattern.groups + for k, i in groupindex.items(): + indexgroup[i] = k + + return _sre.compile( + pattern, flags, code, + p.pattern.groups-1, + groupindex, indexgroup + ) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,268 @@ +# +# Secret Labs' Regular Expression Engine +# +# various symbols used by the regular expression engine. +# run this script to update the _sre include files! +# +# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. +# +# See the sre.py file for information on usage and redistribution. +# + +"""Internal support module for sre""" + +# update when constants are added or removed + +MAGIC = 20031017 + +# max code word in this release + +MAXREPEAT = 65535 + +# SRE standard exception (access as sre.error) +# should this really be here? + +class error(Exception): + pass + +# operators + +FAILURE = "failure" +SUCCESS = "success" + +ANY = "any" +ANY_ALL = "any_all" +ASSERT = "assert" +ASSERT_NOT = "assert_not" +AT = "at" +BIGCHARSET = "bigcharset" +BRANCH = "branch" +CALL = "call" +CATEGORY = "category" +CHARSET = "charset" +GROUPREF = "groupref" +GROUPREF_IGNORE = "groupref_ignore" +GROUPREF_EXISTS = "groupref_exists" +IN = "in" +IN_IGNORE = "in_ignore" +INFO = "info" +JUMP = "jump" +LITERAL = "literal" +LITERAL_IGNORE = "literal_ignore" +MARK = "mark" +MAX_REPEAT = "max_repeat" +MAX_UNTIL = "max_until" +MIN_REPEAT = "min_repeat" +MIN_UNTIL = "min_until" +NEGATE = "negate" +NOT_LITERAL = "not_literal" +NOT_LITERAL_IGNORE = "not_literal_ignore" +RANGE = "range" +REPEAT = "repeat" +REPEAT_ONE = "repeat_one" +SUBPATTERN = "subpattern" +MIN_REPEAT_ONE = "min_repeat_one" + +# positions +AT_BEGINNING = "at_beginning" +AT_BEGINNING_LINE = "at_beginning_line" +AT_BEGINNING_STRING = "at_beginning_string" +AT_BOUNDARY = "at_boundary" +AT_NON_BOUNDARY = "at_non_boundary" +AT_END = "at_end" +AT_END_LINE = "at_end_line" +AT_END_STRING = "at_end_string" +AT_LOC_BOUNDARY = "at_loc_boundary" +AT_LOC_NON_BOUNDARY = "at_loc_non_boundary" +AT_UNI_BOUNDARY = "at_uni_boundary" +AT_UNI_NON_BOUNDARY = "at_uni_non_boundary" + +# categories +CATEGORY_DIGIT = "category_digit" +CATEGORY_NOT_DIGIT = "category_not_digit" +CATEGORY_SPACE = "category_space" +CATEGORY_NOT_SPACE = "category_not_space" +CATEGORY_WORD = "category_word" +CATEGORY_NOT_WORD = "category_not_word" +CATEGORY_LINEBREAK = "category_linebreak" +CATEGORY_NOT_LINEBREAK = "category_not_linebreak" +CATEGORY_LOC_WORD = "category_loc_word" +CATEGORY_LOC_NOT_WORD = "category_loc_not_word" +CATEGORY_UNI_DIGIT = "category_uni_digit" +CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit" +CATEGORY_UNI_SPACE = "category_uni_space" +CATEGORY_UNI_NOT_SPACE = "category_uni_not_space" +CATEGORY_UNI_WORD = "category_uni_word" +CATEGORY_UNI_NOT_WORD = "category_uni_not_word" +CATEGORY_UNI_LINEBREAK = "category_uni_linebreak" +CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak" + +OPCODES = [ + + # failure=0 success=1 (just because it looks better that way :-) + FAILURE, SUCCESS, + + ANY, ANY_ALL, + ASSERT, ASSERT_NOT, + AT, + BRANCH, + CALL, + CATEGORY, + CHARSET, BIGCHARSET, + GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE, + IN, IN_IGNORE, + INFO, + JUMP, + LITERAL, LITERAL_IGNORE, + MARK, + MAX_UNTIL, + MIN_UNTIL, + NOT_LITERAL, NOT_LITERAL_IGNORE, + NEGATE, + RANGE, + REPEAT, + REPEAT_ONE, + SUBPATTERN, + MIN_REPEAT_ONE + +] + +# PyPy hack to make the sre_*.py files from 2.4.1 work on the _sre +# engine of 2.3. +import _sre +if _sre.MAGIC < 20031017: + OPCODES.remove(GROUPREF_EXISTS) +del _sre + +ATCODES = [ + AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY, + AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING, + AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY, + AT_UNI_NON_BOUNDARY +] + +CHCODES = [ + CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE, + CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD, + CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD, + CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT, + CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD, + CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK, + CATEGORY_UNI_NOT_LINEBREAK +] + +def makedict(list): + d = {} + i = 0 + for item in list: + d[item] = i + i = i + 1 + return d + +OPCODES = makedict(OPCODES) +ATCODES = makedict(ATCODES) +CHCODES = makedict(CHCODES) + +# replacement operations for "ignore case" mode +OP_IGNORE = { + GROUPREF: GROUPREF_IGNORE, + IN: IN_IGNORE, + LITERAL: LITERAL_IGNORE, + NOT_LITERAL: NOT_LITERAL_IGNORE +} + +AT_MULTILINE = { + AT_BEGINNING: AT_BEGINNING_LINE, + AT_END: AT_END_LINE +} + +AT_LOCALE = { + AT_BOUNDARY: AT_LOC_BOUNDARY, + AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY +} + +AT_UNICODE = { + AT_BOUNDARY: AT_UNI_BOUNDARY, + AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY +} + +CH_LOCALE = { + CATEGORY_DIGIT: CATEGORY_DIGIT, + CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT, + CATEGORY_SPACE: CATEGORY_SPACE, + CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE, + CATEGORY_WORD: CATEGORY_LOC_WORD, + CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD, + CATEGORY_LINEBREAK: CATEGORY_LINEBREAK, + CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK +} + +CH_UNICODE = { + CATEGORY_DIGIT: CATEGORY_UNI_DIGIT, + CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT, + CATEGORY_SPACE: CATEGORY_UNI_SPACE, + CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE, + CATEGORY_WORD: CATEGORY_UNI_WORD, + CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD, + CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK, + CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK +} + +# flags +SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking) +SRE_FLAG_IGNORECASE = 2 # case insensitive +SRE_FLAG_LOCALE = 4 # honour system locale +SRE_FLAG_MULTILINE = 8 # treat target as multiline string +SRE_FLAG_DOTALL = 16 # treat target as a single string +SRE_FLAG_UNICODE = 32 # use unicode locale +SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments +SRE_FLAG_DEBUG = 128 # debugging + +# flags for INFO primitive +SRE_INFO_PREFIX = 1 # has prefix +SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix) +SRE_INFO_CHARSET = 4 # pattern starts with character from given set + +if __name__ == "__main__": + def dump(f, d, prefix): + items = d.items() + items.sort(key=lambda a: a[1]) + for k, v in items: + f.write("#define %s_%s %s\n" % (prefix, k.upper(), v)) + f = open("sre_constants.h", "w") + f.write("""\ +/* + * Secret Labs' Regular Expression Engine + * + * regular expression matching engine + * + * NOTE: This file is generated by sre_constants.py. If you need + * to change anything in here, edit sre_constants.py and run it. + * + * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. + * + * See the _sre.c file for information on usage and redistribution. + */ + +""") + + f.write("#define SRE_MAGIC %d\n" % MAGIC) + + dump(f, OPCODES, "SRE_OP") + dump(f, ATCODES, "SRE") + dump(f, CHCODES, "SRE") + + f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE) + f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE) + f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE) + f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE) + f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL) + f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE) + f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE) + + f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX) + f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL) + f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET) + + f.close() + print "done" Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,1253 @@ +# subprocess - Subprocesses with accessible I/O streams +# +# For more information about this module, see PEP 324. +# +# This module should remain compatible with Python 2.2, see PEP 291. +# +# Copyright (c) 2003-2005 by Peter Astrand +# +# Licensed to PSF under a Contributor Agreement. +# See http://www.python.org/2.4/license for licensing details. + +r"""subprocess - Subprocesses with accessible I/O streams + +This module allows you to spawn processes, connect to their +input/output/error pipes, and obtain their return codes. This module +intends to replace several other, older modules and functions, like: + +os.system +os.spawn* +os.popen* +popen2.* +commands.* + +Information about how the subprocess module can be used to replace these +modules and functions can be found below. + + + +Using the subprocess module +=========================== +This module defines one class called Popen: + +class Popen(args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + + +Arguments are: + +args should be a string, or a sequence of program arguments. The +program to execute is normally the first item in the args sequence or +string, but can be explicitly set by using the executable argument. + +On UNIX, with shell=False (default): In this case, the Popen class +uses os.execvp() to execute the child program. args should normally +be a sequence. A string will be treated as a sequence with the string +as the only item (the program to execute). + +On UNIX, with shell=True: If args is a string, it specifies the +command string to execute through the shell. If args is a sequence, +the first item specifies the command string, and any additional items +will be treated as additional shell arguments. + +On Windows: the Popen class uses CreateProcess() to execute the child +program, which operates on strings. If args is a sequence, it will be +converted to a string using the list2cmdline method. Please note that +not all MS Windows applications interpret the command line the same +way: The list2cmdline is designed for applications using the same +rules as the MS C runtime. + +bufsize, if given, has the same meaning as the corresponding argument +to the built-in open() function: 0 means unbuffered, 1 means line +buffered, any other positive value means use a buffer of +(approximately) that size. A negative bufsize means to use the system +default, which usually means fully buffered. The default value for +bufsize is 0 (unbuffered). + +stdin, stdout and stderr specify the executed programs' standard +input, standard output and standard error file handles, respectively. +Valid values are PIPE, an existing file descriptor (a positive +integer), an existing file object, and None. PIPE indicates that a +new pipe to the child should be created. With None, no redirection +will occur; the child's file handles will be inherited from the +parent. Additionally, stderr can be STDOUT, which indicates that the +stderr data from the applications should be captured into the same +file handle as for stdout. + +If preexec_fn is set to a callable object, this object will be called +in the child process just before the child is executed. + +If close_fds is true, all file descriptors except 0, 1 and 2 will be +closed before the child process is executed. + +if shell is true, the specified command will be executed through the +shell. + +If cwd is not None, the current directory will be changed to cwd +before the child is executed. + +If env is not None, it defines the environment variables for the new +process. + +If universal_newlines is true, the file objects stdout and stderr are +opened as a text files, but lines may be terminated by any of '\n', +the Unix end-of-line convention, '\r', the Macintosh convention or +'\r\n', the Windows convention. All of these external representations +are seen as '\n' by the Python program. Note: This feature is only +available if Python is built with universal newline support (the +default). Also, the newlines attribute of the file objects stdout, +stdin and stderr are not updated by the communicate() method. + +The startupinfo and creationflags, if given, will be passed to the +underlying CreateProcess() function. They can specify things such as +appearance of the main window and priority for the new process. +(Windows only) + + +This module also defines two shortcut functions: + +call(*popenargs, **kwargs): + Run command with arguments. Wait for command to complete, then + return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + retcode = call(["ls", "-l"]) + +check_call(*popenargs, **kwargs): + Run command with arguments. Wait for command to complete. If the + exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + check_call(["ls", "-l"]) + +Exceptions +---------- +Exceptions raised in the child process, before the new program has +started to execute, will be re-raised in the parent. Additionally, +the exception object will have one extra attribute called +'child_traceback', which is a string containing traceback information +from the childs point of view. + +The most common exception raised is OSError. This occurs, for +example, when trying to execute a non-existent file. Applications +should prepare for OSErrors. + +A ValueError will be raised if Popen is called with invalid arguments. + +check_call() will raise CalledProcessError, if the called process +returns a non-zero return code. + + +Security +-------- +Unlike some other popen functions, this implementation will never call +/bin/sh implicitly. This means that all characters, including shell +metacharacters, can safely be passed to child processes. + + +Popen objects +============= +Instances of the Popen class have the following methods: + +poll() + Check if child process has terminated. Returns returncode + attribute. + +wait() + Wait for child process to terminate. Returns returncode attribute. + +communicate(input=None) + Interact with process: Send data to stdin. Read data from stdout + and stderr, until end-of-file is reached. Wait for process to + terminate. The optional input argument should be a string to be + sent to the child process, or None, if no data should be sent to + the child. + + communicate() returns a tuple (stdout, stderr). + + Note: The data read is buffered in memory, so do not use this + method if the data size is large or unlimited. + +The following attributes are also available: + +stdin + If the stdin argument is PIPE, this attribute is a file object + that provides input to the child process. Otherwise, it is None. + +stdout + If the stdout argument is PIPE, this attribute is a file object + that provides output from the child process. Otherwise, it is + None. + +stderr + If the stderr argument is PIPE, this attribute is file object that + provides error output from the child process. Otherwise, it is + None. + +pid + The process ID of the child process. + +returncode + The child return code. A None value indicates that the process + hasn't terminated yet. A negative value -N indicates that the + child was terminated by signal N (UNIX only). + + +Replacing older functions with the subprocess module +==================================================== +In this section, "a ==> b" means that b can be used as a replacement +for a. + +Note: All functions in this section fail (more or less) silently if +the executed program cannot be found; this module raises an OSError +exception. + +In the following examples, we assume that the subprocess module is +imported with "from subprocess import *". + + +Replacing /bin/sh shell backquote +--------------------------------- +output=`mycmd myarg` +==> +output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] + + +Replacing shell pipe line +------------------------- +output=`dmesg | grep hda` +==> +p1 = Popen(["dmesg"], stdout=PIPE) +p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) +output = p2.communicate()[0] + + +Replacing os.system() +--------------------- +sts = os.system("mycmd" + " myarg") +==> +p = Popen("mycmd" + " myarg", shell=True) +pid, sts = os.waitpid(p.pid, 0) + +Note: + +* Calling the program through the shell is usually not required. + +* It's easier to look at the returncode attribute than the + exitstatus. + +A more real-world example would look like this: + +try: + retcode = call("mycmd" + " myarg", shell=True) + if retcode < 0: + print >>sys.stderr, "Child was terminated by signal", -retcode + else: + print >>sys.stderr, "Child returned", retcode +except OSError, e: + print >>sys.stderr, "Execution failed:", e + + +Replacing os.spawn* +------------------- +P_NOWAIT example: + +pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") +==> +pid = Popen(["/bin/mycmd", "myarg"]).pid + + +P_WAIT example: + +retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") +==> +retcode = call(["/bin/mycmd", "myarg"]) + + +Vector example: + +os.spawnvp(os.P_NOWAIT, path, args) +==> +Popen([path] + args[1:]) + + +Environment example: + +os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) +==> +Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) + + +Replacing os.popen* +------------------- +pipe = os.popen(cmd, mode='r', bufsize) +==> +pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout + +pipe = os.popen(cmd, mode='w', bufsize) +==> +pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin + + +(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize) +==> +p = Popen(cmd, shell=True, bufsize=bufsize, + stdin=PIPE, stdout=PIPE, close_fds=True) +(child_stdin, child_stdout) = (p.stdin, p.stdout) + + +(child_stdin, + child_stdout, + child_stderr) = os.popen3(cmd, mode, bufsize) +==> +p = Popen(cmd, shell=True, bufsize=bufsize, + stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) +(child_stdin, + child_stdout, + child_stderr) = (p.stdin, p.stdout, p.stderr) + + +(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize) +==> +p = Popen(cmd, shell=True, bufsize=bufsize, + stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) +(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout) + + +Replacing popen2.* +------------------ +Note: If the cmd argument to popen2 functions is a string, the command +is executed through /bin/sh. If it is a list, the command is directly +executed. + +(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode) +==> +p = Popen(["somestring"], shell=True, bufsize=bufsize + stdin=PIPE, stdout=PIPE, close_fds=True) +(child_stdout, child_stdin) = (p.stdout, p.stdin) + + +(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode) +==> +p = Popen(["mycmd", "myarg"], bufsize=bufsize, + stdin=PIPE, stdout=PIPE, close_fds=True) +(child_stdout, child_stdin) = (p.stdout, p.stdin) + +The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen, +except that: + +* subprocess.Popen raises an exception if the execution fails +* the capturestderr argument is replaced with the stderr argument. +* stdin=PIPE and stdout=PIPE must be specified. +* popen2 closes all filedescriptors by default, but you have to specify + close_fds=True with subprocess.Popen. + + +""" + +import sys +mswindows = (sys.platform == "win32") + +import os +import types +import traceback + +# Exception classes used by this module. +class CalledProcessError(Exception): + """This exception is raised when a process run by check_call() returns + a non-zero exit status. The exit status will be stored in the + returncode attribute.""" + def __init__(self, returncode, cmd): + self.returncode = returncode + self.cmd = cmd + def __str__(self): + return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) + + +if mswindows: + import threading + import msvcrt + if 0: # <-- change this to use pywin32 instead of the _subprocess driver + import pywintypes + from win32api import GetStdHandle, STD_INPUT_HANDLE, \ + STD_OUTPUT_HANDLE, STD_ERROR_HANDLE + from win32api import GetCurrentProcess, DuplicateHandle, \ + GetModuleFileName, GetVersion + from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE + from win32pipe import CreatePipe + from win32process import CreateProcess, STARTUPINFO, \ + GetExitCodeProcess, STARTF_USESTDHANDLES, \ + STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE + from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0 + else: + from _subprocess import * + class STARTUPINFO: + dwFlags = 0 + hStdInput = None + hStdOutput = None + hStdError = None + wShowWindow = 0 + class pywintypes: + error = IOError +else: + import select + import errno + import fcntl + import pickle + +__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"] + +try: + MAXFD = os.sysconf("SC_OPEN_MAX") +except: + MAXFD = 256 + +# True/False does not exist on 2.2.0 +try: + False +except NameError: + False = 0 + True = 1 + +_active = [] + +def _cleanup(): + for inst in _active[:]: + if inst.poll(_deadstate=sys.maxint) >= 0: + try: + _active.remove(inst) + except ValueError: + # This can happen if two threads create a new Popen instance. + # It's harmless that it was already removed, so ignore. + pass + +PIPE = -1 +STDOUT = -2 + + +def call(*popenargs, **kwargs): + """Run command with arguments. Wait for command to complete, then + return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + retcode = call(["ls", "-l"]) + """ + return Popen(*popenargs, **kwargs).wait() + + +def check_call(*popenargs, **kwargs): + """Run command with arguments. Wait for command to complete. If + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + check_call(["ls", "-l"]) + """ + retcode = call(*popenargs, **kwargs) + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + if retcode: + raise CalledProcessError(retcode, cmd) + return retcode + + +def list2cmdline(seq): + """ + Translate a sequence of arguments into a command line + string, using the same rules as the MS C runtime: + + 1) Arguments are delimited by white space, which is either a + space or a tab. + + 2) A string surrounded by double quotation marks is + interpreted as a single argument, regardless of white space + contained within. A quoted string can be embedded in an + argument. + + 3) A double quotation mark preceded by a backslash is + interpreted as a literal double quotation mark. + + 4) Backslashes are interpreted literally, unless they + immediately precede a double quotation mark. + + 5) If backslashes immediately precede a double quotation mark, + every pair of backslashes is interpreted as a literal + backslash. If the number of backslashes is odd, the last + backslash escapes the next double quotation mark as + described in rule 3. + """ + + # See + # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp + result = [] + needquote = False + for arg in seq: + bs_buf = [] + + # Add a space to separate this argument from the others + if result: + result.append(' ') + + needquote = (" " in arg) or ("\t" in arg) or arg == "" + if needquote: + result.append('"') + + for c in arg: + if c == '\\': + # Don't know if we need to double yet. + bs_buf.append(c) + elif c == '"': + # Double backspaces. + result.append('\\' * len(bs_buf)*2) + bs_buf = [] + result.append('\\"') + else: + # Normal char + if bs_buf: + result.extend(bs_buf) + bs_buf = [] + result.append(c) + + # Add remaining backspaces, if any. + if bs_buf: + result.extend(bs_buf) + + if needquote: + result.extend(bs_buf) + result.append('"') + + return ''.join(result) + + +class Popen(object): + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + """Create new Popen instance.""" + _cleanup() + + self._child_created = False + if not isinstance(bufsize, (int, long)): + raise TypeError("bufsize must be an integer") + + if mswindows: + if preexec_fn is not None: + raise ValueError("preexec_fn is not supported on Windows " + "platforms") + if close_fds: + raise ValueError("close_fds is not supported on Windows " + "platforms") + else: + # POSIX + if startupinfo is not None: + raise ValueError("startupinfo is only supported on Windows " + "platforms") + if creationflags != 0: + raise ValueError("creationflags is only supported on Windows " + "platforms") + + self.stdin = None + self.stdout = None + self.stderr = None + self.pid = None + self.returncode = None + self.universal_newlines = universal_newlines + + # Input and output objects. The general principle is like + # this: + # + # Parent Child + # ------ ----- + # p2cwrite ---stdin---> p2cread + # c2pread <--stdout--- c2pwrite + # errread <--stderr--- errwrite + # + # On POSIX, the child objects are file descriptors. On + # Windows, these are Windows file handles. The parent objects + # are file descriptors on both platforms. The parent objects + # are None when not using PIPEs. The child objects are None + # when not redirecting. + + (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) = self._get_handles(stdin, stdout, stderr) + + self._execute_child(args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + # On Windows, you cannot just redirect one or two handles: You + # either have to redirect all three or none. If the subprocess + # user has only redirected one or two handles, we are + # automatically creating PIPEs for the rest. We should close + # these after the process is started. See bug #1124861. + if mswindows: + if stdin is None and p2cwrite is not None: + os.close(p2cwrite) + p2cwrite = None + if stdout is None and c2pread is not None: + os.close(c2pread) + c2pread = None + if stderr is None and errread is not None: + os.close(errread) + errread = None + + if p2cwrite: + self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) + if c2pread: + if universal_newlines: + self.stdout = os.fdopen(c2pread, 'rU', bufsize) + else: + self.stdout = os.fdopen(c2pread, 'rb', bufsize) + if errread: + if universal_newlines: + self.stderr = os.fdopen(errread, 'rU', bufsize) + else: + self.stderr = os.fdopen(errread, 'rb', bufsize) + + + def _translate_newlines(self, data): + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + return data + + + def __del__(self): + if not self._child_created: + # We didn't get to successfully create a child process. + return + # In case the child hasn't been waited on, check if it's done. + self.poll(_deadstate=sys.maxint) + if self.returncode is None and _active is not None: + # Child is still running, keep us alive until we can wait on it. + _active.append(self) + + + def communicate(self, input=None): + """Interact with process: Send data to stdin. Read data from + stdout and stderr, until end-of-file is reached. Wait for + process to terminate. The optional input argument should be a + string to be sent to the child process, or None, if no data + should be sent to the child. + + communicate() returns a tuple (stdout, stderr).""" + + # Optimization: If we are only using one pipe, or no pipe at + # all, using select() or threads is unnecessary. + if [self.stdin, self.stdout, self.stderr].count(None) >= 2: + stdout = None + stderr = None + if self.stdin: + if input: + self.stdin.write(input) + self.stdin.close() + elif self.stdout: + stdout = self.stdout.read() + elif self.stderr: + stderr = self.stderr.read() + self.wait() + return (stdout, stderr) + + return self._communicate(input) + + + if mswindows: + # + # Windows methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tupel with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + if stdin is None and stdout is None and stderr is None: + return (None, None, None, None, None, None) + + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None + + if stdin is None: + p2cread = GetStdHandle(STD_INPUT_HANDLE) + if p2cread is not None: + pass + elif stdin is None or stdin == PIPE: + p2cread, p2cwrite = CreatePipe(None, 0) + # Detach and turn into fd + p2cwrite = p2cwrite.Detach() + p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) + elif isinstance(stdin, int): + p2cread = msvcrt.get_osfhandle(stdin) + else: + # Assuming file-like object + p2cread = msvcrt.get_osfhandle(stdin.fileno()) + p2cread = self._make_inheritable(p2cread) + + if stdout is None: + c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) + if c2pwrite is not None: + pass + elif stdout is None or stdout == PIPE: + c2pread, c2pwrite = CreatePipe(None, 0) + # Detach and turn into fd + c2pread = c2pread.Detach() + c2pread = msvcrt.open_osfhandle(c2pread, 0) + elif isinstance(stdout, int): + c2pwrite = msvcrt.get_osfhandle(stdout) + else: + # Assuming file-like object + c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) + c2pwrite = self._make_inheritable(c2pwrite) + + if stderr is None: + errwrite = GetStdHandle(STD_ERROR_HANDLE) + if errwrite is not None: + pass + elif stderr is None or stderr == PIPE: + errread, errwrite = CreatePipe(None, 0) + # Detach and turn into fd + errread = errread.Detach() + errread = msvcrt.open_osfhandle(errread, 0) + elif stderr == STDOUT: + errwrite = c2pwrite + elif isinstance(stderr, int): + errwrite = msvcrt.get_osfhandle(stderr) + else: + # Assuming file-like object + errwrite = msvcrt.get_osfhandle(stderr.fileno()) + errwrite = self._make_inheritable(errwrite) + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + + def _make_inheritable(self, handle): + """Return a duplicate of handle, which is inheritable""" + return DuplicateHandle(GetCurrentProcess(), handle, + GetCurrentProcess(), 0, 1, + DUPLICATE_SAME_ACCESS) + + + def _find_w9xpopen(self): + """Find and return absolut path to w9xpopen.exe""" + w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + # Eeek - file-not-found - possibly an embedding + # situation - see if we can locate it in sys.exec_prefix + w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + raise RuntimeError("Cannot locate w9xpopen.exe, which is " + "needed for Popen to work with your " + "shell or platform.") + return w9xpopen + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): + """Execute program (MS Windows version)""" + + if not isinstance(args, types.StringTypes): + args = list2cmdline(args) + + # Process startup details + if startupinfo is None: + startupinfo = STARTUPINFO() + if None not in (p2cread, c2pwrite, errwrite): + startupinfo.dwFlags |= STARTF_USESTDHANDLES + startupinfo.hStdInput = p2cread + startupinfo.hStdOutput = c2pwrite + startupinfo.hStdError = errwrite + + if shell: + startupinfo.dwFlags |= STARTF_USESHOWWINDOW + startupinfo.wShowWindow = SW_HIDE + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = comspec + " /c " + args + if (GetVersion() >= 0x80000000L or + os.path.basename(comspec).lower() == "command.com"): + # Win9x, or using command.com on NT. We need to + # use the w9xpopen intermediate program. For more + # information, see KB Q150956 + # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) + w9xpopen = self._find_w9xpopen() + args = '"%s" %s' % (w9xpopen, args) + # Not passing CREATE_NEW_CONSOLE has been known to + # cause random failures on win9x. Specifically a + # dialog: "Your program accessed mem currently in + # use at xxx" and a hopeful warning about the + # stability of your system. Cost is Ctrl+C wont + # kill children. + creationflags |= CREATE_NEW_CONSOLE + + # Start the process + try: + hp, ht, pid, tid = CreateProcess(executable, args, + # no special security + None, None, + # must inherit handles to pass std + # handles + 1, + creationflags, + env, + cwd, + startupinfo) + except pywintypes.error, e: + # Translate pywintypes.error to WindowsError, which is + # a subclass of OSError. FIXME: We should really + # translate errno using _sys_errlist (or simliar), but + # how can this be done from Python? + raise WindowsError(*e.args) + + # Retain the process handle, but close the thread handle + self._child_created = True + self._handle = hp + self.pid = pid + ht.Close() + + # Child is launched. Close the parent's copy of those pipe + # handles that only the child should have open. You need + # to make sure that no handles to the write end of the + # output pipe are maintained in this process or else the + # pipe will not close when the child process exits and the + # ReadFile will hang. + if p2cread is not None: + p2cread.Close() + if c2pwrite is not None: + c2pwrite.Close() + if errwrite is not None: + errwrite.Close() + + + def poll(self, _deadstate=None): + """Check if child process has terminated. Returns returncode + attribute.""" + if self.returncode is None: + if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + + def wait(self): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + obj = WaitForSingleObject(self._handle, INFINITE) + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + + def _readerthread(self, fh, buffer): + buffer.append(fh.read()) + + + def _communicate(self, input): + stdout = None # Return + stderr = None # Return + + if self.stdout: + stdout = [] + stdout_thread = threading.Thread(target=self._readerthread, + args=(self.stdout, stdout)) + stdout_thread.setDaemon(True) + stdout_thread.start() + if self.stderr: + stderr = [] + stderr_thread = threading.Thread(target=self._readerthread, + args=(self.stderr, stderr)) + stderr_thread.setDaemon(True) + stderr_thread.start() + + if self.stdin: + if input is not None: + self.stdin.write(input) + self.stdin.close() + + if self.stdout: + stdout_thread.join() + if self.stderr: + stderr_thread.join() + + # All data exchanged. Translate lists into strings. + if stdout is not None: + stdout = stdout[0] + if stderr is not None: + stderr = stderr[0] + + # Translate newlines, if requested. We cannot let the file + # object do the translation: It is based on stdio, which is + # impossible to combine with select (unless forcing no + # buffering). + if self.universal_newlines and hasattr(file, 'newlines'): + if stdout: + stdout = self._translate_newlines(stdout) + if stderr: + stderr = self._translate_newlines(stderr) + + self.wait() + return (stdout, stderr) + + else: + # + # POSIX methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tupel with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None + + if stdin is None: + pass + elif stdin == PIPE: + p2cread, p2cwrite = os.pipe() + elif isinstance(stdin, int): + p2cread = stdin + else: + # Assuming file-like object + p2cread = stdin.fileno() + + if stdout is None: + pass + elif stdout == PIPE: + c2pread, c2pwrite = os.pipe() + elif isinstance(stdout, int): + c2pwrite = stdout + else: + # Assuming file-like object + c2pwrite = stdout.fileno() + + if stderr is None: + pass + elif stderr == PIPE: + errread, errwrite = os.pipe() + elif stderr == STDOUT: + errwrite = c2pwrite + elif isinstance(stderr, int): + errwrite = stderr + else: + # Assuming file-like object + errwrite = stderr.fileno() + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + + def _set_cloexec_flag(self, fd): + try: + cloexec_flag = fcntl.FD_CLOEXEC + except AttributeError: + cloexec_flag = 1 + + old = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) + + + def _close_fds(self, but): + for i in xrange(3, MAXFD): + if i == but: + continue + try: + os.close(i) + except: + pass + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): + """Execute program (POSIX version)""" + + if isinstance(args, types.StringTypes): + args = [args] + else: + args = list(args) + + if shell: + args = ["/bin/sh", "-c"] + args + + if executable is None: + executable = args[0] + + # For transferring possible exec failure from child to parent + # The first char specifies the exception type: 0 means + # OSError, 1 means some other error. + errpipe_read, errpipe_write = os.pipe() + self._set_cloexec_flag(errpipe_write) + + in_child = False + gc.disable_finalizers() + try: + self.pid = os.fork() + in_child = (self.pid == 0) + finally: + if not in_child: + gc.enable_finalizers() + self._child_created = True + if self.pid == 0: + # Child + try: + # Close parent's pipe ends + if p2cwrite: + os.close(p2cwrite) + if c2pread: + os.close(c2pread) + if errread: + os.close(errread) + os.close(errpipe_read) + + # Dup fds for child + if p2cread: + os.dup2(p2cread, 0) + if c2pwrite: + os.dup2(c2pwrite, 1) + if errwrite: + os.dup2(errwrite, 2) + + # Close pipe fds. Make sure we don't close the same + # fd more than once, or standard fds. + if p2cread and p2cread not in (0,): + os.close(p2cread) + if c2pwrite and c2pwrite not in (p2cread, 1): + os.close(c2pwrite) + if errwrite and errwrite not in (p2cread, c2pwrite, 2): + os.close(errwrite) + + # Close all other fds, if asked for + if close_fds: + self._close_fds(but=errpipe_write) + + if cwd is not None: + os.chdir(cwd) + + if preexec_fn: + apply(preexec_fn) + + if env is None: + os.execvp(executable, args) + else: + os.execvpe(executable, args, env) + + except: + exc_type, exc_value, tb = sys.exc_info() + # Save the traceback and attach it to the exception object + exc_lines = traceback.format_exception(exc_type, + exc_value, + tb) + exc_value.child_traceback = ''.join(exc_lines) + os.write(errpipe_write, pickle.dumps(exc_value)) + + # This exitcode won't be reported to applications, so it + # really doesn't matter what we return. + os._exit(255) + + # Parent + os.close(errpipe_write) + if p2cread and p2cwrite: + os.close(p2cread) + if c2pwrite and c2pread: + os.close(c2pwrite) + if errwrite and errread: + os.close(errwrite) + + # Wait for exec to fail or succeed; possibly raising exception + data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB + os.close(errpipe_read) + if data != "": + os.waitpid(self.pid, 0) + child_exception = pickle.loads(data) + raise child_exception + + + def _handle_exitstatus(self, sts): + if os.WIFSIGNALED(sts): + self.returncode = -os.WTERMSIG(sts) + elif os.WIFEXITED(sts): + self.returncode = os.WEXITSTATUS(sts) + else: + # Should never happen + raise RuntimeError("Unknown child exit status!") + + + def poll(self, _deadstate=None): + """Check if child process has terminated. Returns returncode + attribute.""" + if self.returncode is None: + try: + pid, sts = os.waitpid(self.pid, os.WNOHANG) + if pid == self.pid: + self._handle_exitstatus(sts) + except os.error: + if _deadstate is not None: + self.returncode = _deadstate + return self.returncode + + + def wait(self): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + pid, sts = os.waitpid(self.pid, 0) + self._handle_exitstatus(sts) + return self.returncode + + + def _communicate(self, input): + read_set = [] + write_set = [] + stdout = None # Return + stderr = None # Return + + if self.stdin: + # Flush stdio buffer. This might block, if the user has + # been writing to .stdin in an uncontrolled fashion. + self.stdin.flush() + if input: + write_set.append(self.stdin) + else: + self.stdin.close() + if self.stdout: + read_set.append(self.stdout) + stdout = [] + if self.stderr: + read_set.append(self.stderr) + stderr = [] + + input_offset = 0 + while read_set or write_set: + rlist, wlist, xlist = select.select(read_set, write_set, []) + + if self.stdin in wlist: + # When select has indicated that the file is writable, + # we can write up to PIPE_BUF bytes without risk + # blocking. POSIX defines PIPE_BUF >= 512 + bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512)) + input_offset += bytes_written + if input_offset >= len(input): + self.stdin.close() + write_set.remove(self.stdin) + + if self.stdout in rlist: + data = os.read(self.stdout.fileno(), 1024) + if data == "": + self.stdout.close() + read_set.remove(self.stdout) + stdout.append(data) + + if self.stderr in rlist: + data = os.read(self.stderr.fileno(), 1024) + if data == "": + self.stderr.close() + read_set.remove(self.stderr) + stderr.append(data) + + # All data exchanged. Translate lists into strings. + if stdout is not None: + stdout = ''.join(stdout) + if stderr is not None: + stderr = ''.join(stderr) + + # Translate newlines, if requested. We cannot let the file + # object do the translation: It is based on stdio, which is + # impossible to combine with select (unless forcing no + # buffering). + if self.universal_newlines and hasattr(file, 'newlines'): + if stdout: + stdout = self._translate_newlines(stdout) + if stderr: + stderr = self._translate_newlines(stderr) + + self.wait() + return (stdout, stderr) + + +def _demo_posix(): + # + # Example 1: Simple redirection: Get process list + # + plist = Popen(["ps"], stdout=PIPE).communicate()[0] + print "Process list:" + print plist + + # + # Example 2: Change uid before executing child + # + if os.getuid() == 0: + p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) + p.wait() + + # + # Example 3: Connecting several subprocesses + # + print "Looking for 'hda'..." + p1 = Popen(["dmesg"], stdout=PIPE) + p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) + print repr(p2.communicate()[0]) + + # + # Example 4: Catch execution error + # + print + print "Trying a weird file..." + try: + print Popen(["/this/path/does/not/exist"]).communicate() + except OSError, e: + if e.errno == errno.ENOENT: + print "The file didn't exist. I thought so..." + print "Child traceback:" + print e.child_traceback + else: + print "Error", e.errno + else: + print >>sys.stderr, "Gosh. No error." + + +def _demo_windows(): + # + # Example 1: Connecting several subprocesses + # + print "Looking for 'PROMPT' in set output..." + p1 = Popen("set", stdout=PIPE, shell=True) + p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) + print repr(p2.communicate()[0]) + + # + # Example 2: Simple execution of program + # + print "Executing calc..." + p = Popen("calc") + p.wait() + + +if __name__ == "__main__": + if mswindows: + _demo_windows() + else: + _demo_posix() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/tarfile.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/tarfile.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,2176 @@ +#!/usr/bin/env python +# -*- coding: iso-8859-1 -*- +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gust?bel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision: 53162 $" +# $Source$ + +version = "0.8.0" +__author__ = "Lars Gust?bel (lars at gustaebel.de)" +__date__ = "$Date: 2006-12-27 21:36:58 +1100 (Wed, 27 Dec 2006) $" +__cvsid__ = "$Id: tarfile.py 53162 2006-12-27 10:36:58Z lars.gustaebel $" +__credits__ = "Gustavo Niemeyer, Niels Gust?bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import shutil +import stat +import errno +import time +import struct +import copy + +if sys.platform == 'mac': + # This module needs work for MacOS9, especially in the area of pathname + # handling. In many places it is assumed a simple substitution of / by the + # local os.path.sep is good enough to convert pathnames, but this does not + # work with the mac rooted:path:name versus :nonrooted:path:name syntax + raise ImportError, "tarfile does not work for platform==mac" + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = "\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +MAGIC = "ustar" # magic tar string +VERSION = "00" # version number + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field +MAXSIZE_MEMBER = 077777777777L # maximum size of a file (11 octal digits) + +REGTYPE = "0" # regular file +AREGTYPE = "\0" # regular file +LNKTYPE = "1" # link (inside tarfile) +SYMTYPE = "2" # symbolic link +CHRTYPE = "3" # character special device +BLKTYPE = "4" # block special device +DIRTYPE = "5" # directory +FIFOTYPE = "6" # fifo special device +CONTTYPE = "7" # contiguous file + +GNUTYPE_LONGNAME = "L" # GNU tar extension for longnames +GNUTYPE_LONGLINK = "K" # GNU tar extension for longlink +GNUTYPE_SPARSE = "S" # GNU tar extension for sparse file + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, # file types that tarfile + SYMTYPE, DIRTYPE, FIFOTYPE, # can cope with. + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +REGULAR_TYPES = (REGTYPE, AREGTYPE, # file types that somehow + CONTTYPE, GNUTYPE_SPARSE) # represent regular files + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0120000 # symbolic link +S_IFREG = 0100000 # regular file +S_IFBLK = 0060000 # block device +S_IFDIR = 0040000 # directory +S_IFCHR = 0020000 # character device +S_IFIFO = 0010000 # fifo + +TSUID = 04000 # set UID on execution +TSGID = 02000 # set GID on execution +TSVTX = 01000 # reserved + +TUREAD = 0400 # read by owner +TUWRITE = 0200 # write by owner +TUEXEC = 0100 # execute/search by owner +TGREAD = 0040 # read by group +TGWRITE = 0020 # write by group +TGEXEC = 0010 # execute/search by group +TOREAD = 0004 # read by other +TOWRITE = 0002 # write by other +TOEXEC = 0001 # execute/search by other + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length): + """Convert a python string to a null-terminated string buffer. + """ + return s[:length] + (length - len(s)) * NUL + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0200): + n = int(s.rstrip(NUL + " ") or "0", 8) + else: + n = 0L + for i in xrange(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, posix=False): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = "%0*o" % (digits - 1, n) + NUL + else: + if posix: + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = "" + for i in xrange(digits - 1): + s = chr(n & 0377) + s + n >>= 8 + s = chr(0200) + s + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + shutil.copyfileobj(src, dst) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in xrange(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +if os.sep != "/": + normpath = lambda path: os.path.normpath(path).replace(os.sep, "/") +else: + normpath = os.path.normpath + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadble tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile: + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream: + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = "" + self.pos = 0L + self.closed = False + + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32("") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = "" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = "" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in xrange(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + t = [self.dbuf] + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + buf = self.cmp.decompress(buf) + t.append(buf) + c += len(buf) + t = "".join(t) + self.dbuf = t[size:] + return t[:size] + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + t = [self.buf] + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + t.append(buf) + c += len(buf) + t = "".join(t) + self.buf = t[size:] + return t[:size] +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith("\037\213\010"): + return "gz" + if self.buf.startswith("BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = "" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + b = [self.buf] + x = len(self.buf) + while x < size: + try: + raw = self.fileobj.read(self.blocksize) + data = self.bz2obj.decompress(raw) + b.append(data) + except EOFError: + break + x += len(data) + self.buf = "".join(b) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) + self.fileobj.close() +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, sparse=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.sparse = sparse + self.position = 0 + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + if self.sparse is None: + return self.readnormal(size) + else: + return self.readsparse(size) + + def readnormal(self, size): + """Read operation for regular files. + """ + self.fileobj.seek(self.offset + self.position) + self.position += size + return self.fileobj.read(size) + + def readsparse(self, size): + """Read operation for sparse files. + """ + data = [] + while size > 0: + buf = self.readsparsesection(size) + if not buf: + break + size -= len(buf) + data.append(buf) + return "".join(data) + + def readsparsesection(self, size): + """Read a single section of a sparse file. + """ + section = self.sparse.find(self.position) + + if section is None: + return "" + + size = min(size, section.offset + section.size - self.position) + + if isinstance(section, _data): + realpos = section.realpos + self.position - section.offset + self.fileobj.seek(self.offset + realpos) + self.position += size + return self.fileobj.read(size) + else: + self.position += size + return NUL * size +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + getattr(tarinfo, "sparse", None)) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = "" + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = "" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = "" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if "\n" in self.buffer: + pos = self.buffer.find("\n") + 1 + else: + buffers = [self.buffer] + while True: + buf = self.fileobj.read(self.blocksize) + buffers.append(buf) + if not buf or "\n" in buf: + self.buffer = "".join(buffers) + pos = self.buffer.find("\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = "" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name (dirnames must end with '/') + self.mode = 0666 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "user" # user name + self.gname = "group" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + @classmethod + def frombuf(cls, buf): + """Construct a TarInfo object from a 512 byte string buffer. + """ + if len(buf) != BLOCKSIZE: + raise ValueError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise ValueError("empty header") + + tarinfo = cls() + tarinfo.buf = buf + tarinfo.name = buf[0:100].rstrip(NUL) + tarinfo.mode = nti(buf[100:108]) + tarinfo.uid = nti(buf[108:116]) + tarinfo.gid = nti(buf[116:124]) + tarinfo.size = nti(buf[124:136]) + tarinfo.mtime = nti(buf[136:148]) + tarinfo.chksum = nti(buf[148:156]) + tarinfo.type = buf[156:157] + tarinfo.linkname = buf[157:257].rstrip(NUL) + tarinfo.uname = buf[265:297].rstrip(NUL) + tarinfo.gname = buf[297:329].rstrip(NUL) + tarinfo.devmajor = nti(buf[329:337]) + tarinfo.devminor = nti(buf[337:345]) + prefix = buf[345:500].rstrip(NUL) + + if prefix and not tarinfo.issparse(): + tarinfo.name = prefix + "/" + tarinfo.name + + if tarinfo.chksum not in calc_chksums(buf): + raise ValueError("invalid header") + return tarinfo + + def tobuf(self, posix=False): + """Return a tar header as a string of 512 byte blocks. + """ + buf = "" + type = self.type + prefix = "" + + if self.name.endswith("/"): + type = DIRTYPE + + if type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + # Prevent "././@LongLink" from being normalized. + name = self.name + else: + name = normpath(self.name) + + if type == DIRTYPE: + # directories should end with '/' + name += "/" + + linkname = self.linkname + if linkname: + # if linkname is empty we end up with a '.' + linkname = normpath(linkname) + + if posix: + if self.size > MAXSIZE_MEMBER: + raise ValueError("file is too large (>= 8 GB)") + + if len(self.linkname) > LENGTH_LINK: + raise ValueError("linkname is too long (>%d)" % (LENGTH_LINK)) + + if len(name) > LENGTH_NAME: + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + + else: + if len(self.linkname) > LENGTH_LINK: + buf += self._create_gnulong(self.linkname, GNUTYPE_LONGLINK) + + if len(name) > LENGTH_NAME: + buf += self._create_gnulong(name, GNUTYPE_LONGNAME) + + parts = [ + stn(name, 100), + itn(self.mode & 07777, 8, posix), + itn(self.uid, 8, posix), + itn(self.gid, 8, posix), + itn(self.size, 12, posix), + itn(self.mtime, 12, posix), + " ", # checksum field + type, + stn(self.linkname, 100), + stn(MAGIC, 6), + stn(VERSION, 2), + stn(self.uname, 32), + stn(self.gname, 32), + itn(self.devmajor, 8, posix), + itn(self.devminor, 8, posix), + stn(prefix, 155) + ] + + buf += struct.pack("%ds" % BLOCKSIZE, "".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + "%06o\0" % chksum + buf[-357:] + self.buf = buf + return buf + + def _create_gnulong(self, name, type): + """Create a GNU longname/longlink header from name. + It consists of an extended tar header, with the length + of the longname as size, followed by data blocks, + which contain the longname as a null terminated string. + """ + name += NUL + + tarinfo = self.__class__() + tarinfo.name = "././@LongLink" + tarinfo.type = type + tarinfo.mode = 0 + tarinfo.size = len(name) + + # create extended header + buf = tarinfo.tobuf() + # create name blocks + buf += name + blocks, remainder = divmod(len(name), BLOCKSIZE) + if remainder > 0: + buf += (BLOCKSIZE - remainder) * NUL + return buf + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.type == GNUTYPE_SPARSE + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 0 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + posix = False # If True, generates POSIX.1-1990-compliant + # archives (no GNU extensions!) + + fileobject = ExFileObject + + def __init__(self, name=None, mode="r", fileobj=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + self.name = os.path.abspath(name) + + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self._mode = mode + self.mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + fileobj = file(self.name, self.mode) + self._extfileobj = False + else: + if self.name is None and hasattr(fileobj, "name"): + self.name = os.path.abspath(fileobj.name) + if hasattr(fileobj, "mode"): + self.mode = fileobj.mode + self._extfileobj = True + self.fileobj = fileobj + + # Init datastructures + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = 0L # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + if self._mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self._mode == "a": + # Move to the end of the archive, + # before the first empty block. + self.firstmember = None + while True: + try: + tarinfo = self.next() + except ReadError: + self.fileobj.seek(0) + break + if tarinfo is None: + self.fileobj.seek(- BLOCKSIZE, 1) + break + + if self._mode in "aw": + self._loaded = True + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=20*512): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj) + except (ReadError, CompressionError): + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + t = cls(name, filemode, + _Stream(name, filemode, comptype, fileobj, bufsize)) + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + if fileobj is None: + fileobj = file(name, mode + "b") + + try: + t = cls.taropen(name, mode, + gzip.GzipFile(name, mode, compresslevel, fileobj)) + except IOError: + raise ReadError("not a gzip file") + t._extfileobj = False + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj) + except IOError: + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self._mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + arcname = normpath(arcname) + drv, arcname = os.path.splitdrive(arcname) + while arcname[0:1] == "/": + arcname = arcname[1:] + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = TarInfo() + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and \ + statres.st_nlink > 1 and inode in self.inodes: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + if arcname[-1:] != "/": + arcname += "/" + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if stat.S_ISREG(stmd): + tarinfo.size = statres.st_size + else: + tarinfo.size = 0L + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print filemode(tarinfo.mode), + print "%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), + if tarinfo.ischr() or tarinfo.isblk(): + print "%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), + else: + print "%10d" % tarinfo.size, + print "%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], + + print tarinfo.name, + + if verbose: + if tarinfo.issym(): + print "->", tarinfo.linkname, + if tarinfo.islnk(): + print "link to", tarinfo.linkname, + print + + def add(self, name, arcname=None, recursive=True): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + # Special case: The user wants to add the current + # working directory. + if name == ".": + if recursive: + if arcname == ".": + arcname = "" + for f in os.listdir("."): + self.add(f, os.path.join(arcname, f)) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = file(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f)) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.posix) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directory with a safe mode, so that + # all files below can be extracted as well. + try: + os.makedirs(os.path.join(path, tarinfo.name), 0777) + except EnvironmentError: + pass + directories.append(tarinfo) + else: + self.extract(tarinfo, path) + + # Reverse sort directories. + directories.sort(lambda a, b: cmp(a.name, b.name)) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + path = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, path) + self.utime(tarinfo, path) + self.chmod(tarinfo, path) + except ExtractError, e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path=""): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. + """ + self._check("r") + + if isinstance(member, TarInfo): + tarinfo = member + else: + tarinfo = self.getmember(member) + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name)) + except EnvironmentError, e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError, e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, TarInfo): + tarinfo = member + else: + tarinfo = self.getmember(member) + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._getmember(tarinfo.linkname, + tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + if targetpath[-1:] == "/": + targetpath = targetpath[:-1] + targetpath = os.path.normpath(targetpath) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + ti = TarInfo() + ti.name = upperdirs + ti.type = DIRTYPE + ti.mode = 0777 + ti.mtime = tarinfo.mtime + ti.uid = tarinfo.uid + ti.gid = tarinfo.gid + ti.uname = tarinfo.uname + ti.gname = tarinfo.gname + try: + self._extract_member(ti, ti.name) + except: + pass + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + os.mkdir(targetpath) + except EnvironmentError, e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.extractfile(tarinfo) + target = file(targetpath, "wb") + copyfileobj(source, target) + source.close() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + linkpath = tarinfo.linkname + try: + if tarinfo.issym(): + os.symlink(linkpath, targetpath) + else: + # See extract(). + os.link(tarinfo._link_target, targetpath) + except AttributeError: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + linkpath) + linkpath = normpath(linkpath) + + try: + self._extract_member(self.getmember(linkpath), targetpath) + except (EnvironmentError, KeyError), e: + linkpath = os.path.normpath(linkpath) + try: + shutil.copy2(linkpath, targetpath) + except EnvironmentError, e: + raise IOError("link could not be created") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + try: + g = grp.getgrgid(tarinfo.gid)[2] + except KeyError: + g = os.getgid() + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + try: + u = pwd.getpwuid(tarinfo.uid)[2] + except KeyError: + u = os.getuid() + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError, e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError, e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + if sys.platform == "win32" and tarinfo.isdir(): + # According to msdn.microsoft.com, it is an error (EACCES) + # to use utime() on directories. + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError, e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + while True: + buf = self.fileobj.read(BLOCKSIZE) + if not buf: + return None + + try: + tarinfo = TarInfo.frombuf(buf) + + # Set the TarInfo object's offset to the current position of the + # TarFile and set self.offset to the position where the data blocks + # should begin. + tarinfo.offset = self.offset + self.offset += BLOCKSIZE + + tarinfo = self.proc_member(tarinfo) + + except ValueError, e: + if self.ignore_zeros: + self._dbg(2, "0x%X: empty or invalid block: %s" % + (self.offset, e)) + self.offset += BLOCKSIZE + continue + else: + if self.offset == 0: + raise ReadError("empty, unreadable or compressed " + "file: %s" % e) + return None + break + + # Some old tar programs represent a directory as a regular + # file with a trailing slash. + if tarinfo.isreg() and tarinfo.name.endswith("/"): + tarinfo.type = DIRTYPE + + # Directory names should have a '/' at the end. + if tarinfo.isdir(): + tarinfo.name += "/" + + self.members.append(tarinfo) + return tarinfo + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is proc_member() which is called with a TarInfo + # object created from the header block from the current offset. The + # proc_member() method can be overridden in a subclass to add custom + # proc_*() methods. A proc_*() method MUST implement the following + # operations: + # 1. Set tarinfo.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set self.offset to the position where the next member's header will + # begin. + # 3. Return tarinfo or another valid TarInfo object. + def proc_member(self, tarinfo): + """Choose the right processing method for tarinfo depending + on its type and call it. + """ + if tarinfo.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self.proc_gnulong(tarinfo) + elif tarinfo.type == GNUTYPE_SPARSE: + return self.proc_sparse(tarinfo) + else: + return self.proc_builtin(tarinfo) + + def proc_builtin(self, tarinfo): + """Process a builtin type member or an unknown member + which will be treated as a regular file. + """ + tarinfo.offset_data = self.offset + if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + self.offset += self._block(tarinfo.size) + return tarinfo + + def proc_gnulong(self, tarinfo): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = "" + count = tarinfo.size + while count > 0: + block = self.fileobj.read(BLOCKSIZE) + buf += block + self.offset += BLOCKSIZE + count -= BLOCKSIZE + + # Fetch the next header and process it. + b = self.fileobj.read(BLOCKSIZE) + t = TarInfo.frombuf(b) + t.offset = self.offset + self.offset += BLOCKSIZE + next = self.proc_member(t) + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = tarinfo.offset + if tarinfo.type == GNUTYPE_LONGNAME: + next.name = buf.rstrip(NUL) + elif tarinfo.type == GNUTYPE_LONGLINK: + next.linkname = buf.rstrip(NUL) + + return next + + def proc_sparse(self, tarinfo): + """Process a GNU sparse header plus extra headers. + """ + buf = tarinfo.buf + sp = _ringbuffer() + pos = 386 + lastpos = 0L + realpos = 0L + # There are 4 possible sparse structs in the + # first header. + for i in xrange(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset > lastpos: + sp.append(_hole(lastpos, offset - lastpos)) + sp.append(_data(offset, numbytes, realpos)) + realpos += numbytes + lastpos = offset + numbytes + pos += 24 + + isextended = ord(buf[482]) + origsize = nti(buf[483:495]) + + # If the isextended flag is given, + # there are extra headers to process. + while isextended == 1: + buf = self.fileobj.read(BLOCKSIZE) + self.offset += BLOCKSIZE + pos = 0 + for i in xrange(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset > lastpos: + sp.append(_hole(lastpos, offset - lastpos)) + sp.append(_data(offset, numbytes, realpos)) + realpos += numbytes + lastpos = offset + numbytes + pos += 24 + isextended = ord(buf[504]) + + if lastpos < origsize: + sp.append(_hole(lastpos, origsize - lastpos)) + + tarinfo.sparse = sp + + tarinfo.offset_data = self.offset + self.offset += self._block(tarinfo.size) + tarinfo.size = origsize + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def _getmember(self, name, tarinfo=None): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + if tarinfo is None: + end = len(members) + else: + end = members.index(tarinfo) + + for i in xrange(end - 1, -1, -1): + if name == members[i].name: + return members[i] + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self._mode not in mode: + raise IOError("bad operation for mode %r" % self._mode) + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print >> sys.stderr, msg +# class TarFile + +class TarIter: + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + def next(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + +# Helper classes for sparse file support +class _section: + """Base class for _data and _hole. + """ + def __init__(self, offset, size): + self.offset = offset + self.size = size + def __contains__(self, offset): + return self.offset <= offset < self.offset + self.size + +class _data(_section): + """Represent a data section in a sparse file. + """ + def __init__(self, offset, size, realpos): + _section.__init__(self, offset, size) + self.realpos = realpos + +class _hole(_section): + """Represent a hole section in a sparse file. + """ + pass + +class _ringbuffer(list): + """Ringbuffer class which increases performance + over a regular list. + """ + def __init__(self): + self.idx = 0 + def find(self, offset): + idx = self.idx + while True: + item = self[idx] + if offset in item: + break + idx += 1 + if idx == len(self): + idx = 0 + if idx == self.idx: + # End of File + return None + self.idx = idx + return item + +#--------------------------------------------- +# zipfile compatible TarFile class +#--------------------------------------------- +TAR_PLAIN = 0 # zipfile.ZIP_STORED +TAR_GZIPPED = 8 # zipfile.ZIP_DEFLATED +class TarFileCompat: + """TarFile class compatible with standard module zipfile's + ZipFile class. + """ + def __init__(self, file, mode="r", compression=TAR_PLAIN): + if compression == TAR_PLAIN: + self.tarfile = TarFile.taropen(file, mode) + elif compression == TAR_GZIPPED: + self.tarfile = TarFile.gzopen(file, mode) + else: + raise ValueError("unknown compression constant") + if mode[0:1] == "r": + members = self.tarfile.getmembers() + for m in members: + m.filename = m.name + m.file_size = m.size + m.date_time = time.gmtime(m.mtime)[:6] + def namelist(self): + return map(lambda m: m.name, self.infolist()) + def infolist(self): + return filter(lambda m: m.type in REGULAR_TYPES, + self.tarfile.getmembers()) + def printdir(self): + self.tarfile.list() + def testzip(self): + return + def getinfo(self, name): + return self.tarfile.getmember(name) + def read(self, name): + return self.tarfile.extractfile(self.tarfile.getmember(name)).read() + def write(self, filename, arcname=None, compress_type=None): + self.tarfile.add(filename, arcname) + def writestr(self, zinfo, bytes): + try: + from cStringIO import StringIO + except ImportError: + from StringIO import StringIO + import calendar + zinfo.name = zinfo.filename + zinfo.size = zinfo.file_size + zinfo.mtime = calendar.timegm(zinfo.date_time) + self.tarfile.addfile(zinfo, StringIO(bytes)) + def close(self): + self.tarfile.close() +#class TarFileCompat + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +open = TarFile.open Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/traceback.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/traceback.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,312 @@ +"""Extract, format and print information about Python stack traces.""" + +import linecache +import sys +import types + +__all__ = ['extract_stack', 'extract_tb', 'format_exception', + 'format_exception_only', 'format_list', 'format_stack', + 'format_tb', 'print_exc', 'format_exc', 'print_exception', + 'print_last', 'print_stack', 'print_tb', 'tb_lineno'] + +def _print(file, str='', terminator='\n'): + file.write(str+terminator) + + +def print_list(extracted_list, file=None): + """Print the list of tuples as returned by extract_tb() or + extract_stack() as a formatted stack trace to the given file.""" + if file is None: + file = sys.stderr + for filename, lineno, name, line in extracted_list: + _print(file, + ' File "%s", line %d, in %s' % (filename,lineno,name)) + if line: + _print(file, ' %s' % line.strip()) + +def format_list(extracted_list): + """Format a list of traceback entry tuples for printing. + + Given a list of tuples as returned by extract_tb() or + extract_stack(), return a list of strings ready for printing. + Each string in the resulting list corresponds to the item with the + same index in the argument list. Each string ends in a newline; + the strings may contain internal newlines as well, for those items + whose source text line is not None. + """ + list = [] + for filename, lineno, name, line in extracted_list: + item = ' File "%s", line %d, in %s\n' % (filename,lineno,name) + if line: + item = item + ' %s\n' % line.strip() + list.append(item) + return list + + +def print_tb(tb, limit=None, file=None): + """Print up to 'limit' stack trace entries from the traceback 'tb'. + + If 'limit' is omitted or None, all entries are printed. If 'file' + is omitted or None, the output goes to sys.stderr; otherwise + 'file' should be an open file or file-like object with a write() + method. + """ + if file is None: + file = sys.stderr + if limit is None: + if hasattr(sys, 'tracebacklimit'): + limit = sys.tracebacklimit + n = 0 + while tb is not None and (limit is None or n < limit): + f = tb.tb_frame + lineno = tb.tb_lineno + co = f.f_code + filename = co.co_filename + name = co.co_name + _print(file, + ' File "%s", line %d, in %s' % (filename,lineno,name)) + linecache.checkcache(filename) + line = linecache.getline(filename, lineno, f.f_globals) + if line: _print(file, ' ' + line.strip()) + tb = tb.tb_next + n = n+1 + +def format_tb(tb, limit = None): + """A shorthand for 'format_list(extract_stack(f, limit)).""" + return format_list(extract_tb(tb, limit)) + +def extract_tb(tb, limit = None): + """Return list of up to limit pre-processed entries from traceback. + + This is useful for alternate formatting of stack traces. If + 'limit' is omitted or None, all entries are extracted. A + pre-processed stack trace entry is a quadruple (filename, line + number, function name, text) representing the information that is + usually printed for a stack trace. The text is a string with + leading and trailing whitespace stripped; if the source is not + available it is None. + """ + if limit is None: + if hasattr(sys, 'tracebacklimit'): + limit = sys.tracebacklimit + list = [] + n = 0 + while tb is not None and (limit is None or n < limit): + f = tb.tb_frame + lineno = tb.tb_lineno + co = f.f_code + filename = co.co_filename + name = co.co_name + linecache.checkcache(filename) + line = linecache.getline(filename, lineno, f.f_globals) + if line: line = line.strip() + else: line = None + list.append((filename, lineno, name, line)) + tb = tb.tb_next + n = n+1 + return list + + +def print_exception(etype, value, tb, limit=None, file=None): + """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. + + This differs from print_tb() in the following ways: (1) if + traceback is not None, it prints a header "Traceback (most recent + call last):"; (2) it prints the exception type and value after the + stack trace; (3) if type is SyntaxError and value has the + appropriate format, it prints the line where the syntax error + occurred with a caret on the next line indicating the approximate + position of the error. + """ + if file is None: + file = sys.stderr + if tb: + _print(file, 'Traceback (most recent call last):') + print_tb(tb, limit, file) + lines = format_exception_only(etype, value) + for line in lines[:-1]: + _print(file, line, ' ') + _print(file, lines[-1], '') + +def format_exception(etype, value, tb, limit = None): + """Format a stack trace and the exception information. + + The arguments have the same meaning as the corresponding arguments + to print_exception(). The return value is a list of strings, each + ending in a newline and some containing internal newlines. When + these lines are concatenated and printed, exactly the same text is + printed as does print_exception(). + """ + if tb: + list = ['Traceback (most recent call last):\n'] + list = list + format_tb(tb, limit) + else: + list = [] + list = list + format_exception_only(etype, value) + return list + +def format_exception_only(etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. + + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + + # An instance should not have a meaningful value parameter, but + # sometimes does, particularly for string exceptions, such as + # >>> raise string1, string2 # deprecated + # + # Clear these out first because issubtype(string1, SyntaxError) + # would throw another exception and mask the original problem. + if (isinstance(etype, BaseException) or + isinstance(etype, types.InstanceType) or + etype is None or type(etype) is str): + return [_format_final_exc_line(etype, value)] + + stype = etype.__name__ + + if not issubclass(etype, SyntaxError): + return [_format_final_exc_line(stype, value)] + + # It was a syntax error; show exactly where the problem was found. + lines = [] + try: + msg, (filename, lineno, offset, badline) = value + except Exception: + pass + else: + filename = filename or "" + lines.append(' File "%s", line %d\n' % (filename, lineno)) + if badline is not None: + lines.append(' %s\n' % badline.strip()) + if offset is not None: + caretspace = badline[:offset].lstrip() + # non-space whitespace (likes tabs) must be kept for alignment + caretspace = ((c.isspace() and c or ' ') for c in caretspace) + # only three spaces to account for offset1 == pos 0 + lines.append(' %s^\n' % ''.join(caretspace)) + value = msg + + lines.append(_format_final_exc_line(stype, value)) + return lines + +def _format_final_exc_line(etype, value): + """Return a list of a single line -- normal case for format_exception_only""" + valuestr = _some_str(value) + if value is None or not valuestr: + line = "%s\n" % etype + else: + line = "%s: %s\n" % (etype, valuestr) + return line + +def _some_str(value): + try: + return str(value) + except: + return '' % type(value).__name__ + + +def print_exc(limit=None, file=None): + """Shorthand for 'print_exception(sys.exc_type, sys.exc_value, sys.exc_traceback, limit, file)'. + (In fact, it uses sys.exc_info() to retrieve the same information + in a thread-safe way.)""" + if file is None: + file = sys.stderr + try: + etype, value, tb = sys.exc_info() + print_exception(etype, value, tb, limit, file) + finally: + etype = value = tb = None + + +def format_exc(limit=None): + """Like print_exc() but return a string.""" + try: + etype, value, tb = sys.exc_info() + return ''.join(format_exception(etype, value, tb, limit)) + finally: + etype = value = tb = None + + +def print_last(limit=None, file=None): + """This is a shorthand for 'print_exception(sys.last_type, + sys.last_value, sys.last_traceback, limit, file)'.""" + if file is None: + file = sys.stderr + print_exception(sys.last_type, sys.last_value, sys.last_traceback, + limit, file) + + +def print_stack(f=None, limit=None, file=None): + """Print a stack trace from its invocation point. + + The optional 'f' argument can be used to specify an alternate + stack frame at which to start. The optional 'limit' and 'file' + arguments have the same meaning as for print_exception(). + """ + if f is None: + try: + raise ZeroDivisionError + except ZeroDivisionError: + f = sys.exc_info()[2].tb_frame.f_back + print_list(extract_stack(f, limit), file) + +def format_stack(f=None, limit=None): + """Shorthand for 'format_list(extract_stack(f, limit))'.""" + if f is None: + try: + raise ZeroDivisionError + except ZeroDivisionError: + f = sys.exc_info()[2].tb_frame.f_back + return format_list(extract_stack(f, limit)) + +def extract_stack(f=None, limit = None): + """Extract the raw traceback from the current stack frame. + + The return value has the same format as for extract_tb(). The + optional 'f' and 'limit' arguments have the same meaning as for + print_stack(). Each item in the list is a quadruple (filename, + line number, function name, text), and the entries are in order + from oldest to newest stack frame. + """ + if f is None: + try: + raise ZeroDivisionError + except ZeroDivisionError: + f = sys.exc_info()[2].tb_frame.f_back + if limit is None: + if hasattr(sys, 'tracebacklimit'): + limit = sys.tracebacklimit + list = [] + n = 0 + while f is not None and (limit is None or n < limit): + lineno = f.f_lineno + co = f.f_code + filename = co.co_filename + name = co.co_name + linecache.checkcache(filename) + line = linecache.getline(filename, lineno, f.f_globals) + if line: line = line.strip() + else: line = None + list.append((filename, lineno, name, line)) + f = f.f_back + n = n+1 + list.reverse() + return list + +def tb_lineno(tb): + """Calculate correct line number of traceback given in tb. + + Obsolete in 2.3. + """ + return tb.tb_lineno Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,101 @@ +"""Define names for all type symbols known in the standard interpreter. + +Types that are part of optional modules (e.g. array) are not listed. +""" +import sys + +# Iterators in Python aren't a matter of type but of protocol. A large +# and changing number of builtin types implement *some* flavor of +# iterator. Don't check the type! Use hasattr to check for both +# "__iter__" and "next" attributes instead. + +NoneType = type(None) +TypeType = type +ObjectType = object + +IntType = int +LongType = long +FloatType = float +BooleanType = bool +try: + ComplexType = complex +except NameError: + pass + +StringType = str + +# StringTypes is already outdated. Instead of writing "type(x) in +# types.StringTypes", you should use "isinstance(x, basestring)". But +# we keep around for compatibility with Python 2.2. +try: + UnicodeType = unicode + StringTypes = (StringType, UnicodeType) +except NameError: + StringTypes = (StringType,) + +BufferType = buffer + +TupleType = tuple +ListType = list +DictType = DictionaryType = dict + +def _f(): pass +FunctionType = type(_f) +LambdaType = type(lambda: None) # Same as FunctionType +try: + CodeType = type(_f.func_code) +except RuntimeError: + # Execution in restricted environment + pass + +def _g(): + yield 1 +GeneratorType = type(_g()) + +class _C: + def _m(self): pass +ClassType = _classobj # PyPy-specific, from __builtin__ +UnboundMethodType = type(_C._m) # Same as MethodType +_x = _C() +InstanceType = _instance # PyPy-specific, from __builtin__ +MethodType = type(_x._m) + +BuiltinFunctionType = type(len) +BuiltinMethodType = type([].append) # Same as BuiltinFunctionType + +ModuleType = type(sys) +FileType = file +XRangeType = xrange + +try: + raise TypeError +except TypeError: + try: + tb = sys.exc_info()[2] + TracebackType = type(tb) + FrameType = type(tb.tb_frame) + except AttributeError: + # In the restricted environment, exc_info returns (None, None, + # None) Then, tb.tb_frame gives an attribute error + pass + tb = None; del tb + +SliceType = slice +EllipsisType = type(Ellipsis) + +DictProxyType = type(TypeType.__dict__) +NotImplementedType = type(NotImplemented) + +# Extension types defined in a C helper module. XXX There may be no +# equivalent in implementations other than CPython, so it seems better to +# leave them undefined then to set them to e.g. None. +try: + import _types +except ImportError: + pass +else: + GetSetDescriptorType = type(_types.Helper.getter) + MemberDescriptorType = type(_types.Helper.member) + del _types + +del sys, _f, _g, _C, _x # Not for export Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,186 @@ +#! /usr/bin/env python + +# Copyright 1994 by Lance Ellinghouse +# Cathedral City, California Republic, United States of America. +# All Rights Reserved +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appear in all copies and that +# both that copyright notice and this permission notice appear in +# supporting documentation, and that the name of Lance Ellinghouse +# not be used in advertising or publicity pertaining to distribution +# of the software without specific, written prior permission. +# LANCE ELLINGHOUSE DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +# FITNESS, IN NO EVENT SHALL LANCE ELLINGHOUSE CENTRUM BE LIABLE +# FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# +# Modified by Jack Jansen, CWI, July 1995: +# - Use binascii module to do the actual line-by-line conversion +# between ascii and binary. This results in a 1000-fold speedup. The C +# version is still 5 times faster, though. +# - Arguments more compliant with python standard + +"""Implementation of the UUencode and UUdecode functions. + +encode(in_file, out_file [,name, mode]) +decode(in_file [, out_file, mode]) +""" + +import binascii +import os +import sys + +__all__ = ["Error", "encode", "decode"] + +class Error(Exception): + pass + +def encode(in_file, out_file, name=None, mode=None): + """Uuencode file""" + # + # If in_file is a pathname open it and change defaults + # + if in_file == '-': + in_file = sys.stdin + elif isinstance(in_file, basestring): + if name is None: + name = os.path.basename(in_file) + if mode is None: + try: + mode = os.stat(in_file).st_mode + except AttributeError: + pass + in_file = open(in_file, 'rb') + # + # Open out_file if it is a pathname + # + if out_file == '-': + out_file = sys.stdout + elif isinstance(out_file, basestring): + out_file = open(out_file, 'w') + # + # Set defaults for name and mode + # + if name is None: + name = '-' + if mode is None: + mode = 0666 + # + # Write the data + # + out_file.write('begin %o %s\n' % ((mode&0777),name)) + data = in_file.read(45) + while len(data) > 0: + out_file.write(binascii.b2a_uu(data)) + data = in_file.read(45) + out_file.write(' \nend\n') + + +def decode(in_file, out_file=None, mode=None, quiet=0): + """Decode uuencoded file""" + # + # Open the input file, if needed. + # + if in_file == '-': + in_file = sys.stdin + elif isinstance(in_file, basestring): + in_file = open(in_file) + # + # Read until a begin is encountered or we've exhausted the file + # + while True: + hdr = in_file.readline() + if not hdr: + raise Error('No valid begin line found in input file') + if not hdr.startswith('begin'): + continue + hdrfields = hdr.split(' ', 2) + if len(hdrfields) == 3 and hdrfields[0] == 'begin': + try: + int(hdrfields[1], 8) + break + except ValueError: + pass + if out_file is None: + out_file = hdrfields[2].rstrip() + if os.path.exists(out_file): + raise Error('Cannot overwrite existing file: %s' % out_file) + if mode is None: + mode = int(hdrfields[1], 8) + # + # Open the output file + # + opened = False + if out_file == '-': + out_file = sys.stdout + elif isinstance(out_file, basestring): + fp = open(out_file, 'wb') + try: + os.chmod(out_file, mode) + except AttributeError: + pass + out_file = fp + opened = True + # + # Main decoding loop + # + s = in_file.readline() + while s and s.strip() != 'end': + try: + data = binascii.a2b_uu(s) + except binascii.Error, v: + # Workaround for broken uuencoders by /Fredrik Lundh + nbytes = (((ord(s[0])-32) & 63) * 4 + 5) // 3 + data = binascii.a2b_uu(s[:nbytes]) + if not quiet: + sys.stderr.write("Warning: %s\n" % v) + out_file.write(data) + s = in_file.readline() + if not s: + raise Error('Truncated input file') + if opened: + out_file.close() + +def test(): + """uuencode/uudecode main program""" + + import optparse + parser = optparse.OptionParser(usage='usage: %prog [-d] [-t] [input [output]]') + parser.add_option('-d', '--decode', dest='decode', help='Decode (instead of encode)?', default=False, action='store_true') + parser.add_option('-t', '--text', dest='text', help='data is text, encoded format unix-compatible text?', default=False, action='store_true') + + (options, args) = parser.parse_args() + if len(args) > 2: + parser.error('incorrect number of arguments') + sys.exit(1) + + input = sys.stdin + output = sys.stdout + if len(args) > 0: + input = args[0] + if len(args) > 1: + output = args[1] + + if options.decode: + if options.text: + if isinstance(output, basestring): + output = open(output, 'w') + else: + print sys.argv[0], ': cannot do -t to stdout' + sys.exit(1) + decode(input, output) + else: + if options.text: + if isinstance(input, basestring): + input = open(input, 'r') + else: + print sys.argv[0], ': cannot do -t from stdin' + sys.exit(1) + encode(input, output) + +if __name__ == '__main__': + test() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py Mon Aug 11 22:10:30 2008 @@ -0,0 +1,264 @@ +"""Python part of the warnings subsystem.""" + +# Note: function level imports should *not* be used +# in this module as it may cause import lock deadlock. +# See bug 683658. +import sys, types +import linecache + +__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings", + "resetwarnings"] + +# filters contains a sequence of filter 5-tuples +# The components of the 5-tuple are: +# - an action: error, ignore, always, default, module, or once +# - a compiled regex that must match the warning message +# - a class representing the warning category +# - a compiled regex that must match the module that is being warned +# - a line number for the line being warning, or 0 to mean any line +# If either if the compiled regexs are None, match anything. +filters = [] +defaultaction = "default" +onceregistry = {} + +def warn(message, category=None, stacklevel=1): + """Issue a warning, or maybe ignore it or raise an exception.""" + # Check if message is already a Warning object + if isinstance(message, Warning): + category = message.__class__ + # Check category argument + if category is None: + category = UserWarning + assert issubclass(category, Warning) + # Get context information + try: + caller = sys._getframe(stacklevel) + except ValueError: + globals = sys.__dict__ + lineno = 1 + else: + globals = caller.f_globals + lineno = caller.f_lineno + if '__name__' in globals: + module = globals['__name__'] + else: + module = "" + filename = globals.get('__file__') + if filename: + fnl = filename.lower() + if fnl.endswith((".pyc", ".pyo")): + filename = filename[:-1] + else: + if module == "__main__": + try: + filename = sys.argv[0] + except AttributeError: + # embedded interpreters don't have sys.argv, see bug #839151 + filename = '__main__' + if not filename: + filename = module + registry = globals.setdefault("__warningregistry__", {}) + warn_explicit(message, category, filename, lineno, module, registry, + globals) + +def warn_explicit(message, category, filename, lineno, + module=None, registry=None, module_globals=None): + if module is None: + module = filename or "" + if module[-3:].lower() == ".py": + module = module[:-3] # XXX What about leading pathname? + if registry is None: + registry = {} + if isinstance(message, Warning): + text = str(message) + category = message.__class__ + else: + text = message + message = category(message) + key = (text, category, lineno) + # Quick test for common case + if registry.get(key): + return + # Search the filters + for item in filters: + action, msg, cat, mod, ln = item + if ((msg is None or msg.match(text)) and + issubclass(category, cat) and + (mod is None or mod.match(module)) and + (ln == 0 or lineno == ln)): + break + else: + action = defaultaction + # Early exit actions + if action == "ignore": + registry[key] = 1 + return + + # Prime the linecache for formatting, in case the + # "file" is actually in a zipfile or something. + linecache.getlines(filename, module_globals) + + if action == "error": + raise message + # Other actions + if action == "once": + registry[key] = 1 + oncekey = (text, category) + if onceregistry.get(oncekey): + return + onceregistry[oncekey] = 1 + elif action == "always": + pass + elif action == "module": + registry[key] = 1 + altkey = (text, category, 0) + if registry.get(altkey): + return + registry[altkey] = 1 + elif action == "default": + registry[key] = 1 + else: + # Unrecognized actions are errors + raise RuntimeError( + "Unrecognized action (%r) in warnings.filters:\n %s" % + (action, item)) + # Print message and context + showwarning(message, category, filename, lineno) + +def showwarning(message, category, filename, lineno, file=None): + """Hook to write a warning to a file; replace if you like.""" + if file is None: + file = sys.stderr + try: + file.write(formatwarning(message, category, filename, lineno)) + except IOError: + pass # the file (probably stderr) is invalid - this warning gets lost. + +def formatwarning(message, category, filename, lineno): + """Function to format a warning the standard way.""" + s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message) + line = linecache.getline(filename, lineno).strip() + if line: + s = s + " " + line + "\n" + return s + +def filterwarnings(action, message="", category=Warning, module="", lineno=0, + append=0): + """Insert an entry into the list of warnings filters (at the front). + + Use assertions to check that all arguments have the right type.""" + import re + assert action in ("error", "ignore", "always", "default", "module", + "once"), "invalid action: %r" % (action,) + assert isinstance(message, basestring), "message must be a string" +# assert isinstance(category, (type, types.ClassType)), \ +# "category must be a class" + assert issubclass(category, Warning), "category must be a Warning subclass" + assert isinstance(module, basestring), "module must be a string" + assert isinstance(lineno, int) and lineno >= 0, \ + "lineno must be an int >= 0" + item = (action, re.compile(message, re.I), category, + re.compile(module), lineno) + if append: + filters.append(item) + else: + filters.insert(0, item) + +def simplefilter(action, category=Warning, lineno=0, append=0): + """Insert a simple entry into the list of warnings filters (at the front). + + A simple filter matches all modules and messages. + """ + assert action in ("error", "ignore", "always", "default", "module", + "once"), "invalid action: %r" % (action,) + assert isinstance(lineno, int) and lineno >= 0, \ + "lineno must be an int >= 0" + item = (action, None, category, None, lineno) + if append: + filters.append(item) + else: + filters.insert(0, item) + +def resetwarnings(): + """Clear the list of warning filters, so that no filters are active.""" + filters[:] = [] + +class _OptionError(Exception): + """Exception used by option processing helpers.""" + pass + +# Helper to process -W options passed via sys.warnoptions +def _processoptions(args): + for arg in args: + try: + _setoption(arg) + except _OptionError, msg: + print >>sys.stderr, "Invalid -W option ignored:", msg + +# Helper for _processoptions() +def _setoption(arg): + import re + parts = arg.split(':') + if len(parts) > 5: + raise _OptionError("too many fields (max 5): %r" % (arg,)) + while len(parts) < 5: + parts.append('') + action, message, category, module, lineno = [s.strip() + for s in parts] + action = _getaction(action) + message = re.escape(message) + category = _getcategory(category) + module = re.escape(module) + if module: + module = module + '$' + if lineno: + try: + lineno = int(lineno) + if lineno < 0: + raise ValueError + except (ValueError, OverflowError): + raise _OptionError("invalid lineno %r" % (lineno,)) + else: + lineno = 0 + filterwarnings(action, message, category, module, lineno) + +# Helper for _setoption() +def _getaction(action): + if not action: + return "default" + if action == "all": return "always" # Alias + for a in ('default', 'always', 'ignore', 'module', 'once', 'error'): + if a.startswith(action): + return a + raise _OptionError("invalid action: %r" % (action,)) + +# Helper for _setoption() +def _getcategory(category): + import re + if not category: + return Warning + if re.match("^[a-zA-Z0-9_]+$", category): + try: + cat = eval(category) + except NameError: + raise _OptionError("unknown warning category: %r" % (category,)) + else: + i = category.rfind(".") + module = category[:i] + klass = category[i+1:] + try: + m = __import__(module, None, None, [klass]) + except ImportError: + raise _OptionError("invalid module name: %r" % (module,)) + try: + cat = getattr(m, klass) + except AttributeError: + raise _OptionError("unknown warning category: %r" % (category,)) + if not issubclass(cat, Warning): + raise _OptionError("invalid warning category: %r" % (category,)) + return cat + +# Module initialization +_processoptions(sys.warnoptions) +simplefilter("ignore", category=PendingDeprecationWarning, append=1) +simplefilter("ignore", category=ImportWarning, append=1) From arigo at codespeak.net Tue Aug 12 13:49:21 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 12 Aug 2008 13:49:21 +0200 (CEST) Subject: [pypy-svn] r57204 - in pypy/dist/pypy: interpreter module/gc module/gc/test rlib rpython rpython/lltypesystem rpython/memory rpython/memory/gc rpython/memory/gctransform rpython/memory/test translator/c/test translator/cli translator/jvm Message-ID: <20080812114921.C050716A1DB@codespeak.net> Author: arigo Date: Tue Aug 12 13:49:19 2008 New Revision: 57204 Modified: pypy/dist/pypy/interpreter/executioncontext.py pypy/dist/pypy/module/gc/interp_gc.py pypy/dist/pypy/module/gc/test/test_gc.py pypy/dist/pypy/rlib/rgc.py pypy/dist/pypy/rpython/llinterp.py pypy/dist/pypy/rpython/lltypesystem/llheap.py pypy/dist/pypy/rpython/lltypesystem/lloperation.py pypy/dist/pypy/rpython/memory/gc/base.py pypy/dist/pypy/rpython/memory/gc/semispace.py pypy/dist/pypy/rpython/memory/gctransform/framework.py pypy/dist/pypy/rpython/memory/gcwrapper.py pypy/dist/pypy/rpython/memory/test/snippet.py pypy/dist/pypy/translator/c/test/test_boehm.py pypy/dist/pypy/translator/cli/opcodes.py pypy/dist/pypy/translator/jvm/opcodes.py Log: Remove the gc__enable_finalizers and gc__disable_finalizers low-level operations. It seems that we don't really need them, as the locking can be nicely implemented at app-level. That's a whole bunch of places with a bit less code, so that's probably good. Modified: pypy/dist/pypy/interpreter/executioncontext.py ============================================================================== --- pypy/dist/pypy/interpreter/executioncontext.py (original) +++ pypy/dist/pypy/interpreter/executioncontext.py Tue Aug 12 13:49:19 2008 @@ -400,12 +400,15 @@ def __init__(self, space): AsyncAction.__init__(self, space) self.dying_objects_w = [] + self.finalizers_lock_count = 0 def register_dying_object(self, w_obj): self.dying_objects_w.append(w_obj) self.fire() def perform(self, executioncontext): + if self.finalizers_lock_count > 0: + return # Each call to perform() first grabs the self.dying_objects_w # and replaces it with an empty list. We do this to try to # avoid too deep recursions of the kind of __del__ being called Modified: pypy/dist/pypy/module/gc/interp_gc.py ============================================================================== --- pypy/dist/pypy/module/gc/interp_gc.py (original) +++ pypy/dist/pypy/module/gc/interp_gc.py Tue Aug 12 13:49:19 2008 @@ -9,25 +9,16 @@ collect.unwrap_spec = [ObjSpace] -class State: - def __init__(self, space): - self.finalizers_lock_count = 0 -def getstate(space): - return space.fromcache(State) - def enable_finalizers(space): - state = getstate(space) - if state.finalizers_lock_count == 0: + if space.user_del_action.finalizers_lock_count == 0: raise OperationError(space.w_ValueError, space.wrap("finalizers are already enabled")) - state.finalizers_lock_count -= 1 - rgc.enable_finalizers() + space.user_del_action.finalizers_lock_count -= 1 + space.user_del_action.fire() enable_finalizers.unwrap_spec = [ObjSpace] def disable_finalizers(space): - state = getstate(space) - rgc.disable_finalizers() - state.finalizers_lock_count += 1 + space.user_del_action.finalizers_lock_count += 1 disable_finalizers.unwrap_spec = [ObjSpace] # ____________________________________________________________ Modified: pypy/dist/pypy/module/gc/test/test_gc.py ============================================================================== --- pypy/dist/pypy/module/gc/test/test_gc.py (original) +++ pypy/dist/pypy/module/gc/test/test_gc.py Tue Aug 12 13:49:19 2008 @@ -4,18 +4,43 @@ gc.collect() # mostly a "does not crash" kind of test def test_disable_finalizers(self): - # on top of PyPy we can't easily test this, except by using - # obsure hacks, so for now we'll live with a "does not crash" - # kind of test import gc + class X(object): + created = 0 + deleted = 0 + def __init__(self): + X.created += 1 + def __del__(self): + X.deleted += 1 + def runtest(should_be_enabled): + gc.collect() + if should_be_enabled: + assert X.deleted == X.created + else: + old_deleted = X.deleted + X(); X(); X() + gc.collect() + if should_be_enabled: + assert X.deleted == X.created + else: + assert X.deleted == old_deleted + + runtest(True) gc.disable_finalizers() + runtest(False) + runtest(False) gc.enable_finalizers() - # we can test that nesting appears to work + runtest(True) + # test nesting gc.disable_finalizers() gc.disable_finalizers() + runtest(False) gc.enable_finalizers() + runtest(False) gc.enable_finalizers() + runtest(True) raises(ValueError, gc.enable_finalizers) + runtest(True) def test_estimate_heap_size(self): import sys, gc Modified: pypy/dist/pypy/rlib/rgc.py ============================================================================== --- pypy/dist/pypy/rlib/rgc.py (original) +++ pypy/dist/pypy/rlib/rgc.py Tue Aug 12 13:49:19 2008 @@ -11,15 +11,6 @@ """ pass -def disable_finalizers(): - """Prevent __del__ methods from running. - Calls to disable_finalizers/enable_finalizers can be nested. - """ - gc.disable() # rough approximation on top of CPython - -def enable_finalizers(): - gc.enable() # rough approximation on top of CPython - # ____________________________________________________________ # Framework GC features @@ -174,18 +165,6 @@ return hop.genop('gc_set_max_heap_size', [v_nbytes], resulttype=lltype.Void) -class CollectEntry(ExtRegistryEntry): - _about_ = (disable_finalizers, enable_finalizers) - - def compute_result_annotation(self): - from pypy.annotation import model as annmodel - return annmodel.s_None - - def specialize_call(self, hop): - opname = 'gc__' + self.instance.__name__ - hop.exception_cannot_occur() - return hop.genop(opname, [], resulttype=hop.r_result) - def can_move(p): return True Modified: pypy/dist/pypy/rpython/llinterp.py ============================================================================== --- pypy/dist/pypy/rpython/llinterp.py (original) +++ pypy/dist/pypy/rpython/llinterp.py Tue Aug 12 13:49:19 2008 @@ -764,12 +764,6 @@ def op_gc__collect(self): self.heap.collect() - def op_gc__disable_finalizers(self): - self.heap.disable_finalizers() - - def op_gc__enable_finalizers(self): - self.heap.enable_finalizers() - def op_gc_can_move(self, ptr): addr = llmemory.cast_ptr_to_adr(ptr) return self.heap.can_move(addr) Modified: pypy/dist/pypy/rpython/lltypesystem/llheap.py ============================================================================== --- pypy/dist/pypy/rpython/lltypesystem/llheap.py (original) +++ pypy/dist/pypy/rpython/lltypesystem/llheap.py Tue Aug 12 13:49:19 2008 @@ -5,7 +5,7 @@ setfield = setattr from operator import setitem as setarrayitem -from pypy.rlib.rgc import collect, disable_finalizers, enable_finalizers +from pypy.rlib.rgc import collect from pypy.rlib.rgc import can_move def setinterior(toplevelcontainer, inneraddr, INNERTYPE, newvalue): Modified: pypy/dist/pypy/rpython/lltypesystem/lloperation.py ============================================================================== --- pypy/dist/pypy/rpython/lltypesystem/lloperation.py (original) +++ pypy/dist/pypy/rpython/lltypesystem/lloperation.py Tue Aug 12 13:49:19 2008 @@ -393,8 +393,6 @@ # __________ GC operations __________ 'gc__collect': LLOp(canunwindgc=True), - 'gc__disable_finalizers': LLOp(), - 'gc__enable_finalizers': LLOp(canunwindgc=True), 'gc_free': LLOp(), 'gc_fetch_exception': LLOp(), 'gc_restore_exception': LLOp(), Modified: pypy/dist/pypy/rpython/memory/gc/base.py ============================================================================== --- pypy/dist/pypy/rpython/memory/gc/base.py (original) +++ pypy/dist/pypy/rpython/memory/gc/base.py Tue Aug 12 13:49:19 2008 @@ -106,12 +106,6 @@ def x_clone(self, clonedata): raise RuntimeError("no support for x_clone in the GC") - def disable_finalizers(self): - pass # xxx this should really be implemented by all subclasses - - def enable_finalizers(self): - pass # xxx this should really be implemented by all subclasses - def trace(self, obj, callback, arg): """Enumerate the locations inside the given obj that can contain GC pointers. For each such location, callback(pointer, arg) is Modified: pypy/dist/pypy/rpython/memory/gc/semispace.py ============================================================================== --- pypy/dist/pypy/rpython/memory/gc/semispace.py (original) +++ pypy/dist/pypy/rpython/memory/gc/semispace.py Tue Aug 12 13:49:19 2008 @@ -72,14 +72,6 @@ self.objects_with_weakrefs = self.AddressStack() self.objects_with_id = self.AddressDict() - def disable_finalizers(self): - self.finalizer_lock_count += 1 - - def enable_finalizers(self): - self.finalizer_lock_count -= 1 - if self.run_finalizers.non_empty(): - self.execute_finalizers() - # This class only defines the malloc_{fixed,var}size_clear() methods # because the spaces are filled with zeroes in advance. Modified: pypy/dist/pypy/rpython/memory/gctransform/framework.py ============================================================================== --- pypy/dist/pypy/rpython/memory/gctransform/framework.py (original) +++ pypy/dist/pypy/rpython/memory/gctransform/framework.py Tue Aug 12 13:49:19 2008 @@ -227,10 +227,6 @@ + [annmodel.SomeBool(), annmodel.SomeBool()], s_gcref) self.collect_ptr = getfn(GCClass.collect.im_func, [s_gc], annmodel.s_None) - self.disable_finalizers_ptr = getfn(GCClass.disable_finalizers.im_func, - [s_gc], annmodel.s_None) - self.enable_finalizers_ptr = getfn(GCClass.enable_finalizers.im_func, - [s_gc], annmodel.s_None) self.can_move_ptr = getfn(GCClass.can_move.im_func, [s_gc, annmodel.SomeAddress()], annmodel.SomeBool()) @@ -567,21 +563,6 @@ self.pop_roots(hop, livevars) return v_result - def gct_gc__disable_finalizers(self, hop): - # cannot collect() - op = hop.spaceop - hop.genop("direct_call", [self.disable_finalizers_ptr, - self.c_const_gc], - resultvar=op.result) - - def gct_gc__enable_finalizers(self, hop): - # can collect() because it typically calls pending finalizers - op = hop.spaceop - livevars = self.push_roots(hop) - hop.genop("direct_call", [self.enable_finalizers_ptr, self.c_const_gc], - resultvar=op.result) - self.pop_roots(hop, livevars) - def gct_gc_x_swap_pool(self, hop): op = hop.spaceop [v_malloced] = op.args Modified: pypy/dist/pypy/rpython/memory/gcwrapper.py ============================================================================== --- pypy/dist/pypy/rpython/memory/gcwrapper.py (original) +++ pypy/dist/pypy/rpython/memory/gcwrapper.py Tue Aug 12 13:49:19 2008 @@ -102,12 +102,6 @@ def collect(self): self.gc.collect() - def disable_finalizers(self): - self.gc.disable_finalizers() - - def enable_finalizers(self): - self.gc.enable_finalizers() - def can_move(self, addr): return self.gc.can_move(addr) Modified: pypy/dist/pypy/rpython/memory/test/snippet.py ============================================================================== --- pypy/dist/pypy/rpython/memory/test/snippet.py (original) +++ pypy/dist/pypy/rpython/memory/test/snippet.py Tue Aug 12 13:49:19 2008 @@ -125,53 +125,3 @@ print summary print msg py.test.fail(msg) - - def test_disable_finalizers(self): - from pypy.rlib import rgc - if self.large_tests_ok: - MULTIPLY = 50 - else: - MULTIPLY = 1 - - tmpfilepath = udir.join('test_disable_finalizers') - - class State: - pass - state = State() - state.tmpfilename = str(tmpfilepath) - state.fd = -1 - - class X(object): - def __init__(self, x): - self.x = str(x) - def __del__(self): - if state.fd >= 0: - os.write(state.fd, self.x) - - def do_stuff(): - lst = [X(n) for n in range(7*MULTIPLY)] - return len(lst) - - def f(): - fd = os.open(state.tmpfilename, - os.O_WRONLY | os.O_CREAT | os.O_TRUNC, - 0644) - state.fd = fd - for i in range(10*MULTIPLY): - do_stuff() - rgc.disable_finalizers() - os.write(fd, '-') - do_stuff() - os.write(fd, '+') - rgc.enable_finalizers() - state.fd = -1 - os.close(fd) - return 'ok' - - self.run_ok(f) - buf = tmpfilepath.read() - assert buf.count('-') == buf.count('+') - assert buf.count('-') + buf.count('+') < len(buf) - for i in range(len(buf)): - if buf[i] == '-': - assert buf[i+1] == '+' Modified: pypy/dist/pypy/translator/c/test/test_boehm.py ============================================================================== --- pypy/dist/pypy/translator/c/test/test_boehm.py (original) +++ pypy/dist/pypy/translator/c/test/test_boehm.py Tue Aug 12 13:49:19 2008 @@ -420,6 +420,3 @@ c_fn = self.getcompiled(wrapper, []) res = c_fn() assert res == 1 - - test_disable_finalizers = ( - snippet.SemiSpaceGCTests.test_disable_finalizers.im_func) Modified: pypy/dist/pypy/translator/cli/opcodes.py ============================================================================== --- pypy/dist/pypy/translator/cli/opcodes.py (original) +++ pypy/dist/pypy/translator/cli/opcodes.py Tue Aug 12 13:49:19 2008 @@ -68,8 +68,6 @@ 'cast_ptr_to_weakadr': [PushAllArgs, 'newobj instance void class %s::.ctor(object)' % WEAKREF], 'gc__collect': 'call void class [mscorlib]System.GC::Collect()', 'gc_set_max_heap_size': Ignore, - 'gc__enable_finalizers': Ignore, - 'gc__disable_finalizers': Ignore, 'resume_point': Ignore, 'debug_assert': Ignore, 'keepalive': Ignore, Modified: pypy/dist/pypy/translator/jvm/opcodes.py ============================================================================== --- pypy/dist/pypy/translator/jvm/opcodes.py (original) +++ pypy/dist/pypy/translator/jvm/opcodes.py Tue Aug 12 13:49:19 2008 @@ -93,8 +93,6 @@ 'gc__collect': jvm.SYSTEMGC, 'gc_set_max_heap_size': Ignore, - 'gc__enable_finalizers': Ignore, - 'gc__disable_finalizers': Ignore, 'resume_point': Ignore, 'debug_assert': [], # TODO: implement? From arigo at codespeak.net Tue Aug 12 13:52:28 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 12 Aug 2008 13:52:28 +0200 (CEST) Subject: [pypy-svn] r57205 - in pypy/dist/pypy: interpreter module/__builtin__ module/gc/test Message-ID: <20080812115228.A49DA16A23E@codespeak.net> Author: arigo Date: Tue Aug 12 13:52:28 2008 New Revision: 57205 Modified: pypy/dist/pypy/interpreter/baseobjspace.py pypy/dist/pypy/interpreter/typedef.py pypy/dist/pypy/module/__builtin__/interp_classobj.py pypy/dist/pypy/module/gc/test/test_gc.py Log: Fix the __del__ handling of old-style instances to match the __del__ handling of new-style instances, in particular regarding the usage of a queue to avoid crashes if the app-level method is called immediately. Modified: pypy/dist/pypy/interpreter/baseobjspace.py ============================================================================== --- pypy/dist/pypy/interpreter/baseobjspace.py (original) +++ pypy/dist/pypy/interpreter/baseobjspace.py Tue Aug 12 13:52:28 2008 @@ -138,6 +138,23 @@ self.setweakref(lifeline.space, None) lifeline.clear_all_weakrefs() + __already_enqueued_for_destruction = False + + def _enqueue_for_destruction(self, space): + """Put the object in the destructor queue of the space. + At a later, safe point in time, UserDelAction will use + space.userdel() to call the object's app-level __del__ method. + """ + # this function always resurect the object, so when + # running on top of CPython we must manually ensure that + # we enqueue it only once + if not we_are_translated(): + if self.__already_enqueued_for_destruction: + return + self.__already_enqueued_for_destruction = True + self.clear_all_weakrefs() + space.user_del_action.register_dying_object(self) + def _call_builtin_destructor(self): pass # method overridden in typedef.py Modified: pypy/dist/pypy/interpreter/typedef.py ============================================================================== --- pypy/dist/pypy/interpreter/typedef.py (original) +++ pypy/dist/pypy/interpreter/typedef.py Tue Aug 12 13:52:28 2008 @@ -9,7 +9,7 @@ DescrMismatch from pypy.interpreter.error import OperationError from pypy.tool.sourcetools import compile2, func_with_new_name -from pypy.rlib.objectmodel import instantiate, we_are_translated +from pypy.rlib.objectmodel import instantiate from pypy.rlib.rarithmetic import intmask class TypeDef: @@ -248,17 +248,8 @@ if "del" in features: class Proto(object): - _del_was_called = False def __del__(self): - # the logic below always resurect the objects, so when - # running on top of CPython we must manually ensure that - # we do it only once - if not we_are_translated(): - if self._del_was_called: - return - self._del_was_called = True - self.clear_all_weakrefs() - self.space.user_del_action.register_dying_object(self) + self._enqueue_for_destruction(self.space) # if the base class needs its own interp-level __del__, # we override the _call_builtin_destructor() method to invoke it # after the app-level destructor. Modified: pypy/dist/pypy/module/__builtin__/interp_classobj.py ============================================================================== --- pypy/dist/pypy/module/__builtin__/interp_classobj.py (original) +++ pypy/dist/pypy/module/__builtin__/interp_classobj.py Tue Aug 12 13:52:28 2008 @@ -630,6 +630,13 @@ space.wrap("instance has no next() method")) return space.call_function(w_func) + def descr_del(self, space): + # Note that this is called from executioncontext.UserDelAction + # via the space.userdel() method. + w_func = self.getattr(space, space.wrap('__del__'), False) + if w_func is not None: + space.call_function(w_func) + rawdict = {} # unary operations @@ -719,20 +726,11 @@ next = interp2app(W_InstanceObject.descr_next, unwrap_spec=['self', ObjSpace]), __weakref__ = make_weakref_descr(W_InstanceObject), + __del__ = interp2app(W_InstanceObject.descr_del, + unwrap_spec=['self', ObjSpace]), **rawdict ) class W_InstanceObjectWithDel(W_InstanceObject): def __del__(self): - self.clear_all_weakrefs() - try: - self.descr_del() - except OperationError, e: - e.write_unraisable(self.space, 'method __del__ of ', self) - e.clear(self.space) # break up reference cycles - - def descr_del(self): - space = self.space - w_func = self.getattr(space, space.wrap('__del__'), False) - if w_func is not None: - space.call_function(w_func) + self._enqueue_for_destruction(self.space) Modified: pypy/dist/pypy/module/gc/test/test_gc.py ============================================================================== --- pypy/dist/pypy/module/gc/test/test_gc.py (original) +++ pypy/dist/pypy/module/gc/test/test_gc.py Tue Aug 12 13:52:28 2008 @@ -5,6 +5,7 @@ def test_disable_finalizers(self): import gc + class X(object): created = 0 deleted = 0 @@ -12,18 +13,31 @@ X.created += 1 def __del__(self): X.deleted += 1 + + class OldX: + created = 0 + deleted = 0 + def __init__(self): + OldX.created += 1 + def __del__(self): + OldX.deleted += 1 + def runtest(should_be_enabled): + runtest1(should_be_enabled, X) + runtest1(should_be_enabled, OldX) + + def runtest1(should_be_enabled, Cls): gc.collect() if should_be_enabled: - assert X.deleted == X.created + assert Cls.deleted == Cls.created else: - old_deleted = X.deleted - X(); X(); X() + old_deleted = Cls.deleted + Cls(); Cls(); Cls() gc.collect() if should_be_enabled: - assert X.deleted == X.created + assert Cls.deleted == Cls.created else: - assert X.deleted == old_deleted + assert Cls.deleted == old_deleted runtest(True) gc.disable_finalizers() From bgola at codespeak.net Wed Aug 13 15:50:37 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Wed, 13 Aug 2008 15:50:37 +0200 (CEST) Subject: [pypy-svn] r57223 - pypy/branch/2.5-features/pypy/module/sys Message-ID: <20080813135037.91E1F169FB0@codespeak.net> Author: bgola Date: Wed Aug 13 15:50:35 2008 New Revision: 57223 Modified: pypy/branch/2.5-features/pypy/module/sys/version.py Log: updating Cpython version Modified: pypy/branch/2.5-features/pypy/module/sys/version.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/sys/version.py (original) +++ pypy/branch/2.5-features/pypy/module/sys/version.py Wed Aug 13 15:50:35 2008 @@ -4,7 +4,7 @@ import os -CPYTHON_VERSION = (2, 4, 1, "alpha", 42) +CPYTHON_VERSION = (2, 5, 1, "alpha", 42) CPYTHON_API_VERSION = 1012 PYPY_VERSION = (1, 0, 0, "alpha", '?') From bgola at codespeak.net Wed Aug 13 15:54:16 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Wed, 13 Aug 2008 15:54:16 +0200 (CEST) Subject: [pypy-svn] r57224 - pypy/branch/2.5-features/pypy/module/_codecs Message-ID: <20080813135416.A2CD7169FB3@codespeak.net> Author: bgola Date: Wed Aug 13 15:54:16 2008 New Revision: 57224 Modified: pypy/branch/2.5-features/pypy/module/_codecs/interp_codecs.py Log: new behavior for codec search functions Modified: pypy/branch/2.5-features/pypy/module/_codecs/interp_codecs.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/_codecs/interp_codecs.py (original) +++ pypy/branch/2.5-features/pypy/module/_codecs/interp_codecs.py Wed Aug 13 15:54:16 2008 @@ -96,7 +96,7 @@ w_result = space.call_function(w_search, space.wrap(normalized_encoding)) if not space.is_w(w_result, space.w_None): - if not (space.is_true(space.is_(space.type(w_result), + if not (space.is_true(space.isinstance(w_result, space.w_tuple)) and space.int_w(space.len(w_result)) == 4): raise OperationError( From witulski at codespeak.net Wed Aug 13 17:46:25 2008 From: witulski at codespeak.net (witulski at codespeak.net) Date: Wed, 13 Aug 2008 17:46:25 +0200 (CEST) Subject: [pypy-svn] r57226 - in pypy/branch/oo-jit/pypy/jit/codegen/x86_64: . test Message-ID: <20080813154625.D0808169FD2@codespeak.net> Author: witulski Date: Wed Aug 13 17:46:22 2008 New Revision: 57226 Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py Log: (witulski) Add immediate32 works now, but doesnt supports values grater then 255. The "make_two_operands_instr" differs now depending on the operands(reg64/imm32). There are assembler methodes for each operandtype. Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py Wed Aug 13 17:46:22 2008 @@ -1,3 +1,5 @@ +from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 + #Mapping from register to coding (Rex.W or Rex.B , ModRM) REGISTER_MAP = { "rax": (0, 0), @@ -16,21 +18,41 @@ "r13": (1, 5), "r14": (1, 6), "r15": (1, 7), - } + } -def make_two_quadreg_instr(opcode): - # XXX for now, arg1 and arg2 are registers +# This method wirtes the bitencodings into +# the memory. imm32 is used when the operation +# has an constant as operand +def make_two_operand_instr(opcode,imm32_mod=None): def quadreg_instr(self, arg1, arg2): - rexR, modrm1 = self.get_register_bits(arg1) - rexB, modrm2 = self.get_register_bits(arg2) - #rexW(1) = 64bitMode rexX(0) = doesn't matter + # Todo: other cases e.g memory as operand + if isinstance(arg1,Register64): + rexR, modrm1 = self.get_register_bits(arg1.reg) + + if isinstance(arg2,Register64): + rexB, modrm2 = self.get_register_bits(arg2.reg) + if isinstance(arg2,Constant32): # e.g IMMEDIATE32 + rexB = 0 + + # rexW(1) = 64bitMode rexX(0) = doesn't matter # exchange the two arguments (rexB/rexR) (modrm2/modrm1) - self.write_rex_byte(1, rexB, 0, rexR) - self.write(opcode) - self.write_modRM_byte(3, modrm2, modrm1) + if isinstance(arg2,Constant32): + self.write_rex_byte(1, rexB, 0, rexR) + self.write(opcode) + self.write_modRM_byte(3, imm32_mod, modrm1) + # FIXME: Bad solution + # TODO: support values > 255 + if(arg2.value<256): + self.write(chr(arg2.value)) + self.write(chr(0)) + self.write(chr(0)) + self.write(chr(0)) + else: + self.write_rex_byte(1, rexB, 0, rexR) + self.write(opcode) + self.write_modRM_byte(3, modrm2, modrm1) return quadreg_instr - - + class X86_64CodeBuilder(object): """ creats x86_64 opcodes""" def write(self, data): @@ -41,9 +63,14 @@ """ tells the current position in memory""" raise NotImplementedError - ADD = make_two_quadreg_instr("\x00") - MOV = make_two_quadreg_instr("\x89") - SUB = make_two_quadreg_instr("\x28") + # The opcodes differs depending on the operands + ADD_QWREG_IMM32 = make_two_operand_instr("\x81",2) + ADD_QWREG_QWREG = make_two_operand_instr("\x00") + + MOV_QWREG_IMM32 = make_two_operand_instr("\xC7",0) + MOV_QWREG_QWREG = make_two_operand_instr("\x89") + + SUB_QWREG_QWREG = make_two_operand_instr("\x28") def RET(self): self.write("\xC3") @@ -51,6 +78,7 @@ def get_register_bits(self, register): return REGISTER_MAP[register] + # Rex-Prefix 4WRXB see AMD vol3 page 45 def write_rex_byte(self, rexW, rexR, rexX, rexB): byte = (4 << 4) | (rexW << 3) | (rexR << 2) | (rexX << 1) | rexB Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py Wed Aug 13 17:46:22 2008 @@ -1,10 +1,11 @@ from pypy.jit.codegen import model +# Wrapper Classes - -class IntVar(model.GenVar): +class Register64(model.GenVar): def __init__(self, reg): self.reg = reg -class Const(model.GenConst): +# TODO: support 64-bit Constants +class Constant32(model.GenConst): def __init__(self, value): self.value = value \ No newline at end of file Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Wed Aug 13 17:46:22 2008 @@ -1,16 +1,46 @@ from pypy.jit.codegen import model from pypy.rlib.objectmodel import specialize -from pypy.jit.codegen.x86_64.objmodel import IntVar, Const +from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 from pypy.jit.codegen.x86_64.codebuf import InMemoryCodeBuilder +#TODO: understand llTypesystem +from pypy.rpython.lltypesystem import llmemory, lltype +from pypy.jit.codegen.ia32.objmodel import LL_TO_GENVAR + + +# TODO: support zero or one arg. +# This method calles the assembler to generate code. +# It saves the operands in the helpregister gv_z +# and determine the Type of the operands, +# to choose the right method in assembler.py def make_two_argument_method(name): def op_int(self, gv_x, gv_y): gv_z = self.allocate_register() - self.mc.MOV(gv_z.reg, gv_x.reg) - method = getattr(self.mc, name) - method(gv_z.reg, gv_y.reg) + self.mc.MOV_QWREG_QWREG(gv_z, gv_x) + method = getattr(self.mc, name+typeToString(gv_x)+typeToString(gv_y)) + method(gv_z, gv_y) return gv_z return op_int + + + +# helper of "make_two_argument_method" to choose +# the right assembler method +def typeToString(parseMe): + if isinstance(parseMe,Constant32): + return "_IMM32" + if isinstance(parseMe,Register64): + return "_QWREG" + + + +# a small helper that provides correct type signature +def map_arg(arg): + if isinstance(arg, lltype.Ptr): + arg = llmemory.Address + if isinstance(arg, (lltype.Array, lltype.Struct)): + arg = lltype.Void + return LL_TO_GENVAR[arg] class Builder(model.GenBuilder): @@ -36,7 +66,7 @@ # "r14":None, # "r15":None, } - + @specialize.arg(1) def genop1(self, opname, gv_arg): genmethod = getattr(self, 'op_' + opname) @@ -52,31 +82,44 @@ def finish_and_return(self, sigtoken, gv_returnvar): #self.mc.write("\xB8\x0F\x00\x00\x00") - self.mc.MOV("rax", gv_returnvar.reg) + self.mc.MOV_QWREG_QWREG(Register64("rax"), gv_returnvar) self.mc.RET() def allocate_register(self, register=None): if register is None: - return IntVar(self.freeregisters.popitem()[0]) + return Register64(self.freeregisters.popitem()[0]) else: del self.freeregisters[register] - return IntVar(register) + return Register64(register) + + def end(self): + pass class RX86_64GenOp(model.AbstractRGenOp): - - + @staticmethod @specialize.memo() def sigToken(FUNCTYPE): - return None - + return ([map_arg(arg) for arg in FUNCTYPE.ARGS if arg + is not lltype.Void], map_arg(FUNCTYPE.RESULT)) + + # wrappes a integer value + def genconst(self, llvalue): + T = lltype.typeOf(llvalue) + # TODO: other cases(?) + if T is lltype.Signed: + return Constant32(llvalue) + def newgraph(self, sigtoken, name): - # XXX for now assume that all functions take two ints and return an int + arg_tokens, res_token = sigtoken + inputargs_gv = [] builder = Builder() - inputargs_gv = [builder.allocate_register("rdi"), - builder.allocate_register("rsi")] - #XXX - entrypoint = Const(builder.mc.tell()) - return builder, entrypoint, inputargs_gv + # TODO: Builder._open() + entrypoint = builder.mc.tell() + # TODO: support more than two reg + register_list = ["rdi","rsi"] + inputargs_gv = [builder.allocate_register(register_list[i]) + for i in range(len(arg_tokens))] + return builder,Constant32(entrypoint), inputargs_gv Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py Wed Aug 13 17:46:22 2008 @@ -1,4 +1,5 @@ from pypy.jit.codegen.x86_64 import assembler +from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 class AsmTest(assembler.X86_64CodeBuilder): def __init__(self): @@ -12,12 +13,12 @@ def test_add(): mc = AsmTest() - mc.ADD("rax", "r11") + mc.ADD_QWREG_QWREG(Register64("rax"), Register64("r11")) assert mc.get_as_string() == "\x4C\x00\xD8" - mc.ADD("rbx", "rbx") + mc.ADD_QWREG_QWREG(Register64("rbx"), Register64("rbx")) assert mc.get_as_string() == "\x4C\x00\xD8\x48\x00\xDB" def test_mov(): mc = AsmTest() - mc.MOV("r15","rsp") + mc.MOV_QWREG_QWREG(Register64("r15"),Register64("rsp")) assert mc.get_as_string() == "\x49\x89\xE7" \ No newline at end of file Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py Wed Aug 13 17:46:22 2008 @@ -1,9 +1,11 @@ from pypy.jit.codegen.x86_64.rgenop import RX86_64GenOp from pypy.rpython.lltypesystem import lltype from ctypes import cast, c_void_p, CFUNCTYPE, c_long, c_double +from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 + +rgenop = RX86_64GenOp() def make_testbuilder(): - rgenop = RX86_64GenOp() FUNC = lltype.FuncType([lltype.Signed, lltype.Signed], lltype.Signed) #the funtiontype(arguments,returntype) of the graph we will create token = rgenop.sigToken(FUNC) builder, entrypoint, inputargs_gv = rgenop.newgraph(token, "test") @@ -12,7 +14,7 @@ fp = cast(c_void_p(entrypoint.value), CFUNCTYPE(c_long, *ctypestypes)) return builder, fp, inputargs_gv, token - + def test_add(): builder, fp, inputargs_gv, token = make_testbuilder() genv0 = inputargs_gv[0] #the first argument "place" From witulski at codespeak.net Wed Aug 13 17:48:48 2008 From: witulski at codespeak.net (witulski at codespeak.net) Date: Wed, 13 Aug 2008 17:48:48 +0200 (CEST) Subject: [pypy-svn] r57227 - pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test Message-ID: <20080813154848.953BE16A059@codespeak.net> Author: witulski Date: Wed Aug 13 17:48:48 2008 New Revision: 57227 Added: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py Log: (witulski) Only the Add immediate test passes. All other tests are skiped now Added: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py ============================================================================== --- (empty file) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py Wed Aug 13 17:48:48 2008 @@ -0,0 +1,69 @@ +import py +from pypy.rpython.lltypesystem import lltype +from pypy.jit.codegen.x86_64.rgenop import RX86_64GenOp +from pypy.jit.codegen.test.rgenop_tests import AbstractRGenOpTestsDirect +#from pypy.jit.codegen.test.rgenop_tests import AbstractRGenOpTestsCompile + +# for the individual tests see +# ====> ../../test/rgenop_tests.py + +def skip(self): + py.test.skip("not implemented yet") + +class TestRGenopDirect(AbstractRGenOpTestsDirect): + RGenOp = RX86_64GenOp + test_directtesthelper_direct = skip + test_dummy_compile = skip + test_cast_raising = skip + test_float_adder = skip + test_float_call = skip + test_float_loop_direct = skip + test_dummy_direct = skip + test_largedummy_direct = skip + test_branching_direct = skip + test_goto_direct = skip + test_if_direct = skip + test_switch_direct = skip + test_large_switch_direct = skip + test_fact_direct = skip + test_calling_pause_direct = skip + test_longwinded_and_direct = skip + test_condition_result_cross_link_direct = skip + test_multiple_cmps = skip + test_flipped_cmp_with_immediate = skip + test_tight_loop = skip + test_jump_to_block_with_many_vars = skip + test_same_as = skip + test_pause_and_resume_direct = skip + test_like_residual_red_call_with_exc_direct = skip + test_call_functions_with_different_signatures_direct = skip + test_defaultonly_switch = skip + test_bool_not_direct = skip + test_read_frame_var_direct = skip + test_read_frame_var_float_direct = skip + test_genconst_from_frame_var_direct = skip + test_write_frame_place_direct = skip + test_write_frame_place_float_direct = skip + test_write_lots_of_frame_places_direct = skip + test_read_frame_place_direct = skip + test_read_float_frame_place_direct = skip + test_frame_vars_like_the_frontend_direct = skip + test_unaliasing_variables_direct = skip + test_from_random_direct = skip + test_from_random_2_direct = skip + test_from_random_3_direct = skip + test_from_random_4_direct = skip + test_from_random_5_direct = skip + test_genzeroconst = skip + test_ovfcheck_adder_direct = skip + test_ovfcheck1_direct = skip + test_ovfcheck2_direct = skip + test_cast_direct = skip + test_array_of_ints = skip + test_interior_access = skip + test_fieldaccess = skip + test_interior_access = skip + test_interior_access_float = skip + test_void_return = skip + test_demo_f1_direct = skip + test_red_switch = skip \ No newline at end of file From antocuni at codespeak.net Wed Aug 13 20:56:27 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Wed, 13 Aug 2008 20:56:27 +0200 (CEST) Subject: [pypy-svn] r57231 - in pypy/branch/oo-jit/pypy: jit/codegen/cli jit/codegen/test translator/cli/src Message-ID: <20080813185627.05A7A16A07C@codespeak.net> Author: antocuni Date: Wed Aug 13 20:56:25 2008 New Revision: 57231 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/args_manager.py pypy/branch/oo-jit/pypy/jit/codegen/test/rgenop_tests.py pypy/branch/oo-jit/pypy/translator/cli/src/pypylib.cs Log: - add a test that passes "many" (==4) arguments across non-local links - change completetly the way InputArgs is implemented, by using a chain of struct Pair. As proven by user/antocuni/cli-bench/inputargs.cs, this has little impact on performances Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/args_manager.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/args_manager.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/args_manager.py Wed Aug 13 20:56:25 2008 @@ -4,13 +4,39 @@ Assembly = System.Reflection.Assembly OpCodes = System.Reflection.Emit.OpCodes +def new_type_array(types): + array = dotnet.new_array(System.Type, len(types)) + for i in range(len(types)): + array[i] = types[i] + return array + +def MakeGenericType(clitype, paramtypes): + array = new_type_array(paramtypes) + return clitype.MakeGenericType(array) + class ArgsManager: def __init__(self): self.type_counter = {} + self.type_index = {} self.clitype = None - self.fieldtypes = None - self.slots = None + self._init_types() + + def _load_pypylib(self): + from pypy.translator.cli.query import pypylib, pypylib2 + assembly = None + for name in [pypylib, pypylib2]: + assembly = Assembly.LoadWithPartialName(name) + if assembly: + break + assert assembly is not None + return assembly + + def _init_types(self): + pypylib = self._load_pypylib() + self.clitype_InputArgs = pypylib.GetType('pypy.runtime.InputArgs`1') + self.clitype_Void = pypylib.GetType('pypy.runtime.Void') + self.clitype_Pair = pypylib.GetType('pypy.runtime.Pair`2') def is_open(self): return self.clitype is None @@ -19,11 +45,10 @@ assert not self.is_open() return self.clitype - def register(self, args_gv): + def register_types(self, types): assert self.is_open() newcounter = {} - for gv_arg in args_gv: - clitype = gv_arg.getCliType() + for clitype in types: newcount = newcounter.get(clitype, 0) newcounter[clitype] = newcount+1 @@ -32,99 +57,86 @@ maxcount = max(oldcount, newcount) self.type_counter[clitype] = maxcount + def register(self, args_gv): + types = [gv_arg.getCliType() for gv_arg in args_gv] + self.register_types(types) + def close(self): assert self.is_open() - templates = self._get_templates() - - self.fieldtypes = fieldtypes = [] - self.slots = slots = {} + fieldtypes = [] for clitype, count in self.type_counter.iteritems(): - start = len(fieldtypes) - end = start+count - fieldtypes += [clitype]*count - slots[clitype] = self._myrange(start, end) - numfields = len(fieldtypes) - - if numfields <= len(templates): - template = templates[numfields-1] - array = dotnet.new_array(System.Type, numfields) - for i in range(numfields): - array[i] = fieldtypes[i] - self.clitype = template.MakeGenericType(array) - else: - assert False, 'TODO' + self.type_index[clitype] = len(fieldtypes) + fieldtypes += [clitype] * count + + pairtype = self.clitype_Void + # iterate over reversed(fieldtypes) + i = len(fieldtypes)-1 + while True: + if i < 0: + break + fieldtype = fieldtypes[i] + pairtype = MakeGenericType(self.clitype_Pair, [fieldtype, pairtype]) + i-=1 + +## for fieldtype in fieldtypes[::-1]: +## pairtype = MakeGenericType(self.clitype_Pair, [fieldtype, pairtype]) + self.clitype = MakeGenericType(self.clitype_InputArgs, [pairtype]) + + def _store_by_index(self, meth, gv_arg, i): + head_info = self._load_nth_head(meth, i) + gv_arg.load(meth) + meth.il.Emit(OpCodes.Stfld, head_info) + + def _load_by_index(self, meth, i): + head_info = self._load_nth_head(meth, i) + meth.il.Emit(OpCodes.Ldfld, head_info) + + def _load_nth_head(self, meth, n): + il = meth.il + fields_info = self.clitype.GetField("fields") + meth.gv_inputargs.load(meth) + il.Emit(OpCodes.Ldflda, fields_info) + + lastfield_info = fields_info + for _ in range(n): + fieldtype = lastfield_info.get_FieldType() + lastfield_info = fieldtype.GetField("tail") + il.Emit(OpCodes.Ldflda, lastfield_info) + fieldtype = lastfield_info.get_FieldType() + return fieldtype.GetField("head") def copy_to_inputargs(self, meth, args_gv): "copy args_gv into the appropriate fields of inputargs" assert not self.is_open() - il = meth.il - gv_inputargs = meth.gv_inputargs - fields = self._get_fields(args_gv) - assert len(args_gv) == len(fields) - for i in range(len(args_gv)): + fieldtypes = [gv_arg.getCliType() for gv_arg in args_gv] + indexes = self._get_indexes(fieldtypes) + assert len(indexes) == len(fieldtypes) + for i in range(len(indexes)): + n = indexes[i] gv_arg = args_gv[i] - fieldinfo = fields[i] - gv_inputargs.load(meth) - gv_arg.load(meth) - il.Emit(OpCodes.Stfld, fieldinfo) + self._store_by_index(meth, gv_arg, n) def copy_from_inputargs(self, meth, args_gv): "copy the appropriate fields of inputargs into args_gv" assert not self.is_open() - il = meth.il - gv_inputargs = meth.gv_inputargs - fields = self._get_fields(args_gv) - assert len(args_gv) == len(fields) - for i in range(len(args_gv)): + fieldtypes = [gv_arg.getCliType() for gv_arg in args_gv] + indexes = self._get_indexes(fieldtypes) + assert len(indexes) == len(fieldtypes) + for i in range(len(indexes)): + n = indexes[i] gv_arg = args_gv[i] - fieldinfo = fields[i] - gv_inputargs.load(meth) - il.Emit(OpCodes.Ldfld, fieldinfo) + self._load_by_index(meth, n) gv_arg.store(meth) - def _myrange(self, start, end): - length = (end - start) - res = [0] * length - for i in range(start, end): - res[i] = i - return res - - def _load_pypylib(self): - from pypy.translator.cli.query import pypylib, pypylib2 - assembly = None - for name in [pypylib, pypylib2]: - assembly = Assembly.LoadWithPartialName(name) - if assembly: - break - assert assembly is not None - return assembly - - def _get_templates(self): - pypylib = self._load_pypylib() - templates = [] - i = 1 - while True: - typename = 'pypy.runtime.InputArgs`%d' % i - clitype = pypylib.GetType(typename) - if not clitype: - break - templates.append(clitype) - i += 1 - return templates - - def _copy_slots(self): - 'Deepcopy self.slots' - slots = {} - for key, value in self.slots.iteritems(): - slots[key] = value[:] - return slots - - def _get_fields(self, args_gv): - slots = self._copy_slots() - types = [gv_arg.getCliType() for gv_arg in args_gv] - fields = [] - for clitype in types: - slot = slots[clitype].pop() - fieldinfo = self.clitype.GetField('field%d' % slot) - fields.append(fieldinfo) - return fields + def _get_indexes(self, fieldtypes): + indexes = [] + curtype = None + curidx = -1 + for fieldtype in fieldtypes: + if fieldtype != curtype: + curidx = self.type_index[fieldtype] + curtype = fieldtype + else: + curidx += 1 + indexes.append(curidx) + return indexes Modified: pypy/branch/oo-jit/pypy/jit/codegen/test/rgenop_tests.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/test/rgenop_tests.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/test/rgenop_tests.py Wed Aug 13 20:56:25 2008 @@ -404,6 +404,53 @@ return res return switch_runner +def make_switch_many_args(T, rgenop): + """ + def f(v0, v1, v2, v3, v4): + if v0 == 0: # switch + return 21*v1 + elif v0 == 1: + return 21+v1+v2+v3+v4 + else: + return v1 + """ + sigtoken = rgenop.sigToken(T.FUNC5) + builder, gv_switch, [gv0, gv1, gv2, gv3, gv4] = rgenop.newgraph(sigtoken, "switch_many_args") + builder.start_writing() + + flexswitch, default_builder = builder.flexswitch(gv0, [gv1, gv2, gv3, gv4]) + const21 = rgenop.genconst(21) + + # default + default_builder.finish_and_return(sigtoken, gv1) + # case == 0 + const0 = rgenop.genconst(0) + case_builder = flexswitch.add_case(const0) + gv_res_case0 = case_builder.genop2('int_mul', const21, gv1) + case_builder.finish_and_return(sigtoken, gv_res_case0) + # case == 1 + const1 = rgenop.genconst(1) + case_builder = flexswitch.add_case(const1) + gv_tmp1 = case_builder.genop2('int_add', const21, gv1) + gv_tmp2 = case_builder.genop2('int_add', gv_tmp1, gv2) + gv_tmp3 = case_builder.genop2('int_add', gv_tmp2, gv3) + gv_res_case1 = case_builder.genop2('int_add', gv_tmp3, gv3) + case_builder.finish_and_return(sigtoken, gv_res_case1) + + builder.end() + return gv_switch + +def get_switch_many_args_runner(T, RGenOp): + def switch_runner(x, y, z, w, k): + rgenop = RGenOp() + gv_switchfn = make_switch_many_args(T, rgenop) + switchfn = gv_switchfn.revealconst(T.Ptr(T.FUNC5)) + res = switchfn(x, y, z, w, k) + keepalive_until_here(rgenop) # to keep the code blocks alive + return res + return switch_runner + + def make_large_switch(T, rgenop): """ def f(v0, v1): @@ -1101,6 +1148,15 @@ res = fn(42, 18) assert res == 18 + def test_switch_many_args_compile(self): + fn = self.compile(get_switch_many_args_runner(self.T, self.RGenOp), [int, int, int, int, int]) + res = fn(0, 2, 3, 4, 5) + assert res == 42 + res = fn(1, 4, 5, 6, 6) + assert res == 42 + res = fn(42, 16, 3, 4, 5) + assert res == 16 + def test_large_switch_compile(self): fn = self.compile(get_large_switch_runner(self.T, self.RGenOp), [int, int]) res = fn(0, 2) @@ -1340,6 +1396,17 @@ res = fnptr(42, 16) assert res == 16 + def test_switch_many_args_direct(self): + rgenop = self.RGenOp() + gv_switchfn = make_switch_many_args(self.T, rgenop) + fnptr = self.cast(gv_switchfn, 5) + res = fnptr(0, 2, 3, 4, 5) + assert res == 42 + res = fnptr(1, 4, 5, 6, 6) + assert res == 42 + res = fnptr(42, 16, 3, 4, 5) + assert res == 16 + def test_large_switch_direct(self): rgenop = self.RGenOp() gv_switchfn = make_large_switch(self.T, rgenop) Modified: pypy/branch/oo-jit/pypy/translator/cli/src/pypylib.cs ============================================================================== --- pypy/branch/oo-jit/pypy/translator/cli/src/pypylib.cs (original) +++ pypy/branch/oo-jit/pypy/translator/cli/src/pypylib.cs Wed Aug 13 20:56:25 2008 @@ -105,13 +105,20 @@ namespace pypy.runtime { - // XXX: these classes should be automatically generated by the JIT backend - public delegate int FlexSwitchCase(int block, object args); - public class InputArgs { - public int Int32_0; - public int Int32_1; + public struct Void { + } + + public struct Pair { + public T0 head; + public T1 tail; } + public class InputArgs { + public T fields; + } + + public delegate int FlexSwitchCase(int block, object args); + public class LowLevelFlexSwitch { public int default_blockid = -1; From fijal at codespeak.net Thu Aug 14 09:50:28 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 14 Aug 2008 09:50:28 +0200 (CEST) Subject: [pypy-svn] r57236 - pypy/dist/pypy/translator/microbench Message-ID: <20080814075028.04A12169FCF@codespeak.net> Author: fijal Date: Thu Aug 14 09:50:26 2008 New Revision: 57236 Modified: pypy/dist/pypy/translator/microbench/microbench.py pypy/dist/pypy/translator/microbench/test_dispatch.py Log: this rather harmless changes fixes issue385 Modified: pypy/dist/pypy/translator/microbench/microbench.py ============================================================================== --- pypy/dist/pypy/translator/microbench/microbench.py (original) +++ pypy/dist/pypy/translator/microbench/microbench.py Thu Aug 14 09:50:26 2008 @@ -23,7 +23,7 @@ this_dir = os.path.dirname(sys.argv[0]) microbenches = [] -for fname in os.listdir('.'): +for fname in os.listdir(this_dir): if not fname.startswith('test_') or not fname.endswith('.py'): continue microbench = fname[:-3] @@ -74,7 +74,9 @@ for n, exe in enumerate(executables): print 'exe:', exe - data = [s for s in os.popen(exe + ' microbench.py -Fr %s 2>&1' % limit).readlines() if not s.startswith('debug:')] + data = [s for s in os.popen('%s %s -Fr %s 2>&1' % + (exe, os.path.join(this_dir, 'microbench.py'), limit)).readlines() + if not s.startswith('debug:')] benchdata = {} for d in data: try: Modified: pypy/dist/pypy/translator/microbench/test_dispatch.py ============================================================================== --- pypy/dist/pypy/translator/microbench/test_dispatch.py (original) +++ pypy/dist/pypy/translator/microbench/test_dispatch.py Thu Aug 14 09:50:26 2008 @@ -1,9 +1,10 @@ try: import dis + import new except ImportError: pass else: - import new, sys + import sys N_NOPS = 10**7 N = int(50) From fijal at codespeak.net Thu Aug 14 11:16:45 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 14 Aug 2008 11:16:45 +0200 (CEST) Subject: [pypy-svn] r57238 - pypy/dist/pypy/translator/js/test Message-ID: <20080814091645.AF37D16A069@codespeak.net> Author: fijal Date: Thu Aug 14 11:16:44 2008 New Revision: 57238 Modified: pypy/dist/pypy/translator/js/test/test_rclass.py Log: Skip those tests as JS backend is not going anywhere :-( Modified: pypy/dist/pypy/translator/js/test/test_rclass.py ============================================================================== --- pypy/dist/pypy/translator/js/test/test_rclass.py (original) +++ pypy/dist/pypy/translator/js/test/test_rclass.py Thu Aug 14 11:16:44 2008 @@ -67,10 +67,19 @@ assert isinstance(res[0], int) def test_hash_preservation(self): - py.test.skip("WIP") + py.test.skip("Broken") def test_issubclass_type(self): - py.test.skip("WIP") + py.test.skip("Broken") + + def test___class___attribute(self): + py.test.skip("Broken") + + def test_circular_hash_initialization(self): + py.test.skip("Broken") + + def test_type(self): + py.test.skip("Broken") #def test_isinstance(self): # py.test.skip("WIP") From fijal at codespeak.net Thu Aug 14 11:25:12 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 14 Aug 2008 11:25:12 +0200 (CEST) Subject: [pypy-svn] r57239 - pypy/dist/pypy/translator/js/test Message-ID: <20080814092512.B2E9016A081@codespeak.net> Author: fijal Date: Thu Aug 14 11:25:12 2008 New Revision: 57239 Modified: pypy/dist/pypy/translator/js/test/test_rfloat.py Log: Skip partly this test. Needs more testing infrastructure adaptation Modified: pypy/dist/pypy/translator/js/test/test_rfloat.py ============================================================================== --- pypy/dist/pypy/translator/js/test/test_rfloat.py (original) +++ pypy/dist/pypy/translator/js/test/test_rfloat.py Thu Aug 14 11:25:12 2008 @@ -23,5 +23,24 @@ y = fn(x) assert fn(x) == 9223372026854775808 + def test_float2str(self): + def fn(f): + return str(f) + + res = self.interpret(fn, [1.5]) + assert float(self.ll_to_string(res)) == 1.5 + res = self.interpret(fn, [-1.5]) + assert float(self.ll_to_string(res)) == -1.5 + py.test.skip("Partly works, needs adapting testing infrastructure") + inf = 1e200 * 1e200 + nan = inf/inf + res = self.interpret(fn, [inf]) + assert self.ll_to_string(res) == self.inf + res = self.interpret(fn, [-inf]) + assert self.ll_to_string(res) == self.minus_inf + res = self.interpret(fn, [nan]) + assert self.ll_to_string(res) == self.nan + + def test_r_singlefloat(self): py.test.skip("not implemented: single-precision floats") From witulski at codespeak.net Thu Aug 14 12:55:19 2008 From: witulski at codespeak.net (witulski at codespeak.net) Date: Thu, 14 Aug 2008 12:55:19 +0200 (CEST) Subject: [pypy-svn] r57240 - in pypy/branch/oo-jit/pypy/jit/codegen/x86_64: . test Message-ID: <20080814105519.F375616A015@codespeak.net> Author: witulski Date: Thu Aug 14 12:55:17 2008 New Revision: 57240 Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py Log: (witulski) The Backend now supports the INC instruction. Added a new test for this. Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py Thu Aug 14 12:55:17 2008 @@ -52,6 +52,22 @@ self.write(opcode) self.write_modRM_byte(3, modrm2, modrm1) return quadreg_instr + + +# This method wirtes the bitencodings into +# the memory. mod is operation specific +def make_one_operand_instr(opcode,mod = None): + def quadreg_instr(self, arg1): + # Todo: other cases e.g memory as operand + if isinstance(arg1,Register64): + rexB, modrm1 = self.get_register_bits(arg1.reg) + rexX = 0 + + # rexW(1) = 64bitMode + self.write_rex_byte(1, 0, rexX, rexB) + self.write(opcode) + self.write_modRM_byte(3, mod, modrm1) + return quadreg_instr class X86_64CodeBuilder(object): """ creats x86_64 opcodes""" @@ -67,6 +83,8 @@ ADD_QWREG_IMM32 = make_two_operand_instr("\x81",2) ADD_QWREG_QWREG = make_two_operand_instr("\x00") + INC_QWREG = make_one_operand_instr("\xFF",0) + MOV_QWREG_IMM32 = make_two_operand_instr("\xC7",0) MOV_QWREG_QWREG = make_two_operand_instr("\x89") Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Thu Aug 14 12:55:17 2008 @@ -8,7 +8,8 @@ -# TODO: support zero or one arg. +# TODO: support zero arg. + # This method calles the assembler to generate code. # It saves the operands in the helpregister gv_z # and determine the Type of the operands, @@ -22,6 +23,13 @@ return gv_z return op_int +def make_one_argument_method(name): + def op_int(self, gv_x): + method = getattr(self.mc, name+typeToString(gv_x)) + method(gv_x) + return gv_x + return op_int + # helper of "make_two_argument_method" to choose @@ -31,6 +39,7 @@ return "_IMM32" if isinstance(parseMe,Register64): return "_QWREG" + @@ -79,6 +88,7 @@ op_int_add = make_two_argument_method("ADD") op_int_sub = make_two_argument_method("SUB") + op_int_inc = make_one_argument_method("INC") def finish_and_return(self, sigtoken, gv_returnvar): #self.mc.write("\xB8\x0F\x00\x00\x00") Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py Thu Aug 14 12:55:17 2008 @@ -9,9 +9,26 @@ def skip(self): py.test.skip("not implemented yet") + +def make_inc(rgenop): + sigtoken = rgenop.sigToken(lltype.FuncType([lltype.Signed], lltype.Signed)) + builder, gv_inc, gv_x = rgenop.newgraph(sigtoken, "inc") + builder.start_writing() + gv_result = builder.genop1("int_inc", gv_x[0]) + builder.finish_and_return(sigtoken, gv_result) + builder.end() + return gv_inc class TestRGenopDirect(AbstractRGenOpTestsDirect): RGenOp = RX86_64GenOp + + def test_inc(self): + rgenop = self.RGenOp() + inc_result = make_inc(rgenop) + fnptr = self.cast(inc_result,1) + res = fnptr(0) + assert res == 1 + test_directtesthelper_direct = skip test_dummy_compile = skip test_cast_raising = skip Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py Thu Aug 14 12:55:17 2008 @@ -1,7 +1,10 @@ +import py from pypy.jit.codegen.x86_64.rgenop import RX86_64GenOp from pypy.rpython.lltypesystem import lltype from ctypes import cast, c_void_p, CFUNCTYPE, c_long, c_double from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 +from pypy.jit.codegen.test.rgenop_tests import AbstractTestBase +from pypy.jit.codegen.test.rgenop_tests import AbstractRGenOpTestsDirect rgenop = RX86_64GenOp() @@ -14,7 +17,7 @@ fp = cast(c_void_p(entrypoint.value), CFUNCTYPE(c_long, *ctypestypes)) return builder, fp, inputargs_gv, token - + def test_add(): builder, fp, inputargs_gv, token = make_testbuilder() genv0 = inputargs_gv[0] #the first argument "place" From cfbolz at codespeak.net Thu Aug 14 13:04:21 2008 From: cfbolz at codespeak.net (cfbolz at codespeak.net) Date: Thu, 14 Aug 2008 13:04:21 +0200 (CEST) Subject: [pypy-svn] r57241 - pypy/branch/oo-jit/pypy/jit/codegen/x86_64 Message-ID: <20080814110421.5982C16A0A2@codespeak.net> Author: cfbolz Date: Thu Aug 14 13:04:20 2008 New Revision: 57241 Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Log: some cosmetic stuff Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py Thu Aug 14 13:04:20 2008 @@ -2,10 +2,12 @@ # Wrapper Classes class Register64(model.GenVar): + _dispatchname = "_QWREG" def __init__(self, reg): self.reg = reg # TODO: support 64-bit Constants class Constant32(model.GenConst): + _dispatchname = "_IMM32" def __init__(self, value): - self.value = value \ No newline at end of file + self.value = value Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Thu Aug 14 13:04:20 2008 @@ -18,14 +18,14 @@ def op_int(self, gv_x, gv_y): gv_z = self.allocate_register() self.mc.MOV_QWREG_QWREG(gv_z, gv_x) - method = getattr(self.mc, name+typeToString(gv_x)+typeToString(gv_y)) + method = getattr(self.mc, name + type_to_string(gv_x)+type_to_string(gv_y)) method(gv_z, gv_y) return gv_z return op_int def make_one_argument_method(name): def op_int(self, gv_x): - method = getattr(self.mc, name+typeToString(gv_x)) + method = getattr(self.mc, name+type_to_string(gv_x)) method(gv_x) return gv_x return op_int @@ -34,13 +34,8 @@ # helper of "make_two_argument_method" to choose # the right assembler method -def typeToString(parseMe): - if isinstance(parseMe,Constant32): - return "_IMM32" - if isinstance(parseMe,Register64): - return "_QWREG" - - +def type_to_string(parse_me): + return parse_me._dispatchname # a small helper that provides correct type signature From cfbolz at codespeak.net Thu Aug 14 13:07:41 2008 From: cfbolz at codespeak.net (cfbolz at codespeak.net) Date: Thu, 14 Aug 2008 13:07:41 +0200 (CEST) Subject: [pypy-svn] r57242 - pypy/branch/oo-jit/pypy/jit/codegen/x86_64 Message-ID: <20080814110741.85F42169F94@codespeak.net> Author: cfbolz Date: Thu Aug 14 13:07:40 2008 New Revision: 57242 Added: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/conftest.py (contents, props changed) Log: skip the tests on the wrong architecture Added: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/conftest.py ============================================================================== --- (empty file) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/conftest.py Thu Aug 14 13:07:40 2008 @@ -0,0 +1,16 @@ +import py +from pypy.jit.codegen import detect_cpu + + +class Directory(py.test.collect.Directory): + + def run(self): + try: + processor = detect_cpu.autodetect() + except detect_cpu.ProcessorAutodetectError, e: + py.test.skip(str(e)) + else: + if processor != 'x86_64': + py.test.skip('detected a %r CPU' % (processor,)) + + return super(Directory, self).run() From arigo at codespeak.net Thu Aug 14 13:15:47 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 14 Aug 2008 13:15:47 +0200 (CEST) Subject: [pypy-svn] r57243 - pypy/dist/pypy/lib/_ctypes Message-ID: <20080814111547.9BB6F16A036@codespeak.net> Author: arigo Date: Thu Aug 14 13:15:47 2008 New Revision: 57243 Modified: pypy/dist/pypy/lib/_ctypes/primitive.py Log: Use proper quoting in the repr of SimpleCData. Modified: pypy/dist/pypy/lib/_ctypes/primitive.py ============================================================================== --- pypy/dist/pypy/lib/_ctypes/primitive.py (original) +++ pypy/dist/pypy/lib/_ctypes/primitive.py Thu Aug 14 13:15:47 2008 @@ -250,7 +250,7 @@ return self.value def __repr__(self): - return "%s(%s)" % (type(self).__name__, self.value) + return "%s(%r)" % (type(self).__name__, self.value) def __nonzero__(self): return self._buffer[0] not in (0, '\x00') From cfbolz at codespeak.net Thu Aug 14 13:25:12 2008 From: cfbolz at codespeak.net (cfbolz at codespeak.net) Date: Thu, 14 Aug 2008 13:25:12 +0200 (CEST) Subject: [pypy-svn] r57244 - pypy/branch/oo-jit/pypy/jit/codegen/x86_64 Message-ID: <20080814112512.0A7CA169FE6@codespeak.net> Author: cfbolz Date: Thu Aug 14 13:25:12 2008 New Revision: 57244 Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Log: clearer exception Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Thu Aug 14 13:25:12 2008 @@ -94,6 +94,8 @@ if register is None: return Register64(self.freeregisters.popitem()[0]) else: + if not self.freeregisters: + raise NotImplementedError("spilling not implemented") del self.freeregisters[register] return Register64(register) From fijal at codespeak.net Thu Aug 14 13:44:12 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 14 Aug 2008 13:44:12 +0200 (CEST) Subject: [pypy-svn] r57246 - in pypy/dist/pypy/lib: . test2 Message-ID: <20080814114412.0E916169FB2@codespeak.net> Author: fijal Date: Thu Aug 14 13:44:08 2008 New Revision: 57246 Added: pypy/dist/pypy/lib/test2/test_hashlib.py (contents, props changed) Modified: pypy/dist/pypy/lib/_hashlib.py Log: (fijal, arigo) Implement other objects passing to _hashlib routines. Modified: pypy/dist/pypy/lib/_hashlib.py ============================================================================== --- pypy/dist/pypy/lib/_hashlib.py (original) +++ pypy/dist/pypy/lib/_hashlib.py Thu Aug 14 13:44:08 2008 @@ -5,6 +5,11 @@ libpath = ctypes.util.find_library('ssl') lib = CDLL(libpath) # Linux, OS X +def bufferstr(x): + if isinstance(x, basestring): + return str(x) + else: + return buffer(x)[:] # FIXME do we really need this anywhere here? class ENV_MD(Structure): @@ -112,6 +117,7 @@ def update(self, string): "Update this hash object's state with the provided string." + string = bufferstr(string) lib.EVP_DigestUpdate(byref(self._obj), c_char_p(string), c_uint(len(string))) def new(name, string=''): @@ -131,11 +137,9 @@ ctx = _new_ENV_MD() lib.EVP_DigestInit(pointer(ctx), digest) - + h = hash(_get_digest(ctx), name) if string: - if not isinstance(string, str): - raise ValueError("hash content is not string") h.update(string) return hash(ctx, name) Added: pypy/dist/pypy/lib/test2/test_hashlib.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/lib/test2/test_hashlib.py Thu Aug 14 13:44:08 2008 @@ -0,0 +1,19 @@ + +import py + +class AppTestHashLib: + def setup_class(cls): + from pypy.conftest import option, gettestobjspace + if option.runappdirect: + try: + import __pypy__ + except ImportError: + py.test.skip("Whitebox tests") + cls.space = gettestobjspace(usemodules=('_rawffi','struct')) + + + def test_unicode(self): + import hashlib + import _hashlib + assert isinstance(hashlib.new('sha1', u'xxx'), _hashlib.hash) + From arigo at codespeak.net Thu Aug 14 16:45:55 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Thu, 14 Aug 2008 16:45:55 +0200 (CEST) Subject: [pypy-svn] r57250 - pypy/branch/jit-merging-logic/pypy/jit/timeshifter Message-ID: <20080814144555.0F28E169F70@codespeak.net> Author: arigo Date: Thu Aug 14 16:45:53 2008 New Revision: 57250 Modified: pypy/branch/jit-merging-logic/pypy/jit/timeshifter/rcontainer.py Log: Check this old change in before I remove my working copy. Modified: pypy/branch/jit-merging-logic/pypy/jit/timeshifter/rcontainer.py ============================================================================== --- pypy/branch/jit-merging-logic/pypy/jit/timeshifter/rcontainer.py (original) +++ pypy/branch/jit-merging-logic/pypy/jit/timeshifter/rcontainer.py Thu Aug 14 16:45:53 2008 @@ -1371,6 +1371,7 @@ searchindex = fielddesc.fieldindex for i in range(len(self.data)): if self.data[i][0] == searchindex: + # XXX I think we never arrive here anyway self.data[i] = searchindex, box return else: From bgola at codespeak.net Fri Aug 15 05:34:29 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Fri, 15 Aug 2008 05:34:29 +0200 (CEST) Subject: [pypy-svn] r57261 - pypy/branch/2.5-features/pypy/interpreter/astcompiler Message-ID: <20080815033429.DE8C7169E76@codespeak.net> Author: bgola Date: Fri Aug 15 05:34:27 2008 New Revision: 57261 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py Log: fix the problem with lambda inside generator expressions (nestedscopes) Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py Fri Aug 15 05:34:27 2008 @@ -265,7 +265,6 @@ co_code = self.co_code self._stackdepths = [UNREACHABLE] * len(co_code) self._stackdepths[0] = 0 - just_loaded_const = None consts_w = self.getConsts() finally_targets = {} largestsize = 0 @@ -293,7 +292,6 @@ i += 1 if curstackdepth == UNREACHABLE: - just_loaded_const = None continue # ignore unreachable instructions if opcode in DEPTH_OP_EFFECT_ALONG_JUMP: @@ -467,6 +465,8 @@ def depth_MAKE_FUNCTION(argc): return -argc def depth_MAKE_CLOSURE(argc): + if argc == 0: + return -1 return -argc def depth_BUILD_SLICE(argc): if argc == 2: From fijal at codespeak.net Fri Aug 15 14:32:51 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Fri, 15 Aug 2008 14:32:51 +0200 (CEST) Subject: [pypy-svn] r57275 - in pypy/dist/pypy/rlib: . test Message-ID: <20080815123251.AA0EB169E6E@codespeak.net> Author: fijal Date: Fri Aug 15 14:32:50 2008 New Revision: 57275 Modified: pypy/dist/pypy/rlib/rmmap.py pypy/dist/pypy/rlib/test/test_rmmap.py Log: Add alloc/free pair to interface of rmmap, copied from codebuf_* in jit. Modified: pypy/dist/pypy/rlib/rmmap.py ============================================================================== --- pypy/dist/pypy/rlib/rmmap.py (original) +++ pypy/dist/pypy/rlib/rmmap.py Fri Aug 15 14:32:50 2008 @@ -64,7 +64,8 @@ elif _MS_WINDOWS: constant_names = ['PAGE_READONLY', 'PAGE_READWRITE', 'PAGE_WRITECOPY', 'FILE_MAP_READ', 'FILE_MAP_WRITE', 'FILE_MAP_COPY', - 'DUPLICATE_SAME_ACCESS'] + 'DUPLICATE_SAME_ACCESS', 'MEM_COMMIT', 'MEM_RESERVE', + 'MEM_RELEASE', 'PAGE_EXECUTE_READWRITE'] for name in constant_names: setattr(CConfig, name, rffi_platform.ConstantInteger(name)) @@ -176,7 +177,14 @@ # but it should not be so! _get_osfhandle = winexternal('_get_osfhandle', [INT], HANDLE) GetLastError = winexternal('GetLastError', [], DWORD) - + VirtualAlloc = winexternal('VirtualAlloc', + [rffi.VOIDP, rffi.SIZE_T, DWORD, DWORD], + rffi.VOIDP) + VirtualProtect = winexternal('VirtualProtect', + [rffi.VOIDP, rffi.SIZE_T, DWORD, DWORDP], BOOL) + VirtualFree = winexternal('VirtualFree', + [rffi.VOIDP, rffi.SIZE_T, DWORD], BOOL) + def _get_page_size(): try: @@ -610,6 +618,24 @@ m.setdata(res, map_size) return m + + # XXX is this really necessary? + class Hint: + pos = -0x4fff0000 # for reproducible results + hint = Hint() + + def alloc(map_size): + flags = MAP_PRIVATE | MAP_ANONYMOUS + prot = PROT_EXEC | PROT_READ | PROT_WRITE + hintp = rffi.cast(PTR, hint.pos) + res = c_mmap(hintp, map_size, prot, flags, -1, 0) + if res == rffi.cast(PTR, -1): + raise MemoryError + hint.pos += map_size + return res + + free = c_munmap + elif _MS_WINDOWS: def mmap(fileno, length, tagname="", access=_ACCESS_DEFAULT): # check size boundaries @@ -708,5 +734,20 @@ err = rffi.cast(lltype.Signed, dwErr) raise OSError(err, os.strerror(err)) + + def alloc(map_size): + null = lltype.nullptr(rffi.VOIDP.TO) + res = VirtualAlloc(null, map_size, MEM_COMMIT|MEM_RESERVE, + PAGE_EXECUTE_READWRITE) + if not res: + raise MemoryError + arg = lltype.malloc(DWORDP.TO, 1, zero=True, flavor='raw') + VirtualProtect(res, map_size, PAGE_EXECUTE_READWRITE, arg) + lltype.free(arg, flavor='raw') + # ignore errors, just try + return res + + def free(ptr, map_size): + VirtualFree(ptr, 0, MEM_RELEASE) # register_external here? Modified: pypy/dist/pypy/rlib/test/test_rmmap.py ============================================================================== --- pypy/dist/pypy/rlib/test/test_rmmap.py (original) +++ pypy/dist/pypy/rlib/test/test_rmmap.py Fri Aug 15 14:32:50 2008 @@ -2,7 +2,7 @@ import os from pypy.rpython.test.test_llinterp import interpret from pypy.rlib import rmmap as mmap -from pypy.rlib.rmmap import RTypeError, RValueError +from pypy.rlib.rmmap import RTypeError, RValueError, alloc, free import sys class TestMMap: @@ -382,3 +382,18 @@ return r compile(func, [int]) + +def test_alloc_free(): + map_size = 65536 + data = alloc(map_size) + for i in range(0, map_size, 171): + data[i] = chr(i & 0xff) + for i in range(0, map_size, 171): + assert data[i] == chr(i & 0xff) + free(data, map_size) + +def test_compile_alloc_free(): + from pypy.translator.c.test.test_genc import compile + + fn = compile(test_alloc_free, []) + fn() From fijal at codespeak.net Fri Aug 15 14:39:30 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Fri, 15 Aug 2008 14:39:30 +0200 (CEST) Subject: [pypy-svn] r57276 - pypy/dist/pypy/rlib Message-ID: <20080815123930.AAFFE168412@codespeak.net> Author: fijal Date: Fri Aug 15 14:39:29 2008 New Revision: 57276 Modified: pypy/dist/pypy/rlib/libffi.py Log: Strike this jit mention Modified: pypy/dist/pypy/rlib/libffi.py ============================================================================== --- pypy/dist/pypy/rlib/libffi.py (original) +++ pypy/dist/pypy/rlib/libffi.py Fri Aug 15 14:39:29 2008 @@ -9,6 +9,7 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.tool.autopath import pypydir from pypy.translator.tool.cbuild import ExternalCompilationInfo +from pypy.rlib.rmmap import alloc import py import os import sys @@ -327,9 +328,6 @@ userdata = rffi.cast(USERDATA_P, ll_userdata) userdata.callback(ll_args, ll_res, userdata) -# heap for closures -from pypy.jit.codegen.i386.codebuf import memhandler - CHUNK = 4096 CLOSURES = rffi.CArrayPtr(FFI_CLOSUREP.TO) @@ -339,7 +337,7 @@ self.free_list = lltype.nullptr(rffi.VOIDP.TO) def _more(self): - chunk = rffi.cast(CLOSURES, memhandler.alloc(CHUNK)) + chunk = rffi.cast(CLOSURES, alloc(CHUNK)) count = CHUNK//rffi.sizeof(FFI_CLOSUREP.TO) for i in range(count): rffi.cast(rffi.VOIDPP, chunk)[0] = self.free_list From fijal at codespeak.net Fri Aug 15 14:45:38 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Fri, 15 Aug 2008 14:45:38 +0200 (CEST) Subject: [pypy-svn] r57279 - in pypy/dist/pypy: jit translator translator/tool Message-ID: <20080815124538.01949169E6C@codespeak.net> Author: fijal Date: Fri Aug 15 14:45:38 2008 New Revision: 57279 Removed: pypy/dist/pypy/jit/ Modified: pypy/dist/pypy/translator/driver.py pypy/dist/pypy/translator/tool/graphpage.py Log: Remove the jit directory. The main reason is that this version is horribly outdated and does not resemble current status (also tests are failing). For more details on jit, look to jit branch: http://codespeak.net/svn/pypy/branch/oo-jit/ Signal a clean error in case someone tries to use it. Modified: pypy/dist/pypy/translator/driver.py ============================================================================== --- pypy/dist/pypy/translator/driver.py (original) +++ pypy/dist/pypy/translator/driver.py Fri Aug 15 14:45:38 2008 @@ -381,6 +381,7 @@ "Backendopt before Hint-annotate") def task_hintannotate_lltype(self): + raise NotImplementedError("JIT is not implemented on trunk, look at oo-jit branch instead") from pypy.jit.hintannotator.annotator import HintAnnotator from pypy.jit.hintannotator.model import OriginFlags from pypy.jit.hintannotator.model import SomeLLAbstractConstant @@ -409,6 +410,8 @@ "Hint-annotate") def task_timeshift_lltype(self): + raise NotImplementedError("JIT is not implemented on trunk, look at oo-jit branch instead") + from pypy.jit.timeshifter.hrtyper import HintRTyper from pypy.jit.codegen import detect_cpu cpu = detect_cpu.autodetect() Modified: pypy/dist/pypy/translator/tool/graphpage.py ============================================================================== --- pypy/dist/pypy/translator/tool/graphpage.py (original) +++ pypy/dist/pypy/translator/tool/graphpage.py Fri Aug 15 14:45:38 2008 @@ -125,9 +125,9 @@ self.annotator.binding_cause_history.get(var, [])) self.binding_history[var.name] = zip(history, cause_history) - from pypy.jit.hintannotator.annotator import HintAnnotator - if isinstance(self.annotator, HintAnnotator): - return + #from pypy.jit.hintannotator.annotator import HintAnnotator + #if isinstance(self.annotator, HintAnnotator): + # return vars = {} for graph in graphs: From arigo at codespeak.net Sat Aug 16 10:33:06 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 16 Aug 2008 10:33:06 +0200 (CEST) Subject: [pypy-svn] r57305 - pypy/branch/opt-option/pypy/config Message-ID: <20080816083306.8CC4416850B@codespeak.net> Author: arigo Date: Sat Aug 16 10:33:03 2008 New Revision: 57305 Modified: pypy/branch/opt-option/pypy/config/pypyoption.py Log: Include "thread" in allworkingmodules. Modified: pypy/branch/opt-option/pypy/config/pypyoption.py ============================================================================== --- pypy/branch/opt-option/pypy/config/pypyoption.py (original) +++ pypy/branch/opt-option/pypy/config/pypyoption.py Sat Aug 16 10:33:03 2008 @@ -27,7 +27,8 @@ ["_socket", "unicodedata", "mmap", "fcntl", "rctime" , "select", "zipimport", "_lsprof", "crypt", "signal", "dyngram", "_rawffi", "termios", "zlib", - "struct", "md5", "sha", "bz2", "_minimal_curses", "cStringIO"] + "struct", "md5", "sha", "bz2", "_minimal_curses", "cStringIO", + "thread"] )) if sys.platform == "win32": From arigo at codespeak.net Sat Aug 16 11:02:40 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 16 Aug 2008 11:02:40 +0200 (CEST) Subject: [pypy-svn] r57307 - in pypy/branch/opt-option/pypy: config doc doc/config translator/goal Message-ID: <20080816090240.0EA1B169E09@codespeak.net> Author: arigo Date: Sat Aug 16 11:02:39 2008 New Revision: 57307 Added: pypy/branch/opt-option/pypy/doc/config/opt.txt (contents, props changed) Removed: pypy/branch/opt-option/pypy/doc/config/objspace.std.allopts.txt Modified: pypy/branch/opt-option/pypy/config/makerestdoc.py pypy/branch/opt-option/pypy/doc/config/commandline.txt pypy/branch/opt-option/pypy/doc/config/confrest.py pypy/branch/opt-option/pypy/doc/config/index.txt pypy/branch/opt-option/pypy/doc/garbage_collection.txt pypy/branch/opt-option/pypy/doc/getting-started.txt pypy/branch/opt-option/pypy/doc/interpreter-optimizations.txt pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py Log: Documentation updates: document --opt and rewrite the getting-started section accordingly. Kill remaining mentions of --allopts. Don't crash if --allworkingmodules is specified together with an option that prevents threads. Modified: pypy/branch/opt-option/pypy/config/makerestdoc.py ============================================================================== --- pypy/branch/opt-option/pypy/config/makerestdoc.py (original) +++ pypy/branch/opt-option/pypy/config/makerestdoc.py Sat Aug 16 11:02:39 2008 @@ -178,10 +178,12 @@ return "Internal Options" return "" -def make_cmdline_overview(descr): - content = Rest( - Title("Overview of Command Line Options for '%s'" % (descr._name, ), - abovechar="=", belowchar="=")) +def make_cmdline_overview(descr, title=True): + content = Rest() + if title: + content.add( + Title("Overview of Command Line Options for '%s'" % (descr._name, ), + abovechar="=", belowchar="=")) cmdlines = [] config = Config(descr) for path in config.getpaths(include_groups=False): Modified: pypy/branch/opt-option/pypy/doc/config/commandline.txt ============================================================================== --- pypy/branch/opt-option/pypy/doc/config/commandline.txt (original) +++ pypy/branch/opt-option/pypy/doc/config/commandline.txt Sat Aug 16 11:02:39 2008 @@ -1,4 +1,33 @@ -.. intentionally empty, but contains the following comment to fool py.test: - overview of command line options for objspace - overview of command line options for translation + +.. contents:: + + +.. _objspace: +.. _`overview-of-command-line-options-for-objspace`: + +------------------------------- +PyPy Python interpreter options +------------------------------- + +The following options can be used after ``translate.py +targetpypystandalone`` or as options to ``py.py``. + +.. GENERATE: objspace + + +.. _translation: +.. _`overview-of-command-line-options-for-translation`: + +--------------------------- +General translation options +--------------------------- + +The following are options of ``translate.py``. They must be +given before the ``targetxxx`` on the command line. + +* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` + +.. __: opt.html + +.. GENERATE: translation Modified: pypy/branch/opt-option/pypy/doc/config/confrest.py ============================================================================== --- pypy/branch/opt-option/pypy/doc/config/confrest.py (original) +++ pypy/branch/opt-option/pypy/doc/config/confrest.py Sat Aug 16 11:02:39 2008 @@ -30,12 +30,13 @@ def get_content(self, txtpath, encoding): if txtpath.basename == "commandline.txt": - result = [".. contents::"] - for descr in all_optiondescrs: - result.append(".. %s_:\n" % (descr._name, )) - result.append(make_cmdline_overview(descr).text()) - result.append("") - result.append(txtpath.read()) + result = [] + for line in txtpath.read().splitlines(): + if line.startswith('.. GENERATE:'): + start = line[len('.. GENERATE:'):].strip() + descr = start_to_descr[start] + line = make_cmdline_overview(descr, title=False).text() + result.append(line) return "\n".join(result) fullpath = txtpath.purebasename start = fullpath.split(".")[0] Modified: pypy/branch/opt-option/pypy/doc/config/index.txt ============================================================================== --- pypy/branch/opt-option/pypy/doc/config/index.txt (original) +++ pypy/branch/opt-option/pypy/doc/config/index.txt Sat Aug 16 11:02:39 2008 @@ -44,9 +44,9 @@ .. image:: ../image/compat-matrix.png .. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#overview-of-command-line-options-for-objspace -.. _`object space options`: commandline.html#overview-of-command-line-options-for-objspace -.. _`translation options`: commandline.html#overview-of-command-line-options-for-translation +.. _`objspace options`: commandline.html#objspace +.. _`object space options`: commandline.html#objspace +.. _`translation options`: commandline.html#translation .. _`overview`: commandline.html .. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html .. _`What PyPy can do for your objects`: ../objspace-proxies.html Added: pypy/branch/opt-option/pypy/doc/config/opt.txt ============================================================================== --- (empty file) +++ pypy/branch/opt-option/pypy/doc/config/opt.txt Sat Aug 16 11:02:39 2008 @@ -0,0 +1,49 @@ +The ``--opt`` or ``-O`` translation option +========================================== + +This meta-option selects a default set of optimization +settings to use during a translation. Usage:: + + translate.py --opt=# + translate.py -O# + +where ``#`` is the desired optimization level. The valid choices are: + + ============= ======================================================== + Level Description + ============= ======================================================== + `--opt=0` all optimizations off; fastest translation `(*)`_ + `--opt=1` non-time-consuming optimizations on `(*)`_ + `--opt=size` minimize the size of the final executable `(*)`_ + `--opt=mem` minimize the run-time RAM consumption (in-progress) + `--opt=2` all optimizations on; good run-time performance + `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ + ============= ======================================================== + +.. _`(*)`: + +`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser +garbage collector`_ (Debian package ``libgc-dev``). The translation +itself is faster and consumes less memory; the final executable is +smaller but slower. The other levels use one of our built-in `custom +garbage collectors`_. + +.. _`(**)`: + +`(**)`: The level `3` enables gcc profile-driven recompilation when +translating PyPy. + +The exact set of optimizations enabled by each level depends +on the backend. Individual translation targets can also +select their own options based on the level: when translating +PyPy, the level `mem` enables the memory-saving object +implementations in the object space; levels `2` and `3` enable +the advanced object implementations that give an increase in +performance; level `3` also enables gcc profile-driven +recompilation. + +The default level is `3`. + + +.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ +.. _`custom garbage collectors`: ../garbage_collection.html Modified: pypy/branch/opt-option/pypy/doc/garbage_collection.txt ============================================================================== --- pypy/branch/opt-option/pypy/doc/garbage_collection.txt (original) +++ pypy/branch/opt-option/pypy/doc/garbage_collection.txt Sat Aug 16 11:02:39 2008 @@ -26,7 +26,7 @@ For more details, see the `overview of command line options for translation`_. -.. _`overview of command line options for translation`: config/commandline.html#overview-of-command-line-options-for-translation +.. _`overview of command line options for translation`: config/commandline.html#translation Mark and Sweep -------------- Modified: pypy/branch/opt-option/pypy/doc/getting-started.txt ============================================================================== --- pypy/branch/opt-option/pypy/doc/getting-started.txt (original) +++ pypy/branch/opt-option/pypy/doc/getting-started.txt Sat Aug 16 11:02:39 2008 @@ -546,44 +546,49 @@ .. _`windows document`: windows.html You can translate the whole of PyPy's Python interpreter to low level C -code. This is the largest and ultimate example of RPython program that -our translation toolchain can process. The most standard variant -nowadays is:: +code. (This is the largest and ultimate example of RPython program that +our translation toolchain can process.) - cd pypy/translator/goal - python translate.py --gc=hybrid --thread targetpypystandalone.py --allworkingmodules --allopts +1. Install dependencies. You need (these are Debian package names, + adapt as needed): -Dependencies: this will compile all supported built-in modules, some of -which have external dependencies. On a Debian Linux, for example, you -need to install the following packages: a full C compiler like gcc; -``python-dev, python-ctypes``; ``libffi-dev, libz-dev, libbz2-dev, -libncurses-dev``. - -This whole process will take some time and quite a lot of memory (around -1200 MB on 32 bit machines). It creates an executable ``pypy-c`` in -the current directory. -The ``--gc=hybrid`` option means that the ``pypy-c`` will use our own -exact generational garbage collector implementation, whose performance -is rather good nowadays. The ``--thread`` option enables the thread -module, which is still slightly experimental. -The ``--allopts`` option enables all the -worthwhile performance optimizations, but slows down the translation -itself. On Linux 32-bit Intel machines, if you don't need threads, you -can get some extra speed (and extra translation time) by removing -``--thread`` and replacing it with ``--gcrootfinder=asmgcc``. - -An alternative is to use the `Boehm-Demers-Weiser garbage -collector`_ instead of our own. For this, use ``--gc=boehm``. -Be sure to install Boehm before starting the translation (e.g. by running -``apt-get install libgc-dev`` on Debian or Ubuntu). Translating with Boehm -is somewhat faster and less memory-hungry than translating with our own GCs. - -In any case, as described above, you can find the produced executable under the -name ``pypy-c``. Type ``pypy-c --help`` to see the options it supports -- -mainly the same basic options as CPython. In addition, ``pypy-c --info`` -prints the translation options that where used to produce this particular -executable. This executable can be moved around or copied on other machines; -see Installation_ below. + * gcc + * ``python-dev`` + * ``python-ctypes`` + * ``libffi-dev`` + * ``libz-dev`` (for the optional ``zlib`` module) + * ``libbz2-dev`` (for the optional ``bz2`` module) + * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) + * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) + +2. Be warned that translation is time-consuming (30 min to + over one hour) and extremely RAM-hungry (kill it if it + starts swapping heavily). If you have less than 1.5 GB of + RAM (or a slow machine) you might want to pick the + `optimization level`_ `1` in the next step. The default + level `3` gives much better results, though. + +3. Run:: + + cd pypy/translator/goal + python translate.py --opt=3 targetpypystandalone.py --allworkingmodules + + possibly replacing ``--opt=3`` with ``--opt=1`` or another + `optimization level`_ of your choice. + + On Linux 32-bit Intel machines, if you don't need threads, you + can get some extra speed (and extra translation time) by adding + ``--gcrootfinder=asmgcc`` just after the ``--opt`` option. + +.. _`optimization level`: config/opt.html + +If everything works correctly this will create an executable +``pypy-c`` in the current directory. Type ``pypy-c --help`` +to see the options it supports -- mainly the same basic +options as CPython. In addition, ``pypy-c --info`` prints the +translation options that where used to produce this particular +executable. This executable can be moved around or copied on +other machines; see Installation_ below. The ``translate.py`` script takes a very large number of options controlling what to translate and how. See ``translate.py -h``. Some of the more Modified: pypy/branch/opt-option/pypy/doc/interpreter-optimizations.txt ============================================================================== --- pypy/branch/opt-option/pypy/doc/interpreter-optimizations.txt (original) +++ pypy/branch/opt-option/pypy/doc/interpreter-optimizations.txt Sat Aug 16 11:02:39 2008 @@ -384,6 +384,6 @@ .. waffles about ropes -You can build a pypy with all generally useful optimizations turned on by using -the :config:`objspace.std.allopts` option. Such a build is between 1.5 and 2.5 -times faster than the default, depending on the benchmark. +When building pypy, all generally useful optimizations are turned on by default +unless you explicitly lower the translation optimization level with the +``--opt`` option. Modified: pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py ============================================================================== --- pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py (original) +++ pypy/branch/opt-option/pypy/translator/goal/targetpypystandalone.py Sat Aug 16 11:02:39 2008 @@ -7,6 +7,7 @@ from pypy.interpreter.error import OperationError from pypy.translator.goal.ann_override import PyPyAnnotatorPolicy from pypy.config.config import Config, to_optparse, make_dict, SUPPRESS_USAGE +from pypy.config.config import ConflictConfigError from pypy.tool.option import make_objspace from pypy.translator.goal.nanos import setup_nanos @@ -134,7 +135,16 @@ if config.translation.thread: config.objspace.usemodules.thread = True elif config.objspace.usemodules.thread: - config.translation.thread = True + try: + config.translation.thread = True + except ConflictConfigError: + # If --allworkingmodules is given, we reach this point + # if threads cannot be enabled (e.g. they conflict with + # something else). In this case, we can try setting the + # usemodules.thread option to False again. It will + # cleanly fail if that option was set to True by the + # command-line directly instead of via --allworkingmodules. + config.objspace.usemodules.thread = False if config.translation.stackless: config.objspace.usemodules._stackless = True From arigo at codespeak.net Sat Aug 16 11:08:18 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sat, 16 Aug 2008 11:08:18 +0200 (CEST) Subject: [pypy-svn] r57308 - pypy/branch/opt-option/pypy/doc/config Message-ID: <20080816090818.94C13169E09@codespeak.net> Author: arigo Date: Sat Aug 16 11:08:17 2008 New Revision: 57308 Modified: pypy/branch/opt-option/pypy/doc/config/opt.txt Log: Fix. Modified: pypy/branch/opt-option/pypy/doc/config/opt.txt ============================================================================== --- pypy/branch/opt-option/pypy/doc/config/opt.txt (original) +++ pypy/branch/opt-option/pypy/doc/config/opt.txt Sat Aug 16 11:08:17 2008 @@ -42,7 +42,7 @@ performance; level `3` also enables gcc profile-driven recompilation. -The default level is `3`. +The default level is `2`. .. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ From hpk at codespeak.net Sun Aug 17 08:38:58 2008 From: hpk at codespeak.net (hpk at codespeak.net) Date: Sun, 17 Aug 2008 08:38:58 +0200 (CEST) Subject: [pypy-svn] r57339 - in pypy/extradoc/planning: 1.0 1.1 roadmap Message-ID: <20080817063858.74AC4169E9B@codespeak.net> Author: hpk Date: Sun Aug 17 08:38:55 2008 New Revision: 57339 Modified: pypy/extradoc/planning/1.0/TODO.txt pypy/extradoc/planning/1.0/docs-refactoring.txt pypy/extradoc/planning/1.1/plan.txt pypy/extradoc/planning/roadmap/task_bytecode_compiler.txt pypy/extradoc/planning/roadmap/task_integration_vm.txt pypy/extradoc/planning/roadmap/task_separate_compilation.txt pypy/extradoc/planning/roadmap/task_stdlib_modules.txt Log: fix various ReST issues Modified: pypy/extradoc/planning/1.0/TODO.txt ============================================================================== --- pypy/extradoc/planning/1.0/TODO.txt (original) +++ pypy/extradoc/planning/1.0/TODO.txt Sun Aug 17 08:38:55 2008 @@ -56,11 +56,8 @@ - integrate and test: - (arigo,pedronis,arre, IN-PROGRESS): wp08 docs & examples, getting-started - - (cfbolz) test wp09 getting-started examples: - - after some fixing things seem to work - - wp09 docs, examples and code - maybe what we have is good enough? - (christian looking into this currently) a bit of progress, - but no feedback/interaction with DFKI/Danish yet. + - (cfbolz) test wp09 getting-started examples: after some fixing things seem to work + - wp09 docs, examples and code - maybe what we have is good enough? (christian looking into this currently) a bit of progress, but no feedback/interaction with DFKI/Danish yet. - move pedronis/pypy-c-testing, pedronis/tarball-testing @@ -74,12 +71,10 @@ - (done by NNN: fijal, NNN) play1 issues: - - rebuild pypy's to use the new readline stub (fixing - ugly stdout/stdin interactions) + - rebuild pypy's to use the new readline stub (fixing ugly stdout/stdin interactions) - review/refine help (add examples where necessary) - make examples execute more cleanly - - fix linking (it gets easily confused if you click around), - make correct 404 and 500 pages + - fix linking (it gets easily confused if you click around), make correct 404 and 500 pages - discuss/do a JIT demo - (DROPPED for 1.0) include documentation/entry point for @@ -95,18 +90,14 @@ - (DONE) consider where/how to put special builtins ("pypymagic" might not be a good name), suggestions: - - names: not pypymagic, not pypybuiltin, holger and cf to propose - (done, result is __pypy__) + - names: not pypymagic, not pypybuiltin, holger and cf to propose (done, result is __pypy__) - document pypybuiltin module (add docstrings) (done) - - functions should only be there if the according option - is enabled (e.g. method cache) (done) - - functions only relating to py.py should not appear in - translated version (unless they work :) (done) - (cfbolz) + - functions should only be there if the according option is enabled (e.g. method cache) (done) + - functions only relating to py.py should not appear in translated version (unless they work :) (done) (cfbolz) - (done, still a bit of doc's/demo refinements maybe) - make transparent proxies more prominent - because they are a rather powerful feature) + make transparent proxies more prominent + because they are a rather powerful feature) - write a nice support module (pypy/lib/tputil.py - in progress) - write/review documentation (in-progress) - fix bugs if any Modified: pypy/extradoc/planning/1.0/docs-refactoring.txt ============================================================================== --- pypy/extradoc/planning/1.0/docs-refactoring.txt (original) +++ pypy/extradoc/planning/1.0/docs-refactoring.txt Sun Aug 17 08:38:55 2008 @@ -1,9 +1,9 @@ - - mission statement should emphasize VM - framework/translation and not mention project results + - mission statement should emphasize VM framework/translation and not mention project results - always clearly separate the talk about the VM translation framework and the Python implementation (and its features) for example "PyPy - implementation of python in python" as the first title is misleading. maybe something like: + a) PyPy is a VM/VHLL translation framework (or whatever we settle on) b) already contains a full Python implementation part of which is re-used from the translation framework Modified: pypy/extradoc/planning/1.1/plan.txt ============================================================================== --- pypy/extradoc/planning/1.1/plan.txt (original) +++ pypy/extradoc/planning/1.1/plan.txt Sun Aug 17 08:38:55 2008 @@ -25,8 +25,7 @@ trunk after all tests have passed - refactor documentation to have separated entry points: - for users of the mainline PyPy Python Interpreter - - for advanced users/developers/experimenters of the PyPy Python - Interpreter + - for advanced users/developers/experimenters of the PyPy Python Interpreter - all the rest - toolchain, prolog, squeak, javascript - revise and cleanup play1 - run more things through sandboxing? - debian packaging? Modified: pypy/extradoc/planning/roadmap/task_bytecode_compiler.txt ============================================================================== --- pypy/extradoc/planning/roadmap/task_bytecode_compiler.txt (original) +++ pypy/extradoc/planning/roadmap/task_bytecode_compiler.txt Sun Aug 17 08:38:55 2008 @@ -23,3 +23,5 @@ Dependencies: - `Port the JIT to ootype`_ + +.. _`Port the JIT to ootype`: task_jit_ootype.html Modified: pypy/extradoc/planning/roadmap/task_integration_vm.txt ============================================================================== --- pypy/extradoc/planning/roadmap/task_integration_vm.txt (original) +++ pypy/extradoc/planning/roadmap/task_integration_vm.txt Sun Aug 17 08:38:55 2008 @@ -28,3 +28,5 @@ Dependencies: - `Refactor RPython external objects interface`_ + +.. _`Refactor RPython external objects interface`: task_external_objects.html Modified: pypy/extradoc/planning/roadmap/task_separate_compilation.txt ============================================================================== --- pypy/extradoc/planning/roadmap/task_separate_compilation.txt (original) +++ pypy/extradoc/planning/roadmap/task_separate_compilation.txt Sun Aug 17 08:38:55 2008 @@ -17,4 +17,4 @@ dependencies: - - None \ No newline at end of file + - None Modified: pypy/extradoc/planning/roadmap/task_stdlib_modules.txt ============================================================================== --- pypy/extradoc/planning/roadmap/task_stdlib_modules.txt (original) +++ pypy/extradoc/planning/roadmap/task_stdlib_modules.txt Sun Aug 17 08:38:55 2008 @@ -32,3 +32,7 @@ .. _FePy: http://fepy.sourceforge.net/ + +.. _`Separate compilation`: task_separate_compilation.html +.. _`Refactor RPython external objects interface`: task_external_objects.html +.. _`Integration with the hosting virtual machine`: task_integration_vm.html From arigo at codespeak.net Sun Aug 17 15:15:53 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sun, 17 Aug 2008 15:15:53 +0200 (CEST) Subject: [pypy-svn] r57354 - in pypy/dist/pypy: config config/test doc doc/config tool/bench/test translator translator/goal Message-ID: <20080817131553.408FC169E8F@codespeak.net> Author: arigo Date: Sun Aug 17 15:15:50 2008 New Revision: 57354 Added: pypy/dist/pypy/doc/config/objspace.std.multimethods.txt - copied unchanged from r57352, pypy/branch/opt-option/pypy/doc/config/objspace.std.multimethods.txt pypy/dist/pypy/doc/config/opt.txt - copied unchanged from r57352, pypy/branch/opt-option/pypy/doc/config/opt.txt Removed: pypy/dist/pypy/doc/config/objspace.std.allopts.txt pypy/dist/pypy/translator/goal/targetmultiplespaces.py Modified: pypy/dist/pypy/config/config.py pypy/dist/pypy/config/makerestdoc.py pypy/dist/pypy/config/pypyoption.py pypy/dist/pypy/config/test/test_config.py pypy/dist/pypy/config/test/test_pypyoption.py pypy/dist/pypy/config/translationoption.py pypy/dist/pypy/doc/config/commandline.txt pypy/dist/pypy/doc/config/confrest.py pypy/dist/pypy/doc/config/index.txt pypy/dist/pypy/doc/garbage_collection.txt pypy/dist/pypy/doc/getting-started.txt pypy/dist/pypy/doc/interpreter-optimizations.txt pypy/dist/pypy/tool/bench/test/test_pypyresult.py pypy/dist/pypy/translator/driver.py pypy/dist/pypy/translator/goal/bench-cronjob.py pypy/dist/pypy/translator/goal/targetprologstandalone.py pypy/dist/pypy/translator/goal/targetpypystandalone.py pypy/dist/pypy/translator/goal/translate.py Log: Remove the --allopts and --faassen options and replace them with a global translate.py option '--opt'. See pypy/doc/config/opt.txt for more information. Note that --opt is not a regular option, but is implemented in a pair of functions that can suggest options based on other information, e.g. the selected backend. This is a merge from the opt-option branch. Also contains: * clean up a bit the translation options and their defaults * updated bench-cronjob.py, tuatara might need updates too * updated getting-started.txt * small extension to the config system to support the kind of 'suggest' used by --opt. Modified: pypy/dist/pypy/config/config.py ============================================================================== --- pypy/dist/pypy/config/config.py (original) +++ pypy/dist/pypy/config/config.py Sun Aug 17 15:15:50 2008 @@ -13,6 +13,9 @@ class ConfigError(Exception): pass +class ConflictConfigError(ConfigError): + pass + class Config(object): _cfgimpl_frozen = False @@ -99,11 +102,23 @@ if oldvalue != value and oldowner not in ("default", "suggested"): if who in ("default", "suggested"): return - raise ConfigError('cannot override value to %s for option %s' % - (value, name)) + raise ConflictConfigError('cannot override value to %s for ' + 'option %s' % (value, name)) child.setoption(self, value, who) self._cfgimpl_value_owners[name] = who + def suggest(self, **kwargs): + for name, value in kwargs.items(): + self.suggestoption(name, value) + + def suggestoption(self, name, value): + try: + self.setoption(name, value, "suggested") + except ConflictConfigError: + # setting didn't work, but that is fine, since it is + # suggested only + pass + def set(self, **kwargs): all_paths = [p.split(".") for p in self.getpaths()] for key, value in kwargs.iteritems(): @@ -248,12 +263,7 @@ for path, reqvalue in self._suggests.get(value, []): toplevel = config._cfgimpl_get_toplevel() homeconfig, name = toplevel._cfgimpl_get_home_by_path(path) - try: - homeconfig.setoption(name, reqvalue, "suggested") - except ConfigError: - # setting didn't work, but that is fine, since it is - # suggested only - pass + homeconfig.suggestoption(name, reqvalue) super(ChoiceOption, self).setoption(config, value, who) def validate(self, value): @@ -298,12 +308,7 @@ for path, reqvalue in self._suggests: toplevel = config._cfgimpl_get_toplevel() homeconfig, name = toplevel._cfgimpl_get_home_by_path(path) - try: - homeconfig.setoption(name, reqvalue, "suggested") - except ConfigError: - # setting didn't work, but that is fine, since it is - # suggested - pass + homeconfig.suggestoption(name, reqvalue) super(BoolOption, self).setoption(config, value, who) Modified: pypy/dist/pypy/config/makerestdoc.py ============================================================================== --- pypy/dist/pypy/config/makerestdoc.py (original) +++ pypy/dist/pypy/config/makerestdoc.py Sun Aug 17 15:15:50 2008 @@ -178,10 +178,12 @@ return "Internal Options" return "" -def make_cmdline_overview(descr): - content = Rest( - Title("Overview of Command Line Options for '%s'" % (descr._name, ), - abovechar="=", belowchar="=")) +def make_cmdline_overview(descr, title=True): + content = Rest() + if title: + content.add( + Title("Overview of Command Line Options for '%s'" % (descr._name, ), + abovechar="=", belowchar="=")) cmdlines = [] config = Config(descr) for path in config.getpaths(include_groups=False): Modified: pypy/dist/pypy/config/pypyoption.py ============================================================================== --- pypy/dist/pypy/config/pypyoption.py (original) +++ pypy/dist/pypy/config/pypyoption.py Sun Aug 17 15:15:50 2008 @@ -3,7 +3,7 @@ import sys from pypy.config.config import OptionDescription, BoolOption, IntOption, ArbitraryOption from pypy.config.config import ChoiceOption, StrOption, to_optparse, Config -from pypy.config.config import ConfigError +from pypy.config.config import ConflictConfigError modulepath = py.magic.autopath().dirpath().dirpath().join("module") all_modules = [p.basename for p in modulepath.listdir() @@ -27,7 +27,8 @@ ["_socket", "unicodedata", "mmap", "fcntl", "rctime" , "select", "zipimport", "_lsprof", "crypt", "signal", "dyngram", "_rawffi", "termios", "zlib", - "struct", "md5", "sha", "bz2", "_minimal_curses", "cStringIO"] + "struct", "md5", "sha", "bz2", "_minimal_curses", "cStringIO", + "thread"] )) if sys.platform == "win32": @@ -74,7 +75,8 @@ "The module %r is disabled\n" % (modname,) + "because importing %s raised %s\n" % (name, errcls) + str(e)) - raise ConfigError("--withmod-%s: %s" % (modname, errcls)) + raise ConflictConfigError("--withmod-%s: %s" % (modname, + errcls)) return validator else: return None @@ -165,11 +167,11 @@ BoolOption("withsmallint", "use tagged integers", default=False, - requires=[("translation.gc", "boehm")]), + requires=[("translation.gc", "boehm"), + ("objspace.std.withprebuiltint", False)]), BoolOption("withprebuiltint", "prebuild commonly used int objects", - default=False, - requires=[("objspace.std.withsmallint", False)]), + default=False), IntOption("prebuiltintfrom", "lowest integer which is prebuilt", default=-5, cmdline="--prebuiltintfrom"), @@ -304,45 +306,10 @@ "a instrumentation option: before exit, print the types seen by " "certain simpler bytecodes", default=False), - - BoolOption("allopts", - "enable all thought-to-be-working optimizations", - default=False, - suggests=[("objspace.opcodes.CALL_LIKELY_BUILTIN", True), - ("objspace.opcodes.CALL_METHOD", True), - ("translation.withsmallfuncsets", 5), - ("translation.profopt", - "-c 'from richards import main;main(); from test import pystone; pystone.main()'"), - ("objspace.std.withmultidict", True), -# ("objspace.std.withstrjoin", True), - ("objspace.std.withshadowtracking", True), -# ("objspace.std.withstrslice", True), -# ("objspace.std.withsmallint", True), - ("objspace.std.withrangelist", True), - ("objspace.std.withmethodcache", True), -# ("objspace.std.withfastslice", True), - ("objspace.std.withprebuiltchar", True), - ("objspace.std.builtinshortcut", True), - ("objspace.std.optimized_list_getitem", True), - ("objspace.std.getattributeshortcut", True), - ("translation.list_comprehension_operations",True), - ("translation.backendopt.remove_asserts",True), - ], - cmdline="--allopts --faassen", negation=False), - -## BoolOption("llvmallopts", -## "enable all optimizations, and use llvm compiled via C", -## default=False, -## requires=[("objspace.std.allopts", True), -## ("translation.llvm_via_c", True), -## ("translation.backend", "llvm")], -## cmdline="--llvm-faassen", negation=False), + ChoiceOption("multimethods", "the multimethod implementation to use", + ["doubledispatch", "mrd"], + default="mrd"), ]), - #BoolOption("lowmem", "Try to use less memory during translation", - # default=False, cmdline="--lowmem", - # requires=[("objspace.geninterp", False)]), - - ]) def get_pypy_config(overrides=None, translating=False): @@ -351,6 +318,51 @@ pypy_optiondescription, overrides=overrides, translating=translating) +def set_pypy_opt_level(config, level): + """Apply PyPy-specific optimization suggestions on the 'config'. + The optimizations depend on the selected level and possibly on the backend. + """ + # warning: during some tests, the type_system and the backend may be + # unspecified and we get None. It shouldn't occur in translate.py though. + type_system = config.translation.type_system + backend = config.translation.backend + + # all the good optimizations for PyPy should be listed here + if level in ['2', '3']: + config.objspace.opcodes.suggest(CALL_LIKELY_BUILTIN=True) + config.objspace.opcodes.suggest(CALL_METHOD=True) + config.objspace.std.suggest(withmultidict=True) + config.objspace.std.suggest(withshadowtracking=True) + config.objspace.std.suggest(withrangelist=True) + config.objspace.std.suggest(withmethodcache=True) + config.objspace.std.suggest(withprebuiltchar=True) + config.objspace.std.suggest(builtinshortcut=True) + config.objspace.std.suggest(optimized_list_getitem=True) + config.objspace.std.suggest(getattributeshortcut=True) + + # extra costly optimizations only go in level 3 + if level == '3': + config.translation.suggest(profopt= + "-c 'from richards import main;main(); " + "from test import pystone; pystone.main()'") + + # memory-saving optimizations + if level == 'mem': + config.objspace.std.suggest(withprebuiltint=True) + config.objspace.std.suggest(withrangelist=True) + config.objspace.std.suggest(withprebuiltchar=True) + config.objspace.std.suggest(withsharingdict=True) + # xxx other options? ropes maybe? + + # completely disable geninterp in a level 0 translation + if level == '0': + config.objspace.suggest(geninterp=False) + + # some optimizations have different effects depending on the typesystem + if type_system == 'ootype': + config.objspace.std.suggest(multimethods="doubledispatch") + + if __name__ == '__main__': config = get_pypy_config() print config.getpaths() Modified: pypy/dist/pypy/config/test/test_config.py ============================================================================== --- pypy/dist/pypy/config/test/test_config.py (original) +++ pypy/dist/pypy/config/test/test_config.py Sun Aug 17 15:15:50 2008 @@ -530,6 +530,15 @@ assert not c.toplevel +def test_bogus_suggests(): + descr = OptionDescription("test", '', [ + BoolOption("toplevel", "", suggests=[("opt", "bogusvalue")]), + ChoiceOption("opt", "", ["a", "b", "c"], "a"), + ]) + c = Config(descr) + py.test.raises(ConfigError, "c.toplevel = True") + + def test_delattr(): descr = OptionDescription("opt", "", [ OptionDescription("s1", "", [ @@ -549,7 +558,7 @@ def my_validator_2(config): assert config is c - raise ConfigError + raise ConflictConfigError descr = OptionDescription("opt", "", [ BoolOption('booloption1', 'option test1', default=False, Modified: pypy/dist/pypy/config/test/test_pypyoption.py ============================================================================== --- pypy/dist/pypy/config/test/test_pypyoption.py (original) +++ pypy/dist/pypy/config/test/test_pypyoption.py Sun Aug 17 15:15:50 2008 @@ -1,6 +1,7 @@ import py -from pypy.config.pypyoption import get_pypy_config +from pypy.config.pypyoption import get_pypy_config, set_pypy_opt_level from pypy.config.config import Config, ConfigError +from pypy.config.translationoption import set_opt_level thisdir = py.magic.autopath().dirpath() @@ -29,10 +30,32 @@ conf.translation.gc = name assert conf.translation.gctransformer == "framework" +def test_set_opt_level(): + conf = get_pypy_config() + set_opt_level(conf, '0') + assert conf.translation.gc == 'boehm' + assert conf.translation.backendopt.none == True + conf = get_pypy_config() + set_opt_level(conf, '2') + assert conf.translation.gc != 'boehm' + assert not conf.translation.backendopt.none + conf = get_pypy_config() + set_opt_level(conf, 'mem') + assert conf.translation.gc == 'marksweep' + assert not conf.translation.backendopt.none + +def test_set_pypy_opt_level(): + conf = get_pypy_config() + set_pypy_opt_level(conf, '2') + assert conf.objspace.std.withmultidict + conf = get_pypy_config() + set_pypy_opt_level(conf, '0') + assert not conf.objspace.std.withmultidict + def test_rweakref_required(): conf = get_pypy_config() conf.translation.rweakref = False - conf.objspace.std.allopts = True + set_pypy_opt_level(conf, '3') assert not conf.objspace.std.withtypeversion assert not conf.objspace.std.withmethodcache Modified: pypy/dist/pypy/config/translationoption.py ============================================================================== --- pypy/dist/pypy/config/translationoption.py (original) +++ pypy/dist/pypy/config/translationoption.py Sun Aug 17 15:15:50 2008 @@ -2,12 +2,14 @@ import py, os from pypy.config.config import OptionDescription, BoolOption, IntOption, ArbitraryOption, FloatOption from pypy.config.config import ChoiceOption, StrOption, to_optparse, Config +from pypy.config.config import ConfigError DEFL_INLINE_THRESHOLD = 32.4 # just enough to inline add__Int_Int() # and just small enough to prevend inlining of some rlist functions. DEFL_PROF_BASED_INLINE_THRESHOLD = 32.4 DEFL_CLEVER_MALLOC_REMOVAL_INLINE_THRESHOLD = 32.4 +DEFL_LOW_INLINE_THRESHOLD = DEFL_INLINE_THRESHOLD / 2.0 translation_optiondescription = OptionDescription( "translation", "Translation Options", [ @@ -39,6 +41,8 @@ BoolOption("llvm_via_c", "compile llvm via C", default=False, cmdline="--llvm-via-c", requires=[("translation.backend", "llvm")]), + + # gc ChoiceOption("gc", "Garbage Collection Strategy", ["boehm", "ref", "marksweep", "semispace", "statistics", "generation", "hybrid", "none"], @@ -82,18 +86,10 @@ "llvmgc": [("translation.gc", "generation")], "asmgcc": [("translation.gc", "generation")], }), + + # other noticeable options BoolOption("thread", "enable use of threading primitives", default=False, cmdline="--thread"), - BoolOption("verbose", "Print extra information", default=False), - BoolOption("debug", "Record extra annotation information", - cmdline="-d --debug", default=False), - BoolOption("insist", "Try hard to go on RTyping", default=False, - cmdline="--insist"), - IntOption("withsmallfuncsets", - "Represent groups of less funtions than this as indices into an array", - default=0), - BoolOption("countmallocs", "Count mallocs and frees", default=False, - cmdline=None), BoolOption("sandbox", "Produce a fully-sandboxed executable", default=False, cmdline="--sandbox", requires=[("translation.thread", False)]), @@ -101,6 +97,11 @@ default=True), # misc + BoolOption("verbose", "Print extra information", default=False), + BoolOption("debug", "Record extra annotation information", + cmdline="-d --debug", default=True), + BoolOption("insist", "Try hard to go on RTyping", default=False, + cmdline="--insist"), StrOption("cc", "Specify compiler to use for compiling generated C", cmdline="--cc"), StrOption("profopt", "Specify profile based optimization script", cmdline="--profopt"), @@ -108,6 +109,13 @@ default=False, cmdline="--no-profopt", negation=False), BoolOption("instrument", "internal: turn instrumentation on", default=False, cmdline=None), + BoolOption("countmallocs", "Count mallocs and frees", default=False, + cmdline=None), + ChoiceOption("fork_before", + "(UNIX) Create restartable checkpoint before step", + ["annotate", "rtype", "backendopt", "database", "source", + "hintannotate", "timeshift"], + default=None, cmdline="--fork-before"), ArbitraryOption("instrumentctl", "internal", default=None), @@ -140,11 +148,9 @@ "attempt to pre-allocate the list", default=False, cmdline='--listcompr'), - ChoiceOption("fork_before", - "(UNIX) Create restartable checkpoint before step", - ["annotate", "rtype", "backendopt", "database", "source", - "hintannotate", "timeshift"], - default=None, cmdline="--fork-before"), + IntOption("withsmallfuncsets", + "Represent groups of less funtions than this as indices into an array", + default=0), # options for ootype OptionDescription("ootype", "Object Oriented Typesystem options", [ @@ -273,3 +279,61 @@ value = getattr(existing_config, child._name) config._cfgimpl_values[child._name] = value return config + +# ____________________________________________________________ + +OPT_LEVELS = ['0', '1', 'size', 'mem', '2', '3'] +DEFAULT_OPT_LEVEL = '2' + +OPT_TABLE_DOC = { + '0': 'No optimization. Uses the Boehm GC.', + '1': 'Enable a default set of optimizations. Uses the Boehm GC.', + 'size': 'Optimize for the size of the executable. Uses the Boehm GC.', + 'mem': 'Optimize for run-time memory usage and use a memory-saving GC.', + '2': 'Enable most optimizations and use a high-performance GC.', + '3': 'Enable all optimizations and use a high-performance GC.', + } + +OPT_TABLE = { + #level: gc backend optimizations... + '0': 'boehm nobackendopt', + '1': 'boehm lowinline', + 'size': 'boehm lowinline remove_asserts', + 'mem': 'marksweep lowinline remove_asserts', + '2': 'hybrid extraopts', + '3': 'hybrid extraopts remove_asserts', + } + +def set_opt_level(config, level): + """Apply optimization suggestions on the 'config'. + The optimizations depend on the selected level and possibly on the backend. + """ + # warning: during some tests, the type_system and the backend may be + # unspecified and we get None. It shouldn't occur in translate.py though. + type_system = config.translation.type_system + backend = config.translation.backend + + try: + opts = OPT_TABLE[level] + except KeyError: + raise ConfigError("no such optimization level: %r" % (level,)) + words = opts.split() + gc = words.pop(0) + + # set the GC (only meaningful with lltype) + config.translation.suggest(gc=gc) + + # set the backendopts + for word in words: + if word == 'nobackendopt': + config.translation.backendopt.suggest(none=True) + elif word == 'lowinline': + config.translation.backendopt.suggest(inline_threshold= + DEFL_LOW_INLINE_THRESHOLD) + elif word == 'remove_asserts': + config.translation.backendopt.suggest(remove_asserts=True) + elif word == 'extraopts': + config.translation.suggest(withsmallfuncsets=5) + config.translation.suggest(list_comprehension_operations=True) + else: + raise ValueError(word) Modified: pypy/dist/pypy/doc/config/commandline.txt ============================================================================== --- pypy/dist/pypy/doc/config/commandline.txt (original) +++ pypy/dist/pypy/doc/config/commandline.txt Sun Aug 17 15:15:50 2008 @@ -1,4 +1,33 @@ -.. intentionally empty, but contains the following comment to fool py.test: - overview of command line options for objspace - overview of command line options for translation + +.. contents:: + + +.. _objspace: +.. _`overview-of-command-line-options-for-objspace`: + +------------------------------- +PyPy Python interpreter options +------------------------------- + +The following options can be used after ``translate.py +targetpypystandalone`` or as options to ``py.py``. + +.. GENERATE: objspace + + +.. _translation: +.. _`overview-of-command-line-options-for-translation`: + +--------------------------- +General translation options +--------------------------- + +The following are options of ``translate.py``. They must be +given before the ``targetxxx`` on the command line. + +* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` + +.. __: opt.html + +.. GENERATE: translation Modified: pypy/dist/pypy/doc/config/confrest.py ============================================================================== --- pypy/dist/pypy/doc/config/confrest.py (original) +++ pypy/dist/pypy/doc/config/confrest.py Sun Aug 17 15:15:50 2008 @@ -30,12 +30,13 @@ def get_content(self, txtpath, encoding): if txtpath.basename == "commandline.txt": - result = [".. contents::"] - for descr in all_optiondescrs: - result.append(".. %s_:\n" % (descr._name, )) - result.append(make_cmdline_overview(descr).text()) - result.append("") - result.append(txtpath.read()) + result = [] + for line in txtpath.read().splitlines(): + if line.startswith('.. GENERATE:'): + start = line[len('.. GENERATE:'):].strip() + descr = start_to_descr[start] + line = make_cmdline_overview(descr, title=False).text() + result.append(line) return "\n".join(result) fullpath = txtpath.purebasename start = fullpath.split(".")[0] Modified: pypy/dist/pypy/doc/config/index.txt ============================================================================== --- pypy/dist/pypy/doc/config/index.txt (original) +++ pypy/dist/pypy/doc/config/index.txt Sun Aug 17 15:15:50 2008 @@ -44,9 +44,9 @@ .. image:: ../image/compat-matrix.png .. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#overview-of-command-line-options-for-objspace -.. _`object space options`: commandline.html#overview-of-command-line-options-for-objspace -.. _`translation options`: commandline.html#overview-of-command-line-options-for-translation +.. _`objspace options`: commandline.html#objspace +.. _`object space options`: commandline.html#objspace +.. _`translation options`: commandline.html#translation .. _`overview`: commandline.html .. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html .. _`What PyPy can do for your objects`: ../objspace-proxies.html Modified: pypy/dist/pypy/doc/garbage_collection.txt ============================================================================== --- pypy/dist/pypy/doc/garbage_collection.txt (original) +++ pypy/dist/pypy/doc/garbage_collection.txt Sun Aug 17 15:15:50 2008 @@ -26,7 +26,7 @@ For more details, see the `overview of command line options for translation`_. -.. _`overview of command line options for translation`: config/commandline.html#overview-of-command-line-options-for-translation +.. _`overview of command line options for translation`: config/commandline.html#translation Mark and Sweep -------------- Modified: pypy/dist/pypy/doc/getting-started.txt ============================================================================== --- pypy/dist/pypy/doc/getting-started.txt (original) +++ pypy/dist/pypy/doc/getting-started.txt Sun Aug 17 15:15:50 2008 @@ -546,44 +546,49 @@ .. _`windows document`: windows.html You can translate the whole of PyPy's Python interpreter to low level C -code. This is the largest and ultimate example of RPython program that -our translation toolchain can process. The most standard variant -nowadays is:: +code. (This is the largest and ultimate example of RPython program that +our translation toolchain can process.) - cd pypy/translator/goal - python translate.py --gc=hybrid --thread targetpypystandalone.py --allworkingmodules --allopts +1. Install dependencies. You need (these are Debian package names, + adapt as needed): -Dependencies: this will compile all supported built-in modules, some of -which have external dependencies. On a Debian Linux, for example, you -need to install the following packages: a full C compiler like gcc; -``python-dev, python-ctypes``; ``libffi-dev, libz-dev, libbz2-dev, -libncurses-dev``. - -This whole process will take some time and quite a lot of memory (around -1200 MB on 32 bit machines). It creates an executable ``pypy-c`` in -the current directory. -The ``--gc=hybrid`` option means that the ``pypy-c`` will use our own -exact generational garbage collector implementation, whose performance -is rather good nowadays. The ``--thread`` option enables the thread -module, which is still slightly experimental. -The ``--allopts`` option enables all the -worthwhile performance optimizations, but slows down the translation -itself. On Linux 32-bit Intel machines, if you don't need threads, you -can get some extra speed (and extra translation time) by removing -``--thread`` and replacing it with ``--gcrootfinder=asmgcc``. - -An alternative is to use the `Boehm-Demers-Weiser garbage -collector`_ instead of our own. For this, use ``--gc=boehm``. -Be sure to install Boehm before starting the translation (e.g. by running -``apt-get install libgc-dev`` on Debian or Ubuntu). Translating with Boehm -is somewhat faster and less memory-hungry than translating with our own GCs. - -In any case, as described above, you can find the produced executable under the -name ``pypy-c``. Type ``pypy-c --help`` to see the options it supports -- -mainly the same basic options as CPython. In addition, ``pypy-c --info`` -prints the translation options that where used to produce this particular -executable. This executable can be moved around or copied on other machines; -see Installation_ below. + * gcc + * ``python-dev`` + * ``python-ctypes`` + * ``libffi-dev`` + * ``libz-dev`` (for the optional ``zlib`` module) + * ``libbz2-dev`` (for the optional ``bz2`` module) + * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) + * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) + +2. Be warned that translation is time-consuming (30 min to + over one hour) and extremely RAM-hungry (kill it if it + starts swapping heavily). If you have less than 1.5 GB of + RAM (or a slow machine) you might want to pick the + `optimization level`_ `1` in the next step. The default + level `3` gives much better results, though. + +3. Run:: + + cd pypy/translator/goal + python translate.py --opt=3 targetpypystandalone.py --allworkingmodules + + possibly replacing ``--opt=3`` with ``--opt=1`` or another + `optimization level`_ of your choice. + + On Linux 32-bit Intel machines, if you don't need threads, you + can get some extra speed (and extra translation time) by adding + ``--gcrootfinder=asmgcc`` just after the ``--opt`` option. + +.. _`optimization level`: config/opt.html + +If everything works correctly this will create an executable +``pypy-c`` in the current directory. Type ``pypy-c --help`` +to see the options it supports -- mainly the same basic +options as CPython. In addition, ``pypy-c --info`` prints the +translation options that where used to produce this particular +executable. This executable can be moved around or copied on +other machines; see Installation_ below. The ``translate.py`` script takes a very large number of options controlling what to translate and how. See ``translate.py -h``. Some of the more Modified: pypy/dist/pypy/doc/interpreter-optimizations.txt ============================================================================== --- pypy/dist/pypy/doc/interpreter-optimizations.txt (original) +++ pypy/dist/pypy/doc/interpreter-optimizations.txt Sun Aug 17 15:15:50 2008 @@ -384,6 +384,6 @@ .. waffles about ropes -You can build a pypy with all generally useful optimizations turned on by using -the :config:`objspace.std.allopts` option. Such a build is between 1.5 and 2.5 -times faster than the default, depending on the benchmark. +When building pypy, all generally useful optimizations are turned on by default +unless you explicitly lower the translation optimization level with the +``--opt`` option. Modified: pypy/dist/pypy/tool/bench/test/test_pypyresult.py ============================================================================== --- pypy/dist/pypy/tool/bench/test/test_pypyresult.py (original) +++ pypy/dist/pypy/tool/bench/test/test_pypyresult.py Sun Aug 17 15:15:50 2008 @@ -12,8 +12,8 @@ return cache[0] pp = tmpdir.join("testpickle") f = pp.open("wb") - pickle.dump({'./pypy-llvm-39474-faassen-c_richards': 5}, f) - pickle.dump({'./pypy-llvm-39474-faassen-c_richards': 42.0}, f) + pickle.dump({'./pypy-llvm-39474-O3-c_richards': 5}, f) + pickle.dump({'./pypy-llvm-39474-O3-c_richards': 42.0}, f) f.close() cache.append(pp) return pp @@ -38,9 +38,9 @@ assert res.besttime == 2.0 def test_BenchResult_pypy(): - res = BenchResult("pypy-llvm-39474-faassen-c_richards", + res = BenchResult("pypy-llvm-39474-O3-c_richards", besttime=2.0, numruns=3) - assert res.executable == "pypy-llvm-39474-faassen-c" + assert res.executable == "pypy-llvm-39474-O3-c" assert res.revision == 39474 assert res.name == "richards" assert res.numruns == 3 Modified: pypy/dist/pypy/translator/driver.py ============================================================================== --- pypy/dist/pypy/translator/driver.py (original) +++ pypy/dist/pypy/translator/driver.py Sun Aug 17 15:15:50 2008 @@ -15,22 +15,6 @@ log = py.log.Producer("translation") py.log.setconsumer("translation", ansi_log) -DEFAULTS = { - 'translation.gc': 'ref', - 'translation.cc': None, - 'translation.profopt': None, - - 'translation.thread': False, # influences GC policy - - 'translation.stackless': False, - 'translation.debug': True, - 'translation.insist': False, - 'translation.backend': 'c', - 'translation.fork_before': None, - 'translation.backendopt.raisingop2direct_call' : False, - 'translation.backendopt.merge_if_blocks': True, -} - def taskdef(taskfunc, deps, title, new_state=None, expected_states=[], idemp=False, earlycheck=None): @@ -93,7 +77,7 @@ if config is None: from pypy.config.pypyoption import get_pypy_config - config = get_pypy_config(DEFAULTS, translating=True) + config = get_pypy_config(translating=True) self.config = config if overrides is not None: self.config.override(overrides) Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py ============================================================================== --- pypy/dist/pypy/translator/goal/bench-cronjob.py (original) +++ pypy/dist/pypy/translator/goal/bench-cronjob.py Sun Aug 17 15:15:50 2008 @@ -181,15 +181,15 @@ if backends == []: #_ prefix means target specific option, # prefix to outcomment backends = [backend.strip() for backend in """ c - c--stackless--_faassen - c--_faassen--_allworkingmodules - c--thread--gc=hybrid--_faassen - c--gc=semispace--_faassen - c--gc=generation--_faassen - c--gc=hybrid--_faassen - cli--_faassen - jvm--_faassen - jvm--inline-threshold=0--_faassen + c--stackless--_O3 + c--_O3--_allworkingmodules + c--thread--gc=hybrid--_O3 + c--gc=semispace--_O3 + c--gc=generation--_O3 + c--gc=hybrid--_O3 + cli--_O3 + jvm--_O3 + jvm--inline-threshold=0--_O3 """.split('\n') if backend.strip() and not backend.strip().startswith('#')] print time.ctime() for backend in backends: Modified: pypy/dist/pypy/translator/goal/targetprologstandalone.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetprologstandalone.py (original) +++ pypy/dist/pypy/translator/goal/targetprologstandalone.py Sun Aug 17 15:15:50 2008 @@ -27,9 +27,7 @@ # _____ Define and setup target ___ -def handle_config(config): - return - config.translation.stackless = True +# XXX this should suggest --stackless somehow def target(driver, args): driver.exe_name = 'pyrolog-%(backend)s' Modified: pypy/dist/pypy/translator/goal/targetpypystandalone.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetpypystandalone.py (original) +++ pypy/dist/pypy/translator/goal/targetpypystandalone.py Sun Aug 17 15:15:50 2008 @@ -7,6 +7,7 @@ from pypy.interpreter.error import OperationError from pypy.translator.goal.ann_override import PyPyAnnotatorPolicy from pypy.config.config import Config, to_optparse, make_dict, SUPPRESS_USAGE +from pypy.config.config import ConflictConfigError from pypy.tool.option import make_objspace from pypy.translator.goal.nanos import setup_nanos @@ -90,26 +91,29 @@ parserkwargs={'usage': self.usage}) return parser - def handle_config(self, config): + def handle_config(self, config, translateconfig): + self.translateconfig = translateconfig + # set up the objspace optimizations based on the --opt argument + from pypy.config.pypyoption import set_pypy_opt_level + set_pypy_opt_level(config, translateconfig.opt) + # as of revision 27081, multimethod.py uses the InstallerVersion1 by default # because it is much faster both to initialize and run on top of CPython. # The InstallerVersion2 is optimized for making a translator-friendly # structure for low level backends. However, InstallerVersion1 is still # preferable for high level backends, so we patch here. + from pypy.objspace.std import multimethod - if config.translation.type_system == 'lltype': + if config.objspace.std.multimethods == 'mrd': assert multimethod.InstallerVersion1.instance_counter == 0,\ 'The wrong Installer version has already been instatiated' multimethod.Installer = multimethod.InstallerVersion2 - else: + elif config.objspace.std.multimethods == 'doubledispatch': # don't rely on the default, set again here assert multimethod.InstallerVersion2.instance_counter == 0,\ 'The wrong Installer version has already been instatiated' multimethod.Installer = multimethod.InstallerVersion1 - def handle_translate_config(self, translateconfig): - self.translateconfig = translateconfig - def print_help(self, config): self.opt_parser(config).print_help() @@ -131,7 +135,16 @@ if config.translation.thread: config.objspace.usemodules.thread = True elif config.objspace.usemodules.thread: - config.translation.thread = True + try: + config.translation.thread = True + except ConflictConfigError: + # If --allworkingmodules is given, we reach this point + # if threads cannot be enabled (e.g. they conflict with + # something else). In this case, we can try setting the + # usemodules.thread option to False again. It will + # cleanly fail if that option was set to True by the + # command-line directly instead of via --allworkingmodules. + config.objspace.usemodules.thread = False if config.translation.stackless: config.objspace.usemodules._stackless = True @@ -184,7 +197,7 @@ def interface(self, ns): for name in ['take_options', 'handle_config', 'print_help', 'target', - 'handle_translate_config', 'portal', + 'portal', 'get_additional_config_options']: ns[name] = getattr(self, name) Modified: pypy/dist/pypy/translator/goal/translate.py ============================================================================== --- pypy/dist/pypy/translator/goal/translate.py (original) +++ pypy/dist/pypy/translator/goal/translate.py Sun Aug 17 15:15:50 2008 @@ -12,6 +12,8 @@ ArbitraryOption, StrOption, IntOption, Config, \ ChoiceOption, OptHelpFormatter from pypy.config.translationoption import get_combined_translation_config +from pypy.config.translationoption import set_opt_level +from pypy.config.translationoption import OPT_LEVELS, DEFAULT_OPT_LEVEL GOALS= [ @@ -46,6 +48,9 @@ translate_optiondescr = OptionDescription("translate", "XXX", [ StrOption("targetspec", "XXX", default='targetpypystandalone', cmdline=None), + ChoiceOption("opt", + "optimization level", OPT_LEVELS, default=DEFAULT_OPT_LEVEL, + cmdline="--opt -O"), BoolOption("profile", "cProfile (to debug the speed of the translation process)", default=False, @@ -72,17 +77,7 @@ OVERRIDES = { 'translation.debug': False, - 'translation.insist': False, - - 'translation.gc': 'boehm', 'translation.backend': 'c', - 'translation.stackless': False, - 'translation.backendopt.raisingop2direct_call' : False, - 'translation.backendopt.merge_if_blocks': True, - - 'translation.cc': None, - 'translation.profopt': None, - 'translation.output': None, } import py @@ -162,13 +157,13 @@ existing_config=config, translating=True) + # apply the optimization level settings + set_opt_level(config, translateconfig.opt) + # let the target modify or prepare itself # based on the config if 'handle_config' in targetspec_dic: - targetspec_dic['handle_config'](config) - - if 'handle_translate_config' in targetspec_dic: - targetspec_dic['handle_translate_config'](translateconfig) + targetspec_dic['handle_config'](config, translateconfig) if translateconfig.help: opt_parser.print_help() From arigo at codespeak.net Sun Aug 17 15:16:07 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sun, 17 Aug 2008 15:16:07 +0200 (CEST) Subject: [pypy-svn] r57355 - pypy/branch/opt-option Message-ID: <20080817131607.4431A169E8F@codespeak.net> Author: arigo Date: Sun Aug 17 15:16:05 2008 New Revision: 57355 Removed: pypy/branch/opt-option/ Log: Kill merged branch. From arigo at codespeak.net Sun Aug 17 16:57:51 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sun, 17 Aug 2008 16:57:51 +0200 (CEST) Subject: [pypy-svn] r57356 - in pypy/dist/pypy: lang/prolog/interpreter/test module/pypyjit/test translator/goal Message-ID: <20080817145751.0B6CF169EBA@codespeak.net> Author: arigo Date: Sun Aug 17 16:57:48 2008 New Revision: 57356 Modified: pypy/dist/pypy/lang/prolog/interpreter/test/test_jit.py pypy/dist/pypy/module/pypyjit/test/test_jit_setup.py pypy/dist/pypy/module/pypyjit/test/test_newbool.py pypy/dist/pypy/translator/goal/targetprologstandalone.py Log: Skip or fix tests that fail because 'jit' was removed from trunk. Modified: pypy/dist/pypy/lang/prolog/interpreter/test/test_jit.py ============================================================================== --- pypy/dist/pypy/lang/prolog/interpreter/test/test_jit.py (original) +++ pypy/dist/pypy/lang/prolog/interpreter/test/test_jit.py Sun Aug 17 16:57:48 2008 @@ -1,4 +1,5 @@ import py +py.test.skip("JIT disabled for now") from pypy.jit.timeshifter.test.test_portal import PortalTest, P_NOVIRTUAL from pypy.lang.prolog.interpreter import portal from pypy.lang.prolog.interpreter import engine, term Modified: pypy/dist/pypy/module/pypyjit/test/test_jit_setup.py ============================================================================== --- pypy/dist/pypy/module/pypyjit/test/test_jit_setup.py (original) +++ pypy/dist/pypy/module/pypyjit/test/test_jit_setup.py Sun Aug 17 16:57:48 2008 @@ -1,3 +1,5 @@ +import py +py.test.skip("JIT disabled for now") from pypy.conftest import gettestobjspace class AppTestPyPyJIT: Modified: pypy/dist/pypy/module/pypyjit/test/test_newbool.py ============================================================================== --- pypy/dist/pypy/module/pypyjit/test/test_newbool.py (original) +++ pypy/dist/pypy/module/pypyjit/test/test_newbool.py Sun Aug 17 16:57:48 2008 @@ -1,3 +1,5 @@ +import py +py.test.skip("JIT disabled for now") from pypy.rpython.lltypesystem import lltype from pypy.jit.hintannotator.annotator import HintAnnotatorPolicy from pypy.jit.timeshifter import rvalue Modified: pypy/dist/pypy/translator/goal/targetprologstandalone.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetprologstandalone.py (original) +++ pypy/dist/pypy/translator/goal/targetprologstandalone.py Sun Aug 17 16:57:48 2008 @@ -14,9 +14,9 @@ term.DEBUG = False def entry_point(argv): - from pypy.jit.codegen.hlinfo import highleveljitinfo - if highleveljitinfo.sys_executable is None: - highleveljitinfo.sys_executable = argv[0] + #from pypy.jit.codegen.hlinfo import highleveljitinfo + #if highleveljitinfo.sys_executable is None: + # highleveljitinfo.sys_executable = argv[0] if len(argv) == 2: execute(e, argv[1]) try: From arigo at codespeak.net Sun Aug 17 17:34:09 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Sun, 17 Aug 2008 17:34:09 +0200 (CEST) Subject: [pypy-svn] r57360 - in pypy/dist/pypy/doc: . jit tool Message-ID: <20080817153409.6E0DE168472@codespeak.net> Author: arigo Date: Sun Aug 17 17:34:07 2008 New Revision: 57360 Modified: pypy/dist/pypy/doc/_ref.txt pypy/dist/pypy/doc/index.txt pypy/dist/pypy/doc/jit/_ref.txt pypy/dist/pypy/doc/tool/makeref.py Log: Fix the doc links to the jit: make them point to http urls in the oo-jit branch for now. Modified: pypy/dist/pypy/doc/_ref.txt ============================================================================== --- pypy/dist/pypy/doc/_ref.txt (original) +++ pypy/dist/pypy/doc/_ref.txt Sun Aug 17 17:34:07 2008 @@ -31,11 +31,6 @@ .. _`interpreter/pyparser/`: .. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser .. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`jit/`: ../../pypy/jit -.. _`jit/codegen/`: ../../pypy/jit/codegen -.. _`jit/hintannotator/`: ../../pypy/jit/hintannotator -.. _`jit/timeshifter/`: ../../pypy/jit/timeshifter -.. _`jit/tl/`: ../../pypy/jit/tl .. _`lang/`: ../../pypy/lang .. _`lang/js/`: ../../pypy/lang/js .. _`lang/prolog/`: ../../pypy/lang/prolog @@ -103,4 +98,5 @@ .. _`translator/jvm/`: ../../pypy/translator/jvm .. _`translator/llvm/`: ../../pypy/translator/llvm .. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool \ No newline at end of file +.. _`translator/tool/`: ../../pypy/translator/tool +.. _`jit/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/jit/ Modified: pypy/dist/pypy/doc/index.txt ============================================================================== --- pypy/dist/pypy/doc/index.txt (original) +++ pypy/dist/pypy/doc/index.txt Sun Aug 17 17:34:07 2008 @@ -221,15 +221,6 @@ `jit/`_ the `just-in-time compiler generator`_ -`jit/codegen/`_ `jit backends`_ for different architectures - -`jit/hintannotator/`_ the `hint-annotator`_ that analyzes an interpreter - -`jit/timeshifter/`_ the `timeshifter`_ that turns an interpreter into a JIT compiler - -`jit/tl/`_ interpreters for toy languages, with which we test the - JIT generator - `lang/`_ interpreters for non-Python languages, written in RPython_ `lang/js/`_ a JavaScript interpreter (in-progress) Modified: pypy/dist/pypy/doc/jit/_ref.txt ============================================================================== --- pypy/dist/pypy/doc/jit/_ref.txt (original) +++ pypy/dist/pypy/doc/jit/_ref.txt Sun Aug 17 17:34:07 2008 @@ -1,8 +1,8 @@ .. _`demo/jit/`: ../../../demo/jit .. _`demo/jit/f1.py`: ../../../demo/jit/f1.py -.. _`pypy/jit/codegen/model.py`: ../../../pypy/jit/codegen/model.py -.. _`pypy/jit/timeshifter/rvalue.py`: ../../../pypy/jit/timeshifter/rvalue.py -.. _`pypy/jit/tl/targettiny1.py`: ../../../pypy/jit/tl/targettiny1.py -.. _`pypy/jit/tl/tiny1.py`: ../../../pypy/jit/tl/tiny1.py -.. _`pypy/jit/tl/tiny2.py`: ../../../pypy/jit/tl/tiny2.py -.. _`rpython/rlist.py`: ../../../pypy/rpython/rlist.py \ No newline at end of file +.. _`rpython/rlist.py`: ../../../pypy/rpython/rlist.py +.. _`pypy/jit/codegen/model.py`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/jit/codegen/model.py +.. _`pypy/jit/timeshifter/rvalue.py`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/jit/timeshifter/rvalue.py +.. _`pypy/jit/tl/targettiny1.py`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/jit/tl/targettiny1.py +.. _`pypy/jit/tl/tiny1.py`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/jit/tl/tiny1.py +.. _`pypy/jit/tl/tiny2.py`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/jit/tl/tiny2.py Modified: pypy/dist/pypy/doc/tool/makeref.py ============================================================================== --- pypy/dist/pypy/doc/tool/makeref.py (original) +++ pypy/dist/pypy/doc/tool/makeref.py Sun Aug 17 17:34:07 2008 @@ -4,9 +4,19 @@ import pypy pypydir = py.path.local(pypy.__file__).dirpath() distdir = pypydir.dirpath() -dist_url = 'http://codespeak.net/svn/pypy/dist/' issue_url = 'http://codespeak.net/issue/pypy-dev/' +import urllib2, posixpath + + +possible_start_dirs = [ + distdir, + distdir.join('pypy'), + # for now, let the jit links point to the oo-jit branch + 'http://codespeak.net/svn/pypy/branch/oo-jit', + 'http://codespeak.net/svn/pypy/branch/oo-jit/pypy', + ] + def makeref(docdir): reffile = docdir.join('_ref.txt') @@ -25,19 +35,27 @@ continue for linkname in linkrex.findall(textfile.read()): if '/' in linkname: - for startloc in ('', 'pypy'): - cand = distdir.join(startloc, linkname) - if cand.check(): - rel = cand.relto(distdir) - assert docdir.relto(distdir) + for startdir in possible_start_dirs: + if isinstance(startdir, str): + assert startdir.startswith('http://') + target = posixpath.join(startdir, linkname) + try: + urllib2.urlopen(target).close() + except urllib2.HTTPError: + continue + else: + cand = startdir.join(linkname) + if not cand.check(): + continue + assert cand.relto(distdir) dotdots = 0 p = docdir while p != distdir: p = p.dirpath() dotdots += 1 target = '../' * dotdots + cand.relto(distdir) - addlink(linkname, target) - break + addlink(linkname, target) + break else: print "WARNING %s: link %r may be bogus" %(textfile, linkname) elif linkname.startswith('issue'): @@ -53,6 +71,7 @@ lines.append(".. _`%s`:" % linkname) lines.append(".. _`%s`: %s" %(linknamelist[-1], linktarget)) + lines.append('') reffile.write("\n".join(lines)) print "wrote %d references to %r" %(len(lines), reffile) #print "last ten lines" From cami at codespeak.net Sun Aug 17 19:49:03 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Sun, 17 Aug 2008 19:49:03 +0200 (CEST) Subject: [pypy-svn] r57364 - in pypy/dist/pypy/lang/gameboy: . test Message-ID: <20080817174903.04EBC168574@codespeak.net> Author: cami Date: Sun Aug 17 19:49:00 2008 New Revision: 57364 Modified: pypy/dist/pypy/lang/gameboy/cartridge.py pypy/dist/pypy/lang/gameboy/gameboy.py pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/test/test_cpu.py pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py Log: fixed bug in cartrdige mbc1 where reset() cleared the rom[] and ram[] by mistake added test for this bug Modified: pypy/dist/pypy/lang/gameboy/cartridge.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cartridge.py (original) +++ pypy/dist/pypy/lang/gameboy/cartridge.py Sun Aug 17 19:49:00 2008 @@ -12,6 +12,7 @@ #from pypy.rlib.rstr import str_replace import os +import pdb # HELPERS ---------------------------------------------------------------------- @@ -72,6 +73,8 @@ self.clock = clock self.cartridge = None self.mbc = None + self.rom = [0] + self.ram = [0] def reset(self): if not self.has_battery(): @@ -278,6 +281,8 @@ self.max_ram_bank_size = max_ram_bank_size self.rom_bank_size = rom_bank_size self.rom_bank = self.rom_bank_size + self.rom = [] + self.ram = [] self.reset() self.set_rom(rom) self.set_ram(ram) @@ -285,8 +290,6 @@ def reset(self): self.ram_bank = 0 self.ram_enable = False - self.rom = [] - self.ram = [] self.rom_size = 0 self.ram_size = 0 Modified: pypy/dist/pypy/lang/gameboy/gameboy.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy.py Sun Aug 17 19:49:00 2008 @@ -16,7 +16,8 @@ from pypy.lang.gameboy.video import * from pypy.lang.gameboy.cartridge import * - +import pdb + class GameBoy(object): def __init__(self): @@ -68,6 +69,7 @@ self.sound.stop() def reset(self): + print "python resetting gameboy" self.ram.reset() self.memory_bank_controller.reset() self.interrupt.reset() @@ -78,7 +80,7 @@ self.video.reset() self.sound.reset() self.cpu.set_rom(self.cartridge_manager.get_rom()) - #self.draw_logo() + self.draw_logo() def get_cycles(self): return min(min(min(min( self.video.get_cycles(), Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Sun Aug 17 19:49:00 2008 @@ -31,11 +31,12 @@ def mainLoop(self): + self.reset() try: isRunning = True while isRunning and self.handle_events(): self.emulate(constants.GAMEBOY_CLOCK >> 2) - #RSDL.Delay(1) + RSDL.Delay(1) except : lltype.free(self.event, flavor='raw') RSDL.Quit() @@ -94,7 +95,7 @@ #if y%2 == 0 or True: # px = self.get_pixel_color(x, y) # str += ["#", "%", "+", " ", " "][px] - pass + RSDL_helper.set_pixel(self.screen, x, y, self.get_pixel_color(x, y)) #print str; def pixel_map(self, x, y): Modified: pypy/dist/pypy/lang/gameboy/test/test_cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_cpu.py Sun Aug 17 19:49:00 2008 @@ -1037,7 +1037,7 @@ assert cpu.sp.get() == spValue+value # ldh_A_mem -def test_0xF0(): +def test_0xF0_store_memory_at_axpanded_fetch_address_in_a(): cpu = get_cpu() valueA = 0x11 value = 0x12 Modified: pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py Sun Aug 17 19:49:00 2008 @@ -185,6 +185,24 @@ basic_read_write_test(mbc, 0, 0x7FFF) +def test_mbc1_reset(mbc=None): + if mbc==None: + mbc = get_mbc1() + mbc.rom_bank = constants.ROM_BANK_SIZE +1 + mbc.memory_model = 1 + mbc.ram_enable = True + mbc.ram_bank = 1 + mbc.rom = range(0, 128, 3) + mbc.ram = range(0, 128, 3) + mbc.reset() + assert mbc.rom_bank == constants.ROM_BANK_SIZE + assert mbc.memory_model == 0 + assert mbc.ram_enable == False + assert mbc.ram_bank == 0 + assert len(mbc.rom) > 0 + assert len(mbc.ram) > 0 + + def test_mbc1_write_ram_enable(mbc=None): if mbc is None: mbc = get_mbc1() From bgola at codespeak.net Sun Aug 17 23:48:17 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Sun, 17 Aug 2008 23:48:17 +0200 (CEST) Subject: [pypy-svn] r57380 - in pypy/branch/2.5-features/lib-python/modified-2.5.1/test: . output Message-ID: <20080817214817.3EF5F169EEA@codespeak.net> Author: bgola Date: Sun Aug 17 23:48:15 2008 New Revision: 57380 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/__init__.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/list_tests.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/mapping_tests.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/output/ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/pickletester.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/seq_tests.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/string_tests.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test___all__.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_array.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_base64.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bufio.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_builtin.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bz2.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_class.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_codeop.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_compile.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_complex.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_copy.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_cpickle.py (contents, props changed) Log: some changes applied to tests (stdlib) to work properly in pypy Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/__init__.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/__init__.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,11 @@ +""" +This package only contains the tests that we have modified for PyPy. +It uses the 'official' hack to include the rest of the standard +'test' package from CPython. + +This assumes that sys.path is configured to contain +'lib-python/modified-2.5.1' before 'lib-python/2.5.1'. +""" + +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/list_tests.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/list_tests.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,523 @@ +""" +Tests common to list and UserList.UserList +""" + +import sys +import os + +import unittest +from test import test_support, seq_tests + +class CommonTest(seq_tests.CommonTest): + + def test_init(self): + # Iterable arg is optional + self.assertEqual(self.type2test([]), self.type2test()) + + # Init clears previous values + a = self.type2test([1, 2, 3]) + a.__init__() + self.assertEqual(a, self.type2test([])) + + # Init overwrites previous values + a = self.type2test([1, 2, 3]) + a.__init__([4, 5, 6]) + self.assertEqual(a, self.type2test([4, 5, 6])) + + # Mutables always return a new object + b = self.type2test(a) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + + def test_repr(self): + l0 = [] + l2 = [0, 1, 2] + a0 = self.type2test(l0) + a2 = self.type2test(l2) + + self.assertEqual(str(a0), str(l0)) + self.assertEqual(repr(a0), repr(l0)) + self.assertEqual(`a2`, `l2`) + self.assertEqual(str(a2), "[0, 1, 2]") + self.assertEqual(repr(a2), "[0, 1, 2]") + + a2.append(a2) + a2.append(3) + self.assertEqual(str(a2), "[0, 1, 2, [...], 3]") + self.assertEqual(repr(a2), "[0, 1, 2, [...], 3]") + + def test_print(self): + d = self.type2test(xrange(200)) + d.append(d) + d.extend(xrange(200,400)) + d.append(d) + d.append(400) + try: + fo = open(test_support.TESTFN, "wb") + print >> fo, d, + fo.close() + fo = open(test_support.TESTFN, "rb") + self.assertEqual(fo.read(), repr(d)) + finally: + fo.close() + os.remove(test_support.TESTFN) + + def test_set_subscript(self): + a = self.type2test(range(20)) + self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 0), [1,2,3]) + self.assertRaises(TypeError, a.__setitem__, slice(0, 10), 1) + self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 2), [1,2]) + self.assertRaises(TypeError, a.__getitem__, 'x', 1) + a[slice(2,10,3)] = [1,2,3] + self.assertEqual(a, self.type2test([0, 1, 1, 3, 4, 2, 6, 7, 3, + 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19])) + + def test_reversed(self): + a = self.type2test(range(20)) + r = reversed(a) + self.assertEqual(list(r), self.type2test(range(19, -1, -1))) + self.assertRaises(StopIteration, r.next) + self.assertEqual(list(reversed(self.type2test())), + self.type2test()) + + def test_setitem(self): + a = self.type2test([0, 1]) + a[0] = 0 + a[1] = 100 + self.assertEqual(a, self.type2test([0, 100])) + a[-1] = 200 + self.assertEqual(a, self.type2test([0, 200])) + a[-2] = 100 + self.assertEqual(a, self.type2test([100, 200])) + self.assertRaises(IndexError, a.__setitem__, -3, 200) + self.assertRaises(IndexError, a.__setitem__, 2, 200) + + a = self.type2test([]) + self.assertRaises(IndexError, a.__setitem__, 0, 200) + self.assertRaises(IndexError, a.__setitem__, -1, 200) + self.assertRaises(TypeError, a.__setitem__) + + a = self.type2test([0,1,2,3,4]) + a[0L] = 1 + a[1L] = 2 + a[2L] = 3 + self.assertEqual(a, self.type2test([1,2,3,3,4])) + a[0] = 5 + a[1] = 6 + a[2] = 7 + self.assertEqual(a, self.type2test([5,6,7,3,4])) + a[-2L] = 88 + a[-1L] = 99 + self.assertEqual(a, self.type2test([5,6,7,88,99])) + a[-2] = 8 + a[-1] = 9 + self.assertEqual(a, self.type2test([5,6,7,8,9])) + + def test_delitem(self): + a = self.type2test([0, 1]) + del a[1] + self.assertEqual(a, [0]) + del a[0] + self.assertEqual(a, []) + + a = self.type2test([0, 1]) + del a[-2] + self.assertEqual(a, [1]) + del a[-1] + self.assertEqual(a, []) + + a = self.type2test([0, 1]) + self.assertRaises(IndexError, a.__delitem__, -3) + self.assertRaises(IndexError, a.__delitem__, 2) + + a = self.type2test([]) + self.assertRaises(IndexError, a.__delitem__, 0) + + self.assertRaises(TypeError, a.__delitem__) + + def test_setslice(self): + l = [0, 1] + a = self.type2test(l) + + for i in range(-3, 4): + a[:i] = l[:i] + self.assertEqual(a, l) + a2 = a[:] + a2[:i] = a[:i] + self.assertEqual(a2, a) + a[i:] = l[i:] + self.assertEqual(a, l) + a2 = a[:] + a2[i:] = a[i:] + self.assertEqual(a2, a) + for j in range(-3, 4): + a[i:j] = l[i:j] + self.assertEqual(a, l) + a2 = a[:] + a2[i:j] = a[i:j] + self.assertEqual(a2, a) + + aa2 = a2[:] + aa2[:0] = [-2, -1] + self.assertEqual(aa2, [-2, -1, 0, 1]) + aa2[0:] = [] + self.assertEqual(aa2, []) + + a = self.type2test([1, 2, 3, 4, 5]) + a[:-1] = a + self.assertEqual(a, self.type2test([1, 2, 3, 4, 5, 5])) + a = self.type2test([1, 2, 3, 4, 5]) + a[1:] = a + self.assertEqual(a, self.type2test([1, 1, 2, 3, 4, 5])) + a = self.type2test([1, 2, 3, 4, 5]) + a[1:-1] = a + self.assertEqual(a, self.type2test([1, 1, 2, 3, 4, 5, 5])) + + a = self.type2test([]) + a[:] = tuple(range(10)) + self.assertEqual(a, self.type2test(range(10))) + + if hasattr(a, '__setslice__'): + self.assertRaises(TypeError, a.__setslice__, 0, 1, 5) + + self.assertRaises(TypeError, a.__setslice__) + + def test_delslice(self): + a = self.type2test([0, 1]) + del a[1:2] + del a[0:1] + self.assertEqual(a, self.type2test([])) + + a = self.type2test([0, 1]) + del a[1L:2L] + del a[0L:1L] + self.assertEqual(a, self.type2test([])) + + a = self.type2test([0, 1]) + del a[-2:-1] + self.assertEqual(a, self.type2test([1])) + + a = self.type2test([0, 1]) + del a[-2L:-1L] + self.assertEqual(a, self.type2test([1])) + + a = self.type2test([0, 1]) + del a[1:] + del a[:1] + self.assertEqual(a, self.type2test([])) + + a = self.type2test([0, 1]) + del a[1L:] + del a[:1L] + self.assertEqual(a, self.type2test([])) + + a = self.type2test([0, 1]) + del a[-1:] + self.assertEqual(a, self.type2test([0])) + + a = self.type2test([0, 1]) + del a[-1L:] + self.assertEqual(a, self.type2test([0])) + + a = self.type2test([0, 1]) + del a[:] + self.assertEqual(a, self.type2test([])) + + def test_append(self): + a = self.type2test([]) + a.append(0) + a.append(1) + a.append(2) + self.assertEqual(a, self.type2test([0, 1, 2])) + + self.assertRaises(TypeError, a.append) + + def test_extend(self): + a1 = self.type2test([0]) + a2 = self.type2test((0, 1)) + a = a1[:] + a.extend(a2) + self.assertEqual(a, a1 + a2) + + a.extend(self.type2test([])) + self.assertEqual(a, a1 + a2) + + a.extend(a) + self.assertEqual(a, self.type2test([0, 0, 1, 0, 0, 1])) + + a = self.type2test("spam") + a.extend("eggs") + self.assertEqual(a, list("spameggs")) + + self.assertRaises(TypeError, a.extend, None) + + self.assertRaises(TypeError, a.extend) + + def test_insert(self): + a = self.type2test([0, 1, 2]) + a.insert(0, -2) + a.insert(1, -1) + a.insert(2, 0) + self.assertEqual(a, [-2, -1, 0, 0, 1, 2]) + + b = a[:] + b.insert(-2, "foo") + b.insert(-200, "left") + b.insert(200, "right") + self.assertEqual(b, self.type2test(["left",-2,-1,0,0,"foo",1,2,"right"])) + + self.assertRaises(TypeError, a.insert) + + def test_pop(self): + a = self.type2test([-1, 0, 1]) + a.pop() + self.assertEqual(a, [-1, 0]) + a.pop(0) + self.assertEqual(a, [0]) + self.assertRaises(IndexError, a.pop, 5) + a.pop(0) + self.assertEqual(a, []) + self.assertRaises(IndexError, a.pop) + self.assertRaises(TypeError, a.pop, 42, 42) + a = self.type2test([0, 10, 20, 30, 40]) + + def test_remove(self): + a = self.type2test([0, 0, 1]) + a.remove(1) + self.assertEqual(a, [0, 0]) + a.remove(0) + self.assertEqual(a, [0]) + a.remove(0) + self.assertEqual(a, []) + + self.assertRaises(ValueError, a.remove, 0) + + self.assertRaises(TypeError, a.remove) + + class BadExc(Exception): + pass + + class BadCmp: + def __eq__(self, other): + if other == 2: + raise BadExc() + return False + + a = self.type2test([0, 1, 2, 3]) + self.assertRaises(BadExc, a.remove, BadCmp()) + + class BadCmp2: + def __cmp__(self, other): + raise BadExc() + + d = self.type2test('abcdefghcij') + d.remove('c') + self.assertEqual(d, self.type2test('abdefghcij')) + d.remove('c') + self.assertEqual(d, self.type2test('abdefghij')) + self.assertRaises(ValueError, d.remove, 'c') + self.assertEqual(d, self.type2test('abdefghij')) + + # Handle comparison errors + d = self.type2test(['a', 'b', BadCmp2(), 'c']) + e = self.type2test(d) + self.assertRaises(BadExc, d.remove, 'c') + for x, y in zip(d, e): + # verify that original order and values are retained. + self.assert_(x is y) + + def test_count(self): + a = self.type2test([0, 1, 2])*3 + self.assertEqual(a.count(0), 3) + self.assertEqual(a.count(1), 3) + self.assertEqual(a.count(3), 0) + + self.assertRaises(TypeError, a.count) + + class BadExc(Exception): + pass + + class BadCmp: + def __eq__(self, other): + if other == 2: + raise BadExc() + return False + + self.assertRaises(BadExc, a.count, BadCmp()) + + def test_index(self): + u = self.type2test([0, 1]) + self.assertEqual(u.index(0), 0) + self.assertEqual(u.index(1), 1) + self.assertRaises(ValueError, u.index, 2) + + u = self.type2test([-2, -1, 0, 0, 1, 2]) + self.assertEqual(u.count(0), 2) + self.assertEqual(u.index(0), 2) + self.assertEqual(u.index(0, 2), 2) + self.assertEqual(u.index(-2, -10), 0) + self.assertEqual(u.index(0, 3), 3) + self.assertEqual(u.index(0, 3, 4), 3) + self.assertRaises(ValueError, u.index, 2, 0, -10) + + self.assertRaises(TypeError, u.index) + + class BadExc(Exception): + pass + + class BadCmp: + def __eq__(self, other): + if other == 2: + raise BadExc() + return False + + a = self.type2test([0, 1, 2, 3]) + self.assertRaises(BadExc, a.index, BadCmp()) + + a = self.type2test([-2, -1, 0, 0, 1, 2]) + self.assertEqual(a.index(0), 2) + self.assertEqual(a.index(0, 2), 2) + self.assertEqual(a.index(0, -4), 2) + self.assertEqual(a.index(-2, -10), 0) + self.assertEqual(a.index(0, 3), 3) + self.assertEqual(a.index(0, -3), 3) + self.assertEqual(a.index(0, 3, 4), 3) + self.assertEqual(a.index(0, -3, -2), 3) + self.assertEqual(a.index(0, -4*sys.maxint, 4*sys.maxint), 2) + self.assertRaises(ValueError, a.index, 0, 4*sys.maxint,-4*sys.maxint) + self.assertRaises(ValueError, a.index, 2, 0, -10) + a.remove(0) + self.assertRaises(ValueError, a.index, 2, 0, 4) + self.assertEqual(a, self.type2test([-2, -1, 0, 1, 2])) + + # Test modifying the list during index's iteration + class EvilCmp: + def __init__(self, victim): + self.victim = victim + def __eq__(self, other): + del self.victim[:] + return False + a = self.type2test() + a[:] = [EvilCmp(a) for _ in xrange(100)] + # This used to seg fault before patch #1005778 + self.assertRaises(ValueError, a.index, None) + + def test_reverse(self): + u = self.type2test([-2, -1, 0, 1, 2]) + u2 = u[:] + u.reverse() + self.assertEqual(u, [2, 1, 0, -1, -2]) + u.reverse() + self.assertEqual(u, u2) + + self.assertRaises(TypeError, u.reverse, 42) + + def test_sort(self): + u = self.type2test([1, 0]) + u.sort() + self.assertEqual(u, [0, 1]) + + u = self.type2test([2,1,0,-1,-2]) + u.sort() + self.assertEqual(u, self.type2test([-2,-1,0,1,2])) + + self.assertRaises(TypeError, u.sort, 42, 42) + + def revcmp(a, b): + return cmp(b, a) + u.sort(revcmp) + self.assertEqual(u, self.type2test([2,1,0,-1,-2])) + + # The following dumps core in unpatched Python 1.5: + def myComparison(x,y): + return cmp(x%3, y%7) + z = self.type2test(range(12)) + z.sort(myComparison) + + self.assertRaises(TypeError, z.sort, 2) + + def selfmodifyingComparison(x,y): + z.append(1) + return cmp(x, y) + self.assertRaises(ValueError, z.sort, selfmodifyingComparison) + + self.assertRaises(TypeError, z.sort, lambda x, y: 's') + + self.assertRaises(TypeError, z.sort, 42, 42, 42, 42) + + def test_slice(self): + u = self.type2test("spam") + u[:2] = "h" + self.assertEqual(u, list("ham")) + + def test_iadd(self): + super(CommonTest, self).test_iadd() + u = self.type2test([0, 1]) + u2 = u + u += [2, 3] + self.assert_(u is u2) + + u = self.type2test("spam") + u += "eggs" + self.assertEqual(u, self.type2test("spameggs")) + + self.assertRaises(TypeError, u.__iadd__, None) + + def test_imul(self): + u = self.type2test([0, 1]) + u *= 3 + self.assertEqual(u, self.type2test([0, 1, 0, 1, 0, 1])) + u *= 0 + self.assertEqual(u, self.type2test([])) + s = self.type2test([]) + oldid = id(s) + s *= 10 + self.assertEqual(id(s), oldid) + + def test_extendedslicing(self): + # subscript + a = self.type2test([0,1,2,3,4]) + + # deletion + del a[::2] + self.assertEqual(a, self.type2test([1,3])) + a = self.type2test(range(5)) + del a[1::2] + self.assertEqual(a, self.type2test([0,2,4])) + a = self.type2test(range(5)) + del a[1::-2] + self.assertEqual(a, self.type2test([0,2,3,4])) + a = self.type2test(range(10)) + del a[::1000] + self.assertEqual(a, self.type2test([1, 2, 3, 4, 5, 6, 7, 8, 9])) + # assignment + a = self.type2test(range(10)) + a[::2] = [-1]*5 + self.assertEqual(a, self.type2test([-1, 1, -1, 3, -1, 5, -1, 7, -1, 9])) + a = self.type2test(range(10)) + a[::-4] = [10]*3 + self.assertEqual(a, self.type2test([0, 10, 2, 3, 4, 10, 6, 7, 8 ,10])) + a = self.type2test(range(4)) + a[::-1] = a + self.assertEqual(a, self.type2test([3, 2, 1, 0])) + a = self.type2test(range(10)) + b = a[:] + c = a[:] + a[2:3] = self.type2test(["two", "elements"]) + b[slice(2,3)] = self.type2test(["two", "elements"]) + c[2:3:] = self.type2test(["two", "elements"]) + self.assertEqual(a, b) + self.assertEqual(a, c) + a = self.type2test(range(10)) + a[::2] = tuple(range(5)) + self.assertEqual(a, self.type2test([0, 1, 1, 3, 2, 5, 3, 7, 4, 9])) + + def test_constructor_exception_handling(self): + # Bug #1242657 + class F(object): + def __iter__(self): + yield 23 + def __len__(self): + raise KeyboardInterrupt + self.assertRaises(KeyboardInterrupt, list, F()) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/mapping_tests.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/mapping_tests.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,672 @@ +# tests common to dict and UserDict +import unittest +import UserDict + + +class BasicTestMappingProtocol(unittest.TestCase): + # This base class can be used to check that an object conforms to the + # mapping protocol + + # Functions that can be useful to override to adapt to dictionary + # semantics + type2test = None # which class is being tested (overwrite in subclasses) + + def _reference(self): + """Return a dictionary of values which are invariant by storage + in the object under test.""" + return {1:2, "key1":"value1", "key2":(1,2,3)} + def _empty_mapping(self): + """Return an empty mapping object""" + return self.type2test() + def _full_mapping(self, data): + """Return a mapping object with the value contained in data + dictionary""" + x = self._empty_mapping() + for key, value in data.items(): + x[key] = value + return x + + def __init__(self, *args, **kw): + unittest.TestCase.__init__(self, *args, **kw) + self.reference = self._reference().copy() + + # A (key, value) pair not in the mapping + key, value = self.reference.popitem() + self.other = {key:value} + + # A (key, value) pair in the mapping + key, value = self.reference.popitem() + self.inmapping = {key:value} + self.reference[key] = value + + def test_read(self): + # Test for read only operations on mapping + p = self._empty_mapping() + p1 = dict(p) #workaround for singleton objects + d = self._full_mapping(self.reference) + if d is p: + p = p1 + #Indexing + for key, value in self.reference.items(): + self.assertEqual(d[key], value) + knownkey = self.other.keys()[0] + self.failUnlessRaises(KeyError, lambda:d[knownkey]) + #len + self.assertEqual(len(p), 0) + self.assertEqual(len(d), len(self.reference)) + #has_key + for k in self.reference: + self.assert_(d.has_key(k)) + self.assert_(k in d) + for k in self.other: + self.failIf(d.has_key(k)) + self.failIf(k in d) + #cmp + self.assertEqual(cmp(p,p), 0) + self.assertEqual(cmp(d,d), 0) + self.assertEqual(cmp(p,d), -1) + self.assertEqual(cmp(d,p), 1) + #__non__zero__ + if p: self.fail("Empty mapping must compare to False") + if not d: self.fail("Full mapping must compare to True") + # keys(), items(), iterkeys() ... + def check_iterandlist(iter, lst, ref): + self.assert_(hasattr(iter, 'next')) + self.assert_(hasattr(iter, '__iter__')) + x = list(iter) + self.assert_(set(x)==set(lst)==set(ref)) + check_iterandlist(d.iterkeys(), d.keys(), self.reference.keys()) + check_iterandlist(iter(d), d.keys(), self.reference.keys()) + check_iterandlist(d.itervalues(), d.values(), self.reference.values()) + check_iterandlist(d.iteritems(), d.items(), self.reference.items()) + #get + key, value = d.iteritems().next() + knownkey, knownvalue = self.other.iteritems().next() + self.assertEqual(d.get(key, knownvalue), value) + self.assertEqual(d.get(knownkey, knownvalue), knownvalue) + self.failIf(knownkey in d) + + def test_write(self): + # Test for write operations on mapping + p = self._empty_mapping() + #Indexing + for key, value in self.reference.items(): + p[key] = value + self.assertEqual(p[key], value) + for key in self.reference.keys(): + del p[key] + self.failUnlessRaises(KeyError, lambda:p[key]) + p = self._empty_mapping() + #update + p.update(self.reference) + self.assertEqual(dict(p), self.reference) + items = p.items() + p = self._empty_mapping() + p.update(items) + self.assertEqual(dict(p), self.reference) + d = self._full_mapping(self.reference) + #setdefault + key, value = d.iteritems().next() + knownkey, knownvalue = self.other.iteritems().next() + self.assertEqual(d.setdefault(key, knownvalue), value) + self.assertEqual(d[key], value) + self.assertEqual(d.setdefault(knownkey, knownvalue), knownvalue) + self.assertEqual(d[knownkey], knownvalue) + #pop + self.assertEqual(d.pop(knownkey), knownvalue) + self.failIf(knownkey in d) + self.assertRaises(KeyError, d.pop, knownkey) + default = 909 + d[knownkey] = knownvalue + self.assertEqual(d.pop(knownkey, default), knownvalue) + self.failIf(knownkey in d) + self.assertEqual(d.pop(knownkey, default), default) + #popitem + key, value = d.popitem() + self.failIf(key in d) + self.assertEqual(value, self.reference[key]) + p=self._empty_mapping() + self.assertRaises(KeyError, p.popitem) + + def test_constructor(self): + self.assertEqual(self._empty_mapping(), self._empty_mapping()) + + def test_bool(self): + self.assert_(not self._empty_mapping()) + self.assert_(self.reference) + self.assert_(bool(self._empty_mapping()) is False) + self.assert_(bool(self.reference) is True) + + def test_keys(self): + d = self._empty_mapping() + self.assertEqual(d.keys(), []) + d = self.reference + self.assert_(self.inmapping.keys()[0] in d.keys()) + self.assert_(self.other.keys()[0] not in d.keys()) + self.assertRaises(TypeError, d.keys, None) + + def test_values(self): + d = self._empty_mapping() + self.assertEqual(d.values(), []) + + self.assertRaises(TypeError, d.values, None) + + def test_items(self): + d = self._empty_mapping() + self.assertEqual(d.items(), []) + + self.assertRaises(TypeError, d.items, None) + + def test_len(self): + d = self._empty_mapping() + self.assertEqual(len(d), 0) + + def test_getitem(self): + d = self.reference + self.assertEqual(d[self.inmapping.keys()[0]], self.inmapping.values()[0]) + + self.assertRaises(TypeError, d.__getitem__) + + def test_update(self): + # mapping argument + d = self._empty_mapping() + d.update(self.other) + self.assertEqual(d.items(), self.other.items()) + + # No argument + d = self._empty_mapping() + d.update() + self.assertEqual(d, self._empty_mapping()) + + # item sequence + d = self._empty_mapping() + d.update(self.other.items()) + self.assertEqual(d.items(), self.other.items()) + + # Iterator + d = self._empty_mapping() + d.update(self.other.iteritems()) + self.assertEqual(d.items(), self.other.items()) + + # FIXME: Doesn't work with UserDict + # self.assertRaises((TypeError, AttributeError), d.update, None) + self.assertRaises((TypeError, AttributeError), d.update, 42) + + outerself = self + class SimpleUserDict: + def __init__(self): + self.d = outerself.reference + def keys(self): + return self.d.keys() + def __getitem__(self, i): + return self.d[i] + d.clear() + d.update(SimpleUserDict()) + i1 = d.items() + i2 = self.reference.items() + i1.sort() + i2.sort() + self.assertEqual(i1, i2) + + class Exc(Exception): pass + + d = self._empty_mapping() + class FailingUserDict: + def keys(self): + raise Exc + self.assertRaises(Exc, d.update, FailingUserDict()) + + d.clear() + + class FailingUserDict: + def keys(self): + class BogonIter: + def __init__(self): + self.i = 1 + def __iter__(self): + return self + def next(self): + if self.i: + self.i = 0 + return 'a' + raise Exc + return BogonIter() + def __getitem__(self, key): + return key + self.assertRaises(Exc, d.update, FailingUserDict()) + + class FailingUserDict: + def keys(self): + class BogonIter: + def __init__(self): + self.i = ord('a') + def __iter__(self): + return self + def next(self): + if self.i <= ord('z'): + rtn = chr(self.i) + self.i += 1 + return rtn + raise StopIteration + return BogonIter() + def __getitem__(self, key): + raise Exc + self.assertRaises(Exc, d.update, FailingUserDict()) + + d = self._empty_mapping() + class badseq(object): + def __iter__(self): + return self + def next(self): + raise Exc() + + self.assertRaises(Exc, d.update, badseq()) + + self.assertRaises(ValueError, d.update, [(1, 2, 3)]) + + # no test_fromkeys or test_copy as both os.environ and selves don't support it + + def test_get(self): + d = self._empty_mapping() + self.assert_(d.get(self.other.keys()[0]) is None) + self.assertEqual(d.get(self.other.keys()[0], 3), 3) + d = self.reference + self.assert_(d.get(self.other.keys()[0]) is None) + self.assertEqual(d.get(self.other.keys()[0], 3), 3) + self.assertEqual(d.get(self.inmapping.keys()[0]), self.inmapping.values()[0]) + self.assertEqual(d.get(self.inmapping.keys()[0], 3), self.inmapping.values()[0]) + self.assertRaises(TypeError, d.get) + self.assertRaises(TypeError, d.get, None, None, None) + + def test_setdefault(self): + d = self._empty_mapping() + self.assertRaises(TypeError, d.setdefault) + + def test_popitem(self): + d = self._empty_mapping() + self.assertRaises(KeyError, d.popitem) + self.assertRaises(TypeError, d.popitem, 42) + + def test_pop(self): + d = self._empty_mapping() + k, v = self.inmapping.items()[0] + d[k] = v + self.assertRaises(KeyError, d.pop, self.other.keys()[0]) + + self.assertEqual(d.pop(k), v) + self.assertEqual(len(d), 0) + + self.assertRaises(KeyError, d.pop, k) + + +class TestMappingProtocol(BasicTestMappingProtocol): + def test_constructor(self): + BasicTestMappingProtocol.test_constructor(self) + self.assert_(self._empty_mapping() is not self._empty_mapping()) + self.assertEqual(self.type2test(x=1, y=2), {"x": 1, "y": 2}) + + def test_bool(self): + BasicTestMappingProtocol.test_bool(self) + self.assert_(not self._empty_mapping()) + self.assert_(self._full_mapping({"x": "y"})) + self.assert_(bool(self._empty_mapping()) is False) + self.assert_(bool(self._full_mapping({"x": "y"})) is True) + + def test_keys(self): + BasicTestMappingProtocol.test_keys(self) + d = self._empty_mapping() + self.assertEqual(d.keys(), []) + d = self._full_mapping({'a': 1, 'b': 2}) + k = d.keys() + self.assert_('a' in k) + self.assert_('b' in k) + self.assert_('c' not in k) + + def test_values(self): + BasicTestMappingProtocol.test_values(self) + d = self._full_mapping({1:2}) + self.assertEqual(d.values(), [2]) + + def test_items(self): + BasicTestMappingProtocol.test_items(self) + + d = self._full_mapping({1:2}) + self.assertEqual(d.items(), [(1, 2)]) + + def test_has_key(self): + d = self._empty_mapping() + self.assert_(not d.has_key('a')) + d = self._full_mapping({'a': 1, 'b': 2}) + k = d.keys() + k.sort() + self.assertEqual(k, ['a', 'b']) + + self.assertRaises(TypeError, d.has_key) + + def test_contains(self): + d = self._empty_mapping() + self.assert_(not ('a' in d)) + self.assert_('a' not in d) + d = self._full_mapping({'a': 1, 'b': 2}) + self.assert_('a' in d) + self.assert_('b' in d) + self.assert_('c' not in d) + + self.assertRaises(TypeError, d.__contains__) + + def test_len(self): + BasicTestMappingProtocol.test_len(self) + d = self._full_mapping({'a': 1, 'b': 2}) + self.assertEqual(len(d), 2) + + def test_getitem(self): + BasicTestMappingProtocol.test_getitem(self) + d = self._full_mapping({'a': 1, 'b': 2}) + self.assertEqual(d['a'], 1) + self.assertEqual(d['b'], 2) + d['c'] = 3 + d['a'] = 4 + self.assertEqual(d['c'], 3) + self.assertEqual(d['a'], 4) + del d['b'] + self.assertEqual(d, {'a': 4, 'c': 3}) + + self.assertRaises(TypeError, d.__getitem__) + + def test_clear(self): + d = self._full_mapping({1:1, 2:2, 3:3}) + d.clear() + self.assertEqual(d, {}) + + self.assertRaises(TypeError, d.clear, None) + + def test_update(self): + BasicTestMappingProtocol.test_update(self) + # mapping argument + d = self._empty_mapping() + d.update({1:100}) + d.update({2:20}) + d.update({1:1, 2:2, 3:3}) + self.assertEqual(d, {1:1, 2:2, 3:3}) + + # no argument + d.update() + self.assertEqual(d, {1:1, 2:2, 3:3}) + + # keyword arguments + d = self._empty_mapping() + d.update(x=100) + d.update(y=20) + d.update(x=1, y=2, z=3) + self.assertEqual(d, {"x":1, "y":2, "z":3}) + + # item sequence + d = self._empty_mapping() + d.update([("x", 100), ("y", 20)]) + self.assertEqual(d, {"x":100, "y":20}) + + # Both item sequence and keyword arguments + d = self._empty_mapping() + d.update([("x", 100), ("y", 20)], x=1, y=2) + self.assertEqual(d, {"x":1, "y":2}) + + # iterator + d = self._full_mapping({1:3, 2:4}) + d.update(self._full_mapping({1:2, 3:4, 5:6}).iteritems()) + self.assertEqual(d, {1:2, 2:4, 3:4, 5:6}) + + class SimpleUserDict: + def __init__(self): + self.d = {1:1, 2:2, 3:3} + def keys(self): + return self.d.keys() + def __getitem__(self, i): + return self.d[i] + d.clear() + d.update(SimpleUserDict()) + self.assertEqual(d, {1:1, 2:2, 3:3}) + + def test_fromkeys(self): + self.assertEqual(self.type2test.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) + d = self._empty_mapping() + self.assert_(not(d.fromkeys('abc') is d)) + self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) + self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0}) + self.assertEqual(d.fromkeys([]), {}) + def g(): + yield 1 + self.assertEqual(d.fromkeys(g()), {1:None}) + self.assertRaises(TypeError, {}.fromkeys, 3) + class dictlike(self.type2test): pass + self.assertEqual(dictlike.fromkeys('a'), {'a':None}) + self.assertEqual(dictlike().fromkeys('a'), {'a':None}) + self.assert_(dictlike.fromkeys('a').__class__ is dictlike) + self.assert_(dictlike().fromkeys('a').__class__ is dictlike) + # FIXME: the following won't work with UserDict, because it's an old style class + # self.assert_(type(dictlike.fromkeys('a')) is dictlike) + class mydict(self.type2test): + def __new__(cls): + return UserDict.UserDict() + ud = mydict.fromkeys('ab') + self.assertEqual(ud, {'a':None, 'b':None}) + # FIXME: the following won't work with UserDict, because it's an old style class + # self.assert_(isinstance(ud, UserDict.UserDict)) + self.assertRaises(TypeError, dict.fromkeys) + + class Exc(Exception): pass + + class baddict1(self.type2test): + def __init__(self): + raise Exc() + + self.assertRaises(Exc, baddict1.fromkeys, [1]) + + class BadSeq(object): + def __iter__(self): + return self + def next(self): + raise Exc() + + self.assertRaises(Exc, self.type2test.fromkeys, BadSeq()) + + class baddict2(self.type2test): + def __setitem__(self, key, value): + raise Exc() + + self.assertRaises(Exc, baddict2.fromkeys, [1]) + + def test_copy(self): + d = self._full_mapping({1:1, 2:2, 3:3}) + self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) + d = self._empty_mapping() + self.assertEqual(d.copy(), d) + self.assert_(isinstance(d.copy(), d.__class__)) + self.assertRaises(TypeError, d.copy, None) + + def test_get(self): + BasicTestMappingProtocol.test_get(self) + d = self._empty_mapping() + self.assert_(d.get('c') is None) + self.assertEqual(d.get('c', 3), 3) + d = self._full_mapping({'a' : 1, 'b' : 2}) + self.assert_(d.get('c') is None) + self.assertEqual(d.get('c', 3), 3) + self.assertEqual(d.get('a'), 1) + self.assertEqual(d.get('a', 3), 1) + + def test_setdefault(self): + BasicTestMappingProtocol.test_setdefault(self) + d = self._empty_mapping() + self.assert_(d.setdefault('key0') is None) + d.setdefault('key0', []) + self.assert_(d.setdefault('key0') is None) + d.setdefault('key', []).append(3) + self.assertEqual(d['key'][0], 3) + d.setdefault('key', []).append(4) + self.assertEqual(len(d['key']), 2) + + def test_popitem(self): + BasicTestMappingProtocol.test_popitem(self) + for copymode in -1, +1: + # -1: b has same structure as a + # +1: b is a.copy() + for log2size in range(4): # XXX 12 too large for PyPy + size = 2**log2size + a = self._empty_mapping() + b = self._empty_mapping() + for i in range(size): + a[repr(i)] = i + if copymode < 0: + b[repr(i)] = i + if copymode > 0: + b = a.copy() + for i in range(size): + ka, va = ta = a.popitem() + self.assertEqual(va, int(ka)) + kb, vb = tb = b.popitem() + self.assertEqual(vb, int(kb)) + self.assert_(not(copymode < 0 and ta != tb)) + self.assert_(not a) + self.assert_(not b) + + def test_pop(self): + BasicTestMappingProtocol.test_pop(self) + + # Tests for pop with specified key + d = self._empty_mapping() + k, v = 'abc', 'def' + + # verify longs/ints get same value when key > 32 bits (for 64-bit archs) + # see SF bug #689659 + x = 4503599627370496L + y = 4503599627370496 + h = self._full_mapping({x: 'anything', y: 'something else'}) + self.assertEqual(h[x], h[y]) + + self.assertEqual(d.pop(k, v), v) + d[k] = v + self.assertEqual(d.pop(k, 1), v) + + +class TestHashMappingProtocol(TestMappingProtocol): + + def test_getitem(self): + TestMappingProtocol.test_getitem(self) + class Exc(Exception): pass + + class BadEq(object): + def __eq__(self, other): + raise Exc() + + d = self._empty_mapping() + d[BadEq()] = 42 + self.assertRaises(KeyError, d.__getitem__, 23) + + class BadHash(object): + fail = False + def __hash__(self): + if self.fail: + raise Exc() + else: + return 42 + + d = self._empty_mapping() + x = BadHash() + d[x] = 42 + x.fail = True + self.assertRaises(Exc, d.__getitem__, x) + + def test_fromkeys(self): + TestMappingProtocol.test_fromkeys(self) + class mydict(self.type2test): + def __new__(cls): + return UserDict.UserDict() + ud = mydict.fromkeys('ab') + self.assertEqual(ud, {'a':None, 'b':None}) + self.assert_(isinstance(ud, UserDict.UserDict)) + + def test_pop(self): + TestMappingProtocol.test_pop(self) + + class Exc(Exception): pass + + class BadHash(object): + fail = False + def __hash__(self): + if self.fail: + raise Exc() + else: + return 42 + + d = self._empty_mapping() + x = BadHash() + d[x] = 42 + x.fail = True + self.assertRaises(Exc, d.pop, x) + + def test_mutatingiteration(self): + d = self._empty_mapping() + d[1] = 1 + try: + for i in d: + d[i+1] = 1 + except RuntimeError: + pass + else: + self.fail("changing dict size during iteration doesn't raise Error") + + def test_repr(self): + d = self._empty_mapping() + self.assertEqual(repr(d), '{}') + d[1] = 2 + self.assertEqual(repr(d), '{1: 2}') + d = self._empty_mapping() + d[1] = d + self.assertEqual(repr(d), '{1: {...}}') + + class Exc(Exception): pass + + class BadRepr(object): + def __repr__(self): + raise Exc() + + d = self._full_mapping({1: BadRepr()}) + self.assertRaises(Exc, repr, d) + + def test_le(self): + self.assert_(not (self._empty_mapping() < self._empty_mapping())) + self.assert_(not (self._full_mapping({1: 2}) < self._full_mapping({1L: 2L}))) + + class Exc(Exception): pass + + class BadCmp(object): + def __cmp__(self, other): + raise Exc() + + d1 = self._full_mapping({BadCmp(): 1}) + d2 = self._full_mapping({1: 1}) + try: + d1 < d2 + except Exc: + pass + else: + self.fail("< didn't raise Exc") + + def test_setdefault(self): + TestMappingProtocol.test_setdefault(self) + + class Exc(Exception): pass + + class BadHash(object): + fail = False + def __hash__(self): + if self.fail: + raise Exc() + else: + return 42 + + d = self._empty_mapping() + x = BadHash() + d[x] = 42 + x.fail = True + self.assertRaises(Exc, d.setdefault, x, []) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/pickletester.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/pickletester.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,1007 @@ +# Notes about changes in this file: +# a prefix of "dont_" means the test makes no sense, +# because we don't use cPickle at all. +# "xxx_" means it works and can be done, but takes ages. +# When PyPy gets really fast, we should remove "xxx_". + +import unittest +import pickle +import cPickle +import pickletools +import copy_reg + +from test.test_support import TestFailed, have_unicode, TESTFN, \ + run_with_locale + +# Tests that try a number of pickle protocols should have a +# for proto in protocols: +# kind of outer loop. +assert pickle.HIGHEST_PROTOCOL == cPickle.HIGHEST_PROTOCOL == 2 +protocols = range(pickle.HIGHEST_PROTOCOL + 1) + + +# Return True if opcode code appears in the pickle, else False. +def opcode_in_pickle(code, pickle): + for op, dummy, dummy in pickletools.genops(pickle): + if op.code == code: + return True + return False + +# Return the number of times opcode code appears in pickle. +def count_opcode(code, pickle): + n = 0 + for op, dummy, dummy in pickletools.genops(pickle): + if op.code == code: + n += 1 + return n + +# We can't very well test the extension registry without putting known stuff +# in it, but we have to be careful to restore its original state. Code +# should do this: +# +# e = ExtensionSaver(extension_code) +# try: +# fiddle w/ the extension registry's stuff for extension_code +# finally: +# e.restore() + +class ExtensionSaver: + # Remember current registration for code (if any), and remove it (if + # there is one). + def __init__(self, code): + self.code = code + if code in copy_reg._inverted_registry: + self.pair = copy_reg._inverted_registry[code] + copy_reg.remove_extension(self.pair[0], self.pair[1], code) + else: + self.pair = None + + # Restore previous registration for code. + def restore(self): + code = self.code + curpair = copy_reg._inverted_registry.get(code) + if curpair is not None: + copy_reg.remove_extension(curpair[0], curpair[1], code) + pair = self.pair + if pair is not None: + copy_reg.add_extension(pair[0], pair[1], code) + +class C: + def __cmp__(self, other): + return cmp(self.__dict__, other.__dict__) + +import __main__ +__main__.C = C +C.__module__ = "__main__" + +class myint(int): + def __init__(self, x): + self.str = str(x) + +class initarg(C): + + def __init__(self, a, b): + self.a = a + self.b = b + + def __getinitargs__(self): + return self.a, self.b + +class metaclass(type): + pass + +class use_metaclass(object): + __metaclass__ = metaclass + +# DATA0 .. DATA2 are the pickles we expect under the various protocols, for +# the object returned by create_data(). + +# break into multiple strings to avoid confusing font-lock-mode +DATA0 = """(lp1 +I0 +aL1L +aF2 +ac__builtin__ +complex +p2 +""" + \ +"""(F3 +F0 +tRp3 +aI1 +aI-1 +aI255 +aI-255 +aI-256 +aI65535 +aI-65535 +aI-65536 +aI2147483647 +aI-2147483647 +aI-2147483648 +a""" + \ +"""(S'abc' +p4 +g4 +""" + \ +"""(i__main__ +C +p5 +""" + \ +"""(dp6 +S'foo' +p7 +I1 +sS'bar' +p8 +I2 +sbg5 +tp9 +ag9 +aI5 +a. +""" + +# Disassembly of DATA0. +DATA0_DIS = """\ + 0: ( MARK + 1: l LIST (MARK at 0) + 2: p PUT 1 + 5: I INT 0 + 8: a APPEND + 9: L LONG 1L + 13: a APPEND + 14: F FLOAT 2.0 + 17: a APPEND + 18: c GLOBAL '__builtin__ complex' + 39: p PUT 2 + 42: ( MARK + 43: F FLOAT 3.0 + 46: F FLOAT 0.0 + 49: t TUPLE (MARK at 42) + 50: R REDUCE + 51: p PUT 3 + 54: a APPEND + 55: I INT 1 + 58: a APPEND + 59: I INT -1 + 63: a APPEND + 64: I INT 255 + 69: a APPEND + 70: I INT -255 + 76: a APPEND + 77: I INT -256 + 83: a APPEND + 84: I INT 65535 + 91: a APPEND + 92: I INT -65535 + 100: a APPEND + 101: I INT -65536 + 109: a APPEND + 110: I INT 2147483647 + 122: a APPEND + 123: I INT -2147483647 + 136: a APPEND + 137: I INT -2147483648 + 150: a APPEND + 151: ( MARK + 152: S STRING 'abc' + 159: p PUT 4 + 162: g GET 4 + 165: ( MARK + 166: i INST '__main__ C' (MARK at 165) + 178: p PUT 5 + 181: ( MARK + 182: d DICT (MARK at 181) + 183: p PUT 6 + 186: S STRING 'foo' + 193: p PUT 7 + 196: I INT 1 + 199: s SETITEM + 200: S STRING 'bar' + 207: p PUT 8 + 210: I INT 2 + 213: s SETITEM + 214: b BUILD + 215: g GET 5 + 218: t TUPLE (MARK at 151) + 219: p PUT 9 + 222: a APPEND + 223: g GET 9 + 226: a APPEND + 227: I INT 5 + 230: a APPEND + 231: . STOP +highest protocol among opcodes = 0 +""" + +DATA1 = (']q\x01(K\x00L1L\nG@\x00\x00\x00\x00\x00\x00\x00' + 'c__builtin__\ncomplex\nq\x02(G@\x08\x00\x00\x00\x00\x00' + '\x00G\x00\x00\x00\x00\x00\x00\x00\x00tRq\x03K\x01J\xff\xff' + '\xff\xffK\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xff' + 'J\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00' + '\x00\x80J\x00\x00\x00\x80(U\x03abcq\x04h\x04(c__main__\n' + 'C\nq\x05oq\x06}q\x07(U\x03fooq\x08K\x01U\x03barq\tK\x02ubh' + '\x06tq\nh\nK\x05e.' + ) + +# Disassembly of DATA1. +DATA1_DIS = """\ + 0: ] EMPTY_LIST + 1: q BINPUT 1 + 3: ( MARK + 4: K BININT1 0 + 6: L LONG 1L + 10: G BINFLOAT 2.0 + 19: c GLOBAL '__builtin__ complex' + 40: q BINPUT 2 + 42: ( MARK + 43: G BINFLOAT 3.0 + 52: G BINFLOAT 0.0 + 61: t TUPLE (MARK at 42) + 62: R REDUCE + 63: q BINPUT 3 + 65: K BININT1 1 + 67: J BININT -1 + 72: K BININT1 255 + 74: J BININT -255 + 79: J BININT -256 + 84: M BININT2 65535 + 87: J BININT -65535 + 92: J BININT -65536 + 97: J BININT 2147483647 + 102: J BININT -2147483647 + 107: J BININT -2147483648 + 112: ( MARK + 113: U SHORT_BINSTRING 'abc' + 118: q BINPUT 4 + 120: h BINGET 4 + 122: ( MARK + 123: c GLOBAL '__main__ C' + 135: q BINPUT 5 + 137: o OBJ (MARK at 122) + 138: q BINPUT 6 + 140: } EMPTY_DICT + 141: q BINPUT 7 + 143: ( MARK + 144: U SHORT_BINSTRING 'foo' + 149: q BINPUT 8 + 151: K BININT1 1 + 153: U SHORT_BINSTRING 'bar' + 158: q BINPUT 9 + 160: K BININT1 2 + 162: u SETITEMS (MARK at 143) + 163: b BUILD + 164: h BINGET 6 + 166: t TUPLE (MARK at 112) + 167: q BINPUT 10 + 169: h BINGET 10 + 171: K BININT1 5 + 173: e APPENDS (MARK at 3) + 174: . STOP +highest protocol among opcodes = 1 +""" + +DATA2 = ('\x80\x02]q\x01(K\x00\x8a\x01\x01G@\x00\x00\x00\x00\x00\x00\x00' + 'c__builtin__\ncomplex\nq\x02G@\x08\x00\x00\x00\x00\x00\x00G\x00' + '\x00\x00\x00\x00\x00\x00\x00\x86Rq\x03K\x01J\xff\xff\xff\xffK' + '\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xff' + 'J\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00' + '\x80(U\x03abcq\x04h\x04(c__main__\nC\nq\x05oq\x06}q\x07(U\x03foo' + 'q\x08K\x01U\x03barq\tK\x02ubh\x06tq\nh\nK\x05e.') + +# Disassembly of DATA2. +DATA2_DIS = """\ + 0: \x80 PROTO 2 + 2: ] EMPTY_LIST + 3: q BINPUT 1 + 5: ( MARK + 6: K BININT1 0 + 8: \x8a LONG1 1L + 11: G BINFLOAT 2.0 + 20: c GLOBAL '__builtin__ complex' + 41: q BINPUT 2 + 43: G BINFLOAT 3.0 + 52: G BINFLOAT 0.0 + 61: \x86 TUPLE2 + 62: R REDUCE + 63: q BINPUT 3 + 65: K BININT1 1 + 67: J BININT -1 + 72: K BININT1 255 + 74: J BININT -255 + 79: J BININT -256 + 84: M BININT2 65535 + 87: J BININT -65535 + 92: J BININT -65536 + 97: J BININT 2147483647 + 102: J BININT -2147483647 + 107: J BININT -2147483648 + 112: ( MARK + 113: U SHORT_BINSTRING 'abc' + 118: q BINPUT 4 + 120: h BINGET 4 + 122: ( MARK + 123: c GLOBAL '__main__ C' + 135: q BINPUT 5 + 137: o OBJ (MARK at 122) + 138: q BINPUT 6 + 140: } EMPTY_DICT + 141: q BINPUT 7 + 143: ( MARK + 144: U SHORT_BINSTRING 'foo' + 149: q BINPUT 8 + 151: K BININT1 1 + 153: U SHORT_BINSTRING 'bar' + 158: q BINPUT 9 + 160: K BININT1 2 + 162: u SETITEMS (MARK at 143) + 163: b BUILD + 164: h BINGET 6 + 166: t TUPLE (MARK at 112) + 167: q BINPUT 10 + 169: h BINGET 10 + 171: K BININT1 5 + 173: e APPENDS (MARK at 5) + 174: . STOP +highest protocol among opcodes = 2 +""" + +def create_data(): + c = C() + c.foo = 1 + c.bar = 2 + x = [0, 1L, 2.0, 3.0+0j] + # Append some integer test cases at cPickle.c's internal size + # cutoffs. + uint1max = 0xff + uint2max = 0xffff + int4max = 0x7fffffff + x.extend([1, -1, + uint1max, -uint1max, -uint1max-1, + uint2max, -uint2max, -uint2max-1, + int4max, -int4max, -int4max-1]) + y = ('abc', 'abc', c, c) + x.append(y) + x.append(y) + x.append(5) + return x + +class AbstractPickleTests(unittest.TestCase): + # Subclass must define self.dumps, self.loads, self.error. + + _testdata = create_data() + + def setUp(self): + pass + + def test_misc(self): + # test various datatypes not tested by testdata + for proto in protocols: + x = myint(4) + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + + x = (1, ()) + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + + x = initarg(1, x) + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + + # XXX test __reduce__ protocol? + + def test_roundtrip_equality(self): + expected = self._testdata + for proto in protocols: + s = self.dumps(expected, proto) + got = self.loads(s) + self.assertEqual(expected, got) + + def test_load_from_canned_string(self): + expected = self._testdata + for canned in DATA0, DATA1, DATA2: + got = self.loads(canned) + self.assertEqual(expected, got) + + # There are gratuitous differences between pickles produced by + # pickle and cPickle, largely because cPickle starts PUT indices at + # 1 and pickle starts them at 0. See XXX comment in cPickle's put2() -- + # there's a comment with an exclamation point there whose meaning + # is a mystery. cPickle also suppresses PUT for objects with a refcount + # of 1. + def dont_test_disassembly(self): + from cStringIO import StringIO + from pickletools import dis + + for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS): + s = self.dumps(self._testdata, proto) + filelike = StringIO() + dis(s, out=filelike) + got = filelike.getvalue() + self.assertEqual(expected, got) + + def test_recursive_list(self): + l = [] + l.append(l) + for proto in protocols: + s = self.dumps(l, proto) + x = self.loads(s) + self.assertEqual(len(x), 1) + self.assert_(x is x[0]) + + def test_recursive_dict(self): + d = {} + d[1] = d + for proto in protocols: + s = self.dumps(d, proto) + x = self.loads(s) + self.assertEqual(x.keys(), [1]) + self.assert_(x[1] is x) + + def test_recursive_inst(self): + i = C() + i.attr = i + for proto in protocols: + s = self.dumps(i, 2) + x = self.loads(s) + self.assertEqual(dir(x), dir(i)) + self.assert_(x.attr is x) + + def test_recursive_multi(self): + l = [] + d = {1:l} + i = C() + i.attr = d + l.append(i) + for proto in protocols: + s = self.dumps(l, proto) + x = self.loads(s) + self.assertEqual(len(x), 1) + self.assertEqual(dir(x[0]), dir(i)) + self.assertEqual(x[0].attr.keys(), [1]) + self.assert_(x[0].attr[1] is x) + + def test_garyp(self): + self.assertRaises(self.error, self.loads, 'garyp') + + def test_insecure_strings(self): + insecure = ["abc", "2 + 2", # not quoted + #"'abc' + 'def'", # not a single quoted string + "'abc", # quote is not closed + "'abc\"", # open quote and close quote don't match + "'abc' ?", # junk after close quote + "'\\'", # trailing backslash + # some tests of the quoting rules + #"'abc\"\''", + #"'\\\\a\'\'\'\\\'\\\\\''", + ] + for s in insecure: + buf = "S" + s + "\012p0\012." + self.assertRaises(ValueError, self.loads, buf) + + if have_unicode: + def test_unicode(self): + endcases = [unicode(''), unicode('<\\u>'), unicode('<\\\u1234>'), + unicode('<\n>'), unicode('<\\>')] + for proto in protocols: + for u in endcases: + p = self.dumps(u, proto) + u2 = self.loads(p) + self.assertEqual(u2, u) + + def test_ints(self): + import sys + for proto in protocols: + n = sys.maxint + while n: + for expected in (-n, n): + s = self.dumps(expected, proto) + n2 = self.loads(s) + self.assertEqual(expected, n2) + n = n >> 1 + + def test_maxint64(self): + maxint64 = (1L << 63) - 1 + data = 'I' + str(maxint64) + '\n.' + got = self.loads(data) + self.assertEqual(got, maxint64) + + # Try too with a bogus literal. + data = 'I' + str(maxint64) + 'JUNK\n.' + self.assertRaises(ValueError, self.loads, data) + + def xxx_test_long(self): + for proto in protocols: + # 256 bytes is where LONG4 begins. + for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257: + nbase = 1L << nbits + for npos in nbase-1, nbase, nbase+1: + for n in npos, -npos: + pickle = self.dumps(n, proto) + got = self.loads(pickle) + self.assertEqual(n, got) + # Try a monster. This is quadratic-time in protos 0 & 1, so don't + # bother with those. + nbase = long("deadbeeffeedface", 16) + nbase += nbase << 1000000 + for n in nbase, -nbase: + p = self.dumps(n, 2) + got = self.loads(p) + self.assertEqual(n, got) + + @run_with_locale('LC_ALL', 'de_DE', 'fr_FR') + def test_float_format(self): + # make sure that floats are formatted locale independent + self.assertEqual(self.dumps(1.2)[0:3], 'F1.') + + def test_reduce(self): + pass + + def test_getinitargs(self): + pass + + def test_metaclass(self): + a = use_metaclass() + for proto in protocols: + s = self.dumps(a, proto) + b = self.loads(s) + self.assertEqual(a.__class__, b.__class__) + + def test_structseq(self): + import time + import os + + t = time.localtime() + for proto in protocols: + s = self.dumps(t, proto) + u = self.loads(s) + self.assertEqual(t, u) + if hasattr(os, "stat"): + t = os.stat(os.curdir) + s = self.dumps(t, proto) + u = self.loads(s) + self.assertEqual(t, u) + if hasattr(os, "statvfs"): + t = os.statvfs(os.curdir) + s = self.dumps(t, proto) + u = self.loads(s) + self.assertEqual(t, u) + + # Tests for protocol 2 + + def test_proto(self): + build_none = pickle.NONE + pickle.STOP + for proto in protocols: + expected = build_none + if proto >= 2: + expected = pickle.PROTO + chr(proto) + expected + p = self.dumps(None, proto) + self.assertEqual(p, expected) + + oob = protocols[-1] + 1 # a future protocol + badpickle = pickle.PROTO + chr(oob) + build_none + try: + self.loads(badpickle) + except ValueError, detail: + self.failUnless(str(detail).startswith( + "unsupported pickle protocol")) + else: + self.fail("expected bad protocol number to raise ValueError") + + def test_long1(self): + x = 12345678910111213141516178920L + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2) + + def test_long4(self): + x = 12345678910111213141516178920L << (256*8) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + self.assertEqual(opcode_in_pickle(pickle.LONG4, s), proto >= 2) + + def test_short_tuples(self): + # Map (proto, len(tuple)) to expected opcode. + expected_opcode = {(0, 0): pickle.TUPLE, + (0, 1): pickle.TUPLE, + (0, 2): pickle.TUPLE, + (0, 3): pickle.TUPLE, + (0, 4): pickle.TUPLE, + + (1, 0): pickle.EMPTY_TUPLE, + (1, 1): pickle.TUPLE, + (1, 2): pickle.TUPLE, + (1, 3): pickle.TUPLE, + (1, 4): pickle.TUPLE, + + (2, 0): pickle.EMPTY_TUPLE, + (2, 1): pickle.TUPLE1, + (2, 2): pickle.TUPLE2, + (2, 3): pickle.TUPLE3, + (2, 4): pickle.TUPLE, + } + a = () + b = (1,) + c = (1, 2) + d = (1, 2, 3) + e = (1, 2, 3, 4) + for proto in protocols: + for x in a, b, c, d, e: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y, (proto, x, s, y)) + expected = expected_opcode[proto, len(x)] + self.assertEqual(opcode_in_pickle(expected, s), True) + + def test_singletons(self): + # Map (proto, singleton) to expected opcode. + expected_opcode = {(0, None): pickle.NONE, + (1, None): pickle.NONE, + (2, None): pickle.NONE, + + (0, True): pickle.INT, + (1, True): pickle.INT, + (2, True): pickle.NEWTRUE, + + (0, False): pickle.INT, + (1, False): pickle.INT, + (2, False): pickle.NEWFALSE, + } + for proto in protocols: + for x in None, False, True: + s = self.dumps(x, proto) + y = self.loads(s) + self.assert_(x is y, (proto, x, s, y)) + expected = expected_opcode[proto, x] + self.assertEqual(opcode_in_pickle(expected, s), True) + + def test_newobj_tuple(self): + x = MyTuple([1, 2, 3]) + x.foo = 42 + x.bar = "hello" + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(tuple(x), tuple(y)) + self.assertEqual(x.__dict__, y.__dict__) + + def test_newobj_list(self): + x = MyList([1, 2, 3]) + x.foo = 42 + x.bar = "hello" + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(list(x), list(y)) + self.assertEqual(x.__dict__, y.__dict__) + + def test_newobj_generic(self): + for proto in protocols: + for C in myclasses: + B = C.__base__ + x = C(C.sample) + x.foo = 42 + s = self.dumps(x, proto) + y = self.loads(s) + detail = (proto, C, B, x, y, type(y)) + self.assertEqual(B(x), B(y), detail) + self.assertEqual(x.__dict__, y.__dict__, detail) + + # Register a type with copy_reg, with extension code extcode. Pickle + # an object of that type. Check that the resulting pickle uses opcode + # (EXT[124]) under proto 2, and not in proto 1. + + def produce_global_ext(self, extcode, opcode): + e = ExtensionSaver(extcode) + try: + copy_reg.add_extension(__name__, "MyList", extcode) + x = MyList([1, 2, 3]) + x.foo = 42 + x.bar = "hello" + + # Dump using protocol 1 for comparison. + s1 = self.dumps(x, 1) + self.assert_(__name__ in s1) + self.assert_("MyList" in s1) + self.assertEqual(opcode_in_pickle(opcode, s1), False) + + y = self.loads(s1) + self.assertEqual(list(x), list(y)) + self.assertEqual(x.__dict__, y.__dict__) + + # Dump using protocol 2 for test. + s2 = self.dumps(x, 2) + self.assert_(__name__ not in s2) + self.assert_("MyList" not in s2) + self.assertEqual(opcode_in_pickle(opcode, s2), True) + + y = self.loads(s2) + self.assertEqual(list(x), list(y)) + self.assertEqual(x.__dict__, y.__dict__) + + finally: + e.restore() + + def test_global_ext1(self): + self.produce_global_ext(0x00000001, pickle.EXT1) # smallest EXT1 code + self.produce_global_ext(0x000000ff, pickle.EXT1) # largest EXT1 code + + def test_global_ext2(self): + self.produce_global_ext(0x00000100, pickle.EXT2) # smallest EXT2 code + self.produce_global_ext(0x0000ffff, pickle.EXT2) # largest EXT2 code + self.produce_global_ext(0x0000abcd, pickle.EXT2) # check endianness + + def test_global_ext4(self): + self.produce_global_ext(0x00010000, pickle.EXT4) # smallest EXT4 code + self.produce_global_ext(0x7fffffff, pickle.EXT4) # largest EXT4 code + self.produce_global_ext(0x12abcdef, pickle.EXT4) # check endianness + + def xxx_test_list_chunking(self): + n = 10 # too small to chunk + x = range(n) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + num_appends = count_opcode(pickle.APPENDS, s) + self.assertEqual(num_appends, proto > 0) + + n = 2500 # expect at least two chunks when proto > 0 + x = range(n) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + num_appends = count_opcode(pickle.APPENDS, s) + if proto == 0: + self.assertEqual(num_appends, 0) + else: + self.failUnless(num_appends >= 2) + + def xxx_test_dict_chunking(self): + n = 10 # too small to chunk + x = dict.fromkeys(range(n)) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + num_setitems = count_opcode(pickle.SETITEMS, s) + self.assertEqual(num_setitems, proto > 0) + + n = 2500 # expect at least two chunks when proto > 0 + x = dict.fromkeys(range(n)) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) + self.assertEqual(x, y) + num_setitems = count_opcode(pickle.SETITEMS, s) + if proto == 0: + self.assertEqual(num_setitems, 0) + else: + self.failUnless(num_setitems >= 2) + + def test_simple_newobj(self): + x = object.__new__(SimpleNewObj) # avoid __init__ + x.abc = 666 + for proto in protocols: + s = self.dumps(x, proto) + self.assertEqual(opcode_in_pickle(pickle.NEWOBJ, s), proto >= 2) + y = self.loads(s) # will raise TypeError if __init__ called + self.assertEqual(y.abc, 666) + self.assertEqual(x.__dict__, y.__dict__) + + def test_newobj_list_slots(self): + x = SlotList([1, 2, 3]) + x.foo = 42 + x.bar = "hello" + s = self.dumps(x, 2) + y = self.loads(s) + self.assertEqual(list(x), list(y)) + self.assertEqual(x.__dict__, y.__dict__) + self.assertEqual(x.foo, y.foo) + self.assertEqual(x.bar, y.bar) + + def test_reduce_overrides_default_reduce_ex(self): + for proto in 0, 1, 2: + x = REX_one() + self.assertEqual(x._reduce_called, 0) + s = self.dumps(x, proto) + self.assertEqual(x._reduce_called, 1) + y = self.loads(s) + self.assertEqual(y._reduce_called, 0) + + def test_reduce_ex_called(self): + for proto in 0, 1, 2: + x = REX_two() + self.assertEqual(x._proto, None) + s = self.dumps(x, proto) + self.assertEqual(x._proto, proto) + y = self.loads(s) + self.assertEqual(y._proto, None) + + def test_reduce_ex_overrides_reduce(self): + for proto in 0, 1, 2: + x = REX_three() + self.assertEqual(x._proto, None) + s = self.dumps(x, proto) + self.assertEqual(x._proto, proto) + y = self.loads(s) + self.assertEqual(y._proto, None) + + def test_reduce_ex_calls_base(self): + for proto in 0, 1, 2: + x = REX_four() + self.assertEqual(x._proto, None) + s = self.dumps(x, proto) + self.assertEqual(x._proto, proto) + y = self.loads(s) + self.assertEqual(y._proto, proto) + + def test_reduce_calls_base(self): + for proto in 0, 1, 2: + x = REX_five() + self.assertEqual(x._reduce_called, 0) + s = self.dumps(x, proto) + self.assertEqual(x._reduce_called, 1) + y = self.loads(s) + self.assertEqual(y._reduce_called, 1) + +# Test classes for reduce_ex + +class REX_one(object): + _reduce_called = 0 + def __reduce__(self): + self._reduce_called = 1 + return REX_one, () + # No __reduce_ex__ here, but inheriting it from object + +class REX_two(object): + _proto = None + def __reduce_ex__(self, proto): + self._proto = proto + return REX_two, () + # No __reduce__ here, but inheriting it from object + +class REX_three(object): + _proto = None + def __reduce_ex__(self, proto): + self._proto = proto + return REX_two, () + def __reduce__(self): + raise TestFailed, "This __reduce__ shouldn't be called" + +class REX_four(object): + _proto = None + def __reduce_ex__(self, proto): + self._proto = proto + return object.__reduce_ex__(self, proto) + # Calling base class method should succeed + +class REX_five(object): + _reduce_called = 0 + def __reduce__(self): + self._reduce_called = 1 + return object.__reduce__(self) + # This one used to fail with infinite recursion + +# Test classes for newobj + +class MyInt(int): + sample = 1 + +class MyLong(long): + sample = 1L + +class MyFloat(float): + sample = 1.0 + +class MyComplex(complex): + sample = 1.0 + 0.0j + +class MyStr(str): + sample = "hello" + +class MyUnicode(unicode): + sample = u"hello \u1234" + +class MyTuple(tuple): + sample = (1, 2, 3) + +class MyList(list): + sample = [1, 2, 3] + +class MyDict(dict): + sample = {"a": 1, "b": 2} + +myclasses = [MyInt, MyLong, MyFloat, + MyComplex, + MyStr, MyUnicode, + MyTuple, MyList, MyDict] + + +class SlotList(MyList): + __slots__ = ["foo"] + +class SimpleNewObj(object): + def __init__(self, a, b, c): + # raise an error, to make sure this isn't called + raise TypeError("SimpleNewObj.__init__() didn't expect to get called") + +class AbstractPickleModuleTests(unittest.TestCase): + + def test_dump_closed_file(self): + import os + f = open(TESTFN, "w") + try: + f.close() + self.assertRaises(ValueError, self.module.dump, 123, f) + finally: + os.remove(TESTFN) + + def test_load_closed_file(self): + import os + f = open(TESTFN, "w") + try: + f.close() + self.assertRaises(ValueError, self.module.dump, 123, f) + finally: + os.remove(TESTFN) + + def test_highest_protocol(self): + # Of course this needs to be changed when HIGHEST_PROTOCOL changes. + self.assertEqual(self.module.HIGHEST_PROTOCOL, 2) + + def test_callapi(self): + from cStringIO import StringIO + f = StringIO() + # With and without keyword arguments + self.module.dump(123, f, -1) + self.module.dump(123, file=f, protocol=-1) + self.module.dumps(123, -1) + self.module.dumps(123, protocol=-1) + self.module.Pickler(f, -1) + self.module.Pickler(f, protocol=-1) + +class AbstractPersistentPicklerTests(unittest.TestCase): + + # This class defines persistent_id() and persistent_load() + # functions that should be used by the pickler. All even integers + # are pickled using persistent ids. + + def persistent_id(self, object): + if isinstance(object, int) and object % 2 == 0: + self.id_count += 1 + return str(object) + else: + return None + + def persistent_load(self, oid): + self.load_count += 1 + object = int(oid) + assert object % 2 == 0 + return object + + def test_persistence(self): + self.id_count = 0 + self.load_count = 0 + L = range(10) + self.assertEqual(self.loads(self.dumps(L)), L) + self.assertEqual(self.id_count, 5) + self.assertEqual(self.load_count, 5) + + def test_bin_persistence(self): + self.id_count = 0 + self.load_count = 0 + L = range(10) + self.assertEqual(self.loads(self.dumps(L, 1)), L) + self.assertEqual(self.id_count, 5) + self.assertEqual(self.load_count, 5) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/seq_tests.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/seq_tests.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,325 @@ +""" +Tests common to tuple, list and UserList.UserList +""" + +import unittest +from test import test_support + +# Various iterables +# This is used for checking the constructor (here and in test_deque.py) +def iterfunc(seqn): + 'Regular generator' + for i in seqn: + yield i + +class Sequence: + 'Sequence using __getitem__' + def __init__(self, seqn): + self.seqn = seqn + def __getitem__(self, i): + return self.seqn[i] + +class IterFunc: + 'Sequence using iterator protocol' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class IterGen: + 'Sequence using iterator protocol defined with a generator' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + for val in self.seqn: + yield val + +class IterNextOnly: + 'Missing __getitem__ and __iter__' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class IterNoNext: + 'Iterator missing next()' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + +class IterGenExc: + 'Test propagation of exceptions' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + 3 // 0 + +class IterFuncStop: + 'Test immediate stop' + def __init__(self, seqn): + pass + def __iter__(self): + return self + def next(self): + raise StopIteration + +from itertools import chain, imap +def itermulti(seqn): + 'Test multiple tiers of iterators' + return chain(imap(lambda x:x, iterfunc(IterGen(Sequence(seqn))))) + +class CommonTest(unittest.TestCase): + # The type to be tested + type2test = None + + def test_constructors(self): + l0 = [] + l1 = [0] + l2 = [0, 1] + + u = self.type2test() + u0 = self.type2test(l0) + u1 = self.type2test(l1) + u2 = self.type2test(l2) + + uu = self.type2test(u) + uu0 = self.type2test(u0) + uu1 = self.type2test(u1) + uu2 = self.type2test(u2) + + v = self.type2test(tuple(u)) + class OtherSeq: + def __init__(self, initseq): + self.__data = initseq + def __len__(self): + return len(self.__data) + def __getitem__(self, i): + return self.__data[i] + s = OtherSeq(u0) + v0 = self.type2test(s) + self.assertEqual(len(v0), len(s)) + + s = "this is also a sequence" + vv = self.type2test(s) + self.assertEqual(len(vv), len(s)) + + # Create from various iteratables + for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for g in (Sequence, IterFunc, IterGen, + itermulti, iterfunc): + self.assertEqual(self.type2test(g(s)), self.type2test(s)) + self.assertEqual(self.type2test(IterFuncStop(s)), self.type2test()) + self.assertEqual(self.type2test(c for c in "123"), self.type2test("123")) + self.assertRaises(TypeError, self.type2test, IterNextOnly(s)) + self.assertRaises(TypeError, self.type2test, IterNoNext(s)) + self.assertRaises(ZeroDivisionError, self.type2test, IterGenExc(s)) + + def test_truth(self): + self.assert_(not self.type2test()) + self.assert_(self.type2test([42])) + + def test_getitem(self): + u = self.type2test([0, 1, 2, 3, 4]) + for i in xrange(len(u)): + self.assertEqual(u[i], i) + self.assertEqual(u[long(i)], i) + for i in xrange(-len(u), -1): + self.assertEqual(u[i], len(u)+i) + self.assertEqual(u[long(i)], len(u)+i) + self.assertRaises(IndexError, u.__getitem__, -len(u)-1) + self.assertRaises(IndexError, u.__getitem__, len(u)) + self.assertRaises(ValueError, u.__getitem__, slice(0,10,0)) + + u = self.type2test() + self.assertRaises(IndexError, u.__getitem__, 0) + self.assertRaises(IndexError, u.__getitem__, -1) + + self.assertRaises(TypeError, u.__getitem__) + + a = self.type2test([10, 11]) + self.assertEqual(a[0], 10) + self.assertEqual(a[1], 11) + self.assertEqual(a[-2], 10) + self.assertEqual(a[-1], 11) + self.assertRaises(IndexError, a.__getitem__, -3) + self.assertRaises(IndexError, a.__getitem__, 3) + + def test_getslice(self): + l = [0, 1, 2, 3, 4] + u = self.type2test(l) + + self.assertEqual(u[0:0], self.type2test()) + self.assertEqual(u[1:2], self.type2test([1])) + self.assertEqual(u[-2:-1], self.type2test([3])) + self.assertEqual(u[-1000:1000], u) + self.assertEqual(u[1000:-1000], self.type2test([])) + self.assertEqual(u[:], u) + self.assertEqual(u[1:None], self.type2test([1, 2, 3, 4])) + self.assertEqual(u[None:3], self.type2test([0, 1, 2])) + + # Extended slices + self.assertEqual(u[::], u) + self.assertEqual(u[::2], self.type2test([0, 2, 4])) + self.assertEqual(u[1::2], self.type2test([1, 3])) + self.assertEqual(u[::-1], self.type2test([4, 3, 2, 1, 0])) + self.assertEqual(u[::-2], self.type2test([4, 2, 0])) + self.assertEqual(u[3::-2], self.type2test([3, 1])) + self.assertEqual(u[3:3:-2], self.type2test([])) + self.assertEqual(u[3:2:-2], self.type2test([3])) + self.assertEqual(u[3:1:-2], self.type2test([3])) + self.assertEqual(u[3:0:-2], self.type2test([3, 1])) + self.assertEqual(u[::-100], self.type2test([4])) + self.assertEqual(u[100:-100:], self.type2test([])) + self.assertEqual(u[-100:100:], u) + self.assertEqual(u[100:-100:-1], u[::-1]) + self.assertEqual(u[-100:100:-1], self.type2test([])) + self.assertEqual(u[-100L:100L:2L], self.type2test([0, 2, 4])) + + # Test extreme cases with long ints + a = self.type2test([0,1,2,3,4]) + self.assertEqual(a[ -pow(2,128L): 3 ], self.type2test([0,1,2])) + self.assertEqual(a[ 3: pow(2,145L) ], self.type2test([3,4])) + + if hasattr(u, '__getslice__'): + self.assertRaises(TypeError, u.__getslice__) + + def test_contains(self): + u = self.type2test([0, 1, 2]) + for i in u: + self.assert_(i in u) + for i in min(u)-1, max(u)+1: + self.assert_(i not in u) + + self.assertRaises(TypeError, u.__contains__) + + def test_contains_fake(self): + class AllEq: + # Sequences must use rich comparison against each item + # (unless "is" is true, or an earlier item answered) + # So instances of AllEq must be found in all non-empty sequences. + def __eq__(self, other): + return True + def __hash__(self): + raise NotImplemented + self.assert_(AllEq() not in self.type2test([])) + self.assert_(AllEq() in self.type2test([1])) + + def test_contains_order(self): + # Sequences must test in-order. If a rich comparison has side + # effects, these will be visible to tests against later members. + # In this test, the "side effect" is a short-circuiting raise. + class DoNotTestEq(Exception): + pass + class StopCompares: + def __eq__(self, other): + raise DoNotTestEq + + checkfirst = self.type2test([1, StopCompares()]) + self.assert_(1 in checkfirst) + checklast = self.type2test([StopCompares(), 1]) + self.assertRaises(DoNotTestEq, checklast.__contains__, 1) + + def test_len(self): + self.assertEqual(len(self.type2test()), 0) + self.assertEqual(len(self.type2test([])), 0) + self.assertEqual(len(self.type2test([0])), 1) + self.assertEqual(len(self.type2test([0, 1, 2])), 3) + + def test_minmax(self): + u = self.type2test([0, 1, 2]) + self.assertEqual(min(u), 0) + self.assertEqual(max(u), 2) + + def test_addmul(self): + u1 = self.type2test([0]) + u2 = self.type2test([0, 1]) + self.assertEqual(u1, u1 + self.type2test()) + self.assertEqual(u1, self.type2test() + u1) + self.assertEqual(u1 + self.type2test([1]), u2) + self.assertEqual(self.type2test([-1]) + u1, self.type2test([-1, 0])) + self.assertEqual(self.type2test(), u2*0) + self.assertEqual(self.type2test(), 0*u2) + self.assertEqual(self.type2test(), u2*0L) + self.assertEqual(self.type2test(), 0L*u2) + self.assertEqual(u2, u2*1) + self.assertEqual(u2, 1*u2) + self.assertEqual(u2, u2*1L) + self.assertEqual(u2, 1L*u2) + self.assertEqual(u2+u2, u2*2) + self.assertEqual(u2+u2, 2*u2) + self.assertEqual(u2+u2, u2*2L) + self.assertEqual(u2+u2, 2L*u2) + self.assertEqual(u2+u2+u2, u2*3) + self.assertEqual(u2+u2+u2, 3*u2) + + class subclass(self.type2test): + pass + u3 = subclass([0, 1]) + self.assertEqual(u3, u3*1) + self.assert_(u3 is not u3*1) + + def test_iadd(self): + u = self.type2test([0, 1]) + u += self.type2test() + self.assertEqual(u, self.type2test([0, 1])) + u += self.type2test([2, 3]) + self.assertEqual(u, self.type2test([0, 1, 2, 3])) + u += self.type2test([4, 5]) + self.assertEqual(u, self.type2test([0, 1, 2, 3, 4, 5])) + + u = self.type2test("spam") + u += self.type2test("eggs") + self.assertEqual(u, self.type2test("spameggs")) + + def test_imul(self): + u = self.type2test([0, 1]) + u *= 3 + self.assertEqual(u, self.type2test([0, 1, 0, 1, 0, 1])) + + #def test_getitemoverwriteiter(self): + # # Verify that __getitem__ overrides are not recognized by __iter__ + # XXX PyPy behaves differently on this detail + # class T(self.type2test): + # def __getitem__(self, key): + # return str(key) + '!!!' + # self.assertEqual(iter(T((1,2))).next(), 1) + + def test_repeat(self): + for m in xrange(4): + s = tuple(range(m)) + for n in xrange(-3, 5): + self.assertEqual(self.type2test(s*n), self.type2test(s)*n) + self.assertEqual(self.type2test(s)*(-4), self.type2test([])) + #self.assertEqual(id(s), id(s*1)) + + def test_subscript(self): + a = self.type2test([10, 11]) + self.assertEqual(a.__getitem__(0L), 10) + self.assertEqual(a.__getitem__(1L), 11) + self.assertEqual(a.__getitem__(-2L), 10) + self.assertEqual(a.__getitem__(-1L), 11) + self.assertRaises(IndexError, a.__getitem__, -3) + self.assertRaises(IndexError, a.__getitem__, 3) + self.assertEqual(a.__getitem__(slice(0,1)), self.type2test([10])) + self.assertEqual(a.__getitem__(slice(1,2)), self.type2test([11])) + self.assertEqual(a.__getitem__(slice(0,2)), self.type2test([10, 11])) + self.assertEqual(a.__getitem__(slice(0,3)), self.type2test([10, 11])) + self.assertEqual(a.__getitem__(slice(3,5)), self.type2test([])) + self.assertRaises(ValueError, a.__getitem__, slice(0, 10, 0)) + self.assertRaises(TypeError, a.__getitem__, 'x') Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/string_tests.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/string_tests.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,1219 @@ +""" +Common tests shared by test_str, test_unicode, test_userstring and test_string. +""" + +import unittest, string, sys, operator +from test import test_support +from UserList import UserList + +class Sequence: + def __init__(self, seq='wxyz'): self.seq = seq + def __len__(self): return len(self.seq) + def __getitem__(self, i): return self.seq[i] + +class BadSeq1(Sequence): + def __init__(self): self.seq = [7, 'hello', 123L] + +class BadSeq2(Sequence): + def __init__(self): self.seq = ['a', 'b', 'c'] + def __len__(self): return 8 + +class CommonTest(unittest.TestCase): + # This testcase contains test that can be used in all + # stringlike classes. Currently this is str, unicode + # UserString and the string module. + + # The type to be tested + # Change in subclasses to change the behaviour of fixtesttype() + type2test = None + + # All tests pass their arguments to the testing methods + # as str objects. fixtesttype() can be used to propagate + # these arguments to the appropriate type + def fixtype(self, obj): + if isinstance(obj, str): + return self.__class__.type2test(obj) + elif isinstance(obj, list): + return [self.fixtype(x) for x in obj] + elif isinstance(obj, tuple): + return tuple([self.fixtype(x) for x in obj]) + elif isinstance(obj, dict): + return dict([ + (self.fixtype(key), self.fixtype(value)) + for (key, value) in obj.iteritems() + ]) + else: + return obj + + # single this out, because UserString cannot cope with fixed args + fixargs = fixtype + subclasscheck = True + + # check that object.method(*args) returns result + def checkequal(self, result, object, methodname, *args): + result = self.fixtype(result) + object = self.fixtype(object) + args = self.fixargs(args) + realresult = getattr(object, methodname)(*args) + self.assertEqual( + result, + realresult + ) + # if the original is returned make sure that + # this doesn't happen with subclasses + if object == realresult and self.subclasscheck: + class subtype(self.__class__.type2test): + pass + object = subtype(object) + realresult = getattr(object, methodname)(*args) + self.assert_(object is not realresult) + + # check that op(*args) returns result + def checkop(self, result, op, *args): + result = self.fixtype(result) + object = self.fixtype(args[0]) + args = self.fixargs(args[1:]) + realresult = op(object, *args) + self.assertEqual( + result, + realresult + ) + # if the original is returned make sure that + # this doesn't happen with subclasses + if object == realresult and self.subclasscheck: + class subtype(self.__class__.type2test): + pass + object = subtype(object) + realresult = op(object, *args) + self.assert_(object is not realresult) + + # check that object.method(*args) raises exc + def checkraises(self, exc, object, methodname, *args): + object = self.fixtype(object) + args = self.fixargs(args) + self.assertRaises( + exc, + getattr(object, methodname), + *args + ) + + # check that op(*args) raises exc + def checkopraises(self, exc, op, *args): + object = self.fixtype(args[0]) + args = self.fixargs(args[1:]) + self.assertRaises( + exc, + op, + object, + *args + ) + + # call object.method(*args) without any checks + def checkcall(self, object, methodname, *args): + object = self.fixtype(object) + args = self.fixargs(args) + getattr(object, methodname)(*args) + + # call op(*args) without any checks + def checkopcall(self, op, *args): + object = self.fixtype(args[0]) + args = self.fixargs(args[1:]) + op(object, *args) + + def test_hash(self): + # SF bug 1054139: += optimization was not invalidating cached hash value + a = self.type2test('DNSSEC') + b = self.type2test('') + for c in a: + b += c + hash(b) + self.assertEqual(hash(a), hash(b)) + + def test_capitalize(self): + self.checkequal(' hello ', ' hello ', 'capitalize') + self.checkequal('Hello ', 'Hello ','capitalize') + self.checkequal('Hello ', 'hello ','capitalize') + self.checkequal('Aaaa', 'aaaa', 'capitalize') + self.checkequal('Aaaa', 'AaAa', 'capitalize') + + self.checkraises(TypeError, 'hello', 'capitalize', 42) + + def test_count(self): + self.checkequal(3, 'aaa', 'count', 'a') + self.checkequal(0, 'aaa', 'count', 'b') + self.checkequal(3, 'aaa', 'count', 'a') + self.checkequal(0, 'aaa', 'count', 'b') + self.checkequal(3, 'aaa', 'count', 'a') + self.checkequal(0, 'aaa', 'count', 'b') + self.checkequal(0, 'aaa', 'count', 'b') + self.checkequal(2, 'aaa', 'count', 'a', 1) + self.checkequal(0, 'aaa', 'count', 'a', 10) + self.checkequal(1, 'aaa', 'count', 'a', -1) + self.checkequal(3, 'aaa', 'count', 'a', -10) + self.checkequal(1, 'aaa', 'count', 'a', 0, 1) + self.checkequal(3, 'aaa', 'count', 'a', 0, 10) + self.checkequal(2, 'aaa', 'count', 'a', 0, -1) + self.checkequal(0, 'aaa', 'count', 'a', 0, -10) + self.checkequal(3, 'aaa', 'count', '', 1) + self.checkequal(1, 'aaa', 'count', '', 3) + self.checkequal(0, 'aaa', 'count', '', 10) + self.checkequal(2, 'aaa', 'count', '', -1) + self.checkequal(4, 'aaa', 'count', '', -10) + + self.checkraises(TypeError, 'hello', 'count') + self.checkraises(TypeError, 'hello', 'count', 42) + + # For a variety of combinations, + # verify that str.count() matches an equivalent function + # replacing all occurrences and then differencing the string lengths + charset = ['', 'a', 'b'] + digits = 7 + base = len(charset) + teststrings = set() + for i in xrange(base ** digits): + entry = [] + for j in xrange(digits): + i, m = divmod(i, base) + entry.append(charset[m]) + teststrings.add(''.join(entry)) + teststrings = list(teststrings) + for i in teststrings: + i = self.fixtype(i) + n = len(i) + for j in teststrings: + r1 = i.count(j) + if j: + r2, rem = divmod(n - len(i.replace(j, '')), len(j)) + else: + r2, rem = len(i)+1, 0 + if rem or r1 != r2: + self.assertEqual(rem, 0, '%s != 0 for %s' % (rem, i)) + self.assertEqual(r1, r2, '%s != %s for %s' % (r1, r2, i)) + + def test_find(self): + self.checkequal(0, 'abcdefghiabc', 'find', 'abc') + self.checkequal(9, 'abcdefghiabc', 'find', 'abc', 1) + self.checkequal(-1, 'abcdefghiabc', 'find', 'def', 4) + + self.checkequal(0, 'abc', 'find', '', 0) + self.checkequal(3, 'abc', 'find', '', 3) + self.checkequal(-1, 'abc', 'find', '', 4) + + self.checkraises(TypeError, 'hello', 'find') + self.checkraises(TypeError, 'hello', 'find', 42) + + # For a variety of combinations, + # verify that str.find() matches __contains__ + # and that the found substring is really at that location + charset = ['', 'a', 'b', 'c'] + digits = 5 + base = len(charset) + teststrings = set() + for i in xrange(base ** digits): + entry = [] + for j in xrange(digits): + i, m = divmod(i, base) + entry.append(charset[m]) + teststrings.add(''.join(entry)) + teststrings = list(teststrings) + for i in teststrings: + i = self.fixtype(i) + for j in teststrings: + loc = i.find(j) + r1 = (loc != -1) + r2 = j in i + if r1 != r2: + self.assertEqual(r1, r2) + if loc != -1: + self.assertEqual(i[loc:loc+len(j)], j) + + def test_rfind(self): + self.checkequal(9, 'abcdefghiabc', 'rfind', 'abc') + self.checkequal(12, 'abcdefghiabc', 'rfind', '') + self.checkequal(0, 'abcdefghiabc', 'rfind', 'abcd') + self.checkequal(-1, 'abcdefghiabc', 'rfind', 'abcz') + + self.checkequal(3, 'abc', 'rfind', '', 0) + self.checkequal(3, 'abc', 'rfind', '', 3) + self.checkequal(-1, 'abc', 'rfind', '', 4) + + self.checkraises(TypeError, 'hello', 'rfind') + self.checkraises(TypeError, 'hello', 'rfind', 42) + + def test_index(self): + self.checkequal(0, 'abcdefghiabc', 'index', '') + self.checkequal(3, 'abcdefghiabc', 'index', 'def') + self.checkequal(0, 'abcdefghiabc', 'index', 'abc') + self.checkequal(9, 'abcdefghiabc', 'index', 'abc', 1) + + self.checkraises(ValueError, 'abcdefghiabc', 'index', 'hib') + self.checkraises(ValueError, 'abcdefghiab', 'index', 'abc', 1) + self.checkraises(ValueError, 'abcdefghi', 'index', 'ghi', 8) + self.checkraises(ValueError, 'abcdefghi', 'index', 'ghi', -1) + + self.checkraises(TypeError, 'hello', 'index') + self.checkraises(TypeError, 'hello', 'index', 42) + + def test_rindex(self): + self.checkequal(12, 'abcdefghiabc', 'rindex', '') + self.checkequal(3, 'abcdefghiabc', 'rindex', 'def') + self.checkequal(9, 'abcdefghiabc', 'rindex', 'abc') + self.checkequal(0, 'abcdefghiabc', 'rindex', 'abc', 0, -1) + + self.checkraises(ValueError, 'abcdefghiabc', 'rindex', 'hib') + self.checkraises(ValueError, 'defghiabc', 'rindex', 'def', 1) + self.checkraises(ValueError, 'defghiabc', 'rindex', 'abc', 0, -1) + self.checkraises(ValueError, 'abcdefghi', 'rindex', 'ghi', 0, 8) + self.checkraises(ValueError, 'abcdefghi', 'rindex', 'ghi', 0, -1) + + self.checkraises(TypeError, 'hello', 'rindex') + self.checkraises(TypeError, 'hello', 'rindex', 42) + + def test_lower(self): + self.checkequal('hello', 'HeLLo', 'lower') + self.checkequal('hello', 'hello', 'lower') + self.checkraises(TypeError, 'hello', 'lower', 42) + + def test_upper(self): + self.checkequal('HELLO', 'HeLLo', 'upper') + self.checkequal('HELLO', 'HELLO', 'upper') + self.checkraises(TypeError, 'hello', 'upper', 42) + + def test_expandtabs(self): + self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs') + self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 8) + self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 4) + self.checkequal('abc\r\nab def\ng hi', 'abc\r\nab\tdef\ng\thi', 'expandtabs', 4) + self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs') + self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 8) + self.checkequal('abc\r\nab\r\ndef\ng\r\nhi', 'abc\r\nab\r\ndef\ng\r\nhi', 'expandtabs', 4) + + self.checkraises(TypeError, 'hello', 'expandtabs', 42, 42) + + def test_split(self): + self.checkequal(['this', 'is', 'the', 'split', 'function'], + 'this is the split function', 'split') + + # by whitespace + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d ', 'split') + self.checkequal(['a', 'b c d'], 'a b c d', 'split', None, 1) + self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', None, 2) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, 3) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, + sys.maxint-1) + self.checkequal(['a b c d'], 'a b c d', 'split', None, 0) + self.checkequal(['a b c d'], ' a b c d', 'split', None, 0) + self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', None, 2) + + self.checkequal([], ' ', 'split') + self.checkequal(['a'], ' a ', 'split') + self.checkequal(['a', 'b'], ' a b ', 'split') + self.checkequal(['a', 'b '], ' a b ', 'split', None, 1) + self.checkequal(['a', 'b c '], ' a b c ', 'split', None, 1) + self.checkequal(['a', 'b', 'c '], ' a b c ', 'split', None, 2) + self.checkequal(['a', 'b'], '\n\ta \t\r b \v ', 'split') + aaa = ' a '*20 + self.checkequal(['a']*20, aaa, 'split') + self.checkequal(['a'] + [aaa[4:]], aaa, 'split', None, 1) + self.checkequal(['a']*19 + ['a '], aaa, 'split', None, 19) + + # by a char + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|') + self.checkequal(['a|b|c|d'], 'a|b|c|d', 'split', '|', 0) + self.checkequal(['a', 'b|c|d'], 'a|b|c|d', 'split', '|', 1) + self.checkequal(['a', 'b', 'c|d'], 'a|b|c|d', 'split', '|', 2) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', 3) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', + sys.maxint-2) + self.checkequal(['a|b|c|d'], 'a|b|c|d', 'split', '|', 0) + self.checkequal(['a', '', 'b||c||d'], 'a||b||c||d', 'split', '|', 2) + self.checkequal(['endcase ', ''], 'endcase |', 'split', '|') + self.checkequal(['', ' startcase'], '| startcase', 'split', '|') + self.checkequal(['', 'bothcase', ''], '|bothcase|', 'split', '|') + self.checkequal(['a', '', 'b\x00c\x00d'], 'a\x00\x00b\x00c\x00d', 'split', '\x00', 2) + + self.checkequal(['a']*20, ('a|'*20)[:-1], 'split', '|') + self.checkequal(['a']*15 +['a|a|a|a|a'], + ('a|'*20)[:-1], 'split', '|', 15) + + # by string + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//') + self.checkequal(['a', 'b//c//d'], 'a//b//c//d', 'split', '//', 1) + self.checkequal(['a', 'b', 'c//d'], 'a//b//c//d', 'split', '//', 2) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', 3) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', + sys.maxint-10) + self.checkequal(['a//b//c//d'], 'a//b//c//d', 'split', '//', 0) + self.checkequal(['a', '', 'b////c////d'], 'a////b////c////d', 'split', '//', 2) + self.checkequal(['endcase ', ''], 'endcase test', 'split', 'test') + self.checkequal(['', ' begincase'], 'test begincase', 'split', 'test') + self.checkequal(['', ' bothcase ', ''], 'test bothcase test', + 'split', 'test') + self.checkequal(['a', 'bc'], 'abbbc', 'split', 'bb') + self.checkequal(['', ''], 'aaa', 'split', 'aaa') + self.checkequal(['aaa'], 'aaa', 'split', 'aaa', 0) + self.checkequal(['ab', 'ab'], 'abbaab', 'split', 'ba') + self.checkequal(['aaaa'], 'aaaa', 'split', 'aab') + self.checkequal([''], '', 'split', 'aaa') + self.checkequal(['aa'], 'aa', 'split', 'aaa') + self.checkequal(['A', 'bobb'], 'Abbobbbobb', 'split', 'bbobb') + self.checkequal(['A', 'B', ''], 'AbbobbBbbobb', 'split', 'bbobb') + + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'split', 'BLAH') + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'split', 'BLAH', 19) + self.checkequal(['a']*18 + ['aBLAHa'], ('aBLAH'*20)[:-4], + 'split', 'BLAH', 18) + + # mixed use of str and unicode + self.checkequal([u'a', u'b', u'c d'], 'a b c d', 'split', u' ', 2) + + # argument type + self.checkraises(TypeError, 'hello', 'split', 42, 42, 42) + + # null case + self.checkraises(ValueError, 'hello', 'split', '') + self.checkraises(ValueError, 'hello', 'split', '', 0) + + def test_rsplit(self): + self.checkequal(['this', 'is', 'the', 'rsplit', 'function'], + 'this is the rsplit function', 'rsplit') + + # by whitespace + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d ', 'rsplit') + self.checkequal(['a b c', 'd'], 'a b c d', 'rsplit', None, 1) + self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', None, 2) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, 3) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, + sys.maxint-20) + self.checkequal(['a b c d'], 'a b c d', 'rsplit', None, 0) + self.checkequal(['a b c d'], 'a b c d ', 'rsplit', None, 0) + self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', None, 2) + + self.checkequal([], ' ', 'rsplit') + self.checkequal(['a'], ' a ', 'rsplit') + self.checkequal(['a', 'b'], ' a b ', 'rsplit') + self.checkequal([' a', 'b'], ' a b ', 'rsplit', None, 1) + self.checkequal([' a b','c'], ' a b c ', 'rsplit', + None, 1) + self.checkequal([' a', 'b', 'c'], ' a b c ', 'rsplit', + None, 2) + self.checkequal(['a', 'b'], '\n\ta \t\r b \v ', 'rsplit', None, 88) + aaa = ' a '*20 + self.checkequal(['a']*20, aaa, 'rsplit') + self.checkequal([aaa[:-4]] + ['a'], aaa, 'rsplit', None, 1) + self.checkequal([' a a'] + ['a']*18, aaa, 'rsplit', None, 18) + + + # by a char + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|') + self.checkequal(['a|b|c', 'd'], 'a|b|c|d', 'rsplit', '|', 1) + self.checkequal(['a|b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 2) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 3) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', + sys.maxint-100) + self.checkequal(['a|b|c|d'], 'a|b|c|d', 'rsplit', '|', 0) + self.checkequal(['a||b||c', '', 'd'], 'a||b||c||d', 'rsplit', '|', 2) + self.checkequal(['', ' begincase'], '| begincase', 'rsplit', '|') + self.checkequal(['endcase ', ''], 'endcase |', 'rsplit', '|') + self.checkequal(['', 'bothcase', ''], '|bothcase|', 'rsplit', '|') + + self.checkequal(['a\x00\x00b', 'c', 'd'], 'a\x00\x00b\x00c\x00d', 'rsplit', '\x00', 2) + + self.checkequal(['a']*20, ('a|'*20)[:-1], 'rsplit', '|') + self.checkequal(['a|a|a|a|a']+['a']*15, + ('a|'*20)[:-1], 'rsplit', '|', 15) + + # by string + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//') + self.checkequal(['a//b//c', 'd'], 'a//b//c//d', 'rsplit', '//', 1) + self.checkequal(['a//b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 2) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 3) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', + sys.maxint-5) + self.checkequal(['a//b//c//d'], 'a//b//c//d', 'rsplit', '//', 0) + self.checkequal(['a////b////c', '', 'd'], 'a////b////c////d', 'rsplit', '//', 2) + self.checkequal(['', ' begincase'], 'test begincase', 'rsplit', 'test') + self.checkequal(['endcase ', ''], 'endcase test', 'rsplit', 'test') + self.checkequal(['', ' bothcase ', ''], 'test bothcase test', + 'rsplit', 'test') + self.checkequal(['ab', 'c'], 'abbbc', 'rsplit', 'bb') + self.checkequal(['', ''], 'aaa', 'rsplit', 'aaa') + self.checkequal(['aaa'], 'aaa', 'rsplit', 'aaa', 0) + self.checkequal(['ab', 'ab'], 'abbaab', 'rsplit', 'ba') + self.checkequal(['aaaa'], 'aaaa', 'rsplit', 'aab') + self.checkequal([''], '', 'rsplit', 'aaa') + self.checkequal(['aa'], 'aa', 'rsplit', 'aaa') + self.checkequal(['bbob', 'A'], 'bbobbbobbA', 'rsplit', 'bbobb') + self.checkequal(['', 'B', 'A'], 'bbobbBbbobbA', 'rsplit', 'bbobb') + + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'rsplit', 'BLAH') + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'rsplit', 'BLAH', 19) + self.checkequal(['aBLAHa'] + ['a']*18, ('aBLAH'*20)[:-4], + 'rsplit', 'BLAH', 18) + + # mixed use of str and unicode + self.checkequal([u'a b', u'c', u'd'], 'a b c d', 'rsplit', u' ', 2) + + # argument type + self.checkraises(TypeError, 'hello', 'rsplit', 42, 42, 42) + + # null case + self.checkraises(ValueError, 'hello', 'rsplit', '') + self.checkraises(ValueError, 'hello', 'rsplit', '', 0) + + def test_strip(self): + self.checkequal('hello', ' hello ', 'strip') + self.checkequal('hello ', ' hello ', 'lstrip') + self.checkequal(' hello', ' hello ', 'rstrip') + self.checkequal('hello', 'hello', 'strip') + + # strip/lstrip/rstrip with None arg + self.checkequal('hello', ' hello ', 'strip', None) + self.checkequal('hello ', ' hello ', 'lstrip', None) + self.checkequal(' hello', ' hello ', 'rstrip', None) + self.checkequal('hello', 'hello', 'strip', None) + + # strip/lstrip/rstrip with str arg + self.checkequal('hello', 'xyzzyhelloxyzzy', 'strip', 'xyz') + self.checkequal('helloxyzzy', 'xyzzyhelloxyzzy', 'lstrip', 'xyz') + self.checkequal('xyzzyhello', 'xyzzyhelloxyzzy', 'rstrip', 'xyz') + self.checkequal('hello', 'hello', 'strip', 'xyz') + + # strip/lstrip/rstrip with unicode arg + if test_support.have_unicode: + self.checkequal(unicode('hello', 'ascii'), 'xyzzyhelloxyzzy', + 'strip', unicode('xyz', 'ascii')) + self.checkequal(unicode('helloxyzzy', 'ascii'), 'xyzzyhelloxyzzy', + 'lstrip', unicode('xyz', 'ascii')) + self.checkequal(unicode('xyzzyhello', 'ascii'), 'xyzzyhelloxyzzy', + 'rstrip', unicode('xyz', 'ascii')) + self.checkequal(unicode('hello', 'ascii'), 'hello', + 'strip', unicode('xyz', 'ascii')) + + self.checkraises(TypeError, 'hello', 'strip', 42, 42) + self.checkraises(TypeError, 'hello', 'lstrip', 42, 42) + self.checkraises(TypeError, 'hello', 'rstrip', 42, 42) + + def test_ljust(self): + self.checkequal('abc ', 'abc', 'ljust', 10) + self.checkequal('abc ', 'abc', 'ljust', 6) + self.checkequal('abc', 'abc', 'ljust', 3) + self.checkequal('abc', 'abc', 'ljust', 2) + self.checkequal('abc*******', 'abc', 'ljust', 10, '*') + self.checkraises(TypeError, 'abc', 'ljust') + + def test_rjust(self): + self.checkequal(' abc', 'abc', 'rjust', 10) + self.checkequal(' abc', 'abc', 'rjust', 6) + self.checkequal('abc', 'abc', 'rjust', 3) + self.checkequal('abc', 'abc', 'rjust', 2) + self.checkequal('*******abc', 'abc', 'rjust', 10, '*') + self.checkraises(TypeError, 'abc', 'rjust') + + def test_center(self): + self.checkequal(' abc ', 'abc', 'center', 10) + self.checkequal(' abc ', 'abc', 'center', 6) + self.checkequal('abc', 'abc', 'center', 3) + self.checkequal('abc', 'abc', 'center', 2) + self.checkequal('***abc****', 'abc', 'center', 10, '*') + self.checkraises(TypeError, 'abc', 'center') + + def test_swapcase(self): + self.checkequal('hEllO CoMPuTErS', 'HeLLo cOmpUteRs', 'swapcase') + + self.checkraises(TypeError, 'hello', 'swapcase', 42) + + def test_replace(self): + EQ = self.checkequal + + # Operations on the empty string + EQ("", "", "replace", "", "") + EQ("A", "", "replace", "", "A") + EQ("", "", "replace", "A", "") + EQ("", "", "replace", "A", "A") + EQ("", "", "replace", "", "", 100) + EQ("", "", "replace", "", "", sys.maxint) + + # interleave (from=="", 'to' gets inserted everywhere) + EQ("A", "A", "replace", "", "") + EQ("*A*", "A", "replace", "", "*") + EQ("*1A*1", "A", "replace", "", "*1") + EQ("*-#A*-#", "A", "replace", "", "*-#") + EQ("*-A*-A*-", "AA", "replace", "", "*-") + EQ("*-A*-A*-", "AA", "replace", "", "*-", -1) + EQ("*-A*-A*-", "AA", "replace", "", "*-", sys.maxint) + EQ("*-A*-A*-", "AA", "replace", "", "*-", 4) + EQ("*-A*-A*-", "AA", "replace", "", "*-", 3) + EQ("*-A*-A", "AA", "replace", "", "*-", 2) + EQ("*-AA", "AA", "replace", "", "*-", 1) + EQ("AA", "AA", "replace", "", "*-", 0) + + # single character deletion (from=="A", to=="") + EQ("", "A", "replace", "A", "") + EQ("", "AAA", "replace", "A", "") + EQ("", "AAA", "replace", "A", "", -1) + EQ("", "AAA", "replace", "A", "", sys.maxint) + EQ("", "AAA", "replace", "A", "", 4) + EQ("", "AAA", "replace", "A", "", 3) + EQ("A", "AAA", "replace", "A", "", 2) + EQ("AA", "AAA", "replace", "A", "", 1) + EQ("AAA", "AAA", "replace", "A", "", 0) + EQ("", "AAAAAAAAAA", "replace", "A", "") + EQ("BCD", "ABACADA", "replace", "A", "") + EQ("BCD", "ABACADA", "replace", "A", "", -1) + EQ("BCD", "ABACADA", "replace", "A", "", sys.maxint) + EQ("BCD", "ABACADA", "replace", "A", "", 5) + EQ("BCD", "ABACADA", "replace", "A", "", 4) + EQ("BCDA", "ABACADA", "replace", "A", "", 3) + EQ("BCADA", "ABACADA", "replace", "A", "", 2) + EQ("BACADA", "ABACADA", "replace", "A", "", 1) + EQ("ABACADA", "ABACADA", "replace", "A", "", 0) + EQ("BCD", "ABCAD", "replace", "A", "") + EQ("BCD", "ABCADAA", "replace", "A", "") + EQ("BCD", "BCD", "replace", "A", "") + EQ("*************", "*************", "replace", "A", "") + EQ("^A^", "^"+"A"*1000+"^", "replace", "A", "", 999) + + # substring deletion (from=="the", to=="") + EQ("", "the", "replace", "the", "") + EQ("ater", "theater", "replace", "the", "") + EQ("", "thethe", "replace", "the", "") + EQ("", "thethethethe", "replace", "the", "") + EQ("aaaa", "theatheatheathea", "replace", "the", "") + EQ("that", "that", "replace", "the", "") + EQ("thaet", "thaet", "replace", "the", "") + EQ("here and re", "here and there", "replace", "the", "") + EQ("here and re and re", "here and there and there", + "replace", "the", "", sys.maxint) + EQ("here and re and re", "here and there and there", + "replace", "the", "", -1) + EQ("here and re and re", "here and there and there", + "replace", "the", "", 3) + EQ("here and re and re", "here and there and there", + "replace", "the", "", 2) + EQ("here and re and there", "here and there and there", + "replace", "the", "", 1) + EQ("here and there and there", "here and there and there", + "replace", "the", "", 0) + EQ("here and re and re", "here and there and there", "replace", "the", "") + + EQ("abc", "abc", "replace", "the", "") + EQ("abcdefg", "abcdefg", "replace", "the", "") + + # substring deletion (from=="bob", to=="") + EQ("bob", "bbobob", "replace", "bob", "") + EQ("bobXbob", "bbobobXbbobob", "replace", "bob", "") + EQ("aaaaaaa", "aaaaaaabob", "replace", "bob", "") + EQ("aaaaaaa", "aaaaaaa", "replace", "bob", "") + + # single character replace in place (len(from)==len(to)==1) + EQ("Who goes there?", "Who goes there?", "replace", "o", "o") + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O") + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", sys.maxint) + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", -1) + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", 3) + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", 2) + EQ("WhO goes there?", "Who goes there?", "replace", "o", "O", 1) + EQ("Who goes there?", "Who goes there?", "replace", "o", "O", 0) + + EQ("Who goes there?", "Who goes there?", "replace", "a", "q") + EQ("who goes there?", "Who goes there?", "replace", "W", "w") + EQ("wwho goes there?ww", "WWho goes there?WW", "replace", "W", "w") + EQ("Who goes there!", "Who goes there?", "replace", "?", "!") + EQ("Who goes there!!", "Who goes there??", "replace", "?", "!") + + EQ("Who goes there?", "Who goes there?", "replace", ".", "!") + + # substring replace in place (len(from)==len(to) > 1) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**") + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", sys.maxint) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", -1) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", 4) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", 3) + EQ("Th** ** a tissue", "This is a tissue", "replace", "is", "**", 2) + EQ("Th** is a tissue", "This is a tissue", "replace", "is", "**", 1) + EQ("This is a tissue", "This is a tissue", "replace", "is", "**", 0) + EQ("cobob", "bobob", "replace", "bob", "cob") + EQ("cobobXcobocob", "bobobXbobobob", "replace", "bob", "cob") + EQ("bobob", "bobob", "replace", "bot", "bot") + + # replace single character (len(from)==1, len(to)>1) + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK") + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", -1) + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", sys.maxint) + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", 2) + EQ("ReyKKjavik", "Reykjavik", "replace", "k", "KK", 1) + EQ("Reykjavik", "Reykjavik", "replace", "k", "KK", 0) + EQ("A----B----C----", "A.B.C.", "replace", ".", "----") + + EQ("Reykjavik", "Reykjavik", "replace", "q", "KK") + + # replace substring (len(from)>1, len(to)!=len(from)) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham") + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", sys.maxint) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", -1) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", 4) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", 3) + EQ("ham, ham, eggs and spam", "spam, spam, eggs and spam", + "replace", "spam", "ham", 2) + EQ("ham, spam, eggs and spam", "spam, spam, eggs and spam", + "replace", "spam", "ham", 1) + EQ("spam, spam, eggs and spam", "spam, spam, eggs and spam", + "replace", "spam", "ham", 0) + + EQ("bobob", "bobobob", "replace", "bobob", "bob") + EQ("bobobXbobob", "bobobobXbobobob", "replace", "bobob", "bob") + EQ("BOBOBOB", "BOBOBOB", "replace", "bob", "bobby") + + ba = buffer('a') + bb = buffer('b') + EQ("bbc", "abc", "replace", ba, bb) + EQ("aac", "abc", "replace", bb, ba) + + # + self.checkequal('one at two!three!', 'one!two!three!', 'replace', '!', '@', 1) + self.checkequal('onetwothree', 'one!two!three!', 'replace', '!', '') + self.checkequal('one at two@three!', 'one!two!three!', 'replace', '!', '@', 2) + self.checkequal('one at two@three@', 'one!two!three!', 'replace', '!', '@', 3) + self.checkequal('one at two@three@', 'one!two!three!', 'replace', '!', '@', 4) + self.checkequal('one!two!three!', 'one!two!three!', 'replace', '!', '@', 0) + self.checkequal('one at two@three@', 'one!two!three!', 'replace', '!', '@') + self.checkequal('one!two!three!', 'one!two!three!', 'replace', 'x', '@') + self.checkequal('one!two!three!', 'one!two!three!', 'replace', 'x', '@', 2) + self.checkequal('-a-b-c-', 'abc', 'replace', '', '-') + self.checkequal('-a-b-c', 'abc', 'replace', '', '-', 3) + self.checkequal('abc', 'abc', 'replace', '', '-', 0) + self.checkequal('', '', 'replace', '', '') + self.checkequal('abc', 'abc', 'replace', 'ab', '--', 0) + self.checkequal('abc', 'abc', 'replace', 'xy', '--') + # Next three for SF bug 422088: [OSF1 alpha] string.replace(); died with + # MemoryError due to empty result (platform malloc issue when requesting + # 0 bytes). + self.checkequal('', '123', 'replace', '123', '') + self.checkequal('', '123123', 'replace', '123', '') + self.checkequal('x', '123x123', 'replace', '123', '') + + self.checkraises(TypeError, 'hello', 'replace') + self.checkraises(TypeError, 'hello', 'replace', 42) + self.checkraises(TypeError, 'hello', 'replace', 42, 'h') + self.checkraises(TypeError, 'hello', 'replace', 'h', 42) + + def test_replace_overflow(self): + # Check for overflow checking on 32 bit machines + if sys.maxint != 2147483647: + return + A2_16 = "A" * (2**16) + self.checkraises(OverflowError, A2_16, "replace", "", A2_16) + self.checkraises(OverflowError, A2_16, "replace", "A", A2_16) + self.checkraises(OverflowError, A2_16, "replace", "AA", A2_16+A2_16) + + def test_zfill(self): + self.checkequal('123', '123', 'zfill', 2) + self.checkequal('123', '123', 'zfill', 3) + self.checkequal('0123', '123', 'zfill', 4) + self.checkequal('+123', '+123', 'zfill', 3) + self.checkequal('+123', '+123', 'zfill', 4) + self.checkequal('+0123', '+123', 'zfill', 5) + self.checkequal('-123', '-123', 'zfill', 3) + self.checkequal('-123', '-123', 'zfill', 4) + self.checkequal('-0123', '-123', 'zfill', 5) + self.checkequal('000', '', 'zfill', 3) + self.checkequal('34', '34', 'zfill', 1) + self.checkequal('0034', '34', 'zfill', 4) + + self.checkraises(TypeError, '123', 'zfill') + +class MixinStrUnicodeUserStringTest: + # additional tests that only work for + # stringlike objects, i.e. str, unicode, UserString + # (but not the string module) + + def test_islower(self): + self.checkequal(False, '', 'islower') + self.checkequal(True, 'a', 'islower') + self.checkequal(False, 'A', 'islower') + self.checkequal(False, '\n', 'islower') + self.checkequal(True, 'abc', 'islower') + self.checkequal(False, 'aBc', 'islower') + self.checkequal(True, 'abc\n', 'islower') + self.checkraises(TypeError, 'abc', 'islower', 42) + + def test_isupper(self): + self.checkequal(False, '', 'isupper') + self.checkequal(False, 'a', 'isupper') + self.checkequal(True, 'A', 'isupper') + self.checkequal(False, '\n', 'isupper') + self.checkequal(True, 'ABC', 'isupper') + self.checkequal(False, 'AbC', 'isupper') + self.checkequal(True, 'ABC\n', 'isupper') + self.checkraises(TypeError, 'abc', 'isupper', 42) + + def test_istitle(self): + self.checkequal(False, '', 'istitle') + self.checkequal(False, 'a', 'istitle') + self.checkequal(True, 'A', 'istitle') + self.checkequal(False, '\n', 'istitle') + self.checkequal(True, 'A Titlecased Line', 'istitle') + self.checkequal(True, 'A\nTitlecased Line', 'istitle') + self.checkequal(True, 'A Titlecased, Line', 'istitle') + self.checkequal(False, 'Not a capitalized String', 'istitle') + self.checkequal(False, 'Not\ta Titlecase String', 'istitle') + self.checkequal(False, 'Not--a Titlecase String', 'istitle') + self.checkequal(False, 'NOT', 'istitle') + self.checkraises(TypeError, 'abc', 'istitle', 42) + + def test_isspace(self): + self.checkequal(False, '', 'isspace') + self.checkequal(False, 'a', 'isspace') + self.checkequal(True, ' ', 'isspace') + self.checkequal(True, '\t', 'isspace') + self.checkequal(True, '\r', 'isspace') + self.checkequal(True, '\n', 'isspace') + self.checkequal(True, ' \t\r\n', 'isspace') + self.checkequal(False, ' \t\r\na', 'isspace') + self.checkraises(TypeError, 'abc', 'isspace', 42) + + def test_isalpha(self): + self.checkequal(False, '', 'isalpha') + self.checkequal(True, 'a', 'isalpha') + self.checkequal(True, 'A', 'isalpha') + self.checkequal(False, '\n', 'isalpha') + self.checkequal(True, 'abc', 'isalpha') + self.checkequal(False, 'aBc123', 'isalpha') + self.checkequal(False, 'abc\n', 'isalpha') + self.checkraises(TypeError, 'abc', 'isalpha', 42) + + def test_isalnum(self): + self.checkequal(False, '', 'isalnum') + self.checkequal(True, 'a', 'isalnum') + self.checkequal(True, 'A', 'isalnum') + self.checkequal(False, '\n', 'isalnum') + self.checkequal(True, '123abc456', 'isalnum') + self.checkequal(True, 'a1b3c', 'isalnum') + self.checkequal(False, 'aBc000 ', 'isalnum') + self.checkequal(False, 'abc\n', 'isalnum') + self.checkraises(TypeError, 'abc', 'isalnum', 42) + + def test_isdigit(self): + self.checkequal(False, '', 'isdigit') + self.checkequal(False, 'a', 'isdigit') + self.checkequal(True, '0', 'isdigit') + self.checkequal(True, '0123456789', 'isdigit') + self.checkequal(False, '0123456789a', 'isdigit') + + self.checkraises(TypeError, 'abc', 'isdigit', 42) + + def test_title(self): + self.checkequal(' Hello ', ' hello ', 'title') + self.checkequal('Hello ', 'hello ', 'title') + self.checkequal('Hello ', 'Hello ', 'title') + self.checkequal('Format This As Title String', "fOrMaT thIs aS titLe String", 'title') + self.checkequal('Format,This-As*Title;String', "fOrMaT,thIs-aS*titLe;String", 'title', ) + self.checkequal('Getint', "getInt", 'title') + self.checkraises(TypeError, 'hello', 'title', 42) + + def test_splitlines(self): + self.checkequal(['abc', 'def', '', 'ghi'], "abc\ndef\n\rghi", 'splitlines') + self.checkequal(['abc', 'def', '', 'ghi'], "abc\ndef\n\r\nghi", 'splitlines') + self.checkequal(['abc', 'def', 'ghi'], "abc\ndef\r\nghi", 'splitlines') + self.checkequal(['abc', 'def', 'ghi'], "abc\ndef\r\nghi\n", 'splitlines') + self.checkequal(['abc', 'def', 'ghi', ''], "abc\ndef\r\nghi\n\r", 'splitlines') + self.checkequal(['', 'abc', 'def', 'ghi', ''], "\nabc\ndef\r\nghi\n\r", 'splitlines') + self.checkequal(['\n', 'abc\n', 'def\r\n', 'ghi\n', '\r'], "\nabc\ndef\r\nghi\n\r", 'splitlines', 1) + + self.checkraises(TypeError, 'abc', 'splitlines', 42, 42) + + def test_startswith(self): + self.checkequal(True, 'hello', 'startswith', 'he') + self.checkequal(True, 'hello', 'startswith', 'hello') + self.checkequal(False, 'hello', 'startswith', 'hello world') + self.checkequal(True, 'hello', 'startswith', '') + self.checkequal(False, 'hello', 'startswith', 'ello') + self.checkequal(True, 'hello', 'startswith', 'ello', 1) + self.checkequal(True, 'hello', 'startswith', 'o', 4) + self.checkequal(False, 'hello', 'startswith', 'o', 5) + self.checkequal(True, 'hello', 'startswith', '', 5) + self.checkequal(False, 'hello', 'startswith', 'lo', 6) + self.checkequal(True, 'helloworld', 'startswith', 'lowo', 3) + self.checkequal(True, 'helloworld', 'startswith', 'lowo', 3, 7) + self.checkequal(False, 'helloworld', 'startswith', 'lowo', 3, 6) + + # test negative indices + self.checkequal(True, 'hello', 'startswith', 'he', 0, -1) + self.checkequal(True, 'hello', 'startswith', 'he', -53, -1) + self.checkequal(False, 'hello', 'startswith', 'hello', 0, -1) + self.checkequal(False, 'hello', 'startswith', 'hello world', -1, -10) + self.checkequal(False, 'hello', 'startswith', 'ello', -5) + self.checkequal(True, 'hello', 'startswith', 'ello', -4) + self.checkequal(False, 'hello', 'startswith', 'o', -2) + self.checkequal(True, 'hello', 'startswith', 'o', -1) + self.checkequal(True, 'hello', 'startswith', '', -3, -3) + self.checkequal(False, 'hello', 'startswith', 'lo', -9) + + self.checkraises(TypeError, 'hello', 'startswith') + self.checkraises(TypeError, 'hello', 'startswith', 42) + + # test tuple arguments + self.checkequal(True, 'hello', 'startswith', ('he', 'ha')) + self.checkequal(False, 'hello', 'startswith', ('lo', 'llo')) + self.checkequal(True, 'hello', 'startswith', ('hellox', 'hello')) + self.checkequal(False, 'hello', 'startswith', ()) + self.checkequal(True, 'helloworld', 'startswith', ('hellowo', + 'rld', 'lowo'), 3) + self.checkequal(False, 'helloworld', 'startswith', ('hellowo', 'ello', + 'rld'), 3) + self.checkequal(True, 'hello', 'startswith', ('lo', 'he'), 0, -1) + self.checkequal(False, 'hello', 'startswith', ('he', 'hel'), 0, 1) + self.checkequal(True, 'hello', 'startswith', ('he', 'hel'), 0, 2) + + self.checkraises(TypeError, 'hello', 'startswith', (42,)) + + def test_endswith(self): + self.checkequal(True, 'hello', 'endswith', 'lo') + self.checkequal(False, 'hello', 'endswith', 'he') + self.checkequal(True, 'hello', 'endswith', '') + self.checkequal(False, 'hello', 'endswith', 'hello world') + self.checkequal(False, 'helloworld', 'endswith', 'worl') + self.checkequal(True, 'helloworld', 'endswith', 'worl', 3, 9) + self.checkequal(True, 'helloworld', 'endswith', 'world', 3, 12) + self.checkequal(True, 'helloworld', 'endswith', 'lowo', 1, 7) + self.checkequal(True, 'helloworld', 'endswith', 'lowo', 2, 7) + self.checkequal(True, 'helloworld', 'endswith', 'lowo', 3, 7) + self.checkequal(False, 'helloworld', 'endswith', 'lowo', 4, 7) + self.checkequal(False, 'helloworld', 'endswith', 'lowo', 3, 8) + self.checkequal(False, 'ab', 'endswith', 'ab', 0, 1) + self.checkequal(False, 'ab', 'endswith', 'ab', 0, 0) + + # test negative indices + self.checkequal(True, 'hello', 'endswith', 'lo', -2) + self.checkequal(False, 'hello', 'endswith', 'he', -2) + self.checkequal(True, 'hello', 'endswith', '', -3, -3) + self.checkequal(False, 'hello', 'endswith', 'hello world', -10, -2) + self.checkequal(False, 'helloworld', 'endswith', 'worl', -6) + self.checkequal(True, 'helloworld', 'endswith', 'worl', -5, -1) + self.checkequal(True, 'helloworld', 'endswith', 'worl', -5, 9) + self.checkequal(True, 'helloworld', 'endswith', 'world', -7, 12) + self.checkequal(True, 'helloworld', 'endswith', 'lowo', -99, -3) + self.checkequal(True, 'helloworld', 'endswith', 'lowo', -8, -3) + self.checkequal(True, 'helloworld', 'endswith', 'lowo', -7, -3) + self.checkequal(False, 'helloworld', 'endswith', 'lowo', 3, -4) + self.checkequal(False, 'helloworld', 'endswith', 'lowo', -8, -2) + + self.checkraises(TypeError, 'hello', 'endswith') + self.checkraises(TypeError, 'hello', 'endswith', 42) + + # test tuple arguments + self.checkequal(False, 'hello', 'endswith', ('he', 'ha')) + self.checkequal(True, 'hello', 'endswith', ('lo', 'llo')) + self.checkequal(True, 'hello', 'endswith', ('hellox', 'hello')) + self.checkequal(False, 'hello', 'endswith', ()) + self.checkequal(True, 'helloworld', 'endswith', ('hellowo', + 'rld', 'lowo'), 3) + self.checkequal(False, 'helloworld', 'endswith', ('hellowo', 'ello', + 'rld'), 3, -1) + self.checkequal(True, 'hello', 'endswith', ('hell', 'ell'), 0, -1) + self.checkequal(False, 'hello', 'endswith', ('he', 'hel'), 0, 1) + self.checkequal(True, 'hello', 'endswith', ('he', 'hell'), 0, 4) + + self.checkraises(TypeError, 'hello', 'endswith', (42,)) + + def test___contains__(self): + self.checkop(True, operator.contains, '', '') # vereq('' in '', True) + self.checkop(True, operator.contains, 'abc', '') # vereq('' in 'abc', True) + self.checkop(False, operator.contains, 'abc', '\0') # vereq('\0' in 'abc', False) + self.checkop(True, operator.contains, '\0abc', '\0') # vereq('\0' in '\0abc', True) + self.checkop(True, operator.contains, 'abc\0', '\0') # vereq('\0' in 'abc\0', True) + self.checkop(True, operator.contains, '\0abc', 'a') # vereq('a' in '\0abc', True) + self.checkop(True, operator.contains, 'asdf', 'asdf') # vereq('asdf' in 'asdf', True) + self.checkop(False, operator.contains, 'asd', 'asdf') # vereq('asdf' in 'asd', False) + self.checkop(False, operator.contains, '', 'asdf') # vereq('asdf' in '', False) + + def test_subscript(self): + self.checkop(u'a', operator.getitem, 'abc', 0) + self.checkop(u'c', operator.getitem, 'abc', -1) + self.checkop(u'a', operator.getitem, 'abc', 0L) + self.checkop(u'abc', operator.getitem, 'abc', slice(0, 3)) + self.checkop(u'abc', operator.getitem, 'abc', slice(0, 1000)) + self.checkop(u'a', operator.getitem, 'abc', slice(0, 1)) + self.checkop(u'', operator.getitem, 'abc', slice(0, 0)) + # FIXME What about negative indizes? This is handled differently by [] and __getitem__(slice) + + self.checkopraises(TypeError, operator.getitem, 'abc', 'def') + + def test_slice(self): + self.checkop('abc', operator.getslice, 'abc', 0, 1000) + self.checkop('abc', operator.getslice, 'abc', 0, 3) + self.checkop('ab', operator.getslice, 'abc', 0, 2) + self.checkop('bc', operator.getslice, 'abc', 1, 3) + self.checkop('b', operator.getslice, 'abc', 1, 2) + self.checkop('', operator.getslice, 'abc', 2, 2) + self.checkop('', operator.getslice, 'abc', 1000, 1000) + self.checkop('', operator.getslice, 'abc', 2000, 1000) + self.checkop('', operator.getslice, 'abc', 2, 1) + # FIXME What about negative indizes? This is handled differently by [] and __getslice__ + + self.checkopraises(TypeError, operator.getslice, 'abc', 'def') + + def test_mul(self): + self.checkop('', operator.mul, 'abc', -1) + self.checkop('', operator.mul, 'abc', 0) + self.checkop('abc', operator.mul, 'abc', 1) + self.checkop('abcabcabc', operator.mul, 'abc', 3) + self.checkopraises(TypeError, operator.mul, 'abc') + self.checkopraises(TypeError, operator.mul, 'abc', '') + self.checkopraises(OverflowError, operator.mul, 10000*'abc', 2000000000) + # XXX: on a 64-bit system, this doesn't raise an overflow error, + # but either raises a MemoryError, or succeeds (if you have 54TiB) + #self.checkraises(OverflowError, 10000*'abc', '__mul__', 2000000000) + + def test_join(self): + # join now works with any sequence type + # moved here, because the argument order is + # different in string.join (see the test in + # test.test_string.StringTest.test_join) + self.checkequal('a b c d', ' ', 'join', ['a', 'b', 'c', 'd']) + self.checkequal('abcd', '', 'join', ('a', 'b', 'c', 'd')) + self.checkequal('bd', '', 'join', ('', 'b', '', 'd')) + self.checkequal('ac', '', 'join', ('a', '', 'c', '')) + self.checkequal('w x y z', ' ', 'join', Sequence()) + self.checkequal('abc', 'a', 'join', ('abc',)) + self.checkequal('z', 'a', 'join', UserList(['z'])) + if test_support.have_unicode: + self.checkequal(unicode('a.b.c'), unicode('.'), 'join', ['a', 'b', 'c']) + self.checkequal(unicode('a.b.c'), '.', 'join', [unicode('a'), 'b', 'c']) + self.checkequal(unicode('a.b.c'), '.', 'join', ['a', unicode('b'), 'c']) + self.checkequal(unicode('a.b.c'), '.', 'join', ['a', 'b', unicode('c')]) + self.checkraises(TypeError, '.', 'join', ['a', unicode('b'), 3]) + for i in [5, 25, 125]: + self.checkequal(((('a' * i) + '-') * i)[:-1], '-', 'join', + ['a' * i] * i) + self.checkequal(((('a' * i) + '-') * i)[:-1], '-', 'join', + ('a' * i,) * i) + + self.checkraises(TypeError, ' ', 'join', BadSeq1()) + self.checkequal('a b c', ' ', 'join', BadSeq2()) + + self.checkraises(TypeError, ' ', 'join') + self.checkraises(TypeError, ' ', 'join', 7) + self.checkraises(TypeError, ' ', 'join', Sequence([7, 'hello', 123L])) + try: + def f(): + yield 4 + "" + self.fixtype(' ').join(f()) + except TypeError, e: + if '+' not in str(e): + self.fail('join() ate exception message') + else: + self.fail('exception not raised') + + def test_formatting(self): + self.checkop('+hello+', operator.mod, '+%s+', 'hello') + self.checkop('+10+', operator.mod, '+%d+', 10) + self.checkop('a', operator.mod, "%c", "a") + self.checkop('a', operator.mod, "%c", "a") + self.checkop('"', operator.mod, "%c", 34) + self.checkop('$', operator.mod, "%c", 36) + self.checkop('10', operator.mod, "%d", 10) + self.checkop('\x7f', operator.mod, "%c", 0x7f) + + for ordinal in (-100, 0x200000): + # unicode raises ValueError, str raises OverflowError + self.checkopraises((ValueError, OverflowError), operator.mod, '%c', ordinal) + + self.checkop(' 42', operator.mod, '%3ld', 42) + self.checkop('0042.00', operator.mod, '%07.2f', 42) + self.checkop('0042.00', operator.mod, '%07.2F', 42) + + self.checkopraises(TypeError, operator.mod, 'abc') + self.checkopraises(TypeError, operator.mod, '%(foo)s', 42) + self.checkopraises(TypeError, operator.mod, '%s%s', (42,)) + self.checkopraises(TypeError, operator.mod, '%c', (None,)) + self.checkopraises(ValueError, operator.mod, '%(foo', {}) + self.checkopraises(TypeError, operator.mod, '%(foo)s %(bar)s', ('foo', 42)) + + # argument names with properly nested brackets are supported + self.checkop('bar', operator.mod, '%((foo))s', {'(foo)': 'bar'}) + + # 100 is a magic number in PyUnicode_Format, this forces a resize + self.checkop(103*'a'+'x', operator.mod, '%sx', 103*'a') + + self.checkopraises(TypeError, operator.mod, '%*s', ('foo', 'bar')) + self.checkopraises(TypeError, operator.mod, '%10.*f', ('foo', 42.)) + self.checkopraises(ValueError, operator.mod, '%10', (42,)) + + def test_floatformatting(self): + # float formatting + # XXX changed for PyPy to be faster + for prec, value in [(0, 3.141592655), + (1, 0.01), + (2, 120394), + (5, 23.01958), + (20, 141414.51321098), + (49, 0.01), + (50, 1e50), + (99, 123)]: + format = '%%.%if' % prec + try: + self.checkopcall(operator.mod, format, value) + except OverflowError: + self.failUnless(abs(value) < 1e25 and prec >= 67, + "OverflowError on small examples") + + def test_inplace_rewrites(self): + # Check that strings don't copy and modify cached single-character strings + self.checkequal('a', 'A', 'lower') + self.checkequal(True, 'A', 'isupper') + self.checkequal('A', 'a', 'upper') + self.checkequal(True, 'a', 'islower') + + self.checkequal('a', 'A', 'replace', 'A', 'a') + self.checkequal(True, 'A', 'isupper') + + self.checkequal('A', 'a', 'capitalize') + self.checkequal(True, 'a', 'islower') + + self.checkequal('A', 'a', 'swapcase') + self.checkequal(True, 'a', 'islower') + + self.checkequal('A', 'a', 'title') + self.checkequal(True, 'a', 'islower') + + def test_partition(self): + + self.checkequal(('this is the par', 'ti', 'tion method'), + 'this is the partition method', 'partition', 'ti') + + # from raymond's original specification + S = 'http://www.python.org' + self.checkequal(('http', '://', 'www.python.org'), S, 'partition', '://') + self.checkequal(('http://www.python.org', '', ''), S, 'partition', '?') + self.checkequal(('', 'http://', 'www.python.org'), S, 'partition', 'http://') + self.checkequal(('http://www.python.', 'org', ''), S, 'partition', 'org') + + self.checkraises(ValueError, S, 'partition', '') + self.checkraises(TypeError, S, 'partition', None) + + def test_rpartition(self): + + self.checkequal(('this is the rparti', 'ti', 'on method'), + 'this is the rpartition method', 'rpartition', 'ti') + + # from raymond's original specification + S = 'http://www.python.org' + self.checkequal(('http', '://', 'www.python.org'), S, 'rpartition', '://') + self.checkequal(('', '', 'http://www.python.org'), S, 'rpartition', '?') + self.checkequal(('', 'http://', 'www.python.org'), S, 'rpartition', 'http://') + self.checkequal(('http://www.python.', 'org', ''), S, 'rpartition', 'org') + + self.checkraises(ValueError, S, 'rpartition', '') + self.checkraises(TypeError, S, 'rpartition', None) + + +class MixinStrStringUserStringTest: + # Additional tests for 8bit strings, i.e. str, UserString and + # the string module + + def test_maketrans(self): + self.assertEqual( + ''.join(map(chr, xrange(256))).replace('abc', 'xyz'), + string.maketrans('abc', 'xyz') + ) + self.assertRaises(ValueError, string.maketrans, 'abc', 'xyzw') + + def test_translate(self): + table = string.maketrans('abc', 'xyz') + self.checkequal('xyzxyz', 'xyzabcdef', 'translate', table, 'def') + + table = string.maketrans('a', 'A') + self.checkequal('Abc', 'abc', 'translate', table) + self.checkequal('xyz', 'xyz', 'translate', table) + self.checkequal('yz', 'xyz', 'translate', table, 'x') + self.checkraises(ValueError, 'xyz', 'translate', 'too short', 'strip') + self.checkraises(ValueError, 'xyz', 'translate', 'too short') + + +class MixinStrUserStringTest: + # Additional tests that only work with + # 8bit compatible object, i.e. str and UserString + + if test_support.have_unicode: + def test_encoding_decoding(self): + codecs = [('rot13', 'uryyb jbeyq'), + ('base64', 'aGVsbG8gd29ybGQ=\n'), + ('hex', '68656c6c6f20776f726c64'), + ('uu', 'begin 666 \n+:&5L;&\\@=V]R;&0 \n \nend\n')] + for encoding, data in codecs: + self.checkequal(data, 'hello world', 'encode', encoding) + self.checkequal('hello world', data, 'decode', encoding) + # zlib is optional, so we make the test optional too... + try: + import zlib + except ImportError: + pass + else: + data = 'x\x9c\xcbH\xcd\xc9\xc9W(\xcf/\xcaI\x01\x00\x1a\x0b\x04]' + self.checkequal(data, 'hello world', 'encode', 'zlib') + self.checkequal('hello world', data, 'decode', 'zlib') + + self.checkraises(TypeError, 'xyz', 'decode', 42) + self.checkraises(TypeError, 'xyz', 'encode', 42) + + +class MixinStrUnicodeTest: + # Additional tests that only work with str and unicode. + + def test_bug1001011(self): + # Make sure join returns a NEW object for single item sequences + # involving a subclass. + # Make sure that it is of the appropriate type. + # Check the optimisation still occurs for standard objects. + t = self.type2test + class subclass(t): + pass + s1 = subclass("abcd") + s2 = t().join([s1]) + self.assert_(s1 is not s2) + self.assert_(type(s2) is t) + + + # XXX impl. specific optimisation + #s1 = t("abcd") + #s2 = t().join([s1]) + #self.assert_(s1 is s2) + + # Should also test mixed-type join. + if t is unicode: + s1 = subclass("abcd") + s2 = "".join([s1]) + self.assert_(s1 is not s2) + self.assert_(type(s2) is t) + + # XXX impl. specific opt. + #s1 = t("abcd") + #s2 = "".join([s1]) + #self.assert_(s1 is s2) + + elif t is str: + s1 = subclass("abcd") + s2 = u"".join([s1]) + self.assert_(s1 is not s2) + self.assert_(type(s2) is unicode) # promotes! + + s1 = t("abcd") + s2 = u"".join([s1]) + self.assert_(s1 is not s2) + self.assert_(type(s2) is unicode) # promotes! + + else: + self.fail("unexpected type for MixinStrUnicodeTest %r" % t) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test___all__.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test___all__.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,200 @@ +import unittest +from test import test_support + +from test.test_support import verify, verbose +import sys +import warnings + +warnings.filterwarnings("ignore", + "the gopherlib module is deprecated", + DeprecationWarning, + "") + +class AllTest(unittest.TestCase): + + def check_all(self, modname): + names = {} + original_sys_modules = sys.modules.copy() + try: + exec "import %s" % modname in names + except ImportError: + # Silent fail here seems the best route since some modules + # may not be available in all environments. + # We restore sys.modules to avoid leaving broken modules behind, + # but we must not remove built-in modules from sys.modules + # (because they can't be re-imported, typically) + for name in sys.modules.keys(): + if name in original_sys_modules: + continue + # XXX hackish + mod = sys.modules[name] + if not hasattr(mod, '__file__'): + continue + if (mod.__file__.lower().endswith('.py') or + mod.__file__.lower().endswith('.pyc') or + mod.__file__.lower().endswith('.pyo')): + del sys.modules[name] + return + verify(hasattr(sys.modules[modname], "__all__"), + "%s has no __all__ attribute" % modname) + names = {} + exec "from %s import *" % modname in names + if names.has_key("__builtins__"): + del names["__builtins__"] + keys = set(names) + all = set(sys.modules[modname].__all__) + verify(keys==all, "%s != %s" % (keys, all)) + + def test_all(self): + if not sys.platform.startswith('java'): + # In case _socket fails to build, make this test fail more gracefully + # than an AttributeError somewhere deep in CGIHTTPServer. + import _socket + + self.check_all("BaseHTTPServer") + self.check_all("Bastion") + self.check_all("CGIHTTPServer") + self.check_all("ConfigParser") + self.check_all("Cookie") + self.check_all("MimeWriter") + self.check_all("Queue") + self.check_all("SimpleHTTPServer") + self.check_all("SocketServer") + self.check_all("StringIO") + self.check_all("UserString") + self.check_all("aifc") + self.check_all("atexit") + self.check_all("audiodev") + self.check_all("base64") + self.check_all("bdb") + self.check_all("binhex") + self.check_all("calendar") + self.check_all("cgi") + self.check_all("cmd") + self.check_all("code") + self.check_all("codecs") + self.check_all("codeop") + self.check_all("colorsys") + self.check_all("commands") + self.check_all("compileall") + self.check_all("copy") + self.check_all("copy_reg") + self.check_all("csv") + self.check_all("dbhash") + self.check_all("decimal") + self.check_all("difflib") + self.check_all("dircache") + self.check_all("dis") + self.check_all("doctest") + self.check_all("dummy_thread") + self.check_all("dummy_threading") + self.check_all("filecmp") + self.check_all("fileinput") + self.check_all("fnmatch") + self.check_all("fpformat") + self.check_all("ftplib") + self.check_all("getopt") + self.check_all("getpass") + self.check_all("gettext") + self.check_all("glob") + self.check_all("gopherlib") + self.check_all("gzip") + self.check_all("heapq") + self.check_all("htmllib") + self.check_all("httplib") + self.check_all("ihooks") + self.check_all("imaplib") + self.check_all("imghdr") + self.check_all("imputil") + self.check_all("keyword") + self.check_all("linecache") + self.check_all("locale") + self.check_all("macpath") + self.check_all("macurl2path") + self.check_all("mailbox") + self.check_all("mailcap") + self.check_all("mhlib") + self.check_all("mimetools") + self.check_all("mimetypes") + self.check_all("mimify") + self.check_all("multifile") + self.check_all("netrc") + self.check_all("nntplib") + self.check_all("ntpath") + self.check_all("opcode") + self.check_all("optparse") + self.check_all("os") + self.check_all("os2emxpath") + self.check_all("pdb") + self.check_all("pickle") + self.check_all("pickletools") + self.check_all("pipes") + self.check_all("popen2") + self.check_all("poplib") + self.check_all("posixpath") + self.check_all("pprint") + self.check_all("profile") + self.check_all("pstats") + self.check_all("pty") + self.check_all("py_compile") + self.check_all("pyclbr") + self.check_all("quopri") + self.check_all("random") + self.check_all("re") + self.check_all("repr") + self.check_all("rexec") + self.check_all("rfc822") + self.check_all("rlcompleter") + self.check_all("robotparser") + self.check_all("sched") + self.check_all("sets") + self.check_all("sgmllib") + self.check_all("shelve") + self.check_all("shlex") + self.check_all("shutil") + self.check_all("smtpd") + self.check_all("smtplib") + self.check_all("sndhdr") + self.check_all("socket") + self.check_all("_strptime") + self.check_all("symtable") + self.check_all("tabnanny") + self.check_all("tarfile") + self.check_all("telnetlib") + self.check_all("tempfile") + self.check_all("textwrap") + self.check_all("threading") + self.check_all("timeit") + self.check_all("toaiff") + self.check_all("tokenize") + self.check_all("traceback") + self.check_all("tty") + self.check_all("unittest") + self.check_all("urllib") + self.check_all("urlparse") + self.check_all("uu") + self.check_all("warnings") + self.check_all("wave") + self.check_all("weakref") + self.check_all("webbrowser") + self.check_all("xdrlib") + self.check_all("zipfile") + + # rlcompleter needs special consideration; it import readline which + # initializes GNU readline which calls setlocale(LC_CTYPE, "")... :-( + try: + self.check_all("rlcompleter") + finally: + try: + import locale + except ImportError: + pass + else: + locale.setlocale(locale.LC_CTYPE, 'C') + + +def test_main(): + test_support.run_unittest(AllTest) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_array.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_array.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,1002 @@ +#! /usr/bin/env python +"""Test the arraymodule. + Roger E. Masse +""" + +import unittest +from test import test_support +#from weakref import proxy +import array, cStringIO, math +from cPickle import loads, dumps + +# XXX as we use the struct module we get struct.error when trying to push +# objects of invalid types or out of range into an array. If this issue +# is fixed, remove all mentions of struct.error in the sequel. +import struct + +class ArraySubclass(array.array): + pass + +class ArraySubclassWithKwargs(array.array): + def __init__(self, typecode, newarg=None): + array.array.__init__(typecode) + +tests = [] # list to accumulate all tests +typecodes = "cubBhHiIlLfd" + +class BadConstructorTest(unittest.TestCase): + + def test_constructor(self): + self.assertRaises(TypeError, array.array) + self.assertRaises(TypeError, array.array, spam=42) + self.assertRaises(TypeError, array.array, 'xx') + self.assertRaises(ValueError, array.array, 'x') + +tests.append(BadConstructorTest) + +class BaseTest(unittest.TestCase): + # Required class attributes (provided by subclasses + # typecode: the typecode to test + # example: an initializer usable in the constructor for this type + # smallerexample: the same length as example, but smaller + # biggerexample: the same length as example, but bigger + # outside: An entry that is not in example + # minitemsize: the minimum guaranteed itemsize + + def assertEntryEqual(self, entry1, entry2): + self.assertEqual(entry1, entry2) + + def badtypecode(self): + # Return a typecode that is different from our own + return typecodes[(typecodes.index(self.typecode)+1) % len(typecodes)] + + def test_constructor(self): + a = array.array(self.typecode) + self.assertEqual(a.typecode, self.typecode) + self.assert_(a.itemsize>=self.minitemsize) + self.assertRaises(TypeError, array.array, self.typecode, None) + + def test_len(self): + a = array.array(self.typecode) + a.append(self.example[0]) + self.assertEqual(len(a), 1) + + a = array.array(self.typecode, self.example) + self.assertEqual(len(a), len(self.example)) + + def test_buffer_info(self): + a = array.array(self.typecode, self.example) + self.assertRaises(TypeError, a.buffer_info, 42) + bi = a.buffer_info() + self.assert_(isinstance(bi, tuple)) + self.assertEqual(len(bi), 2) + self.assert_(isinstance(bi[0], (int, long))) + self.assert_(isinstance(bi[1], int)) + self.assertEqual(bi[1], len(a)) + + def test_byteswap(self): + a = array.array(self.typecode, self.example) + self.assertRaises(TypeError, a.byteswap, 42) + if a.itemsize in (1, 2, 4, 8): + b = array.array(self.typecode, self.example) + b.byteswap() + if a.itemsize==1: + self.assertEqual(a, b) + else: + self.assertNotEqual(a, b) + b.byteswap() + self.assertEqual(a, b) + + def test_copy(self): + import copy + a = array.array(self.typecode, self.example) + b = copy.copy(a) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + + def test_deepcopy(self): + import copy + a = array.array(self.typecode, self.example) + b = copy.deepcopy(a) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + + def test_pickle(self): + for protocol in (0, 1, 2): + a = array.array(self.typecode, self.example) + b = loads(dumps(a, protocol)) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + + a = ArraySubclass(self.typecode, self.example) + a.x = 10 + b = loads(dumps(a, protocol)) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + self.assertEqual(a.x, b.x) + self.assertEqual(type(a), type(b)) + + def test_pickle_for_empty_array(self): + for protocol in (0, 1, 2): + a = array.array(self.typecode) + b = loads(dumps(a, protocol)) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + + a = ArraySubclass(self.typecode) + a.x = 10 + b = loads(dumps(a, protocol)) + self.assertNotEqual(id(a), id(b)) + self.assertEqual(a, b) + self.assertEqual(a.x, b.x) + self.assertEqual(type(a), type(b)) + + def test_insert(self): + a = array.array(self.typecode, self.example) + a.insert(0, self.example[0]) + self.assertEqual(len(a), 1+len(self.example)) + self.assertEqual(a[0], a[1]) + self.assertRaises(TypeError, a.insert) + self.assertRaises(TypeError, a.insert, None) + self.assertRaises((TypeError, struct.error), a.insert, 0, None) + + a = array.array(self.typecode, self.example) + a.insert(-1, self.example[0]) + self.assertEqual( + a, + array.array( + self.typecode, + self.example[:-1] + self.example[:1] + self.example[-1:] + ) + ) + + a = array.array(self.typecode, self.example) + a.insert(-1000, self.example[0]) + self.assertEqual( + a, + array.array(self.typecode, self.example[:1] + self.example) + ) + + a = array.array(self.typecode, self.example) + a.insert(1000, self.example[0]) + self.assertEqual( + a, + array.array(self.typecode, self.example + self.example[:1]) + ) + + def test_tofromfile(self): + a = array.array(self.typecode, 2*self.example) + self.assertRaises(TypeError, a.tofile) + self.assertRaises(TypeError, a.tofile, cStringIO.StringIO()) + f = open(test_support.TESTFN, 'wb') + try: + a.tofile(f) + f.close() + b = array.array(self.typecode) + f = open(test_support.TESTFN, 'rb') + self.assertRaises(TypeError, b.fromfile) + self.assertRaises( + TypeError, + b.fromfile, + cStringIO.StringIO(), len(self.example) + ) + b.fromfile(f, len(self.example)) + self.assertEqual(b, array.array(self.typecode, self.example)) + self.assertNotEqual(a, b) + b.fromfile(f, len(self.example)) + self.assertEqual(a, b) + self.assertRaises(EOFError, b.fromfile, f, 1) + f.close() + finally: + if not f.closed: + f.close() + test_support.unlink(test_support.TESTFN) + + def test_tofromlist(self): + a = array.array(self.typecode, 2*self.example) + b = array.array(self.typecode) + self.assertRaises(TypeError, a.tolist, 42) + self.assertRaises(TypeError, b.fromlist) + self.assertRaises(TypeError, b.fromlist, 42) + self.assertRaises((TypeError, struct.error), b.fromlist, [None]) + b.fromlist(a.tolist()) + self.assertEqual(a, b) + + def test_tofromstring(self): + a = array.array(self.typecode, 2*self.example) + b = array.array(self.typecode) + self.assertRaises(TypeError, a.tostring, 42) + self.assertRaises(TypeError, b.fromstring) + self.assertRaises(TypeError, b.fromstring, 42) + b.fromstring(a.tostring()) + self.assertEqual(a, b) + if a.itemsize>1: + self.assertRaises(ValueError, b.fromstring, "x") + + def test_repr(self): + a = array.array(self.typecode, 2*self.example) + self.assertEqual(a, eval(repr(a), {"array": array.array})) + + a = array.array(self.typecode) + self.assertEqual(repr(a), "array('%s')" % self.typecode) + + def test_str(self): + a = array.array(self.typecode, 2*self.example) + str(a) + + def test_cmp(self): + a = array.array(self.typecode, self.example) + self.assert_((a == 42) is False) + self.assert_((a != 42) is True) + + self.assert_((a == a) is True) + self.assert_((a != a) is False) + self.assert_((a < a) is False) + self.assert_((a <= a) is True) + self.assert_((a > a) is False) + self.assert_((a >= a) is True) + + al = array.array(self.typecode, self.smallerexample) + ab = array.array(self.typecode, self.biggerexample) + + self.assert_((a == 2*a) is False) + self.assert_((a != 2*a) is True) + self.assert_((a < 2*a) is True) + self.assert_((a <= 2*a) is True) + self.assert_((a > 2*a) is False) + self.assert_((a >= 2*a) is False) + + self.assert_((a == al) is False) + self.assert_((a != al) is True) + self.assert_((a < al) is False) + self.assert_((a <= al) is False) + self.assert_((a > al) is True) + self.assert_((a >= al) is True) + + self.assert_((a == ab) is False) + self.assert_((a != ab) is True) + self.assert_((a < ab) is True) + self.assert_((a <= ab) is True) + self.assert_((a > ab) is False) + self.assert_((a >= ab) is False) + + def test_add(self): + a = array.array(self.typecode, self.example) \ + + array.array(self.typecode, self.example[::-1]) + self.assertEqual( + a, + array.array(self.typecode, self.example + self.example[::-1]) + ) + + b = array.array(self.badtypecode()) + self.assertRaises(TypeError, a.__add__, b) + + self.assertRaises(TypeError, a.__add__, "bad") + + def test_iadd(self): + a = array.array(self.typecode, self.example[::-1]) + b = a + a += array.array(self.typecode, 2*self.example) + self.assert_(a is b) + self.assertEqual( + a, + array.array(self.typecode, self.example[::-1]+2*self.example) + ) + + b = array.array(self.badtypecode()) + self.assertRaises(TypeError, a.__add__, b) + + self.assertRaises(TypeError, a.__iadd__, "bad") + + def test_mul(self): + a = 5*array.array(self.typecode, self.example) + self.assertEqual( + a, + array.array(self.typecode, 5*self.example) + ) + + a = array.array(self.typecode, self.example)*5 + self.assertEqual( + a, + array.array(self.typecode, self.example*5) + ) + + a = 0*array.array(self.typecode, self.example) + self.assertEqual( + a, + array.array(self.typecode) + ) + + a = (-1)*array.array(self.typecode, self.example) + self.assertEqual( + a, + array.array(self.typecode) + ) + + self.assertRaises(TypeError, a.__mul__, "bad") + + def test_imul(self): + a = array.array(self.typecode, self.example) + b = a + + a *= 5 + self.assert_(a is b) + self.assertEqual( + a, + array.array(self.typecode, 5*self.example) + ) + + a *= 0 + self.assert_(a is b) + self.assertEqual(a, array.array(self.typecode)) + + a *= 1000 + self.assert_(a is b) + self.assertEqual(a, array.array(self.typecode)) + + a *= -1 + self.assert_(a is b) + self.assertEqual(a, array.array(self.typecode)) + + a = array.array(self.typecode, self.example) + a *= -1 + self.assertEqual(a, array.array(self.typecode)) + + self.assertRaises(TypeError, a.__imul__, "bad") + + def test_getitem(self): + a = array.array(self.typecode, self.example) + self.assertEntryEqual(a[0], self.example[0]) + self.assertEntryEqual(a[0L], self.example[0]) + self.assertEntryEqual(a[-1], self.example[-1]) + self.assertEntryEqual(a[-1L], self.example[-1]) + self.assertEntryEqual(a[len(self.example)-1], self.example[-1]) + self.assertEntryEqual(a[-len(self.example)], self.example[0]) + self.assertRaises(TypeError, a.__getitem__) + self.assertRaises(IndexError, a.__getitem__, len(self.example)) + self.assertRaises(IndexError, a.__getitem__, -len(self.example)-1) + + def test_setitem(self): + a = array.array(self.typecode, self.example) + a[0] = a[-1] + self.assertEntryEqual(a[0], a[-1]) + + a = array.array(self.typecode, self.example) + a[0L] = a[-1] + self.assertEntryEqual(a[0], a[-1]) + + a = array.array(self.typecode, self.example) + a[-1] = a[0] + self.assertEntryEqual(a[0], a[-1]) + + a = array.array(self.typecode, self.example) + a[-1L] = a[0] + self.assertEntryEqual(a[0], a[-1]) + + a = array.array(self.typecode, self.example) + a[len(self.example)-1] = a[0] + self.assertEntryEqual(a[0], a[-1]) + + a = array.array(self.typecode, self.example) + a[-len(self.example)] = a[-1] + self.assertEntryEqual(a[0], a[-1]) + + self.assertRaises(TypeError, a.__setitem__) + self.assertRaises(TypeError, a.__setitem__, None) + self.assertRaises((TypeError, struct.error), a.__setitem__, 0, None) + self.assertRaises( + IndexError, + a.__setitem__, + len(self.example), self.example[0] + ) + self.assertRaises( + IndexError, + a.__setitem__, + -len(self.example)-1, self.example[0] + ) + + def test_delitem(self): + a = array.array(self.typecode, self.example) + del a[0] + self.assertEqual( + a, + array.array(self.typecode, self.example[1:]) + ) + + a = array.array(self.typecode, self.example) + del a[-1] + self.assertEqual( + a, + array.array(self.typecode, self.example[:-1]) + ) + + a = array.array(self.typecode, self.example) + del a[len(self.example)-1] + self.assertEqual( + a, + array.array(self.typecode, self.example[:-1]) + ) + + a = array.array(self.typecode, self.example) + del a[-len(self.example)] + self.assertEqual( + a, + array.array(self.typecode, self.example[1:]) + ) + + self.assertRaises(TypeError, a.__delitem__) + self.assertRaises(TypeError, a.__delitem__, None) + self.assertRaises(IndexError, a.__delitem__, len(self.example)) + self.assertRaises(IndexError, a.__delitem__, -len(self.example)-1) + + def test_getslice(self): + a = array.array(self.typecode, self.example) + self.assertEqual(a[:], a) + + self.assertEqual( + a[1:], + array.array(self.typecode, self.example[1:]) + ) + + self.assertEqual( + a[:1], + array.array(self.typecode, self.example[:1]) + ) + + self.assertEqual( + a[:-1], + array.array(self.typecode, self.example[:-1]) + ) + + self.assertEqual( + a[-1:], + array.array(self.typecode, self.example[-1:]) + ) + + self.assertEqual( + a[-1:-1], + array.array(self.typecode) + ) + + self.assertEqual( + a[2:1], + array.array(self.typecode) + ) + + self.assertEqual( + a[1000:], + array.array(self.typecode) + ) + self.assertEqual(a[-1000:], a) + self.assertEqual(a[:1000], a) + self.assertEqual( + a[:-1000], + array.array(self.typecode) + ) + self.assertEqual(a[-1000:1000], a) + self.assertEqual( + a[2000:1000], + array.array(self.typecode) + ) + + def test_setslice(self): + a = array.array(self.typecode, self.example) + a[:1] = a + self.assertEqual( + a, + array.array(self.typecode, self.example + self.example[1:]) + ) + + a = array.array(self.typecode, self.example) + a[:-1] = a + self.assertEqual( + a, + array.array(self.typecode, self.example + self.example[-1:]) + ) + + a = array.array(self.typecode, self.example) + a[-1:] = a + self.assertEqual( + a, + array.array(self.typecode, self.example[:-1] + self.example) + ) + + a = array.array(self.typecode, self.example) + a[1:] = a + self.assertEqual( + a, + array.array(self.typecode, self.example[:1] + self.example) + ) + + a = array.array(self.typecode, self.example) + a[1:-1] = a + self.assertEqual( + a, + array.array( + self.typecode, + self.example[:1] + self.example + self.example[-1:] + ) + ) + + a = array.array(self.typecode, self.example) + a[1000:] = a + self.assertEqual( + a, + array.array(self.typecode, 2*self.example) + ) + + a = array.array(self.typecode, self.example) + a[-1000:] = a + self.assertEqual( + a, + array.array(self.typecode, self.example) + ) + + a = array.array(self.typecode, self.example) + a[:1000] = a + self.assertEqual( + a, + array.array(self.typecode, self.example) + ) + + a = array.array(self.typecode, self.example) + a[:-1000] = a + self.assertEqual( + a, + array.array(self.typecode, 2*self.example) + ) + + a = array.array(self.typecode, self.example) + a[1:0] = a + self.assertEqual( + a, + array.array(self.typecode, self.example[:1] + self.example + self.example[1:]) + ) + + a = array.array(self.typecode, self.example) + a[2000:1000] = a + self.assertEqual( + a, + array.array(self.typecode, 2*self.example) + ) + + a = array.array(self.typecode, self.example) + self.assertRaises(TypeError, a.__setslice__, 0, 0, None) + self.assertRaises(TypeError, a.__setitem__, slice(0, 1), None) + + b = array.array(self.badtypecode()) + self.assertRaises(TypeError, a.__setslice__, 0, 0, b) + self.assertRaises(TypeError, a.__setitem__, slice(0, 1), b) + + def test_index(self): + example = 2*self.example + a = array.array(self.typecode, example) + self.assertRaises(TypeError, a.index) + for x in example: + self.assertEqual(a.index(x), example.index(x)) + self.assertRaises(ValueError, a.index, None) + self.assertRaises(ValueError, a.index, self.outside) + + def test_count(self): + example = 2*self.example + a = array.array(self.typecode, example) + self.assertRaises(TypeError, a.count) + for x in example: + self.assertEqual(a.count(x), example.count(x)) + self.assertEqual(a.count(self.outside), 0) + self.assertEqual(a.count(None), 0) + + def test_remove(self): + for x in self.example: + example = 2*self.example + a = array.array(self.typecode, example) + pos = example.index(x) + example2 = example[:pos] + example[pos+1:] + a.remove(x) + self.assertEqual(a, array.array(self.typecode, example2)) + + a = array.array(self.typecode, self.example) + self.assertRaises(ValueError, a.remove, self.outside) + + self.assertRaises(ValueError, a.remove, None) + + def test_pop(self): + a = array.array(self.typecode) + self.assertRaises(IndexError, a.pop) + + a = array.array(self.typecode, 2*self.example) + self.assertRaises(TypeError, a.pop, 42, 42) + self.assertRaises(TypeError, a.pop, None) + self.assertRaises(IndexError, a.pop, len(a)) + self.assertRaises(IndexError, a.pop, -len(a)-1) + + self.assertEntryEqual(a.pop(0), self.example[0]) + self.assertEqual( + a, + array.array(self.typecode, self.example[1:]+self.example) + ) + self.assertEntryEqual(a.pop(1), self.example[2]) + self.assertEqual( + a, + array.array(self.typecode, self.example[1:2]+self.example[3:]+self.example) + ) + self.assertEntryEqual(a.pop(0), self.example[1]) + self.assertEntryEqual(a.pop(), self.example[-1]) + self.assertEqual( + a, + array.array(self.typecode, self.example[3:]+self.example[:-1]) + ) + + def test_reverse(self): + a = array.array(self.typecode, self.example) + self.assertRaises(TypeError, a.reverse, 42) + a.reverse() + self.assertEqual( + a, + array.array(self.typecode, self.example[::-1]) + ) + + def test_extend(self): + a = array.array(self.typecode, self.example) + self.assertRaises(TypeError, a.extend) + a.extend(array.array(self.typecode, self.example[::-1])) + self.assertEqual( + a, + array.array(self.typecode, self.example+self.example[::-1]) + ) + + b = array.array(self.badtypecode()) + self.assertRaises(TypeError, a.extend, b) + + a = array.array(self.typecode, self.example) + a.extend(self.example[::-1]) + self.assertEqual( + a, + array.array(self.typecode, self.example+self.example[::-1]) + ) + + def test_constructor_with_iterable_argument(self): + a = array.array(self.typecode, iter(self.example)) + b = array.array(self.typecode, self.example) + self.assertEqual(a, b) + + # non-iterable argument + self.assertRaises(TypeError, array.array, self.typecode, 10) + + # pass through errors raised in __iter__ + class A: + def __iter__(self): + raise UnicodeError + self.assertRaises(UnicodeError, array.array, self.typecode, A()) + + # pass through errors raised in next() + def B(): + raise UnicodeError + yield None + self.assertRaises(UnicodeError, array.array, self.typecode, B()) + + def test_coveritertraverse(self): + try: + import gc + except ImportError: + return + a = array.array(self.typecode) + l = [iter(a)] + l.append(l) + gc.collect() + + def test_buffer(self): + a = array.array(self.typecode, self.example) + b = buffer(a) + self.assertEqual(b[0], a.tostring()[0]) + + def DONOTtest_weakref(self): + # XXX disabled until PyPy grows weakref support + s = array.array(self.typecode, self.example) + p = proxy(s) + self.assertEqual(p.tostring(), s.tostring()) + s = None + self.assertRaises(ReferenceError, len, p) + + def test_bug_782369(self): + import sys + if hasattr(sys, "getrefcount"): + for i in range(10): + b = array.array('B', range(64)) + rc = sys.getrefcount(10) + for i in range(10): + b = array.array('B', range(64)) + self.assertEqual(rc, sys.getrefcount(10)) + + def test_subclass_with_kwargs(self): + # SF bug #1486663 -- this used to erroneously raise a TypeError + ArraySubclassWithKwargs('b', newarg=1) + + +class StringTest(BaseTest): + + def test_setitem(self): + super(StringTest, self).test_setitem() + a = array.array(self.typecode, self.example) + self.assertRaises((TypeError, struct.error), a.__setitem__, 0, self.example[:2]) + +class CharacterTest(StringTest): + typecode = 'c' + example = '\x01azAZ\x00\xfe' + smallerexample = '\x01azAY\x00\xfe' + biggerexample = '\x01azAZ\x00\xff' + outside = '\x33' + minitemsize = 1 + + def test_subbclassing(self): + class EditableString(array.array): + def __new__(cls, s, *args, **kwargs): + return array.array.__new__(cls, 'c', s) + + def __init__(self, s, color='blue'): + array.array.__init__(self, 'c', s) + self.color = color + + def strip(self): + self[:] = array.array('c', self.tostring().strip()) + + def __repr__(self): + return 'EditableString(%r)' % self.tostring() + + s = EditableString("\ttest\r\n") + s.strip() + self.assertEqual(s.tostring(), "test") + + self.assertEqual(s.color, "blue") + s.color = "red" + self.assertEqual(s.color, "red") + self.assertEqual(s.__dict__.keys(), ["color"]) + + def test_nounicode(self): + a = array.array(self.typecode, self.example) + self.assertRaises(ValueError, a.fromunicode, unicode('')) + self.assertRaises(ValueError, a.tounicode) + +tests.append(CharacterTest) + +if test_support.have_unicode: + class UnicodeTest(StringTest): + typecode = 'u' + example = unicode(r'\x01\u263a\x00\ufeff', 'unicode-escape') + smallerexample = unicode(r'\x01\u263a\x00\ufefe', 'unicode-escape') + biggerexample = unicode(r'\x01\u263a\x01\ufeff', 'unicode-escape') + outside = unicode('\x33') + minitemsize = 2 + + def test_unicode(self): + self.assertRaises((TypeError, struct.error), array.array, 'b', unicode('foo', 'ascii')) + + a = array.array('u', unicode(r'\xa0\xc2\u1234', 'unicode-escape')) + a.fromunicode(unicode(' ', 'ascii')) + a.fromunicode(unicode('', 'ascii')) + a.fromunicode(unicode('', 'ascii')) + a.fromunicode(unicode(r'\x11abc\xff\u1234', 'unicode-escape')) + s = a.tounicode() + self.assertEqual( + s, + unicode(r'\xa0\xc2\u1234 \x11abc\xff\u1234', 'unicode-escape') + ) + + s = unicode(r'\x00="\'a\\b\x80\xff\u0000\u0001\u1234', 'unicode-escape') + a = array.array('u', s) + self.assertEqual( + repr(a), + r"""array('u', u'\x00="\'a\\b\x80\xff\x00\x01\u1234')""" + ) + + self.assertRaises(TypeError, a.fromunicode) + + tests.append(UnicodeTest) + +class NumberTest(BaseTest): + + def test_extslice(self): + a = array.array(self.typecode, range(5)) + self.assertEqual(a[::], a) + self.assertEqual(a[::2], array.array(self.typecode, [0,2,4])) + self.assertEqual(a[1::2], array.array(self.typecode, [1,3])) + self.assertEqual(a[::-1], array.array(self.typecode, [4,3,2,1,0])) + self.assertEqual(a[::-2], array.array(self.typecode, [4,2,0])) + self.assertEqual(a[3::-2], array.array(self.typecode, [3,1])) + self.assertEqual(a[-100:100:], a) + self.assertEqual(a[100:-100:-1], a[::-1]) + self.assertEqual(a[-100L:100L:2L], array.array(self.typecode, [0,2,4])) + self.assertEqual(a[1000:2000:2], array.array(self.typecode, [])) + self.assertEqual(a[-1000:-2000:-2], array.array(self.typecode, [])) + + def test_delslice(self): + a = array.array(self.typecode, range(5)) + del a[::2] + self.assertEqual(a, array.array(self.typecode, [1,3])) + a = array.array(self.typecode, range(5)) + del a[1::2] + self.assertEqual(a, array.array(self.typecode, [0,2,4])) + a = array.array(self.typecode, range(5)) + del a[1::-2] + self.assertEqual(a, array.array(self.typecode, [0,2,3,4])) + a = array.array(self.typecode, range(10)) + del a[::1000] + self.assertEqual(a, array.array(self.typecode, [1,2,3,4,5,6,7,8,9])) + + def test_assignment(self): + a = array.array(self.typecode, range(10)) + a[::2] = array.array(self.typecode, [42]*5) + self.assertEqual(a, array.array(self.typecode, [42, 1, 42, 3, 42, 5, 42, 7, 42, 9])) + a = array.array(self.typecode, range(10)) + a[::-4] = array.array(self.typecode, [10]*3) + self.assertEqual(a, array.array(self.typecode, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10])) + a = array.array(self.typecode, range(4)) + a[::-1] = a + self.assertEqual(a, array.array(self.typecode, [3, 2, 1, 0])) + a = array.array(self.typecode, range(10)) + b = a[:] + c = a[:] + ins = array.array(self.typecode, range(2)) + a[2:3] = ins + b[slice(2,3)] = ins + c[2:3:] = ins + + def test_iterationcontains(self): + a = array.array(self.typecode, range(10)) + self.assertEqual(list(a), range(10)) + b = array.array(self.typecode, [20]) + self.assertEqual(a[-1] in a, True) + self.assertEqual(b[0] not in a, True) + + def check_overflow(self, lower, upper): + # method to be used by subclasses + + # should not overflow assigning lower limit + a = array.array(self.typecode, [lower]) + a[0] = lower + # should overflow assigning less than lower limit + self.assertRaises((OverflowError, struct.error, ValueError), array.array, self.typecode, [lower-1]) + self.assertRaises((OverflowError, struct.error, ValueError), a.__setitem__, 0, lower-1) + # should not overflow assigning upper limit + a = array.array(self.typecode, [upper]) + a[0] = upper + # should overflow assigning more than upper limit + self.assertRaises((OverflowError, struct.error), array.array, self.typecode, [upper+1]) + self.assertRaises((OverflowError, struct.error), a.__setitem__, 0, upper+1) + + def test_subclassing(self): + typecode = self.typecode + class ExaggeratingArray(array.array): + __slots__ = ['offset'] + + def __new__(cls, typecode, data, offset): + return array.array.__new__(cls, typecode, data) + + def __init__(self, typecode, data, offset): + self.offset = offset + + def __getitem__(self, i): + return array.array.__getitem__(self, i) + self.offset + + a = ExaggeratingArray(self.typecode, [3, 6, 7, 11], 4) + self.assertEntryEqual(a[0], 7) + + self.assertRaises(AttributeError, setattr, a, "color", "blue") + +class SignedNumberTest(NumberTest): + example = [-1, 0, 1, 42, 0x7f] + smallerexample = [-1, 0, 1, 42, 0x7e] + biggerexample = [-1, 0, 1, 43, 0x7f] + outside = 23 + + def test_overflow(self): + a = array.array(self.typecode) + lower = -1 * long(pow(2, a.itemsize * 8 - 1)) + upper = long(pow(2, a.itemsize * 8 - 1)) - 1L + self.check_overflow(lower, upper) + +class UnsignedNumberTest(NumberTest): + example = [0, 1, 17, 23, 42, 0xff] + smallerexample = [0, 1, 17, 23, 42, 0xfe] + biggerexample = [0, 1, 17, 23, 43, 0xff] + outside = 0xaa + + def test_overflow(self): + a = array.array(self.typecode) + lower = 0 + upper = long(pow(2, a.itemsize * 8)) - 1L + self.check_overflow(lower, upper) + + +class ByteTest(SignedNumberTest): + typecode = 'b' + minitemsize = 1 +tests.append(ByteTest) + +class UnsignedByteTest(UnsignedNumberTest): + typecode = 'B' + minitemsize = 1 +tests.append(UnsignedByteTest) + +class ShortTest(SignedNumberTest): + typecode = 'h' + minitemsize = 2 +tests.append(ShortTest) + +class UnsignedShortTest(UnsignedNumberTest): + typecode = 'H' + minitemsize = 2 +tests.append(UnsignedShortTest) + +class IntTest(SignedNumberTest): + typecode = 'i' + minitemsize = 2 +tests.append(IntTest) + +class UnsignedIntTest(UnsignedNumberTest): + typecode = 'I' + minitemsize = 2 +tests.append(UnsignedIntTest) + +class LongTest(SignedNumberTest): + typecode = 'l' + minitemsize = 4 +tests.append(LongTest) + +class UnsignedLongTest(UnsignedNumberTest): + typecode = 'L' + minitemsize = 4 +tests.append(UnsignedLongTest) + +class FPTest(NumberTest): + example = [-42.0, 0, 42, 1e5, -1e10] + smallerexample = [-42.0, 0, 42, 1e5, -2e10] + biggerexample = [-42.0, 0, 42, 1e5, 1e10] + outside = 23 + + def assertEntryEqual(self, entry1, entry2): + self.assertAlmostEqual(entry1, entry2) + + def test_byteswap(self): + a = array.array(self.typecode, self.example) + self.assertRaises(TypeError, a.byteswap, 42) + if a.itemsize in (1, 2, 4, 8): + b = array.array(self.typecode, self.example) + b.byteswap() + if a.itemsize==1: + self.assertEqual(a, b) + else: + # On alphas treating the byte swapped bit patters as + # floats/doubles results in floating point exceptions + # => compare the 8bit string values instead + self.assertNotEqual(a.tostring(), b.tostring()) + b.byteswap() + self.assertEqual(a, b) + +class FloatTest(FPTest): + typecode = 'f' + minitemsize = 4 +tests.append(FloatTest) + +class DoubleTest(FPTest): + typecode = 'd' + minitemsize = 8 +tests.append(DoubleTest) + +def test_main(verbose=None): + import sys + + test_support.run_unittest(*tests) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*tests) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_base64.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_base64.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,208 @@ +import unittest +from test import test_support +import base64 + + + +class LegacyBase64TestCase(unittest.TestCase): + def test_encodestring(self): + eq = self.assertEqual + eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n") + eq(base64.encodestring("a"), "YQ==\n") + eq(base64.encodestring("ab"), "YWI=\n") + eq(base64.encodestring("abc"), "YWJj\n") + eq(base64.encodestring(""), "") + eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789!@#0^&*();:<>,. []{}"), + "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" + "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" + "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") + + def test_decodestring(self): + eq = self.assertEqual + eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org") + eq(base64.decodestring("YQ==\n"), "a") + eq(base64.decodestring("YWI=\n"), "ab") + eq(base64.decodestring("YWJj\n"), "abc") + eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" + "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" + "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), + "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789!@#0^&*();:<>,. []{}") + eq(base64.decodestring(''), '') + + def test_encode(self): + eq = self.assertEqual + from cStringIO import StringIO + infp = StringIO('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789!@#0^&*();:<>,. []{}') + outfp = StringIO() + base64.encode(infp, outfp) + eq(outfp.getvalue(), + 'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' + 'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' + 'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') + + def test_decode(self): + from cStringIO import StringIO + infp = StringIO('d3d3LnB5dGhvbi5vcmc=') + outfp = StringIO() + base64.decode(infp, outfp) + self.assertEqual(outfp.getvalue(), 'www.python.org') + + + +class BaseXYTestCase(unittest.TestCase): + def test_b64encode(self): + eq = self.assertEqual + # Test default alphabet + eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") + eq(base64.b64encode('\x00'), 'AA==') + eq(base64.b64encode("a"), "YQ==") + eq(base64.b64encode("ab"), "YWI=") + eq(base64.b64encode("abc"), "YWJj") + eq(base64.b64encode(""), "") + eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789!@#0^&*();:<>,. []{}"), + "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" + "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" + "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") + # Test with arbitrary alternative characters + eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd') + # Test standard alphabet + eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") + eq(base64.standard_b64encode("a"), "YQ==") + eq(base64.standard_b64encode("ab"), "YWI=") + eq(base64.standard_b64encode("abc"), "YWJj") + eq(base64.standard_b64encode(""), "") + eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789!@#0^&*();:<>,. []{}"), + "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" + "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" + "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") + # Test with 'URL safe' alternative characters + eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd') + + def test_b64decode(self): + eq = self.assertEqual + eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") + eq(base64.b64decode('AA=='), '\x00') + eq(base64.b64decode("YQ=="), "a") + eq(base64.b64decode("YWI="), "ab") + eq(base64.b64decode("YWJj"), "abc") + eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" + "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" + "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), + "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789!@#0^&*();:<>,. []{}") + eq(base64.b64decode(''), '') + # Test with arbitrary alternative characters + eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d') + # Test standard alphabet + eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") + eq(base64.standard_b64decode("YQ=="), "a") + eq(base64.standard_b64decode("YWI="), "ab") + eq(base64.standard_b64decode("YWJj"), "abc") + eq(base64.standard_b64decode(""), "") + eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" + "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" + "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), + "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789!@#0^&*();:<>,. []{}") + # Test with 'URL safe' alternative characters + eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') + + + # This test is testing an implementation specific detail. + # In our implementation, we make a best attempt to translate + # strings with broken encoding back to their original form. + # Since the user will most likely be interested in what remains + # of a message which has been broken in transmission, this seems to + # be the most reasonable thing to do. + # In pypy we disable this test! + ''' + def test_b64decode_error(self): + self.assertRaises(TypeError, base64.b64decode, 'abc') + ''' + + def test_b32encode(self): + eq = self.assertEqual + eq(base64.b32encode(''), '') + eq(base64.b32encode('\x00'), 'AA======') + eq(base64.b32encode('a'), 'ME======') + eq(base64.b32encode('ab'), 'MFRA====') + eq(base64.b32encode('abc'), 'MFRGG===') + eq(base64.b32encode('abcd'), 'MFRGGZA=') + eq(base64.b32encode('abcde'), 'MFRGGZDF') + + def test_b32decode(self): + eq = self.assertEqual + eq(base64.b32decode(''), '') + eq(base64.b32decode('AA======'), '\x00') + eq(base64.b32decode('ME======'), 'a') + eq(base64.b32decode('MFRA===='), 'ab') + eq(base64.b32decode('MFRGG==='), 'abc') + eq(base64.b32decode('MFRGGZA='), 'abcd') + eq(base64.b32decode('MFRGGZDF'), 'abcde') + + def test_b32decode_casefold(self): + eq = self.assertEqual + eq(base64.b32decode('', True), '') + eq(base64.b32decode('ME======', True), 'a') + eq(base64.b32decode('MFRA====', True), 'ab') + eq(base64.b32decode('MFRGG===', True), 'abc') + eq(base64.b32decode('MFRGGZA=', True), 'abcd') + eq(base64.b32decode('MFRGGZDF', True), 'abcde') + # Lower cases + eq(base64.b32decode('me======', True), 'a') + eq(base64.b32decode('mfra====', True), 'ab') + eq(base64.b32decode('mfrgg===', True), 'abc') + eq(base64.b32decode('mfrggza=', True), 'abcd') + eq(base64.b32decode('mfrggzdf', True), 'abcde') + # Expected exceptions + self.assertRaises(TypeError, base64.b32decode, 'me======') + # Mapping zero and one + eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe') + eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe') + eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe') + + def test_b32decode_error(self): + self.assertRaises(TypeError, base64.b32decode, 'abc') + self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==') + + def test_b16encode(self): + eq = self.assertEqual + eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF') + eq(base64.b16encode('\x00'), '00') + + def test_b16decode(self): + eq = self.assertEqual + eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef') + eq(base64.b16decode('00'), '\x00') + # Lower case is not allowed without a flag + self.assertRaises(TypeError, base64.b16decode, '0102abcdef') + # Case fold + eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef') + + + +def suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(LegacyBase64TestCase)) + suite.addTest(unittest.makeSuite(BaseXYTestCase)) + return suite + + +def test_main(): + test_support.run_suite(suite()) + + +if __name__ == '__main__': + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bufio.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bufio.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,63 @@ +from test.test_support import verify, TestFailed, TESTFN + +# Simple test to ensure that optimizations in fileobject.c deliver +# the expected results. For best testing, run this under a debug-build +# Python too (to exercise asserts in the C code). + +# Repeat string 'pattern' as often as needed to reach total length +# 'length'. Then call try_one with that string, a string one larger +# than that, and a string one smaller than that. The main driver +# feeds this all small sizes and various powers of 2, so we exercise +# all likely stdio buffer sizes, and "off by one" errors on both +# sides. +def drive_one(pattern, length): + q, r = divmod(length, len(pattern)) + teststring = pattern * q + pattern[:r] + verify(len(teststring) == length) + try_one(teststring) + try_one(teststring + "x") + try_one(teststring[:-1]) + +# Write s + "\n" + s to file, then open it and ensure that successive +# .readline()s deliver what we wrote. +def try_one(s): + # Since C doesn't guarantee we can write/read arbitrary bytes in text + # files, use binary mode. + f = open(TESTFN, "wb") + # write once with \n and once without + f.write(s) + f.write("\n") + f.write(s) + f.close() + f = open(TESTFN, "rb") + line = f.readline() + if line != s + "\n": + raise TestFailed("Expected %r got %r" % (s + "\n", line)) + line = f.readline() + if line != s: + raise TestFailed("Expected %r got %r" % (s, line)) + line = f.readline() + if line: + raise TestFailed("Expected EOF but got %r" % line) + f.close() + +# A pattern with prime length, to avoid simple relationships with +# stdio buffer sizes. +primepat = "1234567890\00\01\02\03\04\05\06" + +nullpat = "\0" * 1000 + +try: +# Too slow for PyPy. +# for size in range(1, 257) + [512, 1000, 1024, 2048, 4096, 8192, 10000, +# 16384, 32768, 65536, 1000000]: + for size in range(1, 9) + [9, 63, 64, 65, 128, 129, 254, 255, 256, 512, + 1000, 1024, 2048, 4096, 8192, 10000, 16384]: + drive_one(primepat, size) + drive_one(nullpat, size) +finally: + try: + import os + os.unlink(TESTFN) + except: + pass Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_builtin.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_builtin.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,1785 @@ +# Python test set -- built-in functions + +import test.test_support, unittest +from test.test_support import fcmp, have_unicode, TESTFN, unlink, \ + run_unittest, run_with_locale +from operator import neg + +import sys, warnings, cStringIO, random, UserDict +warnings.filterwarnings("ignore", "hex../oct.. of negative int", + FutureWarning, __name__) +warnings.filterwarnings("ignore", "integer argument expected", + DeprecationWarning, "unittest") + +# count the number of test runs. +# used to skip running test_execfile() multiple times +numruns = 0 + +class Squares: + + def __init__(self, max): + self.max = max + self.sofar = [] + + def __len__(self): return len(self.sofar) + + def __getitem__(self, i): + if not 0 <= i < self.max: raise IndexError + n = len(self.sofar) + while n <= i: + self.sofar.append(n*n) + n += 1 + return self.sofar[i] + +class StrSquares: + + def __init__(self, max): + self.max = max + self.sofar = [] + + def __len__(self): + return len(self.sofar) + + def __getitem__(self, i): + if not 0 <= i < self.max: + raise IndexError + n = len(self.sofar) + while n <= i: + self.sofar.append(str(n*n)) + n += 1 + return self.sofar[i] + +class BitBucket: + def write(self, line): + pass + +L = [ + ('0', 0), + ('1', 1), + ('9', 9), + ('10', 10), + ('99', 99), + ('100', 100), + ('314', 314), + (' 314', 314), + ('314 ', 314), + (' \t\t 314 \t\t ', 314), + (repr(sys.maxint), sys.maxint), + (' 1x', ValueError), + (' 1 ', 1), + (' 1\02 ', ValueError), + ('', ValueError), + (' ', ValueError), + (' \t\t ', ValueError) +] +if have_unicode: + L += [ + (unicode('0'), 0), + (unicode('1'), 1), + (unicode('9'), 9), + (unicode('10'), 10), + (unicode('99'), 99), + (unicode('100'), 100), + (unicode('314'), 314), + (unicode(' 314'), 314), + (unicode('\u0663\u0661\u0664 ','raw-unicode-escape'), 314), + (unicode(' \t\t 314 \t\t '), 314), + (unicode(' 1x'), ValueError), + (unicode(' 1 '), 1), + (unicode(' 1\02 '), ValueError), + (unicode(''), ValueError), + (unicode(' '), ValueError), + (unicode(' \t\t '), ValueError), + (unichr(0x200), ValueError), +] + +class TestFailingBool: + def __nonzero__(self): + raise RuntimeError + +class TestFailingIter: + def __iter__(self): + raise RuntimeError + +class BuiltinTest(unittest.TestCase): + + def test_import(self): + __import__('sys') + __import__('time') + __import__('string') + self.assertRaises(ImportError, __import__, 'spamspam') + self.assertRaises(TypeError, __import__, 1, 2, 3, 4) + self.assertRaises(ValueError, __import__, '') + + def test_abs(self): + # int + self.assertEqual(abs(0), 0) + self.assertEqual(abs(1234), 1234) + self.assertEqual(abs(-1234), 1234) + self.assertTrue(abs(-sys.maxint-1) > 0) + # float + self.assertEqual(abs(0.0), 0.0) + self.assertEqual(abs(3.14), 3.14) + self.assertEqual(abs(-3.14), 3.14) + # long + self.assertEqual(abs(0L), 0L) + self.assertEqual(abs(1234L), 1234L) + self.assertEqual(abs(-1234L), 1234L) + # str + self.assertRaises(TypeError, abs, 'a') + + def test_all(self): + self.assertEqual(all([2, 4, 6]), True) + self.assertEqual(all([2, None, 6]), False) + self.assertRaises(RuntimeError, all, [2, TestFailingBool(), 6]) + self.assertRaises(RuntimeError, all, TestFailingIter()) + self.assertRaises(TypeError, all, 10) # Non-iterable + self.assertRaises(TypeError, all) # No args + self.assertRaises(TypeError, all, [2, 4, 6], []) # Too many args + self.assertEqual(all([]), True) # Empty iterator + S = [50, 60] + self.assertEqual(all(x > 42 for x in S), True) + S = [50, 40, 60] + self.assertEqual(all(x > 42 for x in S), False) + + def test_any(self): + self.assertEqual(any([None, None, None]), False) + self.assertEqual(any([None, 4, None]), True) + self.assertRaises(RuntimeError, any, [None, TestFailingBool(), 6]) + self.assertRaises(RuntimeError, all, TestFailingIter()) + self.assertRaises(TypeError, any, 10) # Non-iterable + self.assertRaises(TypeError, any) # No args + self.assertRaises(TypeError, any, [2, 4, 6], []) # Too many args + self.assertEqual(any([]), False) # Empty iterator + S = [40, 60, 30] + self.assertEqual(any(x > 42 for x in S), True) + S = [10, 20, 30] + self.assertEqual(any(x > 42 for x in S), False) + + def test_neg(self): + x = -sys.maxint-1 + self.assert_(isinstance(x, int)) + self.assertEqual(-x, sys.maxint+1) + + def test_apply(self): + def f0(*args): + self.assertEqual(args, ()) + def f1(a1): + self.assertEqual(a1, 1) + def f2(a1, a2): + self.assertEqual(a1, 1) + self.assertEqual(a2, 2) + def f3(a1, a2, a3): + self.assertEqual(a1, 1) + self.assertEqual(a2, 2) + self.assertEqual(a3, 3) + apply(f0, ()) + apply(f1, (1,)) + apply(f2, (1, 2)) + apply(f3, (1, 2, 3)) + + # A PyCFunction that takes only positional parameters should allow an + # empty keyword dictionary to pass without a complaint, but raise a + # TypeError if the dictionary is non-empty. + apply(id, (1,), {}) + self.assertRaises(TypeError, apply, id, (1,), {"foo": 1}) + self.assertRaises(TypeError, apply) + self.assertRaises(TypeError, apply, id, 42) + self.assertRaises(TypeError, apply, id, (42,), 42) + + def test_callable(self): + self.assert_(callable(len)) + def f(): pass + self.assert_(callable(f)) + class C: + def meth(self): pass + self.assert_(callable(C)) + x = C() + self.assert_(callable(x.meth)) + self.assert_(not callable(x)) + class D(C): + def __call__(self): pass + y = D() + self.assert_(callable(y)) + y() + + def test_chr(self): + self.assertEqual(chr(32), ' ') + self.assertEqual(chr(65), 'A') + self.assertEqual(chr(97), 'a') + self.assertEqual(chr(0xff), '\xff') + self.assertRaises(ValueError, chr, 256) + self.assertRaises(TypeError, chr) + + def test_cmp(self): + self.assertEqual(cmp(-1, 1), -1) + self.assertEqual(cmp(1, -1), 1) + self.assertEqual(cmp(1, 1), 0) + # verify that circular objects are not handled + a = []; a.append(a) + b = []; b.append(b) + from UserList import UserList + c = UserList(); c.append(c) + self.assertRaises(RuntimeError, cmp, a, b) + self.assertRaises(RuntimeError, cmp, b, c) + self.assertRaises(RuntimeError, cmp, c, a) + self.assertRaises(RuntimeError, cmp, a, c) + # okay, now break the cycles + a.pop(); b.pop(); c.pop() + self.assertRaises(TypeError, cmp) + + def test_coerce(self): + self.assert_(not fcmp(coerce(1, 1.1), (1.0, 1.1))) + self.assertEqual(coerce(1, 1L), (1L, 1L)) + self.assert_(not fcmp(coerce(1L, 1.1), (1.0, 1.1))) + self.assertRaises(TypeError, coerce) + class BadNumber: + def __coerce__(self, other): + raise ValueError + self.assertRaises(ValueError, coerce, 42, BadNumber()) + self.assertRaises(OverflowError, coerce, 0.5, int("12345" * 1000)) + + def test_compile(self): + compile('print 1\n', '', 'exec') + bom = '\xef\xbb\xbf' + compile(bom + 'print 1\n', '', 'exec') + self.assertRaises(TypeError, compile) + self.assertRaises(ValueError, compile, 'print 42\n', '', 'badmode') + self.assertRaises(ValueError, compile, 'print 42\n', '', 'single', 0xff) + self.assertRaises(TypeError, compile, chr(0), 'f', 'exec') + if have_unicode: + compile(unicode('print u"\xc3\xa5"\n', 'utf8'), '', 'exec') + self.assertRaises(TypeError, compile, unichr(0), 'f', 'exec') + self.assertRaises(ValueError, compile, unicode('a = 1'), 'f', 'bad') + + def test_delattr(self): + import sys + sys.spam = 1 + delattr(sys, 'spam') + self.assertRaises(TypeError, delattr) + + def test_dir(self): + x = 1 + self.assert_('x' in dir()) + import sys + self.assert_('modules' in dir(sys)) + self.assertRaises(TypeError, dir, 42, 42) + + def test_divmod(self): + self.assertEqual(divmod(12, 7), (1, 5)) + self.assertEqual(divmod(-12, 7), (-2, 2)) + self.assertEqual(divmod(12, -7), (-2, -2)) + self.assertEqual(divmod(-12, -7), (1, -5)) + + self.assertEqual(divmod(12L, 7L), (1L, 5L)) + self.assertEqual(divmod(-12L, 7L), (-2L, 2L)) + self.assertEqual(divmod(12L, -7L), (-2L, -2L)) + self.assertEqual(divmod(-12L, -7L), (1L, -5L)) + + self.assertEqual(divmod(12, 7L), (1, 5L)) + self.assertEqual(divmod(-12, 7L), (-2, 2L)) + self.assertEqual(divmod(12L, -7), (-2L, -2)) + self.assertEqual(divmod(-12L, -7), (1L, -5)) + + self.assertEqual(divmod(-sys.maxint-1, -1), + (sys.maxint+1, 0)) + + self.assert_(not fcmp(divmod(3.25, 1.0), (3.0, 0.25))) + self.assert_(not fcmp(divmod(-3.25, 1.0), (-4.0, 0.75))) + self.assert_(not fcmp(divmod(3.25, -1.0), (-4.0, -0.75))) + self.assert_(not fcmp(divmod(-3.25, -1.0), (3.0, -0.25))) + + self.assertRaises(TypeError, divmod) + + def test_eval(self): + self.assertEqual(eval('1+1'), 2) + self.assertEqual(eval(' 1+1\n'), 2) + globals = {'a': 1, 'b': 2} + locals = {'b': 200, 'c': 300} + self.assertEqual(eval('a', globals) , 1) + self.assertEqual(eval('a', globals, locals), 1) + self.assertEqual(eval('b', globals, locals), 200) + self.assertEqual(eval('c', globals, locals), 300) + if have_unicode: + self.assertEqual(eval(unicode('1+1')), 2) + self.assertEqual(eval(unicode(' 1+1\n')), 2) + globals = {'a': 1, 'b': 2} + locals = {'b': 200, 'c': 300} + if have_unicode: + self.assertEqual(eval(unicode('a'), globals), 1) + self.assertEqual(eval(unicode('a'), globals, locals), 1) + self.assertEqual(eval(unicode('b'), globals, locals), 200) + self.assertEqual(eval(unicode('c'), globals, locals), 300) + bom = '\xef\xbb\xbf' + self.assertEqual(eval(bom + 'a', globals, locals), 1) + self.assertEqual(eval(unicode('u"\xc3\xa5"', 'utf8'), globals), + unicode('\xc3\xa5', 'utf8')) + self.assertRaises(TypeError, eval) + self.assertRaises(TypeError, eval, ()) + + def test_general_eval(self): + # Tests that general mappings can be used for the locals argument + + class M: + "Test mapping interface versus possible calls from eval()." + def __getitem__(self, key): + if key == 'a': + return 12 + raise KeyError + def keys(self): + return list('xyz') + + m = M() + g = globals() + self.assertEqual(eval('a', g, m), 12) + self.assertRaises(NameError, eval, 'b', g, m) + self.assertEqual(eval('dir()', g, m), list('xyz')) + self.assertEqual(eval('globals()', g, m), g) + self.assertEqual(eval('locals()', g, m), m) + # the following line checks a detail of CPython: the globals() of + # any frame must be a real dictionary + #self.assertRaises(TypeError, eval, 'a', m) + class A: + "Non-mapping" + pass + m = A() + self.assertRaises((TypeError, AttributeError) , eval, 'a', g, m) + + # Verify that dict subclasses work as well + class D(dict): + def __getitem__(self, key): + if key == 'a': + return 12 + return dict.__getitem__(self, key) + def keys(self): + return list('xyz') + + d = D() + self.assertEqual(eval('a', g, d), 12) + self.assertRaises(NameError, eval, 'b', g, d) + self.assertEqual(eval('dir()', g, d), list('xyz')) + self.assertEqual(eval('globals()', g, d), g) + self.assertEqual(eval('locals()', g, d), d) + + # Verify locals stores (used by list comps) + eval('[locals() for i in (2,3)]', g, d) + eval('[locals() for i in (2,3)]', g, UserDict.UserDict()) + + class SpreadSheet: + "Sample application showing nested, calculated lookups." + _cells = {} + def __setitem__(self, key, formula): + self._cells[key] = formula + def __getitem__(self, key): + return eval(self._cells[key], globals(), self) + + ss = SpreadSheet() + ss['a1'] = '5' + ss['a2'] = 'a1*6' + ss['a3'] = 'a2*7' + self.assertEqual(ss['a3'], 210) + + # Verify that dir() catches a non-list returned by eval + # SF bug #1004669 + class C: + def __getitem__(self, item): + raise KeyError(item) + def keys(self): + return 'a' + self.assertRaises(TypeError, eval, 'dir()', globals(), C()) + + # Done outside of the method test_z to get the correct scope + z = 0 + f = open(TESTFN, 'w') + f.write('z = z+1\n') + f.write('z = z*2\n') + f.close() + execfile(TESTFN) + + def test_execfile(self): + global numruns + if numruns: + return + numruns += 1 + + globals = {'a': 1, 'b': 2} + locals = {'b': 200, 'c': 300} + + self.assertEqual(self.__class__.z, 2) + globals['z'] = 0 + execfile(TESTFN, globals) + self.assertEqual(globals['z'], 2) + locals['z'] = 0 + execfile(TESTFN, globals, locals) + self.assertEqual(locals['z'], 2) + + class M: + "Test mapping interface versus possible calls from execfile()." + def __init__(self): + self.z = 10 + def __getitem__(self, key): + if key == 'z': + return self.z + raise KeyError + def __setitem__(self, key, value): + if key == 'z': + self.z = value + return + raise KeyError + + locals = M() + locals['z'] = 0 + execfile(TESTFN, globals, locals) + self.assertEqual(locals['z'], 2) + + unlink(TESTFN) + self.assertRaises(TypeError, execfile) + self.assertRaises(TypeError, execfile, TESTFN, {}, ()) + import os + self.assertRaises(IOError, execfile, os.curdir) + self.assertRaises(IOError, execfile, "I_dont_exist") + + def test_filter(self): + self.assertEqual(filter(lambda c: 'a' <= c <= 'z', 'Hello World'), 'elloorld') + self.assertEqual(filter(None, [1, 'hello', [], [3], '', None, 9, 0]), [1, 'hello', [3], 9]) + self.assertEqual(filter(lambda x: x > 0, [1, -3, 9, 0, 2]), [1, 9, 2]) + self.assertEqual(filter(None, Squares(10)), [1, 4, 9, 16, 25, 36, 49, 64, 81]) + self.assertEqual(filter(lambda x: x%2, Squares(10)), [1, 9, 25, 49, 81]) + def identity(item): + return 1 + filter(identity, Squares(5)) + self.assertRaises(TypeError, filter) + class BadSeq(object): + def __getitem__(self, index): + if index<4: + return 42 + raise ValueError + self.assertRaises(ValueError, filter, lambda x: x, BadSeq()) + def badfunc(): + pass + self.assertRaises(TypeError, filter, badfunc, range(5)) + + # test bltinmodule.c::filtertuple() + self.assertEqual(filter(None, (1, 2)), (1, 2)) + self.assertEqual(filter(lambda x: x>=3, (1, 2, 3, 4)), (3, 4)) + self.assertRaises(TypeError, filter, 42, (1, 2)) + + # test bltinmodule.c::filterstring() + self.assertEqual(filter(None, "12"), "12") + self.assertEqual(filter(lambda x: x>="3", "1234"), "34") + self.assertRaises(TypeError, filter, 42, "12") + class badstr(str): + def __getitem__(self, index): + raise ValueError + self.assertRaises(ValueError, filter, lambda x: x >="3", badstr("1234")) + + class badstr2(str): + def __getitem__(self, index): + return 42 + self.assertRaises(TypeError, filter, lambda x: x >=42, badstr2("1234")) + + class weirdstr(str): + def __getitem__(self, index): + return weirdstr(2*str.__getitem__(self, index)) + self.assertEqual(filter(lambda x: x>="33", weirdstr("1234")), "3344") + + class shiftstr(str): + def __getitem__(self, index): + return chr(ord(str.__getitem__(self, index))+1) + self.assertEqual(filter(lambda x: x>="3", shiftstr("1234")), "345") + + if have_unicode: + # test bltinmodule.c::filterunicode() + self.assertEqual(filter(None, unicode("12")), unicode("12")) + self.assertEqual(filter(lambda x: x>="3", unicode("1234")), unicode("34")) + self.assertRaises(TypeError, filter, 42, unicode("12")) + self.assertRaises(ValueError, filter, lambda x: x >="3", badstr(unicode("1234"))) + + class badunicode(unicode): + def __getitem__(self, index): + return 42 + self.assertRaises(TypeError, filter, lambda x: x >=42, badunicode("1234")) + + class weirdunicode(unicode): + def __getitem__(self, index): + return weirdunicode(2*unicode.__getitem__(self, index)) + self.assertEqual( + filter(lambda x: x>=unicode("33"), weirdunicode("1234")), unicode("3344")) + + class shiftunicode(unicode): + def __getitem__(self, index): + return unichr(ord(unicode.__getitem__(self, index))+1) + self.assertEqual( + filter(lambda x: x>=unicode("3"), shiftunicode("1234")), + unicode("345") + ) + + def test_filter_subclasses(self): + # test that filter() never returns tuple, str or unicode subclasses + # and that the result always goes through __getitem__ + funcs = (None, bool, lambda x: True) + class tuple2(tuple): + def __getitem__(self, index): + return 2*tuple.__getitem__(self, index) + class str2(str): + def __getitem__(self, index): + return 2*str.__getitem__(self, index) + inputs = { + tuple2: {(): (), (1, 2, 3): (2, 4, 6)}, + str2: {"": "", "123": "112233"} + } + if have_unicode: + class unicode2(unicode): + def __getitem__(self, index): + return 2*unicode.__getitem__(self, index) + inputs[unicode2] = { + unicode(): unicode(), + unicode("123"): unicode("112233") + } + + for (cls, inps) in inputs.iteritems(): + for (inp, exp) in inps.iteritems(): + # make sure the output goes through __getitem__ + # even if func is None + self.assertEqual( + filter(funcs[0], cls(inp)), + filter(funcs[1], cls(inp)) + ) + for func in funcs: + outp = filter(func, cls(inp)) + self.assertEqual(outp, exp) + self.assert_(not isinstance(outp, cls)) + + def test_float(self): + self.assertEqual(float(3.14), 3.14) + self.assertEqual(float(314), 314.0) + self.assertEqual(float(314L), 314.0) + self.assertEqual(float(" 3.14 "), 3.14) + self.assertRaises(ValueError, float, " 0x3.1 ") + self.assertRaises(ValueError, float, " -0x3.p-1 ") + if have_unicode: + self.assertEqual(float(unicode(" 3.14 ")), 3.14) + self.assertEqual(float(unicode(" \u0663.\u0661\u0664 ",'raw-unicode-escape')), 3.14) + # Implementation limitation in PyFloat_FromString() + self.assertRaises(ValueError, float, unicode("1"*10000)) + + @run_with_locale('LC_NUMERIC', 'fr_FR', 'de_DE') + def test_float_with_comma(self): + # set locale to something that doesn't use '.' for the decimal point + # float must not accept the locale specific decimal point but + # it still has to accept the normal python syntac + import locale + if not locale.localeconv()['decimal_point'] == ',': + return + + self.assertEqual(float(" 3.14 "), 3.14) + self.assertEqual(float("+3.14 "), 3.14) + self.assertEqual(float("-3.14 "), -3.14) + self.assertEqual(float(".14 "), .14) + self.assertEqual(float("3. "), 3.0) + self.assertEqual(float("3.e3 "), 3000.0) + self.assertEqual(float("3.2e3 "), 3200.0) + self.assertEqual(float("2.5e-1 "), 0.25) + self.assertEqual(float("5e-1"), 0.5) + self.assertRaises(ValueError, float, " 3,14 ") + self.assertRaises(ValueError, float, " +3,14 ") + self.assertRaises(ValueError, float, " -3,14 ") + self.assertRaises(ValueError, float, " 0x3.1 ") + self.assertRaises(ValueError, float, " -0x3.p-1 ") + self.assertEqual(float(" 25.e-1 "), 2.5) + self.assertEqual(fcmp(float(" .25e-1 "), .025), 0) + + def test_floatconversion(self): + # Make sure that calls to __float__() work properly + class Foo0: + def __float__(self): + return 42. + + class Foo1(object): + def __float__(self): + return 42. + + class Foo2(float): + def __float__(self): + return 42. + + class Foo3(float): + def __new__(cls, value=0.): + return float.__new__(cls, 2*value) + + def __float__(self): + return self + + class Foo4(float): + def __float__(self): + return 42 + + self.assertAlmostEqual(float(Foo0()), 42.) + self.assertAlmostEqual(float(Foo1()), 42.) + self.assertAlmostEqual(float(Foo2()), 42.) + self.assertAlmostEqual(float(Foo3(21)), 42.) + self.assertRaises(TypeError, float, Foo4(42)) + + def test_getattr(self): + import sys + self.assert_(getattr(sys, 'stdout') is sys.stdout) + self.assertRaises(TypeError, getattr, sys, 1) + self.assertRaises(TypeError, getattr, sys, 1, "foo") + self.assertRaises(TypeError, getattr) + if have_unicode: + self.assertRaises(UnicodeError, getattr, sys, unichr(sys.maxunicode)) + + def test_hasattr(self): + import sys + self.assert_(hasattr(sys, 'stdout')) + self.assertRaises(TypeError, hasattr, sys, 1) + self.assertRaises(TypeError, hasattr) + if have_unicode: + self.assertRaises(UnicodeError, hasattr, sys, unichr(sys.maxunicode)) + + def test_hash(self): + hash(None) + self.assertEqual(hash(1), hash(1L)) + self.assertEqual(hash(1), hash(1.0)) + hash('spam') + if have_unicode: + self.assertEqual(hash('spam'), hash(unicode('spam'))) + hash((0,1,2,3)) + def f(): pass + self.assertRaises(TypeError, hash, []) + self.assertRaises(TypeError, hash, {}) + # Bug 1536021: Allow hash to return long objects + class X: + def __hash__(self): + return 2**100 + self.assertEquals(type(hash(X())), int) + class Y(object): + def __hash__(self): + return 2**100 + self.assertEquals(type(hash(Y())), int) + class Z(long): + def __hash__(self): + return self + self.assertEquals(hash(Z(42)), hash(42L)) + + def test_hex(self): + self.assertEqual(hex(16), '0x10') + self.assertEqual(hex(16L), '0x10L') + self.assertEqual(hex(-16), '-0x10') + self.assertEqual(hex(-16L), '-0x10L') + self.assertRaises(TypeError, hex, {}) + + def test_id(self): + id(None) + id(1) + id(1L) + id(1.0) + id('spam') + id((0,1,2,3)) + id([0,1,2,3]) + id({'spam': 1, 'eggs': 2, 'ham': 3}) + + # Test input() later, together with raw_input + + def test_int(self): + self.assertEqual(int(314), 314) + self.assertEqual(int(3.14), 3) + self.assertEqual(int(314L), 314) + # Check that conversion from float truncates towards zero + self.assertEqual(int(-3.14), -3) + self.assertEqual(int(3.9), 3) + self.assertEqual(int(-3.9), -3) + self.assertEqual(int(3.5), 3) + self.assertEqual(int(-3.5), -3) + # Different base: + self.assertEqual(int("10",16), 16L) + if have_unicode: + self.assertEqual(int(unicode("10"),16), 16L) + # Test conversion from strings and various anomalies + for s, v in L: + for sign in "", "+", "-": + for prefix in "", " ", "\t", " \t\t ": + ss = prefix + sign + s + vv = v + if sign == "-" and v is not ValueError: + vv = -v + try: + self.assertEqual(int(ss), vv) + except v: + pass + + s = repr(-1-sys.maxint) + x = int(s) + self.assertEqual(x+1, -sys.maxint) + self.assert_(isinstance(x, int)) + # should return long + self.assertEqual(int(s[1:]), sys.maxint+1) + + # should return long + x = int(1e100) + self.assert_(isinstance(x, long)) + x = int(-1e100) + self.assert_(isinstance(x, long)) + + + # SF bug 434186: 0x80000000/2 != 0x80000000>>1. + # Worked by accident in Windows release build, but failed in debug build. + # Failed in all Linux builds. + x = -1-sys.maxint + self.assertEqual(x >> 1, x//2) + + self.assertRaises(ValueError, int, '123\0') + self.assertRaises(ValueError, int, '53', 40) + + # SF bug 1545497: embedded NULs were not detected with + # explicit base + self.assertRaises(ValueError, int, '123\0', 10) + self.assertRaises(ValueError, int, '123\x00 245', 20) + + x = int('1' * 600) + self.assert_(isinstance(x, long)) + + if have_unicode: + x = int(unichr(0x661) * 600) + self.assert_(isinstance(x, long)) + + self.assertRaises(TypeError, int, 1, 12) + + self.assertEqual(int('0123', 0), 83) + self.assertEqual(int('0x123', 16), 291) + + # SF bug 1334662: int(string, base) wrong answers + # Various representations of 2**32 evaluated to 0 + # rather than 2**32 in previous versions + + self.assertEqual(int('100000000000000000000000000000000', 2), 4294967296L) + self.assertEqual(int('102002022201221111211', 3), 4294967296L) + self.assertEqual(int('10000000000000000', 4), 4294967296L) + self.assertEqual(int('32244002423141', 5), 4294967296L) + self.assertEqual(int('1550104015504', 6), 4294967296L) + self.assertEqual(int('211301422354', 7), 4294967296L) + self.assertEqual(int('40000000000', 8), 4294967296L) + self.assertEqual(int('12068657454', 9), 4294967296L) + self.assertEqual(int('4294967296', 10), 4294967296L) + self.assertEqual(int('1904440554', 11), 4294967296L) + self.assertEqual(int('9ba461594', 12), 4294967296L) + self.assertEqual(int('535a79889', 13), 4294967296L) + self.assertEqual(int('2ca5b7464', 14), 4294967296L) + self.assertEqual(int('1a20dcd81', 15), 4294967296L) + self.assertEqual(int('100000000', 16), 4294967296L) + self.assertEqual(int('a7ffda91', 17), 4294967296L) + self.assertEqual(int('704he7g4', 18), 4294967296L) + self.assertEqual(int('4f5aff66', 19), 4294967296L) + self.assertEqual(int('3723ai4g', 20), 4294967296L) + self.assertEqual(int('281d55i4', 21), 4294967296L) + self.assertEqual(int('1fj8b184', 22), 4294967296L) + self.assertEqual(int('1606k7ic', 23), 4294967296L) + self.assertEqual(int('mb994ag', 24), 4294967296L) + self.assertEqual(int('hek2mgl', 25), 4294967296L) + self.assertEqual(int('dnchbnm', 26), 4294967296L) + self.assertEqual(int('b28jpdm', 27), 4294967296L) + self.assertEqual(int('8pfgih4', 28), 4294967296L) + self.assertEqual(int('76beigg', 29), 4294967296L) + self.assertEqual(int('5qmcpqg', 30), 4294967296L) + self.assertEqual(int('4q0jto4', 31), 4294967296L) + self.assertEqual(int('4000000', 32), 4294967296L) + self.assertEqual(int('3aokq94', 33), 4294967296L) + self.assertEqual(int('2qhxjli', 34), 4294967296L) + self.assertEqual(int('2br45qb', 35), 4294967296L) + self.assertEqual(int('1z141z4', 36), 4294967296L) + + # SF bug 1334662: int(string, base) wrong answers + # Checks for proper evaluation of 2**32 + 1 + self.assertEqual(int('100000000000000000000000000000001', 2), 4294967297L) + self.assertEqual(int('102002022201221111212', 3), 4294967297L) + self.assertEqual(int('10000000000000001', 4), 4294967297L) + self.assertEqual(int('32244002423142', 5), 4294967297L) + self.assertEqual(int('1550104015505', 6), 4294967297L) + self.assertEqual(int('211301422355', 7), 4294967297L) + self.assertEqual(int('40000000001', 8), 4294967297L) + self.assertEqual(int('12068657455', 9), 4294967297L) + self.assertEqual(int('4294967297', 10), 4294967297L) + self.assertEqual(int('1904440555', 11), 4294967297L) + self.assertEqual(int('9ba461595', 12), 4294967297L) + self.assertEqual(int('535a7988a', 13), 4294967297L) + self.assertEqual(int('2ca5b7465', 14), 4294967297L) + self.assertEqual(int('1a20dcd82', 15), 4294967297L) + self.assertEqual(int('100000001', 16), 4294967297L) + self.assertEqual(int('a7ffda92', 17), 4294967297L) + self.assertEqual(int('704he7g5', 18), 4294967297L) + self.assertEqual(int('4f5aff67', 19), 4294967297L) + self.assertEqual(int('3723ai4h', 20), 4294967297L) + self.assertEqual(int('281d55i5', 21), 4294967297L) + self.assertEqual(int('1fj8b185', 22), 4294967297L) + self.assertEqual(int('1606k7id', 23), 4294967297L) + self.assertEqual(int('mb994ah', 24), 4294967297L) + self.assertEqual(int('hek2mgm', 25), 4294967297L) + self.assertEqual(int('dnchbnn', 26), 4294967297L) + self.assertEqual(int('b28jpdn', 27), 4294967297L) + self.assertEqual(int('8pfgih5', 28), 4294967297L) + self.assertEqual(int('76beigh', 29), 4294967297L) + self.assertEqual(int('5qmcpqh', 30), 4294967297L) + self.assertEqual(int('4q0jto5', 31), 4294967297L) + self.assertEqual(int('4000001', 32), 4294967297L) + self.assertEqual(int('3aokq95', 33), 4294967297L) + self.assertEqual(int('2qhxjlj', 34), 4294967297L) + self.assertEqual(int('2br45qc', 35), 4294967297L) + self.assertEqual(int('1z141z5', 36), 4294967297L) + + def test_intconversion(self): + # Test __int__() + class Foo0: + def __int__(self): + return 42 + + class Foo1(object): + def __int__(self): + return 42 + + class Foo2(int): + def __int__(self): + return 42 + + class Foo3(int): + def __int__(self): + return self + + class Foo4(int): + def __int__(self): + return 42L + + class Foo5(int): + def __int__(self): + return 42. + + self.assertEqual(int(Foo0()), 42) + self.assertEqual(int(Foo1()), 42) + self.assertEqual(int(Foo2()), 42) + self.assertEqual(int(Foo3()), 0) + self.assertEqual(int(Foo4()), 42L) + self.assertRaises(TypeError, int, Foo5()) + + def test_intern(self): + self.assertRaises(TypeError, intern) + s = "never interned before" + self.assert_(intern(s) is s) + s2 = s.swapcase().swapcase() + self.assert_(intern(s2) is s) + + # Subclasses of string can't be interned, because they + # provide too much opportunity for insane things to happen. + # We don't want them in the interned dict and if they aren't + # actually interned, we don't want to create the appearance + # that they are by allowing intern() to succeeed. + class S(str): + def __hash__(self): + return 123 + + self.assertRaises(TypeError, intern, S("abc")) + + # It's still safe to pass these strings to routines that + # call intern internally, e.g. PyObject_SetAttr(). + s = S("abc") + setattr(s, s, s) + self.assertEqual(getattr(s, s), s) + + def test_iter(self): + self.assertRaises(TypeError, iter) + self.assertRaises(TypeError, iter, 42, 42) + lists = [("1", "2"), ["1", "2"], "12"] + if have_unicode: + lists.append(unicode("12")) + for l in lists: + i = iter(l) + self.assertEqual(i.next(), '1') + self.assertEqual(i.next(), '2') + self.assertRaises(StopIteration, i.next) + + def test_isinstance(self): + class C: + pass + class D(C): + pass + class E: + pass + c = C() + d = D() + e = E() + self.assert_(isinstance(c, C)) + self.assert_(isinstance(d, C)) + self.assert_(not isinstance(e, C)) + self.assert_(not isinstance(c, D)) + self.assert_(not isinstance('foo', E)) + self.assertRaises(TypeError, isinstance, E, 'foo') + self.assertRaises(TypeError, isinstance) + + def test_issubclass(self): + class C: + pass + class D(C): + pass + class E: + pass + c = C() + d = D() + e = E() + self.assert_(issubclass(D, C)) + self.assert_(issubclass(C, C)) + self.assert_(not issubclass(C, D)) + self.assertRaises(TypeError, issubclass, 'foo', E) + self.assertRaises(TypeError, issubclass, E, 'foo') + self.assertRaises(TypeError, issubclass) + + def test_len(self): + self.assertEqual(len('123'), 3) + self.assertEqual(len(()), 0) + self.assertEqual(len((1, 2, 3, 4)), 4) + self.assertEqual(len([1, 2, 3, 4]), 4) + self.assertEqual(len({}), 0) + self.assertEqual(len({'a':1, 'b': 2}), 2) + class BadSeq: + def __len__(self): + raise ValueError + self.assertRaises(ValueError, len, BadSeq()) + + def test_list(self): + self.assertEqual(list([]), []) + l0_3 = [0, 1, 2, 3] + l0_3_bis = list(l0_3) + self.assertEqual(l0_3, l0_3_bis) + self.assert_(l0_3 is not l0_3_bis) + self.assertEqual(list(()), []) + self.assertEqual(list((0, 1, 2, 3)), [0, 1, 2, 3]) + self.assertEqual(list(''), []) + self.assertEqual(list('spam'), ['s', 'p', 'a', 'm']) + + if sys.maxint == 0x7fffffff: + # This test can currently only work on 32-bit machines. + # XXX If/when PySequence_Length() returns a ssize_t, it should be + # XXX re-enabled. + # Verify clearing of bug #556025. + # This assumes that the max data size (sys.maxint) == max + # address size this also assumes that the address size is at + # least 4 bytes with 8 byte addresses, the bug is not well + # tested + # + # Note: This test is expected to SEGV under Cygwin 1.3.12 or + # earlier due to a newlib bug. See the following mailing list + # thread for the details: + + # http://sources.redhat.com/ml/newlib/2002/msg00369.html + self.assertRaises(MemoryError, [None].__mul__, sys.maxint // 2) + + # This code used to segfault in Py2.4a3 + x = [] + x.extend(-y for y in x) + self.assertEqual(x, []) + + def test_long(self): + self.assertEqual(long(314), 314L) + self.assertEqual(long(3.14), 3L) + self.assertEqual(long(314L), 314L) + # Check that conversion from float truncates towards zero + self.assertEqual(long(-3.14), -3L) + self.assertEqual(long(3.9), 3L) + self.assertEqual(long(-3.9), -3L) + self.assertEqual(long(3.5), 3L) + self.assertEqual(long(-3.5), -3L) + self.assertEqual(long("-3"), -3L) + if have_unicode: + self.assertEqual(long(unicode("-3")), -3L) + # Different base: + self.assertEqual(long("10",16), 16L) + if have_unicode: + self.assertEqual(long(unicode("10"),16), 16L) + # Check conversions from string (same test set as for int(), and then some) + LL = [ + ('1' + '0'*20, 10L**20), + ('1' + '0'*100, 10L**100) + ] + L2 = L[:] + if have_unicode: + L2 += [ + (unicode('1') + unicode('0')*20, 10L**20), + (unicode('1') + unicode('0')*100, 10L**100), + ] + for s, v in L2 + LL: + for sign in "", "+", "-": + for prefix in "", " ", "\t", " \t\t ": + ss = prefix + sign + s + vv = v + if sign == "-" and v is not ValueError: + vv = -v + try: + self.assertEqual(long(ss), long(vv)) + except v: + pass + + self.assertRaises(ValueError, long, '123\0') + self.assertRaises(ValueError, long, '53', 40) + self.assertRaises(TypeError, long, 1, 12) + + # SF patch #1638879: embedded NULs were not detected with + # explicit base + self.assertRaises(ValueError, long, '123\0', 10) + self.assertRaises(ValueError, long, '123\x00 245', 20) + + self.assertEqual(long('100000000000000000000000000000000', 2), + 4294967296) + self.assertEqual(long('102002022201221111211', 3), 4294967296) + self.assertEqual(long('10000000000000000', 4), 4294967296) + self.assertEqual(long('32244002423141', 5), 4294967296) + self.assertEqual(long('1550104015504', 6), 4294967296) + self.assertEqual(long('211301422354', 7), 4294967296) + self.assertEqual(long('40000000000', 8), 4294967296) + self.assertEqual(long('12068657454', 9), 4294967296) + self.assertEqual(long('4294967296', 10), 4294967296) + self.assertEqual(long('1904440554', 11), 4294967296) + self.assertEqual(long('9ba461594', 12), 4294967296) + self.assertEqual(long('535a79889', 13), 4294967296) + self.assertEqual(long('2ca5b7464', 14), 4294967296) + self.assertEqual(long('1a20dcd81', 15), 4294967296) + self.assertEqual(long('100000000', 16), 4294967296) + self.assertEqual(long('a7ffda91', 17), 4294967296) + self.assertEqual(long('704he7g4', 18), 4294967296) + self.assertEqual(long('4f5aff66', 19), 4294967296) + self.assertEqual(long('3723ai4g', 20), 4294967296) + self.assertEqual(long('281d55i4', 21), 4294967296) + self.assertEqual(long('1fj8b184', 22), 4294967296) + self.assertEqual(long('1606k7ic', 23), 4294967296) + self.assertEqual(long('mb994ag', 24), 4294967296) + self.assertEqual(long('hek2mgl', 25), 4294967296) + self.assertEqual(long('dnchbnm', 26), 4294967296) + self.assertEqual(long('b28jpdm', 27), 4294967296) + self.assertEqual(long('8pfgih4', 28), 4294967296) + self.assertEqual(long('76beigg', 29), 4294967296) + self.assertEqual(long('5qmcpqg', 30), 4294967296) + self.assertEqual(long('4q0jto4', 31), 4294967296) + self.assertEqual(long('4000000', 32), 4294967296) + self.assertEqual(long('3aokq94', 33), 4294967296) + self.assertEqual(long('2qhxjli', 34), 4294967296) + self.assertEqual(long('2br45qb', 35), 4294967296) + self.assertEqual(long('1z141z4', 36), 4294967296) + + self.assertEqual(long('100000000000000000000000000000001', 2), + 4294967297) + self.assertEqual(long('102002022201221111212', 3), 4294967297) + self.assertEqual(long('10000000000000001', 4), 4294967297) + self.assertEqual(long('32244002423142', 5), 4294967297) + self.assertEqual(long('1550104015505', 6), 4294967297) + self.assertEqual(long('211301422355', 7), 4294967297) + self.assertEqual(long('40000000001', 8), 4294967297) + self.assertEqual(long('12068657455', 9), 4294967297) + self.assertEqual(long('4294967297', 10), 4294967297) + self.assertEqual(long('1904440555', 11), 4294967297) + self.assertEqual(long('9ba461595', 12), 4294967297) + self.assertEqual(long('535a7988a', 13), 4294967297) + self.assertEqual(long('2ca5b7465', 14), 4294967297) + self.assertEqual(long('1a20dcd82', 15), 4294967297) + self.assertEqual(long('100000001', 16), 4294967297) + self.assertEqual(long('a7ffda92', 17), 4294967297) + self.assertEqual(long('704he7g5', 18), 4294967297) + self.assertEqual(long('4f5aff67', 19), 4294967297) + self.assertEqual(long('3723ai4h', 20), 4294967297) + self.assertEqual(long('281d55i5', 21), 4294967297) + self.assertEqual(long('1fj8b185', 22), 4294967297) + self.assertEqual(long('1606k7id', 23), 4294967297) + self.assertEqual(long('mb994ah', 24), 4294967297) + self.assertEqual(long('hek2mgm', 25), 4294967297) + self.assertEqual(long('dnchbnn', 26), 4294967297) + self.assertEqual(long('b28jpdn', 27), 4294967297) + self.assertEqual(long('8pfgih5', 28), 4294967297) + self.assertEqual(long('76beigh', 29), 4294967297) + self.assertEqual(long('5qmcpqh', 30), 4294967297) + self.assertEqual(long('4q0jto5', 31), 4294967297) + self.assertEqual(long('4000001', 32), 4294967297) + self.assertEqual(long('3aokq95', 33), 4294967297) + self.assertEqual(long('2qhxjlj', 34), 4294967297) + self.assertEqual(long('2br45qc', 35), 4294967297) + self.assertEqual(long('1z141z5', 36), 4294967297) + + + def test_longconversion(self): + # Test __long__() + class Foo0: + def __long__(self): + return 42L + + class Foo1(object): + def __long__(self): + return 42L + + class Foo2(long): + def __long__(self): + return 42L + + class Foo3(long): + def __long__(self): + return self + + class Foo4(long): + def __long__(self): + return 42 + + class Foo5(long): + def __long__(self): + return 42. + + self.assertEqual(long(Foo0()), 42L) + self.assertEqual(long(Foo1()), 42L) + self.assertEqual(long(Foo2()), 42L) + self.assertEqual(long(Foo3()), 0) + self.assertEqual(long(Foo4()), 42) + self.assertRaises(TypeError, long, Foo5()) + + def test_map(self): + self.assertEqual( + map(None, 'hello world'), + ['h','e','l','l','o',' ','w','o','r','l','d'] + ) + self.assertEqual( + map(None, 'abcd', 'efg'), + [('a', 'e'), ('b', 'f'), ('c', 'g'), ('d', None)] + ) + self.assertEqual( + map(None, range(10)), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + ) + self.assertEqual( + map(lambda x: x*x, range(1,4)), + [1, 4, 9] + ) + try: + from math import sqrt + except ImportError: + def sqrt(x): + return pow(x, 0.5) + self.assertEqual( + map(lambda x: map(sqrt,x), [[16, 4], [81, 9]]), + [[4.0, 2.0], [9.0, 3.0]] + ) + self.assertEqual( + map(lambda x, y: x+y, [1,3,2], [9,1,4]), + [10, 4, 6] + ) + + def plus(*v): + accu = 0 + for i in v: accu = accu + i + return accu + self.assertEqual( + map(plus, [1, 3, 7]), + [1, 3, 7] + ) + self.assertEqual( + map(plus, [1, 3, 7], [4, 9, 2]), + [1+4, 3+9, 7+2] + ) + self.assertEqual( + map(plus, [1, 3, 7], [4, 9, 2], [1, 1, 0]), + [1+4+1, 3+9+1, 7+2+0] + ) + self.assertEqual( + map(None, Squares(10)), + [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + ) + self.assertEqual( + map(int, Squares(10)), + [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + ) + self.assertEqual( + map(None, Squares(3), Squares(2)), + [(0,0), (1,1), (4,None)] + ) + self.assertEqual( + map(max, Squares(3), Squares(2)), + [0, 1, 4] + ) + self.assertRaises(TypeError, map) + self.assertRaises(TypeError, map, lambda x: x, 42) + self.assertEqual(map(None, [42]), [42]) + class BadSeq: + def __getitem__(self, index): + raise ValueError + self.assertRaises(ValueError, map, lambda x: x, BadSeq()) + def badfunc(x): + raise RuntimeError + self.assertRaises(RuntimeError, map, badfunc, range(5)) + + def test_max(self): + self.assertEqual(max('123123'), '3') + self.assertEqual(max(1, 2, 3), 3) + self.assertEqual(max((1, 2, 3, 1, 2, 3)), 3) + self.assertEqual(max([1, 2, 3, 1, 2, 3]), 3) + + self.assertEqual(max(1, 2L, 3.0), 3.0) + self.assertEqual(max(1L, 2.0, 3), 3) + self.assertEqual(max(1.0, 2, 3L), 3L) + + for stmt in ( + "max(key=int)", # no args + "max(1, key=int)", # single arg not iterable + "max(1, 2, keystone=int)", # wrong keyword + "max(1, 2, key=int, abc=int)", # two many keywords + "max(1, 2, key=1)", # keyfunc is not callable + ): + try: + exec(stmt) in globals() + except TypeError: + pass + else: + self.fail(stmt) + + self.assertEqual(max((1,), key=neg), 1) # one elem iterable + self.assertEqual(max((1,2), key=neg), 1) # two elem iterable + self.assertEqual(max(1, 2, key=neg), 1) # two elems + + data = [random.randrange(200) for i in range(100)] + keys = dict((elem, random.randrange(50)) for elem in data) + f = keys.__getitem__ + self.assertEqual(max(data, key=f), + sorted(reversed(data), key=f)[-1]) + + def test_min(self): + self.assertEqual(min('123123'), '1') + self.assertEqual(min(1, 2, 3), 1) + self.assertEqual(min((1, 2, 3, 1, 2, 3)), 1) + self.assertEqual(min([1, 2, 3, 1, 2, 3]), 1) + + self.assertEqual(min(1, 2L, 3.0), 1) + self.assertEqual(min(1L, 2.0, 3), 1L) + self.assertEqual(min(1.0, 2, 3L), 1.0) + + self.assertRaises(TypeError, min) + self.assertRaises(TypeError, min, 42) + self.assertRaises(ValueError, min, ()) + class BadSeq: + def __getitem__(self, index): + raise ValueError + self.assertRaises(ValueError, min, BadSeq()) + class BadNumber: + def __cmp__(self, other): + raise ValueError + self.assertRaises(ValueError, min, (42, BadNumber())) + + for stmt in ( + "min(key=int)", # no args + "min(1, key=int)", # single arg not iterable + "min(1, 2, keystone=int)", # wrong keyword + "min(1, 2, key=int, abc=int)", # two many keywords + "min(1, 2, key=1)", # keyfunc is not callable + ): + try: + exec(stmt) in globals() + except TypeError: + pass + else: + self.fail(stmt) + + self.assertEqual(min((1,), key=neg), 1) # one elem iterable + self.assertEqual(min((1,2), key=neg), 2) # two elem iterable + self.assertEqual(min(1, 2, key=neg), 2) # two elems + + data = [random.randrange(200) for i in range(100)] + keys = dict((elem, random.randrange(50)) for elem in data) + f = keys.__getitem__ + self.assertEqual(min(data, key=f), + sorted(data, key=f)[0]) + + def test_oct(self): + self.assertEqual(oct(100), '0144') + self.assertEqual(oct(100L), '0144L') + self.assertEqual(oct(-100), '-0144') + self.assertEqual(oct(-100L), '-0144L') + self.assertRaises(TypeError, oct, ()) + + def write_testfile(self): + # NB the first 4 lines are also used to test input and raw_input, below + fp = open(TESTFN, 'w') + try: + fp.write('1+1\n') + fp.write('1+1\n') + fp.write('The quick brown fox jumps over the lazy dog') + fp.write('.\n') + fp.write('Dear John\n') + fp.write('XXX'*100) + fp.write('YYY'*100) + finally: + fp.close() + + def test_open(self): + self.write_testfile() + fp = open(TESTFN, 'r') + try: + self.assertEqual(fp.readline(4), '1+1\n') + self.assertEqual(fp.readline(4), '1+1\n') + self.assertEqual(fp.readline(), 'The quick brown fox jumps over the lazy dog.\n') + self.assertEqual(fp.readline(4), 'Dear') + self.assertEqual(fp.readline(100), ' John\n') + self.assertEqual(fp.read(300), 'XXX'*100) + self.assertEqual(fp.read(1000), 'YYY'*100) + finally: + fp.close() + unlink(TESTFN) + + def test_ord(self): + self.assertEqual(ord(' '), 32) + self.assertEqual(ord('A'), 65) + self.assertEqual(ord('a'), 97) + if have_unicode: + self.assertEqual(ord(unichr(sys.maxunicode)), sys.maxunicode) + self.assertRaises(TypeError, ord, 42) + if have_unicode: + self.assertRaises(TypeError, ord, unicode("12")) + + def test_pow(self): + self.assertEqual(pow(0,0), 1) + self.assertEqual(pow(0,1), 0) + self.assertEqual(pow(1,0), 1) + self.assertEqual(pow(1,1), 1) + + self.assertEqual(pow(2,0), 1) + self.assertEqual(pow(2,10), 1024) + self.assertEqual(pow(2,20), 1024*1024) + self.assertEqual(pow(2,30), 1024*1024*1024) + + self.assertEqual(pow(-2,0), 1) + self.assertEqual(pow(-2,1), -2) + self.assertEqual(pow(-2,2), 4) + self.assertEqual(pow(-2,3), -8) + + self.assertEqual(pow(0L,0), 1) + self.assertEqual(pow(0L,1), 0) + self.assertEqual(pow(1L,0), 1) + self.assertEqual(pow(1L,1), 1) + + self.assertEqual(pow(2L,0), 1) + self.assertEqual(pow(2L,10), 1024) + self.assertEqual(pow(2L,20), 1024*1024) + self.assertEqual(pow(2L,30), 1024*1024*1024) + + self.assertEqual(pow(-2L,0), 1) + self.assertEqual(pow(-2L,1), -2) + self.assertEqual(pow(-2L,2), 4) + self.assertEqual(pow(-2L,3), -8) + + self.assertAlmostEqual(pow(0.,0), 1.) + self.assertAlmostEqual(pow(0.,1), 0.) + self.assertAlmostEqual(pow(1.,0), 1.) + self.assertAlmostEqual(pow(1.,1), 1.) + + self.assertAlmostEqual(pow(2.,0), 1.) + self.assertAlmostEqual(pow(2.,10), 1024.) + self.assertAlmostEqual(pow(2.,20), 1024.*1024.) + self.assertAlmostEqual(pow(2.,30), 1024.*1024.*1024.) + + self.assertAlmostEqual(pow(-2.,0), 1.) + self.assertAlmostEqual(pow(-2.,1), -2.) + self.assertAlmostEqual(pow(-2.,2), 4.) + self.assertAlmostEqual(pow(-2.,3), -8.) + + for x in 2, 2L, 2.0: + for y in 10, 10L, 10.0: + for z in 1000, 1000L, 1000.0: + if isinstance(x, float) or \ + isinstance(y, float) or \ + isinstance(z, float): + self.assertRaises(TypeError, pow, x, y, z) + else: + self.assertAlmostEqual(pow(x, y, z), 24.0) + + self.assertRaises(TypeError, pow, -1, -2, 3) + self.assertRaises(ValueError, pow, 1, 2, 0) + self.assertRaises(TypeError, pow, -1L, -2L, 3L) + self.assertRaises(ValueError, pow, 1L, 2L, 0L) + self.assertRaises(ValueError, pow, -342.43, 0.234) + + self.assertRaises(TypeError, pow) + + def test_range(self): + self.assertEqual(range(3), [0, 1, 2]) + self.assertEqual(range(1, 5), [1, 2, 3, 4]) + self.assertEqual(range(0), []) + self.assertEqual(range(-3), []) + self.assertEqual(range(1, 10, 3), [1, 4, 7]) + self.assertEqual(range(5, -5, -3), [5, 2, -1, -4]) + + # Now test range() with longs + self.assertEqual(range(-2**100), []) + self.assertEqual(range(0, -2**100), []) + self.assertEqual(range(0, 2**100, -1), []) + self.assertEqual(range(0, 2**100, -1), []) + + a = long(10 * sys.maxint) + b = long(100 * sys.maxint) + c = long(50 * sys.maxint) + + self.assertEqual(range(a, a+2), [a, a+1]) + self.assertEqual(range(a+2, a, -1L), [a+2, a+1]) + self.assertEqual(range(a+4, a, -2), [a+4, a+2]) + + seq = range(a, b, c) + self.assert_(a in seq) + self.assert_(b not in seq) + self.assertEqual(len(seq), 2) + + seq = range(b, a, -c) + self.assert_(b in seq) + self.assert_(a not in seq) + self.assertEqual(len(seq), 2) + + seq = range(-a, -b, -c) + self.assert_(-a in seq) + self.assert_(-b not in seq) + self.assertEqual(len(seq), 2) + + self.assertRaises(TypeError, range) + self.assertRaises(TypeError, range, 1, 2, 3, 4) + self.assertRaises(ValueError, range, 1, 2, 0) + self.assertRaises(ValueError, range, a, a + 1, long(0)) + + class badzero(int): + def __cmp__(self, other): + raise RuntimeError + self.assertRaises(RuntimeError, range, a, a + 1, badzero(1)) + + # Reject floats when it would require PyLongs to represent. + # (smaller floats still accepted, but deprecated) + self.assertRaises(TypeError, range, 1e100, 1e101, 1e101) + + self.assertRaises(TypeError, range, 0, "spam") + self.assertRaises(TypeError, range, 0, 42, "spam") + + self.assertRaises(OverflowError, range, -sys.maxint, sys.maxint) + self.assertRaises(OverflowError, range, 0, 2*sys.maxint) + + def test_input_and_raw_input(self): + self.write_testfile() + fp = open(TESTFN, 'r') + savestdin = sys.stdin + savestdout = sys.stdout # Eats the echo + try: + sys.stdin = fp + sys.stdout = BitBucket() + self.assertEqual(input(), 2) + self.assertEqual(input('testing\n'), 2) + self.assertEqual(raw_input(), 'The quick brown fox jumps over the lazy dog.') + self.assertEqual(raw_input('testing\n'), 'Dear John') + + # SF 1535165: don't segfault on closed stdin + # sys.stdout must be a regular file for triggering + sys.stdout = savestdout + sys.stdin.close() + self.assertRaises(ValueError, input) + + sys.stdout = BitBucket() + sys.stdin = cStringIO.StringIO("NULL\0") + self.assertRaises(TypeError, input, 42, 42) + sys.stdin = cStringIO.StringIO(" 'whitespace'") + self.assertEqual(input(), 'whitespace') + sys.stdin = cStringIO.StringIO() + self.assertRaises(EOFError, input) + + # SF 876178: make sure input() respect future options. + sys.stdin = cStringIO.StringIO('1/2') + sys.stdout = cStringIO.StringIO() + exec compile('print input()', 'test_builtin_tmp', 'exec') + sys.stdin.seek(0, 0) + exec compile('from __future__ import division;print input()', + 'test_builtin_tmp', 'exec') + sys.stdin.seek(0, 0) + exec compile('print input()', 'test_builtin_tmp', 'exec') + # The result we expect depends on whether new division semantics + # are already in effect. + if 1/2 == 0: + # This test was compiled with old semantics. + expected = ['0', '0.5', '0'] + else: + # This test was compiled with new semantics (e.g., -Qnew + # was given on the command line. + expected = ['0.5', '0.5', '0.5'] + self.assertEqual(sys.stdout.getvalue().splitlines(), expected) + + del sys.stdout + self.assertRaises(RuntimeError, input, 'prompt') + del sys.stdin + self.assertRaises(RuntimeError, input, 'prompt') + finally: + sys.stdin = savestdin + sys.stdout = savestdout + fp.close() + unlink(TESTFN) + + def test_reduce(self): + self.assertEqual(reduce(lambda x, y: x+y, ['a', 'b', 'c'], ''), 'abc') + self.assertEqual( + reduce(lambda x, y: x+y, [['a', 'c'], [], ['d', 'w']], []), + ['a','c','d','w'] + ) + self.assertEqual(reduce(lambda x, y: x*y, range(2,8), 1), 5040) + self.assertEqual( + reduce(lambda x, y: x*y, range(2,21), 1L), + 2432902008176640000L + ) + self.assertEqual(reduce(lambda x, y: x+y, Squares(10)), 285) + self.assertEqual(reduce(lambda x, y: x+y, Squares(10), 0), 285) + self.assertEqual(reduce(lambda x, y: x+y, Squares(0), 0), 0) + self.assertRaises(TypeError, reduce) + self.assertRaises(TypeError, reduce, 42, 42) + self.assertRaises(TypeError, reduce, 42, 42, 42) + self.assertEqual(reduce(42, "1"), "1") # func is never called with one item + self.assertEqual(reduce(42, "", "1"), "1") # func is never called with one item + self.assertRaises(TypeError, reduce, 42, (42, 42)) + + class BadSeq: + def __getitem__(self, index): + raise ValueError + self.assertRaises(ValueError, reduce, 42, BadSeq()) + + def test_reload(self): + import marshal + reload(marshal) + import string + reload(string) + ## import sys + ## self.assertRaises(ImportError, reload, sys) + + def test_repr(self): + self.assertEqual(repr(''), '\'\'') + self.assertEqual(repr(0), '0') + self.assertEqual(repr(0L), '0L') + self.assertEqual(repr(()), '()') + self.assertEqual(repr([]), '[]') + self.assertEqual(repr({}), '{}') + a = [] + a.append(a) + self.assertEqual(repr(a), '[[...]]') + a = {} + a[0] = a + self.assertEqual(repr(a), '{0: {...}}') + + def test_round(self): + self.assertEqual(round(0.0), 0.0) + self.assertEqual(round(1.0), 1.0) + self.assertEqual(round(10.0), 10.0) + self.assertEqual(round(1000000000.0), 1000000000.0) + self.assertEqual(round(1e20), 1e20) + + self.assertEqual(round(-1.0), -1.0) + self.assertEqual(round(-10.0), -10.0) + self.assertEqual(round(-1000000000.0), -1000000000.0) + self.assertEqual(round(-1e20), -1e20) + + self.assertEqual(round(0.1), 0.0) + self.assertEqual(round(1.1), 1.0) + self.assertEqual(round(10.1), 10.0) + self.assertEqual(round(1000000000.1), 1000000000.0) + + self.assertEqual(round(-1.1), -1.0) + self.assertEqual(round(-10.1), -10.0) + self.assertEqual(round(-1000000000.1), -1000000000.0) + + self.assertEqual(round(0.9), 1.0) + self.assertEqual(round(9.9), 10.0) + self.assertEqual(round(999999999.9), 1000000000.0) + + self.assertEqual(round(-0.9), -1.0) + self.assertEqual(round(-9.9), -10.0) + self.assertEqual(round(-999999999.9), -1000000000.0) + + self.assertEqual(round(-8.0, -1), -10.0) + + # test new kwargs + self.assertEqual(round(number=-8.0, ndigits=-1), -10.0) + + self.assertRaises(TypeError, round) + + def test_setattr(self): + setattr(sys, 'spam', 1) + self.assertEqual(sys.spam, 1) + self.assertRaises(TypeError, setattr, sys, 1, 'spam') + self.assertRaises(TypeError, setattr) + + def test_str(self): + self.assertEqual(str(''), '') + self.assertEqual(str(0), '0') + self.assertEqual(str(0L), '0') + self.assertEqual(str(()), '()') + self.assertEqual(str([]), '[]') + self.assertEqual(str({}), '{}') + a = [] + a.append(a) + self.assertEqual(str(a), '[[...]]') + a = {} + a[0] = a + self.assertEqual(str(a), '{0: {...}}') + + def test_sum(self): + self.assertEqual(sum([]), 0) + self.assertEqual(sum(range(2,8)), 27) + self.assertEqual(sum(iter(range(2,8))), 27) + self.assertEqual(sum(Squares(10)), 285) + self.assertEqual(sum(iter(Squares(10))), 285) + self.assertEqual(sum([[1], [2], [3]], []), [1, 2, 3]) + + self.assertRaises(TypeError, sum) + self.assertRaises(TypeError, sum, 42) + self.assertRaises(TypeError, sum, ['a', 'b', 'c']) + self.assertRaises(TypeError, sum, ['a', 'b', 'c'], '') + self.assertRaises(TypeError, sum, [[1], [2], [3]]) + self.assertRaises(TypeError, sum, [{2:3}]) + self.assertRaises(TypeError, sum, [{2:3}]*2, {2:3}) + + class BadSeq: + def __getitem__(self, index): + raise ValueError + self.assertRaises(ValueError, sum, BadSeq()) + + def test_tuple(self): + self.assertEqual(tuple(()), ()) + t0_3 = (0, 1, 2, 3) + t0_3_bis = tuple(t0_3) + self.assert_(t0_3 is t0_3_bis) + self.assertEqual(tuple([]), ()) + self.assertEqual(tuple([0, 1, 2, 3]), (0, 1, 2, 3)) + self.assertEqual(tuple(''), ()) + self.assertEqual(tuple('spam'), ('s', 'p', 'a', 'm')) + + def test_type(self): + self.assertEqual(type(''), type('123')) + self.assertNotEqual(type(''), type(())) + + def test_unichr(self): + if have_unicode: + self.assertEqual(unichr(32), unicode(' ')) + self.assertEqual(unichr(65), unicode('A')) + self.assertEqual(unichr(97), unicode('a')) + self.assertEqual( + unichr(sys.maxunicode), + unicode('\\U%08x' % (sys.maxunicode), 'unicode-escape') + ) + self.assertRaises(ValueError, unichr, sys.maxunicode+1) + self.assertRaises(TypeError, unichr) + + # We don't want self in vars(), so these are static methods + + @staticmethod + def get_vars_f0(): + return vars() + + @staticmethod + def get_vars_f2(): + BuiltinTest.get_vars_f0() + a = 1 + b = 2 + return vars() + + def test_vars(self): + self.assertEqual(set(vars()), set(dir())) + import sys + self.assertEqual(set(vars(sys)), set(dir(sys))) + self.assertEqual(self.get_vars_f0(), {}) + self.assertEqual(self.get_vars_f2(), {'a': 1, 'b': 2}) + self.assertRaises(TypeError, vars, 42, 42) + self.assertRaises(TypeError, vars, 42) + + def test_zip(self): + a = (1, 2, 3) + b = (4, 5, 6) + t = [(1, 4), (2, 5), (3, 6)] + self.assertEqual(zip(a, b), t) + b = [4, 5, 6] + self.assertEqual(zip(a, b), t) + b = (4, 5, 6, 7) + self.assertEqual(zip(a, b), t) + class I: + def __getitem__(self, i): + if i < 0 or i > 2: raise IndexError + return i + 4 + self.assertEqual(zip(a, I()), t) + self.assertEqual(zip(), []) + self.assertEqual(zip(*[]), []) + self.assertRaises(TypeError, zip, None) + class G: + pass + self.assertRaises(TypeError, zip, a, G()) + + # Make sure zip doesn't try to allocate a billion elements for the + # result list when one of its arguments doesn't say how long it is. + # A MemoryError is the most likely failure mode. + class SequenceWithoutALength: + def __getitem__(self, i): + if i == 5: + raise IndexError + else: + return i + self.assertEqual( + zip(SequenceWithoutALength(), xrange(2**30)), + list(enumerate(range(5))) + ) + + class BadSeq: + def __getitem__(self, i): + if i == 5: + raise ValueError + else: + return i + self.assertRaises(ValueError, zip, BadSeq(), BadSeq()) + +class TestSorted(unittest.TestCase): + + def test_basic(self): + data = range(100) + copy = data[:] + random.shuffle(copy) + self.assertEqual(data, sorted(copy)) + self.assertNotEqual(data, copy) + + data.reverse() + random.shuffle(copy) + self.assertEqual(data, sorted(copy, cmp=lambda x, y: cmp(y,x))) + self.assertNotEqual(data, copy) + random.shuffle(copy) + self.assertEqual(data, sorted(copy, key=lambda x: -x)) + self.assertNotEqual(data, copy) + random.shuffle(copy) + self.assertEqual(data, sorted(copy, reverse=1)) + self.assertNotEqual(data, copy) + + def test_inputtypes(self): + s = 'abracadabra' + types = [list, tuple] + if have_unicode: + types.insert(0, unicode) + for T in types: + self.assertEqual(sorted(s), sorted(T(s))) + + s = ''.join(dict.fromkeys(s).keys()) # unique letters only + types = [set, frozenset, list, tuple, dict.fromkeys] + if have_unicode: + types.insert(0, unicode) + for T in types: + self.assertEqual(sorted(s), sorted(T(s))) + + def test_baddecorator(self): + data = 'The quick Brown fox Jumped over The lazy Dog'.split() + self.assertRaises(TypeError, sorted, data, None, lambda x,y: 0) + +def test_main(verbose=None): + test_classes = (BuiltinTest, TestSorted) + + run_unittest(*test_classes) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + run_unittest(*test_classes) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bz2.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bz2.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,372 @@ +#!/usr/bin/python +from test import test_support +from test.test_support import TESTFN + +import unittest +from cStringIO import StringIO +import os +import popen2 +import sys + +import bz2 +from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor + +has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx", "riscos") + +class BaseTest(unittest.TestCase): + "Base for other testcases." + TEXT = 'root:x:0:0:root:/root:/bin/bash\nbin:x:1:1:bin:/bin:\ndaemon:x:2:2:daemon:/sbin:\nadm:x:3:4:adm:/var/adm:\nlp:x:4:7:lp:/var/spool/lpd:\nsync:x:5:0:sync:/sbin:/bin/sync\nshutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\nhalt:x:7:0:halt:/sbin:/sbin/halt\nmail:x:8:12:mail:/var/spool/mail:\nnews:x:9:13:news:/var/spool/news:\nuucp:x:10:14:uucp:/var/spool/uucp:\noperator:x:11:0:operator:/root:\ngames:x:12:100:games:/usr/games:\ngopher:x:13:30:gopher:/usr/lib/gopher-data:\nftp:x:14:50:FTP User:/var/ftp:/bin/bash\nnobody:x:65534:65534:Nobody:/home:\npostfix:x:100:101:postfix:/var/spool/postfix:\nniemeyer:x:500:500::/home/niemeyer:/bin/bash\npostgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\nmysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\nwww:x:103:104::/var/www:/bin/false\n' + DATA = 'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`' + DATA_CRLF = 'BZh91AY&SY\xaez\xbbN\x00\x01H\xdf\x80\x00\x12@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe0@\x01\xbc\xc6`\x86*\x8d=M\xa9\x9a\x86\xd0L@\x0fI\xa6!\xa1\x13\xc8\x88jdi\x8d@\x03@\x1a\x1a\x0c\x0c\x83 \x00\xc4h2\x19\x01\x82D\x84e\t\xe8\x99\x89\x19\x1ah\x00\r\x1a\x11\xaf\x9b\x0fG\xf5(\x1b\x1f?\t\x12\xcf\xb5\xfc\x95E\x00ps\x89\x12^\xa4\xdd\xa2&\x05(\x87\x04\x98\x89u\xe40%\xb6\x19\'\x8c\xc4\x89\xca\x07\x0e\x1b!\x91UIFU%C\x994!DI\xd2\xfa\xf0\xf1N8W\xde\x13A\xf5\x9cr%?\x9f3;I45A\xd1\x8bT\xb1\xa4\xc7\x8d\x1a\\"\xad\xa1\xabyBg\x15\xb9l\x88\x88\x91k"\x94\xa4\xd4\x89\xae*\xa6\x0b\x10\x0c\xd6\xd4m\xe86\xec\xb5j\x8a\x86j\';\xca.\x01I\xf2\xaaJ\xe8\x88\x8cU+t3\xfb\x0c\n\xa33\x13r2\r\x16\xe0\xb3(\xbf\x1d\x83r\xe7M\xf0D\x1365\xd8\x88\xd3\xa4\x92\xcb2\x06\x04\\\xc1\xb0\xea//\xbek&\xd8\xe6+t\xe5\xa1\x13\xada\x16\xder5"w]\xa2i\xb7[\x97R \xe2IT\xcd;Z\x04dk4\xad\x8a\t\xd3\x81z\x10\xf1:^`\xab\x1f\xc5\xdc\x91N\x14$+\x9e\xae\xd3\x80' + + if has_cmdline_bunzip2: + def decompress(self, data): + pop = popen2.Popen3("bunzip2", capturestderr=1) + pop.tochild.write(data) + pop.tochild.close() + ret = pop.fromchild.read() + pop.fromchild.close() + if pop.wait() != 0: + ret = bz2.decompress(data) + return ret + + else: + # popen2.Popen3 doesn't exist on Windows, and even if it did, bunzip2 + # isn't available to run. + def decompress(self, data): + return bz2.decompress(data) + +class BZ2FileTest(BaseTest): + "Test BZ2File type miscellaneous methods." + + def setUp(self): + self.filename = TESTFN + + def tearDown(self): + if os.path.isfile(self.filename): + os.unlink(self.filename) + + def createTempFile(self, crlf=0): + f = open(self.filename, "wb") + if crlf: + data = self.DATA_CRLF + else: + data = self.DATA + f.write(data) + f.close() + + def testRead(self): + # "Test BZ2File.read()" + self.createTempFile() + bz2f = BZ2File(self.filename) + self.assertRaises(TypeError, bz2f.read, None) + self.assertEqual(bz2f.read(), self.TEXT) + bz2f.close() + + def testReadChunk10(self): + # "Test BZ2File.read() in chunks of 10 bytes" + self.createTempFile() + bz2f = BZ2File(self.filename) + text = '' + while 1: + str = bz2f.read(10) + if not str: + break + text += str + self.assertEqual(text, text) + bz2f.close() + + def testRead100(self): + # "Test BZ2File.read(100)" + self.createTempFile() + bz2f = BZ2File(self.filename) + self.assertEqual(bz2f.read(100), self.TEXT[:100]) + bz2f.close() + + def testReadLine(self): + # "Test BZ2File.readline()" + self.createTempFile() + bz2f = BZ2File(self.filename) + self.assertRaises(TypeError, bz2f.readline, None) + sio = StringIO(self.TEXT) + for line in sio.readlines(): + self.assertEqual(bz2f.readline(), line) + bz2f.close() + + def testReadLines(self): + # "Test BZ2File.readlines()" + self.createTempFile() + bz2f = BZ2File(self.filename) + self.assertRaises(TypeError, bz2f.readlines, None) + sio = StringIO(self.TEXT) + self.assertEqual(bz2f.readlines(), sio.readlines()) + bz2f.close() + + def testIterator(self): + # "Test iter(BZ2File)" + self.createTempFile() + bz2f = BZ2File(self.filename) + sio = StringIO(self.TEXT) + self.assertEqual(list(iter(bz2f)), sio.readlines()) + bz2f.close() + + def testXReadLines(self): + # "Test BZ2File.xreadlines()" + self.createTempFile() + bz2f = BZ2File(self.filename) + sio = StringIO(self.TEXT) + self.assertEqual(list(bz2f.xreadlines()), sio.readlines()) + bz2f.close() + + def testUniversalNewlinesLF(self): + # "Test BZ2File.read() with universal newlines (\\n)" + self.createTempFile() + bz2f = BZ2File(self.filename, "rU") + self.assertEqual(bz2f.read(), self.TEXT) + self.assertEqual(bz2f.newlines, "\n") + bz2f.close() + + def testUniversalNewlinesCRLF(self): + # "Test BZ2File.read() with universal newlines (\\r\\n)" + self.createTempFile(crlf=1) + bz2f = BZ2File(self.filename, "rU") + self.assertEqual(bz2f.read(), self.TEXT) + self.assertEqual(bz2f.newlines, "\r\n") + bz2f.close() + + def testWrite(self): + # "Test BZ2File.write()" + bz2f = BZ2File(self.filename, "w") + self.assertRaises(TypeError, bz2f.write) + bz2f.write(self.TEXT) + bz2f.close() + f = open(self.filename, 'rb') + self.assertEqual(self.decompress(f.read()), self.TEXT) + f.close() + + def testWriteChunks10(self): + # "Test BZ2File.write() with chunks of 10 bytes" + bz2f = BZ2File(self.filename, "w") + n = 0 + while 1: + str = self.TEXT[n*10:(n+1)*10] + if not str: + break + bz2f.write(str) + n += 1 + bz2f.close() + f = open(self.filename, 'rb') + self.assertEqual(self.decompress(f.read()), self.TEXT) + f.close() + + def testWriteLines(self): + # "Test BZ2File.writelines()" + bz2f = BZ2File(self.filename, "w") + self.assertRaises(TypeError, bz2f.writelines) + sio = StringIO(self.TEXT) + bz2f.writelines(sio.readlines()) + bz2f.close() + # patch #1535500 + self.assertRaises(ValueError, bz2f.writelines, ["a"]) + f = open(self.filename, 'rb') + self.assertEqual(self.decompress(f.read()), self.TEXT) + f.close() + + def testWriteMethodsOnReadOnlyFile(self): + bz2f = BZ2File(self.filename, "w") + bz2f.write("abc") + bz2f.close() + + bz2f = BZ2File(self.filename, "r") + self.assertRaises(IOError, bz2f.write, "a") + self.assertRaises(IOError, bz2f.writelines, ["a"]) + + def testSeekForward(self): + # "Test BZ2File.seek(150, 0)" + self.createTempFile() + bz2f = BZ2File(self.filename) + self.assertRaises(TypeError, bz2f.seek) + bz2f.seek(150) + self.assertEqual(bz2f.read(), self.TEXT[150:]) + bz2f.close() + + def testSeekBackwards(self): + # "Test BZ2File.seek(-150, 1)" + self.createTempFile() + bz2f = BZ2File(self.filename) + bz2f.read(500) + bz2f.seek(-150, 1) + self.assertEqual(bz2f.read(), self.TEXT[500-150:]) + bz2f.close() + + def testSeekBackwardsFromEnd(self): + # "Test BZ2File.seek(-150, 2)" + self.createTempFile() + bz2f = BZ2File(self.filename) + bz2f.seek(-150, 2) + self.assertEqual(bz2f.read(), self.TEXT[len(self.TEXT)-150:]) + bz2f.close() + + def testSeekPostEnd(self): + # "Test BZ2File.seek(150000)" + self.createTempFile() + bz2f = BZ2File(self.filename) + bz2f.seek(150000) + self.assertEqual(bz2f.tell(), len(self.TEXT)) + self.assertEqual(bz2f.read(), "") + bz2f.close() + + def testSeekPostEndTwice(self): + # "Test BZ2File.seek(150000) twice" + self.createTempFile() + bz2f = BZ2File(self.filename) + bz2f.seek(150000) + bz2f.seek(150000) + self.assertEqual(bz2f.tell(), len(self.TEXT)) + self.assertEqual(bz2f.read(), "") + bz2f.close() + + def testSeekPreStart(self): + # "Test BZ2File.seek(-150, 0)" + self.createTempFile() + bz2f = BZ2File(self.filename) + bz2f.seek(-150) + self.assertEqual(bz2f.tell(), 0) + self.assertEqual(bz2f.read(), self.TEXT) + bz2f.close() + + def testOpenDel(self): + # "Test opening and deleting a file many times" + self.createTempFile() + for i in xrange(10000): + o = BZ2File(self.filename) + o.close() + del o + + def testOpenNonexistent(self): + # "Test opening a nonexistent file" + self.assertRaises(IOError, BZ2File, "/non/existent") + + def testModeU(self): + # Bug #1194181: bz2.BZ2File opened for write with mode "U" + self.createTempFile() + bz2f = BZ2File(self.filename, "U") + bz2f.close() + f = file(self.filename) + f.seek(0, 2) + self.assertEqual(f.tell(), len(self.DATA)) + f.close() + + def testBug1191043(self): + # readlines() for files containing no newline + data = 'BZh91AY&SY\xd9b\x89]\x00\x00\x00\x03\x80\x04\x00\x02\x00\x0c\x00 \x00!\x9ah3M\x13<]\xc9\x14\xe1BCe\x8a%t' + f = open(self.filename, "wb") + f.write(data) + f.close() + bz2f = BZ2File(self.filename) + lines = bz2f.readlines() + bz2f.close() + self.assertEqual(lines, ['Test']) + bz2f = BZ2File(self.filename) + xlines = list(bz2f.xreadlines()) + bz2f.close() + self.assertEqual(xlines, ['Test']) + + +class BZ2CompressorTest(BaseTest): + def testCompress(self): + # "Test BZ2Compressor.compress()/flush()" + bz2c = BZ2Compressor() + self.assertRaises(TypeError, bz2c.compress) + data = bz2c.compress(self.TEXT) + data += bz2c.flush() + self.assertEqual(self.decompress(data), self.TEXT) + + def testCompressChunks10(self): + # "Test BZ2Compressor.compress()/flush() with chunks of 10 bytes" + bz2c = BZ2Compressor() + n = 0 + data = '' + while 1: + str = self.TEXT[n*10:(n+1)*10] + if not str: + break + data += bz2c.compress(str) + n += 1 + data += bz2c.flush() + self.assertEqual(self.decompress(data), self.TEXT) + +class BZ2DecompressorTest(BaseTest): + def test_Constructor(self): + self.assertRaises(TypeError, BZ2Decompressor, 42) + + def testDecompress(self): + # "Test BZ2Decompressor.decompress()" + bz2d = BZ2Decompressor() + self.assertRaises(TypeError, bz2d.decompress) + text = bz2d.decompress(self.DATA) + self.assertEqual(text, self.TEXT) + + def testDecompressChunks10(self): + # "Test BZ2Decompressor.decompress() with chunks of 10 bytes" + bz2d = BZ2Decompressor() + text = '' + n = 0 + while 1: + str = self.DATA[n*10:(n+1)*10] + if not str: + break + text += bz2d.decompress(str) + n += 1 + self.assertEqual(text, self.TEXT) + + def testDecompressUnusedData(self): + # "Test BZ2Decompressor.decompress() with unused data" + bz2d = BZ2Decompressor() + unused_data = "this is unused data" + text = bz2d.decompress(self.DATA+unused_data) + self.assertEqual(text, self.TEXT) + self.assertEqual(bz2d.unused_data, unused_data) + + def testEOFError(self): + # "Calling BZ2Decompressor.decompress() after EOS must raise EOFError" + bz2d = BZ2Decompressor() + text = bz2d.decompress(self.DATA) + self.assertRaises(EOFError, bz2d.decompress, "anything") + + +class FuncTest(BaseTest): + "Test module functions" + + def testCompress(self): + # "Test compress() function" + data = bz2.compress(self.TEXT) + self.assertEqual(self.decompress(data), self.TEXT) + + def testDecompress(self): + # "Test decompress() function" + text = bz2.decompress(self.DATA) + self.assertEqual(text, self.TEXT) + + def testDecompressEmpty(self): + # "Test decompress() function with empty string" + text = bz2.decompress("") + self.assertEqual(text, "") + + def testDecompressIncomplete(self): + # "Test decompress() function with incomplete data" + self.assertRaises(ValueError, bz2.decompress, self.DATA[:-10]) + +def test_main(): + test_support.run_unittest( + BZ2FileTest, + BZ2CompressorTest, + BZ2DecompressorTest, + FuncTest + ) + test_support.reap_children() + +if __name__ == '__main__': + test_main() + +# vim:ts=4:sw=4 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_class.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_class.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,415 @@ +"Test the functionality of Python classes implementing operators." + +from test.test_support import TestFailed + +testmeths = [ + +# Binary operations + "add", + "radd", + "sub", + "rsub", + "mul", + "rmul", + "div", + "rdiv", + "mod", + "rmod", + "divmod", + "rdivmod", + "pow", + "rpow", + "rshift", + "rrshift", + "lshift", + "rlshift", + "and", + "rand", + "or", + "ror", + "xor", + "rxor", + +# List/dict operations + "contains", + "getitem", + "getslice", + "setitem", + "setslice", + "delitem", + "delslice", + +# Unary operations + "neg", + "pos", + "abs", + +# generic operations + "init", + ] + +# These need to return something other than None +# "coerce", +# "hash", +# "str", +# "repr", +# "int", +# "long", +# "float", +# "oct", +# "hex", + +# These are separate because they can influence the test of other methods. +# "getattr", +# "setattr", +# "delattr", + +class AllTests: + def __coerce__(self, *args): + print "__coerce__:", args + return (self,) + args + + def __hash__(self, *args): + print "__hash__:", args + return hash(id(self)) + + def __str__(self, *args): + print "__str__:", args + return "AllTests" + + def __repr__(self, *args): + print "__repr__:", args + return "AllTests" + + def __int__(self, *args): + print "__int__:", args + return 1 + + def __float__(self, *args): + print "__float__:", args + return 1.0 + + def __long__(self, *args): + print "__long__:", args + return 1L + + def __oct__(self, *args): + print "__oct__:", args + return '01' + + def __hex__(self, *args): + print "__hex__:", args + return '0x1' + + def __cmp__(self, *args): + print "__cmp__:", args + return 0 + + def __del__(self, *args): + print "__del__:", args + +# Synthesize AllTests methods from the names in testmeths. + +method_template = """\ +def __%(method)s__(self, *args): + print "__%(method)s__:", args +""" + +for method in testmeths: + exec method_template % locals() in AllTests.__dict__ + +del method, method_template + +# this also tests __init__ of course. +testme = AllTests() + +# Binary operations + +testme + 1 +1 + testme + +testme - 1 +1 - testme + +testme * 1 +1 * testme + +if 1/2 == 0: + testme / 1 + 1 / testme +else: + # True division is in effect, so "/" doesn't map to __div__ etc; but + # the canned expected-output file requires that __div__ etc get called. + testme.__coerce__(1) + testme.__div__(1) + testme.__coerce__(1) + testme.__rdiv__(1) + +testme % 1 +1 % testme + +divmod(testme,1) +divmod(1, testme) + +testme ** 1 +1 ** testme + +testme >> 1 +1 >> testme + +testme << 1 +1 << testme + +testme & 1 +1 & testme + +testme | 1 +1 | testme + +testme ^ 1 +1 ^ testme + + +# List/dict operations + +class Empty: pass + +try: + 1 in Empty() + print 'failed, should have raised TypeError' +except TypeError: + pass + +1 in testme + +testme[1] +testme[1] = 1 +del testme[1] + +testme[:42] +testme[:42] = "The Answer" +del testme[:42] + +testme[2:1024:10] +testme[2:1024:10] = "A lot" +del testme[2:1024:10] + +testme[:42, ..., :24:, 24, 100] +testme[:42, ..., :24:, 24, 100] = "Strange" +del testme[:42, ..., :24:, 24, 100] + + +# Now remove the slice hooks to see if converting normal slices to slice +# object works. + +del AllTests.__getslice__ +del AllTests.__setslice__ +del AllTests.__delslice__ + +import sys +if sys.platform[:4] != 'java': + testme[:42] + testme[:42] = "The Answer" + del testme[:42] +else: + # This works under Jython, but the actual slice values are + # different. + print "__getitem__: (slice(0, 42, None),)" + print "__setitem__: (slice(0, 42, None), 'The Answer')" + print "__delitem__: (slice(0, 42, None),)" + +# Unary operations + +-testme ++testme +abs(testme) +int(testme) +long(testme) +float(testme) +oct(testme) +hex(testme) + +# And the rest... + +hash(testme) +repr(testme) +str(testme) + +testme == 1 +testme < 1 +testme > 1 +testme <> 1 +testme != 1 +1 == testme +1 < testme +1 > testme +1 <> testme +1 != testme + +# This test has to be last (duh.) + +del testme +if sys.platform[:4] == 'java': + import java + java.lang.System.gc() +else: + import gc + gc.collect() + +# Interfering tests + +class ExtraTests: + def __getattr__(self, *args): + print "__getattr__:", args + return "SomeVal" + + def __setattr__(self, *args): + print "__setattr__:", args + + def __delattr__(self, *args): + print "__delattr__:", args + +testme = ExtraTests() +testme.spam +testme.eggs = "spam, spam, spam and ham" +del testme.cardinal + + +# return values of some method are type-checked +class BadTypeClass: + def __int__(self): + return None + __float__ = __int__ + __long__ = __int__ + __str__ = __int__ + __repr__ = __int__ + __oct__ = __int__ + __hex__ = __int__ + +def check_exc(stmt, exception): + """Raise TestFailed if executing 'stmt' does not raise 'exception' + """ + try: + exec stmt + except exception: + pass + else: + raise TestFailed, "%s should raise %s" % (stmt, exception) + +check_exc("int(BadTypeClass())", TypeError) +check_exc("float(BadTypeClass())", TypeError) +check_exc("long(BadTypeClass())", TypeError) +check_exc("str(BadTypeClass())", TypeError) +check_exc("repr(BadTypeClass())", TypeError) +check_exc("oct(BadTypeClass())", TypeError) +check_exc("hex(BadTypeClass())", TypeError) + +# mixing up ints and longs is okay +class IntLongMixClass: + def __int__(self): + return 0L + + def __long__(self): + return 0 + +try: + int(IntLongMixClass()) +except TypeError: + raise TestFailed, "TypeError should not be raised" + +try: + long(IntLongMixClass()) +except TypeError: + raise TestFailed, "TypeError should not be raised" + + +# Test correct errors from hash() on objects with comparisons but no __hash__ + +class C0: + pass + +hash(C0()) # This should work; the next two should raise TypeError + +class C1: + def __cmp__(self, other): return 0 + +check_exc("hash(C1())", TypeError) + +class C2: + def __eq__(self, other): return 1 + +check_exc("hash(C2())", TypeError) + +# Test for SF bug 532646 + +class A: + pass +A.__call__ = A() +a = A() +try: + a() # This should not segfault +except RuntimeError: + pass +else: + raise TestFailed, "how could this not have overflowed the stack?" + + +# Tests for exceptions raised in instance_getattr2(). + +def booh(self): + raise AttributeError, "booh" + +class A: + a = property(booh) +try: + A().a # Raised AttributeError: A instance has no attribute 'a' +except AttributeError, x: + if str(x) != "booh": + print "attribute error for A().a got masked:", str(x) + +class E: + __eq__ = property(booh) +E() == E() # In debug mode, caused a C-level assert() to fail + +class I: + __init__ = property(booh) +try: + I() # In debug mode, printed XXX undetected error and raises AttributeError +except AttributeError, x: + pass +else: + print "attribute error for I.__init__ got masked" + + +# Test comparison and hash of methods +class A: + def __init__(self, x): + self.x = x + def f(self): + pass + def g(self): + pass + def __eq__(self, other): + return self.x == other.x + def __hash__(self): + return self.x +class B(A): + pass + +a1 = A(1) +a2 = A(2) +assert a1.f == a1.f +assert a1.f != a2.f +assert a1.f != a1.g +assert a1.f == A(1).f +assert hash(a1.f) == hash(a1.f) +assert hash(a1.f) == hash(A(1).f) + +assert A.f != a1.f +assert A.f != A.g +assert B.f == A.f +assert hash(B.f) == hash(A.f) + +# the following triggers a SystemError in 2.4 +a = A(hash(A.f.im_func)^(-1)) +hash(a.f) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_codeop.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_codeop.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,195 @@ +""" + Test cases for codeop.py + Nick Mathewson +""" +import unittest +from test.test_support import run_unittest, is_jython + +from codeop import compile_command, PyCF_DONT_IMPLY_DEDENT + +if is_jython: + import sys + import cStringIO + + def unify_callables(d): + for n,v in d.items(): + if callable(v): + d[n] = callable + return d + +class CodeopTests(unittest.TestCase): + + def assertValid(self, str, symbol='single'): + '''succeed iff str is a valid piece of code''' + if is_jython: + code = compile_command(str, "", symbol) + self.assert_(code) + if symbol == "single": + d,r = {},{} + saved_stdout = sys.stdout + sys.stdout = cStringIO.StringIO() + try: + exec code in d + exec compile(str,"","single") in r + finally: + sys.stdout = saved_stdout + elif symbol == 'eval': + ctx = {'a': 2} + d = { 'value': eval(code,ctx) } + r = { 'value': eval(str,ctx) } + self.assertEquals(unify_callables(r),unify_callables(d)) + else: + expected = compile(str, "", symbol, PyCF_DONT_IMPLY_DEDENT) + self.assertEquals( compile_command(str, "", symbol), expected) + + def assertIncomplete(self, str, symbol='single'): + '''succeed iff str is the start of a valid piece of code''' + self.assertEquals( compile_command(str, symbol=symbol), None) + + def assertInvalid(self, str, symbol='single', is_syntax=1): + '''succeed iff str is the start of an invalid piece of code''' + try: + compile_command(str,symbol=symbol) + self.fail("No exception thrown for invalid code") + except SyntaxError: + self.assert_(is_syntax) + except OverflowError: + self.assert_(not is_syntax) + + def test_valid(self): + av = self.assertValid + + # special case + if not is_jython: + self.assertEquals(compile_command(""), + compile("pass", "", 'single', + PyCF_DONT_IMPLY_DEDENT)) + self.assertEquals(compile_command("\n"), + compile("pass", "", 'single', + PyCF_DONT_IMPLY_DEDENT)) + else: + av("") + av("\n") + + av("a = 1") + av("\na = 1") + av("a = 1\n") + av("a = 1\n\n") + av("\n\na = 1\n\n") + + av("def x():\n pass\n") + av("if 1:\n pass\n") + + av("\n\nif 1: pass\n") + av("\n\nif 1: pass\n\n") + + av("def x():\n\n pass\n") + av("def x():\n pass\n \n") + av("def x():\n pass\n \n") + + av("pass\n") + av("3**3\n") + + av("if 9==3:\n pass\nelse:\n pass\n") + av("if 1:\n pass\n if 1:\n pass\n else:\n pass\n") + + av("#a\n#b\na = 3\n") + av("#a\n\n \na=3\n") + av("a=3\n\n") + av("a = 9+ \\\n3") + + av("3**3","eval") + av("(lambda z: \n z**3)","eval") + + av("9+ \\\n3","eval") + av("9+ \\\n3\n","eval") + + av("\n\na**3","eval") + av("\n \na**3","eval") + av("#a\n#b\na**3","eval") + + def test_incomplete(self): + ai = self.assertIncomplete + + ai("(a **") + ai("(a,b,") + ai("(a,b,(") + ai("(a,b,(") + ai("a = (") + ai("a = {") + ai("b + {") + + ai("if 9==3:\n pass\nelse:") + ai("if 9==3:\n pass\nelse:\n") + ai("if 9==3:\n pass\nelse:\n pass") + ai("if 1:") + ai("if 1:\n") + ai("if 1:\n pass\n if 1:\n pass\n else:") + ai("if 1:\n pass\n if 1:\n pass\n else:\n") + ai("if 1:\n pass\n if 1:\n pass\n else:\n pass") + + ai("def x():") + ai("def x():\n") + ai("def x():\n\n") + + ai("def x():\n pass") + ai("def x():\n pass\n ") + ai("def x():\n pass\n ") + ai("\n\ndef x():\n pass") + + ai("a = 9+ \\") + ai("a = 'a\\") + ai("a = '''xy") + + ai("","eval") + ai("\n","eval") + ai("(","eval") + ai("(\n\n\n","eval") + ai("(9+","eval") + ai("9+ \\","eval") + ai("lambda z: \\","eval") + + def test_invalid(self): + ai = self.assertInvalid + ai("a b") + + ai("a @") + ai("a b @") + ai("a ** @") + + ai("a = ") + ai("a = 9 +") + + ai("def x():\n\npass\n") + + ai("\n\n if 1: pass\n\npass") + + ai("a = 9+ \\\n") + ai("a = 'a\\ ") + ai("a = 'a\\\n") + + # XXX for PyPy: disable these tests (using compile_command with eval + # doesn't raise a SyntaxError if it appears to be an + # incomplete multi-line stmt) + # ai("a = 1","eval") + # ai("a = (","eval") + # ai("]","eval") + # ai("())","eval") + # ai("[}","eval") + # ai("9+","eval") + # ai("lambda z:","eval") + # ai("a b","eval") + + def test_filename(self): + self.assertEquals(compile_command("a = 1\n", "abc").co_filename, + compile("a = 1\n", "abc", 'single').co_filename) + self.assertNotEquals(compile_command("a = 1\n", "abc").co_filename, + compile("a = 1\n", "def", 'single').co_filename) + + +def test_main(): + run_unittest(CodeopTests) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_compile.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_compile.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,402 @@ +import unittest +import warnings +import sys +from test import test_support + +class TestSpecifics(unittest.TestCase): + + def test_debug_assignment(self): + # catch assignments to __debug__ + self.assertRaises(SyntaxError, compile, '__debug__ = 1', '?', 'single') + import __builtin__ + prev = __builtin__.__debug__ + setattr(__builtin__, '__debug__', 'sure') + setattr(__builtin__, '__debug__', prev) + + def test_argument_handling(self): + # detect duplicate positional and keyword arguments + self.assertRaises(SyntaxError, eval, 'lambda a,a:0') + self.assertRaises(SyntaxError, eval, 'lambda a,a=1:0') + self.assertRaises(SyntaxError, eval, 'lambda a=1,a=1:0') + try: + exec 'def f(a, a): pass' + self.fail("duplicate arguments") + except SyntaxError: + pass + try: + exec 'def f(a = 0, a = 1): pass' + self.fail("duplicate keyword arguments") + except SyntaxError: + pass + try: + exec 'def f(a): global a; a = 1' + self.fail("variable is global and local") + except SyntaxError: + pass + + def test_syntax_error(self): + self.assertRaises(SyntaxError, compile, "1+*3", "filename", "exec") + + def test_duplicate_global_local(self): + try: + exec 'def f(a): global a; a = 1' + self.fail("variable is global and local") + except SyntaxError: + pass + + def test_exec_with_general_mapping_for_locals(self): + + class M: + "Test mapping interface versus possible calls from eval()." + def __getitem__(self, key): + if key == 'a': + return 12 + raise KeyError + def __setitem__(self, key, value): + self.results = (key, value) + def keys(self): + return list('xyz') + + m = M() + g = globals() + exec 'z = a' in g, m + self.assertEqual(m.results, ('z', 12)) + try: + exec 'z = b' in g, m + except NameError: + pass + else: + self.fail('Did not detect a KeyError') + exec 'z = dir()' in g, m + self.assertEqual(m.results, ('z', list('xyz'))) + exec 'z = globals()' in g, m + self.assertEqual(m.results, ('z', g)) + exec 'z = locals()' in g, m + self.assertEqual(m.results, ('z', m)) + try: + exec 'z = b' in m + except NameError: + pass + else: + self.fail('PyPy should accept not real dict globals ') + + class A: + "Non-mapping" + pass + m = A() + try: + exec 'z = a' in g, m + except TypeError: + pass + else: + self.fail('Did not validate locals as a mapping') + + # Verify that dict subclasses work as well + class D(dict): + def __getitem__(self, key): + if key == 'a': + return 12 + return dict.__getitem__(self, key) + d = D() + exec 'z = a' in g, d + self.assertEqual(d['z'], 12) + + def test_extended_arg(self): + longexpr = 'x = x or ' + '-x' * 2500 + code = ''' +def f(x): + %s + %s + %s + %s + %s + %s + %s + %s + %s + %s + # the expressions above have no effect, x == argument + while x: + x -= 1 + # EXTENDED_ARG/JUMP_ABSOLUTE here + return x +''' % ((longexpr,)*10) + exec code + self.assertEqual(f(5), 0) + + def test_complex_args(self): + + def comp_args((a, b)): + return a,b + self.assertEqual(comp_args((1, 2)), (1, 2)) + + def comp_args((a, b)=(3, 4)): + return a, b + self.assertEqual(comp_args((1, 2)), (1, 2)) + self.assertEqual(comp_args(), (3, 4)) + + def comp_args(a, (b, c)): + return a, b, c + self.assertEqual(comp_args(1, (2, 3)), (1, 2, 3)) + + def comp_args(a=2, (b, c)=(3, 4)): + return a, b, c + self.assertEqual(comp_args(1, (2, 3)), (1, 2, 3)) + self.assertEqual(comp_args(), (2, 3, 4)) + + def test_argument_order(self): + try: + exec 'def f(a=1, (b, c)): pass' + self.fail("non-default args after default") + except SyntaxError: + pass + + def test_float_literals(self): + # testing bad float literals + self.assertRaises(SyntaxError, eval, "2e") + self.assertRaises(SyntaxError, eval, "2.0e+") + self.assertRaises(SyntaxError, eval, "1e-") + self.assertRaises(SyntaxError, eval, "3-4e/21") + + def test_indentation(self): + # testing compile() of indented block w/o trailing newline" + s = """ +if 1: + if 2: + pass""" + compile(s, "", "exec") + + # This test is probably specific to CPython and may not generalize + # to other implementations. We are trying to ensure that when + # the first line of code starts after 256, correct line numbers + # in tracebacks are still produced. + def test_leading_newlines(self): + s256 = "".join(["\n"] * 256 + ["spam"]) + co = compile(s256, 'fn', 'exec') + self.assertEqual(co.co_firstlineno, 257) + self.assertEqual(co.co_lnotab, '') + + def test_literals_with_leading_zeroes(self): + for arg in ["077787", "0xj", "0x.", "0e", "090000000000000", + "080000000000000", "000000000000009", "000000000000008"]: + self.assertRaises(SyntaxError, eval, arg) + + self.assertEqual(eval("0777"), 511) + self.assertEqual(eval("0777L"), 511) + self.assertEqual(eval("000777"), 511) + self.assertEqual(eval("0xff"), 255) + self.assertEqual(eval("0xffL"), 255) + self.assertEqual(eval("0XfF"), 255) + self.assertEqual(eval("0777."), 777) + self.assertEqual(eval("0777.0"), 777) + self.assertEqual(eval("000000000000000000000000000000000000000000000000000777e0"), 777) + self.assertEqual(eval("0777e1"), 7770) + self.assertEqual(eval("0e0"), 0) + self.assertEqual(eval("0000E-012"), 0) + self.assertEqual(eval("09.5"), 9.5) + self.assertEqual(eval("0777j"), 777j) + self.assertEqual(eval("00j"), 0j) + self.assertEqual(eval("00.0"), 0) + self.assertEqual(eval("0e3"), 0) + self.assertEqual(eval("090000000000000."), 90000000000000.) + self.assertEqual(eval("090000000000000.0000000000000000000000"), 90000000000000.) + self.assertEqual(eval("090000000000000e0"), 90000000000000.) + self.assertEqual(eval("090000000000000e-0"), 90000000000000.) + self.assertEqual(eval("090000000000000j"), 90000000000000j) + self.assertEqual(eval("000000000000007"), 7) + self.assertEqual(eval("000000000000008."), 8.) + self.assertEqual(eval("000000000000009."), 9.) + + def test_unary_minus(self): + # Verify treatment of unary minus on negative numbers SF bug #660455 + if sys.maxint == 2147483647: + # 32-bit machine + all_one_bits = '0xffffffff' + self.assertEqual(eval(all_one_bits), 4294967295L) + self.assertEqual(eval("-" + all_one_bits), -4294967295L) + elif sys.maxint == 9223372036854775807: + # 64-bit machine + all_one_bits = '0xffffffffffffffff' + self.assertEqual(eval(all_one_bits), 18446744073709551615L) + self.assertEqual(eval("-" + all_one_bits), -18446744073709551615L) + else: + self.fail("How many bits *does* this machine have???") + # Verify treatment of contant folding on -(sys.maxint+1) + # i.e. -2147483648 on 32 bit platforms. Should return int, not long. + self.assertTrue(isinstance(eval("%s" % (-sys.maxint - 1)), int)) + self.assertTrue(isinstance(eval("%s" % (-sys.maxint - 2)), long)) + + if sys.maxint == 9223372036854775807: + def test_32_63_bit_values(self): + a = +4294967296 # 1 << 32 + b = -4294967296 # 1 << 32 + c = +281474976710656 # 1 << 48 + d = -281474976710656 # 1 << 48 + e = +4611686018427387904 # 1 << 62 + f = -4611686018427387904 # 1 << 62 + g = +9223372036854775807 # 1 << 63 - 1 + h = -9223372036854775807 # 1 << 63 - 1 + + for variable in self.test_32_63_bit_values.func_code.co_consts: + if variable is not None: + self.assertTrue(isinstance(variable, int)) + + def test_sequence_unpacking_error(self): + # Verify sequence packing/unpacking with "or". SF bug #757818 + i,j = (1, -1) or (-1, 1) + self.assertEqual(i, 1) + self.assertEqual(j, -1) + + def test_none_assignment(self): + stmts = [ + 'None = 0', + 'None += 0', + '__builtins__.None = 0', + 'def None(): pass', + 'class None: pass', + '(a, None) = 0, 0', + 'for None in range(10): pass', + 'def f(None): pass', + ] + for stmt in stmts: + stmt += "\n" + self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'single') + self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'exec') + + def test_import(self): + succeed = [ + 'import sys', + 'import os, sys', + 'import os as bar', + 'import os.path as bar', + 'from __future__ import nested_scopes, generators', + 'from __future__ import (nested_scopes,\ngenerators)', + 'from __future__ import (nested_scopes,\ngenerators,)', + 'from sys import stdin, stderr, stdout', + 'from sys import (stdin, stderr,\nstdout)', + 'from sys import (stdin, stderr,\nstdout,)', + 'from sys import (stdin\n, stderr, stdout)', + 'from sys import (stdin\n, stderr, stdout,)', + 'from sys import stdin as si, stdout as so, stderr as se', + 'from sys import (stdin as si, stdout as so, stderr as se)', + 'from sys import (stdin as si, stdout as so, stderr as se,)', + ] + fail = [ + 'import (os, sys)', + 'import (os), (sys)', + 'import ((os), (sys))', + 'import (sys', + 'import sys)', + 'import (os,)', + 'import os As bar', + 'import os.path a bar', + 'from sys import stdin As stdout', + 'from sys import stdin a stdout', + 'from (sys) import stdin', + 'from __future__ import (nested_scopes', + 'from __future__ import nested_scopes)', + 'from __future__ import nested_scopes,\ngenerators', + 'from sys import (stdin', + 'from sys import stdin)', + 'from sys import stdin, stdout,\nstderr', + 'from sys import stdin si', + 'from sys import stdin,' + 'from sys import (*)', + 'from sys import (stdin,, stdout, stderr)', + 'from sys import (stdin, stdout),', + ] + for stmt in succeed: + compile(stmt, 'tmp', 'exec') + for stmt in fail: + self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'exec') + + def test_for_distinct_code_objects(self): + # SF bug 1048870 + def f(): + f1 = lambda x=1: x + f2 = lambda x=2: x + return f1, f2 + f1, f2 = f() + self.assertNotEqual(id(f1.func_code), id(f2.func_code)) + + def test_unicode_encoding(self): + code = u"# -*- coding: utf-8 -*-\npass\n" + self.assertRaises(SyntaxError, compile, code, "tmp", "exec") + + def test_subscripts(self): + # SF bug 1448804 + # Class to make testing subscript results easy + class str_map(object): + def __init__(self): + self.data = {} + def __getitem__(self, key): + return self.data[str(key)] + def __setitem__(self, key, value): + self.data[str(key)] = value + def __delitem__(self, key): + del self.data[str(key)] + def __contains__(self, key): + return str(key) in self.data + d = str_map() + # Index + d[1] = 1 + self.assertEqual(d[1], 1) + d[1] += 1 + self.assertEqual(d[1], 2) + del d[1] + self.assertEqual(1 in d, False) + # Tuple of indices + d[1, 1] = 1 + self.assertEqual(d[1, 1], 1) + d[1, 1] += 1 + self.assertEqual(d[1, 1], 2) + del d[1, 1] + self.assertEqual((1, 1) in d, False) + # Simple slice + d[1:2] = 1 + self.assertEqual(d[1:2], 1) + d[1:2] += 1 + self.assertEqual(d[1:2], 2) + del d[1:2] + self.assertEqual(slice(1, 2) in d, False) + # Tuple of simple slices + d[1:2, 1:2] = 1 + self.assertEqual(d[1:2, 1:2], 1) + d[1:2, 1:2] += 1 + self.assertEqual(d[1:2, 1:2], 2) + del d[1:2, 1:2] + self.assertEqual((slice(1, 2), slice(1, 2)) in d, False) + # Extended slice + d[1:2:3] = 1 + self.assertEqual(d[1:2:3], 1) + d[1:2:3] += 1 + self.assertEqual(d[1:2:3], 2) + del d[1:2:3] + self.assertEqual(slice(1, 2, 3) in d, False) + # Tuple of extended slices + d[1:2:3, 1:2:3] = 1 + self.assertEqual(d[1:2:3, 1:2:3], 1) + d[1:2:3, 1:2:3] += 1 + self.assertEqual(d[1:2:3, 1:2:3], 2) + del d[1:2:3, 1:2:3] + self.assertEqual((slice(1, 2, 3), slice(1, 2, 3)) in d, False) + # Ellipsis + d[...] = 1 + self.assertEqual(d[...], 1) + d[...] += 1 + self.assertEqual(d[...], 2) + del d[...] + self.assertEqual(Ellipsis in d, False) + # Tuple of Ellipses + d[..., ...] = 1 + self.assertEqual(d[..., ...], 1) + d[..., ...] += 1 + self.assertEqual(d[..., ...], 2) + del d[..., ...] + self.assertEqual((Ellipsis, Ellipsis) in d, False) + +def test_main(): + test_support.run_unittest(TestSpecifics) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_complex.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_complex.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,342 @@ +import unittest, os +from test import test_support + +import warnings +warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + message=".*complex divmod.*are deprecated" +) + +from random import random + +# These tests ensure that complex math does the right thing + +class ComplexTest(unittest.TestCase): + + def assertAlmostEqual(self, a, b): + if isinstance(a, complex): + if isinstance(b, complex): + unittest.TestCase.assertAlmostEqual(self, a.real, b.real) + unittest.TestCase.assertAlmostEqual(self, a.imag, b.imag) + else: + unittest.TestCase.assertAlmostEqual(self, a.real, b) + unittest.TestCase.assertAlmostEqual(self, a.imag, 0.) + else: + if isinstance(b, complex): + unittest.TestCase.assertAlmostEqual(self, a, b.real) + unittest.TestCase.assertAlmostEqual(self, 0., b.imag) + else: + unittest.TestCase.assertAlmostEqual(self, a, b) + + def assertCloseAbs(self, x, y, eps=1e-9): + """Return true iff floats x and y "are close\"""" + # put the one with larger magnitude second + if abs(x) > abs(y): + x, y = y, x + if y == 0: + return abs(x) < eps + if x == 0: + return abs(y) < eps + # check that relative difference < eps + self.assert_(abs((x-y)/y) < eps) + + def assertClose(self, x, y, eps=1e-9): + """Return true iff complexes x and y "are close\"""" + self.assertCloseAbs(x.real, y.real, eps) + self.assertCloseAbs(x.imag, y.imag, eps) + + def assertIs(self, a, b): + self.assert_(a is b) + + def check_div(self, x, y): + """Compute complex z=x*y, and check that z/x==y and z/y==x.""" + z = x * y + if x != 0: + q = z / x + self.assertClose(q, y) + q = z.__div__(x) + self.assertClose(q, y) + q = z.__truediv__(x) + self.assertClose(q, y) + if y != 0: + q = z / y + self.assertClose(q, x) + q = z.__div__(y) + self.assertClose(q, x) + q = z.__truediv__(y) + self.assertClose(q, x) + + def test_div(self): + # too slow for PyPy --- simple_real = [float(i) for i in xrange(-5, 6)] + simple_real = [-2.0, 0.0, 1.0] + simple_complex = [complex(x, y) for x in simple_real for y in simple_real] + for x in simple_complex: + for y in simple_complex: + self.check_div(x, y) + + # A naive complex division algorithm (such as in 2.0) is very prone to + # nonsense errors for these (overflows and underflows). + self.check_div(complex(1e200, 1e200), 1+0j) + self.check_div(complex(1e-200, 1e-200), 1+0j) + + # Just for fun. + for i in xrange(100): + self.check_div(complex(random(), random()), + complex(random(), random())) + + self.assertRaises(ZeroDivisionError, complex.__div__, 1+1j, 0+0j) + # FIXME: The following currently crashes on Alpha + # self.assertRaises(OverflowError, pow, 1e200+1j, 1e200+1j) + + def test_truediv(self): + self.assertAlmostEqual(complex.__truediv__(2+0j, 1+1j), 1-1j) + self.assertRaises(ZeroDivisionError, complex.__truediv__, 1+1j, 0+0j) + + def test_floordiv(self): + self.assertAlmostEqual(complex.__floordiv__(3+0j, 1.5+0j), 2) + self.assertRaises(ZeroDivisionError, complex.__floordiv__, 3+0j, 0+0j) + + def test_coerce(self): + self.assertRaises(OverflowError, complex.__coerce__, 1+1j, 1L<<10000) + + def test_richcompare(self): + self.assertRaises(OverflowError, complex.__eq__, 1+1j, 1L<<10000) + self.assertEqual(complex.__lt__(1+1j, None), NotImplemented) + self.assertIs(complex.__eq__(1+1j, 1+1j), True) + self.assertIs(complex.__eq__(1+1j, 2+2j), False) + self.assertIs(complex.__ne__(1+1j, 1+1j), False) + self.assertIs(complex.__ne__(1+1j, 2+2j), True) + self.assertRaises(TypeError, complex.__lt__, 1+1j, 2+2j) + self.assertRaises(TypeError, complex.__le__, 1+1j, 2+2j) + self.assertRaises(TypeError, complex.__gt__, 1+1j, 2+2j) + self.assertRaises(TypeError, complex.__ge__, 1+1j, 2+2j) + + def test_mod(self): + self.assertRaises(ZeroDivisionError, (1+1j).__mod__, 0+0j) + + a = 3.33+4.43j + try: + a % 0 + except ZeroDivisionError: + pass + else: + self.fail("modulo parama can't be 0") + + def test_divmod(self): + self.assertRaises(ZeroDivisionError, divmod, 1+1j, 0+0j) + + def test_pow(self): + self.assertAlmostEqual(pow(1+1j, 0+0j), 1.0) + self.assertAlmostEqual(pow(0+0j, 2+0j), 0.0) + self.assertRaises(ZeroDivisionError, pow, 0+0j, 1j) + self.assertAlmostEqual(pow(1j, -1), 1/1j) + self.assertAlmostEqual(pow(1j, 200), 1) + self.assertRaises(ValueError, pow, 1+1j, 1+1j, 1+1j) + + a = 3.33+4.43j + self.assertEqual(a ** 0j, 1) + self.assertEqual(a ** 0.+0.j, 1) + + self.assertEqual(3j ** 0j, 1) + self.assertEqual(3j ** 0, 1) + + try: + 0j ** a + except ZeroDivisionError: + pass + else: + self.fail("should fail 0.0 to negative or complex power") + + try: + 0j ** (3-2j) + except ZeroDivisionError: + pass + else: + self.fail("should fail 0.0 to negative or complex power") + + # The following is used to exercise certain code paths + self.assertEqual(a ** 105, a ** 105) + self.assertEqual(a ** -105, a ** -105) + self.assertEqual(a ** -30, a ** -30) + + self.assertEqual(0.0j ** 0, 1) + + b = 5.1+2.3j + self.assertRaises(ValueError, pow, a, b, 0) + + def test_boolcontext(self): + for i in xrange(100): + self.assert_(complex(random() + 1e-6, random() + 1e-6)) + self.assert_(not complex(0.0, 0.0)) + + def test_conjugate(self): + self.assertClose(complex(5.3, 9.8).conjugate(), 5.3-9.8j) + + def test_constructor(self): + class OS: + def __init__(self, value): self.value = value + def __complex__(self): return self.value + class NS(object): + def __init__(self, value): self.value = value + def __complex__(self): return self.value + self.assertEqual(complex(OS(1+10j)), 1+10j) + self.assertEqual(complex(NS(1+10j)), 1+10j) + self.assertRaises(TypeError, complex, OS(None)) + self.assertRaises(TypeError, complex, NS(None)) + + self.assertAlmostEqual(complex("1+10j"), 1+10j) + self.assertAlmostEqual(complex(10), 10+0j) + self.assertAlmostEqual(complex(10.0), 10+0j) + self.assertAlmostEqual(complex(10L), 10+0j) + self.assertAlmostEqual(complex(10+0j), 10+0j) + self.assertAlmostEqual(complex(1,10), 1+10j) + self.assertAlmostEqual(complex(1,10L), 1+10j) + self.assertAlmostEqual(complex(1,10.0), 1+10j) + self.assertAlmostEqual(complex(1L,10), 1+10j) + self.assertAlmostEqual(complex(1L,10L), 1+10j) + self.assertAlmostEqual(complex(1L,10.0), 1+10j) + self.assertAlmostEqual(complex(1.0,10), 1+10j) + self.assertAlmostEqual(complex(1.0,10L), 1+10j) + self.assertAlmostEqual(complex(1.0,10.0), 1+10j) + self.assertAlmostEqual(complex(3.14+0j), 3.14+0j) + self.assertAlmostEqual(complex(3.14), 3.14+0j) + self.assertAlmostEqual(complex(314), 314.0+0j) + self.assertAlmostEqual(complex(314L), 314.0+0j) + self.assertAlmostEqual(complex(3.14+0j, 0j), 3.14+0j) + self.assertAlmostEqual(complex(3.14, 0.0), 3.14+0j) + self.assertAlmostEqual(complex(314, 0), 314.0+0j) + self.assertAlmostEqual(complex(314L, 0L), 314.0+0j) + self.assertAlmostEqual(complex(0j, 3.14j), -3.14+0j) + self.assertAlmostEqual(complex(0.0, 3.14j), -3.14+0j) + self.assertAlmostEqual(complex(0j, 3.14), 3.14j) + self.assertAlmostEqual(complex(0.0, 3.14), 3.14j) + self.assertAlmostEqual(complex("1"), 1+0j) + self.assertAlmostEqual(complex("1j"), 1j) + self.assertAlmostEqual(complex(), 0) + self.assertAlmostEqual(complex("-1"), -1) + self.assertAlmostEqual(complex("+1"), +1) + + class complex2(complex): pass + self.assertAlmostEqual(complex(complex2(1+1j)), 1+1j) + self.assertAlmostEqual(complex(real=17, imag=23), 17+23j) + self.assertAlmostEqual(complex(real=17+23j), 17+23j) + self.assertAlmostEqual(complex(real=17+23j, imag=23), 17+46j) + self.assertAlmostEqual(complex(real=1+2j, imag=3+4j), -3+5j) + + c = 3.14 + 1j + self.assert_(complex(c) is c) + del c + + self.assertRaises(TypeError, complex, "1", "1") + self.assertRaises(TypeError, complex, 1, "1") + + self.assertEqual(complex(" 3.14+J "), 3.14+1j) + if test_support.have_unicode: + self.assertEqual(complex(unicode(" 3.14+J ")), 3.14+1j) + + # SF bug 543840: complex(string) accepts strings with \0 + # Fixed in 2.3. + self.assertRaises(ValueError, complex, '1+1j\0j') + + self.assertRaises(TypeError, int, 5+3j) + self.assertRaises(TypeError, long, 5+3j) + self.assertRaises(TypeError, float, 5+3j) + self.assertRaises(ValueError, complex, "") + self.assertRaises(TypeError, complex, None) + self.assertRaises(ValueError, complex, "\0") + self.assertRaises(TypeError, complex, "1", "2") + self.assertRaises(TypeError, complex, "1", 42) + self.assertRaises(TypeError, complex, 1, "2") + self.assertRaises(ValueError, complex, "1+") + self.assertRaises(ValueError, complex, "1+1j+1j") + self.assertRaises(ValueError, complex, "--") + if test_support.have_unicode: + self.assertRaises(ValueError, complex, unicode("1"*500)) + self.assertRaises(ValueError, complex, unicode("x")) + + class EvilExc(Exception): + pass + + class evilcomplex: + def __complex__(self): + raise EvilExc + + self.assertRaises(EvilExc, complex, evilcomplex()) + + class float2: + def __init__(self, value): + self.value = value + def __float__(self): + return self.value + + self.assertAlmostEqual(complex(float2(42.)), 42) + self.assertAlmostEqual(complex(real=float2(17.), imag=float2(23.)), 17+23j) + self.assertRaises(TypeError, complex, float2(None)) + + class complex0(complex): + """Test usage of __complex__() when inheriting from 'complex'""" + def __complex__(self): + return 42j + + class complex1(complex): + """Test usage of __complex__() with a __new__() method""" + def __new__(self, value=0j): + return complex.__new__(self, 2*value) + def __complex__(self): + return self + + class complex2(complex): + """Make sure that __complex__() calls fail if anything other than a + complex is returned""" + def __complex__(self): + return None + + self.assertAlmostEqual(complex(complex0(1j)), 42j) + self.assertAlmostEqual(complex(complex1(1j)), 2j) + self.assertRaises(TypeError, complex, complex2(1j)) + + def test_hash(self): + for x in xrange(-30, 30): + self.assertEqual(hash(x), hash(complex(x, 0))) + x /= 3.0 # now check against floating point + self.assertEqual(hash(x), hash(complex(x, 0.))) + + def test_abs(self): + nums = [complex(x/3., y/7.) for x in xrange(-9,9) for y in xrange(-9,9)] + for num in nums: + self.assertAlmostEqual((num.real**2 + num.imag**2) ** 0.5, abs(num)) + + def test_repr(self): + self.assertEqual(repr(1+6j), '(1+6j)') + self.assertEqual(repr(1-6j), '(1-6j)') + + self.assertNotEqual(repr(-(1+0j)), '(-1+-0j)') + + def test_neg(self): + self.assertEqual(-(1+6j), -1-6j) + + def test_file(self): + a = 3.33+4.43j + b = 5.1+2.3j + + fo = None + try: + fo = open(test_support.TESTFN, "wb") + print >>fo, a, b + fo.close() + fo = open(test_support.TESTFN, "rb") + self.assertEqual(fo.read(), "%s %s\n" % (a, b)) + finally: + if (fo is not None) and (not fo.closed): + fo.close() + try: + os.remove(test_support.TESTFN) + except (OSError, IOError): + pass + +def test_main(): + test_support.run_unittest(ComplexTest) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_copy.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_copy.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,591 @@ +"""Unit tests for the copy module.""" + +import sys +import copy +import copy_reg + +import unittest +from test import test_support + +class TestCopy(unittest.TestCase): + + # Attempt full line coverage of copy.py from top to bottom + + def test_exceptions(self): + self.assert_(copy.Error is copy.error) + self.assert_(issubclass(copy.Error, Exception)) + + # The copy() method + + def test_copy_basic(self): + x = 42 + y = copy.copy(x) + self.assertEqual(x, y) + + def test_copy_copy(self): + class C(object): + def __init__(self, foo): + self.foo = foo + def __copy__(self): + return C(self.foo) + x = C(42) + y = copy.copy(x) + self.assertEqual(y.__class__, x.__class__) + self.assertEqual(y.foo, x.foo) + + def test_copy_registry(self): + class C(object): + def __new__(cls, foo): + obj = object.__new__(cls) + obj.foo = foo + return obj + def pickle_C(obj): + return (C, (obj.foo,)) + x = C(42) + self.assertRaises(TypeError, copy.copy, x) + copy_reg.pickle(C, pickle_C, C) + y = copy.copy(x) + + def test_copy_reduce_ex(self): + class C(object): + def __reduce_ex__(self, proto): + return "" + def __reduce__(self): + raise test_support.TestFailed, "shouldn't call this" + x = C() + y = copy.copy(x) + self.assert_(y is x) + + def test_copy_reduce(self): + class C(object): + def __reduce__(self): + return "" + x = C() + y = copy.copy(x) + self.assert_(y is x) + + def test_copy_cant(self): + class C(object): + def __getattribute__(self, name): + if name.startswith("__reduce"): + raise AttributeError, name + return object.__getattribute__(self, name) + x = C() + self.assertRaises(copy.Error, copy.copy, x) + + # Type-specific _copy_xxx() methods + + def test_copy_atomic(self): + class Classic: + pass + class NewStyle(object): + pass + def f(): + pass + tests = [None, 42, 2L**100, 3.14, True, False, 1j, + "hello", u"hello\u1234", f.func_code, + NewStyle, xrange(10), Classic, max] + for x in tests: + self.assert_(copy.copy(x) is x, repr(x)) + + def test_copy_list(self): + x = [1, 2, 3] + self.assertEqual(copy.copy(x), x) + + def test_copy_tuple(self): + x = (1, 2, 3) + self.assertEqual(copy.copy(x), x) + + def test_copy_dict(self): + x = {"foo": 1, "bar": 2} + self.assertEqual(copy.copy(x), x) + + def test_copy_inst_vanilla(self): + class C: + def __init__(self, foo): + self.foo = foo + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C(42) + self.assertEqual(copy.copy(x), x) + + def test_copy_inst_copy(self): + class C: + def __init__(self, foo): + self.foo = foo + def __copy__(self): + return C(self.foo) + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C(42) + self.assertEqual(copy.copy(x), x) + + def test_copy_inst_getinitargs(self): + class C: + def __init__(self, foo): + self.foo = foo + def __getinitargs__(self): + return (self.foo,) + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C(42) + self.assertEqual(copy.copy(x), x) + + def test_copy_inst_getstate(self): + class C: + def __init__(self, foo): + self.foo = foo + def __getstate__(self): + return {"foo": self.foo} + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C(42) + self.assertEqual(copy.copy(x), x) + + def test_copy_inst_setstate(self): + class C: + def __init__(self, foo): + self.foo = foo + def __setstate__(self, state): + self.foo = state["foo"] + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C(42) + self.assertEqual(copy.copy(x), x) + + def test_copy_inst_getstate_setstate(self): + class C: + def __init__(self, foo): + self.foo = foo + def __getstate__(self): + return self.foo + def __setstate__(self, state): + self.foo = state + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C(42) + self.assertEqual(copy.copy(x), x) + + # The deepcopy() method + + def test_deepcopy_basic(self): + x = 42 + y = copy.deepcopy(x) + self.assertEqual(y, x) + + def test_deepcopy_memo(self): + # Tests of reflexive objects are under type-specific sections below. + # This tests only repetitions of objects. + x = [] + x = [x, x] + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y is not x) + self.assert_(y[0] is not x[0]) + self.assert_(y[0] is y[1]) + + def test_deepcopy_issubclass(self): + # XXX Note: there's no way to test the TypeError coming out of + # issubclass() -- this can only happen when an extension + # module defines a "type" that doesn't formally inherit from + # type. + class Meta(type): + pass + class C: + __metaclass__ = Meta + self.assertEqual(copy.deepcopy(C), C) + + def test_deepcopy_deepcopy(self): + class C(object): + def __init__(self, foo): + self.foo = foo + def __deepcopy__(self, memo=None): + return C(self.foo) + x = C(42) + y = copy.deepcopy(x) + self.assertEqual(y.__class__, x.__class__) + self.assertEqual(y.foo, x.foo) + + def test_deepcopy_registry(self): + class C(object): + def __new__(cls, foo): + obj = object.__new__(cls) + obj.foo = foo + return obj + def pickle_C(obj): + return (C, (obj.foo,)) + x = C(42) + self.assertRaises(TypeError, copy.deepcopy, x) + copy_reg.pickle(C, pickle_C, C) + y = copy.deepcopy(x) + + def test_deepcopy_reduce_ex(self): + class C(object): + def __reduce_ex__(self, proto): + return "" + def __reduce__(self): + raise test_support.TestFailed, "shouldn't call this" + x = C() + y = copy.deepcopy(x) + self.assert_(y is x) + + def test_deepcopy_reduce(self): + class C(object): + def __reduce__(self): + return "" + x = C() + y = copy.deepcopy(x) + self.assert_(y is x) + + def test_deepcopy_cant(self): + class C(object): + def __getattribute__(self, name): + if name.startswith("__reduce"): + raise AttributeError, name + return object.__getattribute__(self, name) + x = C() + self.assertRaises(copy.Error, copy.deepcopy, x) + + # Type-specific _deepcopy_xxx() methods + + def test_deepcopy_atomic(self): + class Classic: + pass + class NewStyle(object): + pass + def f(): + pass + tests = [None, 42, 2L**100, 3.14, True, False, 1j, + "hello", u"hello\u1234", f.func_code, + NewStyle, xrange(10), Classic, max] + for x in tests: + self.assert_(copy.deepcopy(x) is x, repr(x)) + + def test_deepcopy_list(self): + x = [[1, 2], 3] + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(x is not y) + self.assert_(x[0] is not y[0]) + + def test_deepcopy_reflexive_list(self): + x = [] + x.append(x) + y = copy.deepcopy(x) + self.assertRaises(RuntimeError, cmp, y, x) + self.assert_(y is not x) + self.assert_(y[0] is y) + self.assertEqual(len(y), 1) + + def test_deepcopy_tuple(self): + x = ([1, 2], 3) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(x is not y) + self.assert_(x[0] is not y[0]) + + def test_deepcopy_reflexive_tuple(self): + x = ([],) + x[0].append(x) + y = copy.deepcopy(x) + self.assertRaises(RuntimeError, cmp, y, x) + self.assert_(y is not x) + self.assert_(y[0] is not x[0]) + self.assert_(y[0][0] is y) + + def test_deepcopy_dict(self): + x = {"foo": [1, 2], "bar": 3} + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(x is not y) + self.assert_(x["foo"] is not y["foo"]) + + def test_deepcopy_reflexive_dict(self): + x = {} + x['foo'] = x + y = copy.deepcopy(x) + self.assertRaises(RuntimeError, cmp, y, x) + self.assert_(y is not x) + self.assert_(y['foo'] is y) + self.assertEqual(len(y), 1) + + def test_deepcopy_keepalive(self): + memo = {} + x = 42 + y = copy.deepcopy(x, memo) + self.assert_(memo[id(x)] is x) + + def test_deepcopy_inst_vanilla(self): + class C: + def __init__(self, foo): + self.foo = foo + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C([42]) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y.foo is not x.foo) + + def test_deepcopy_inst_deepcopy(self): + class C: + def __init__(self, foo): + self.foo = foo + def __deepcopy__(self, memo): + return C(copy.deepcopy(self.foo, memo)) + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C([42]) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y is not x) + self.assert_(y.foo is not x.foo) + + def test_deepcopy_inst_getinitargs(self): + class C: + def __init__(self, foo): + self.foo = foo + def __getinitargs__(self): + return (self.foo,) + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C([42]) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y is not x) + self.assert_(y.foo is not x.foo) + + def test_deepcopy_inst_getstate(self): + class C: + def __init__(self, foo): + self.foo = foo + def __getstate__(self): + return {"foo": self.foo} + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C([42]) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y is not x) + self.assert_(y.foo is not x.foo) + + def test_deepcopy_inst_setstate(self): + class C: + def __init__(self, foo): + self.foo = foo + def __setstate__(self, state): + self.foo = state["foo"] + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C([42]) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y is not x) + self.assert_(y.foo is not x.foo) + + def test_deepcopy_inst_getstate_setstate(self): + class C: + def __init__(self, foo): + self.foo = foo + def __getstate__(self): + return self.foo + def __setstate__(self, state): + self.foo = state + def __cmp__(self, other): + return cmp(self.foo, other.foo) + x = C([42]) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y is not x) + self.assert_(y.foo is not x.foo) + + def test_deepcopy_reflexive_inst(self): + class C: + pass + x = C() + x.foo = x + y = copy.deepcopy(x) + self.assert_(y is not x) + self.assert_(y.foo is y) + + # _reconstruct() + + def test_reconstruct_string(self): + class C(object): + def __reduce__(self): + return "" + x = C() + y = copy.copy(x) + self.assert_(y is x) + y = copy.deepcopy(x) + self.assert_(y is x) + + def test_reconstruct_nostate(self): + class C(object): + def __reduce__(self): + return (C, ()) + x = C() + x.foo = 42 + y = copy.copy(x) + self.assert_(y.__class__ is x.__class__) + y = copy.deepcopy(x) + self.assert_(y.__class__ is x.__class__) + + def test_reconstruct_state(self): + class C(object): + def __reduce__(self): + return (C, (), self.__dict__) + def __cmp__(self, other): + return cmp(self.__dict__, other.__dict__) + x = C() + x.foo = [42] + y = copy.copy(x) + self.assertEqual(y, x) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y.foo is not x.foo) + + def test_reconstruct_state_setstate(self): + class C(object): + def __reduce__(self): + return (C, (), self.__dict__) + def __setstate__(self, state): + self.__dict__.update(state) + def __cmp__(self, other): + return cmp(self.__dict__, other.__dict__) + x = C() + x.foo = [42] + y = copy.copy(x) + self.assertEqual(y, x) + y = copy.deepcopy(x) + self.assertEqual(y, x) + self.assert_(y.foo is not x.foo) + + def test_reconstruct_reflexive(self): + class C(object): + pass + x = C() + x.foo = x + y = copy.deepcopy(x) + self.assert_(y is not x) + self.assert_(y.foo is y) + + # Additions for Python 2.3 and pickle protocol 2 + + def test_reduce_4tuple(self): + class C(list): + def __reduce__(self): + return (C, (), self.__dict__, iter(self)) + def __cmp__(self, other): + return (cmp(list(self), list(other)) or + cmp(self.__dict__, other.__dict__)) + x = C([[1, 2], 3]) + y = copy.copy(x) + self.assertEqual(x, y) + self.assert_(x is not y) + self.assert_(x[0] is y[0]) + y = copy.deepcopy(x) + self.assertEqual(x, y) + self.assert_(x is not y) + self.assert_(x[0] is not y[0]) + + def test_reduce_5tuple(self): + class C(dict): + def __reduce__(self): + return (C, (), self.__dict__, None, self.iteritems()) + def __cmp__(self, other): + return (cmp(dict(self), list(dict)) or + cmp(self.__dict__, other.__dict__)) + x = C([("foo", [1, 2]), ("bar", 3)]) + y = copy.copy(x) + self.assertEqual(x, y) + self.assert_(x is not y) + self.assert_(x["foo"] is y["foo"]) + y = copy.deepcopy(x) + self.assertEqual(x, y) + self.assert_(x is not y) + self.assert_(x["foo"] is not y["foo"]) + + def test_copy_slots(self): + class C(object): + __slots__ = ["foo"] + x = C() + x.foo = [42] + y = copy.copy(x) + self.assert_(x.foo is y.foo) + + def test_deepcopy_slots(self): + class C(object): + __slots__ = ["foo"] + x = C() + x.foo = [42] + y = copy.deepcopy(x) + self.assertEqual(x.foo, y.foo) + self.assert_(x.foo is not y.foo) + + def test_copy_list_subclass(self): + class C(list): + pass + x = C([[1, 2], 3]) + x.foo = [4, 5] + y = copy.copy(x) + self.assertEqual(list(x), list(y)) + self.assertEqual(x.foo, y.foo) + self.assert_(x[0] is y[0]) + self.assert_(x.foo is y.foo) + + def test_deepcopy_list_subclass(self): + class C(list): + pass + x = C([[1, 2], 3]) + x.foo = [4, 5] + y = copy.deepcopy(x) + self.assertEqual(list(x), list(y)) + self.assertEqual(x.foo, y.foo) + self.assert_(x[0] is not y[0]) + self.assert_(x.foo is not y.foo) + + def test_copy_tuple_subclass(self): + class C(tuple): + pass + x = C([1, 2, 3]) + self.assertEqual(tuple(x), (1, 2, 3)) + y = copy.copy(x) + self.assertEqual(tuple(y), (1, 2, 3)) + + def test_deepcopy_tuple_subclass(self): + class C(tuple): + pass + x = C([[1, 2], 3]) + self.assertEqual(tuple(x), ([1, 2], 3)) + y = copy.deepcopy(x) + self.assertEqual(tuple(y), ([1, 2], 3)) + self.assert_(x is not y) + self.assert_(x[0] is not y[0]) + + def test_getstate_exc(self): + class EvilState(object): + def __getstate__(self): + raise ValueError, "ain't got no stickin' state" + self.assertRaises(ValueError, copy.copy, EvilState()) + + def test_copy_function(self): + self.assertEqual(copy.copy(global_foo), global_foo) + def foo(x, y): return x+y + self.assertEqual(copy.copy(foo), foo) + bar = lambda: None + self.assertEqual(copy.copy(bar), bar) + + def test_deepcopy_function(self): + self.assertEqual(copy.deepcopy(global_foo), global_foo) + def foo(x, y): return x+y + self.assertEqual(copy.deepcopy(foo), foo) + bar = lambda: None + self.assertEqual(copy.deepcopy(bar), bar) + +def global_foo(x, y): return x+y + +def test_main(): + test_support.run_unittest(TestCopy) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_cpickle.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_cpickle.py Sun Aug 17 23:48:15 2008 @@ -0,0 +1,103 @@ +import cPickle +import unittest +from cStringIO import StringIO +from test.pickletester import AbstractPickleTests, AbstractPickleModuleTests +from test import test_support + +class cPickleTests(AbstractPickleTests, AbstractPickleModuleTests): + + def setUp(self): + self.dumps = cPickle.dumps + self.loads = cPickle.loads + + error = cPickle.BadPickleGet + module = cPickle + +class cPicklePicklerTests(AbstractPickleTests): + + def dumps(self, arg, proto=0): + f = StringIO() + p = cPickle.Pickler(f, proto) + p.dump(arg) + f.seek(0) + return f.read() + + def loads(self, buf): + f = StringIO(buf) + p = cPickle.Unpickler(f) + return p.load() + + error = cPickle.BadPickleGet + +class cPickleListPicklerTests(AbstractPickleTests): + + def dumps(self, arg, proto=0): + p = cPickle.Pickler(proto) + p.dump(arg) + return p.getvalue() + + def loads(self, *args): + f = StringIO(args[0]) + p = cPickle.Unpickler(f) + return p.load() + + error = cPickle.BadPickleGet + +class cPickleFastPicklerTests(AbstractPickleTests): + + def dumps(self, arg, proto=0): + f = StringIO() + p = cPickle.Pickler(f, proto) + p.fast = 1 + p.dump(arg) + f.seek(0) + return f.read() + + def loads(self, *args): + f = StringIO(args[0]) + p = cPickle.Unpickler(f) + return p.load() + + error = cPickle.BadPickleGet + + def test_recursive_list(self): + self.assertRaises(ValueError, + AbstractPickleTests.test_recursive_list, + self) + + def test_recursive_inst(self): + self.assertRaises(ValueError, + AbstractPickleTests.test_recursive_inst, + self) + + def test_recursive_dict(self): + self.assertRaises(ValueError, + AbstractPickleTests.test_recursive_dict, + self) + + def test_recursive_multi(self): + self.assertRaises(ValueError, + AbstractPickleTests.test_recursive_multi, + self) + + def test_nonrecursive_deep(self): + # If it's not cyclic, it should pickle OK even if the nesting + # depth exceeds PY_CPICKLE_FAST_LIMIT. That happens to be + # 50 today. Jack Jansen reported stack overflow on Mac OS 9 + # at 64. + a = [] + for i in range(60): + a = [a] + b = self.loads(self.dumps(a)) + self.assertEqual(a, b) + +def test_main(): + test_support.run_unittest( + cPickleTests, + cPicklePicklerTests, + cPickleListPicklerTests, + # XXX undocumented, not supported by PyPy: cPickleFastPicklerTests + ) + +if __name__ == "__main__": + test_main() From bgola at codespeak.net Mon Aug 18 01:37:19 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 01:37:19 +0200 (CEST) Subject: [pypy-svn] r57382 - pypy/branch/2.5-features/lib-python/modified-2.5.1/test Message-ID: <20080817233719.9D129168501@codespeak.net> Author: bgola Date: Mon Aug 18 01:37:16 2008 New Revision: 57382 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_deque.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descr.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descrtut.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_dict.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_enumerate.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_exceptions.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_file.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_format.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_funcattrs.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_generators.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_genexps.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_iter.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_itertools.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_marshal.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mmap.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_module.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mutants.py (contents, props changed) Log: more changes applied to tests (stdlib) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_deque.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_deque.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,632 @@ +from collections import deque +import unittest +from test import test_support, seq_tests +#from weakref import proxy +import copy +import cPickle as pickle +from cStringIO import StringIO +import random +import os + +BIG = 10 + +def fail(): + raise SyntaxError + yield 1 + +class BadCmp: + def __eq__(self, other): + raise RuntimeError + +class MutateCmp: + def __init__(self, deque, result): + self.deque = deque + self.result = result + def __eq__(self, other): + self.deque.clear() + return self.result + +class TestBasic(unittest.TestCase): + + def test_basics(self): + d = deque(xrange(100)) + d.__init__(xrange(100, 200)) + for i in xrange(200, 400): + d.append(i) + for i in reversed(xrange(-200, 0)): + d.appendleft(i) + self.assertEqual(list(d), range(-200, 400)) + self.assertEqual(len(d), 600) + + left = [d.popleft() for i in xrange(250)] + self.assertEqual(left, range(-200, 50)) + self.assertEqual(list(d), range(50, 400)) + + right = [d.pop() for i in xrange(250)] + right.reverse() + self.assertEqual(right, range(150, 400)) + self.assertEqual(list(d), range(50, 150)) + + def test_comparisons(self): + d = deque('xabc'); d.popleft() + for e in [d, deque('abc'), deque('ab'), deque(), list(d)]: + self.assertEqual(d==e, type(d)==type(e) and list(d)==list(e)) + self.assertEqual(d!=e, not(type(d)==type(e) and list(d)==list(e))) + + args = map(deque, ('', 'a', 'b', 'ab', 'ba', 'abc', 'xba', 'xabc', 'cba')) + for x in args: + for y in args: + self.assertEqual(x == y, list(x) == list(y), (x,y)) + self.assertEqual(x != y, list(x) != list(y), (x,y)) + self.assertEqual(x < y, list(x) < list(y), (x,y)) + self.assertEqual(x <= y, list(x) <= list(y), (x,y)) + self.assertEqual(x > y, list(x) > list(y), (x,y)) + self.assertEqual(x >= y, list(x) >= list(y), (x,y)) + self.assertEqual(cmp(x,y), cmp(list(x),list(y)), (x,y)) + + def test_extend(self): + d = deque('a') + self.assertRaises(TypeError, d.extend, 1) + d.extend('bcd') + self.assertEqual(list(d), list('abcd')) + + def test_extendleft(self): + d = deque('a') + self.assertRaises(TypeError, d.extendleft, 1) + d.extendleft('bcd') + self.assertEqual(list(d), list(reversed('abcd'))) + d = deque() + d.extendleft(range(1000)) + self.assertEqual(list(d), list(reversed(range(1000)))) + self.assertRaises(SyntaxError, d.extendleft, fail()) + + def test_getitem(self): + n = 10 + d = deque(xrange(n)) + l = range(n) + for i in xrange(n): + d.popleft() + l.pop(0) + if random.random() < 0.5: + d.append(i) + l.append(i) + for j in xrange(1-len(l), len(l)): + assert d[j] == l[j] + + d = deque('superman') + self.assertEqual(d[0], 's') + self.assertEqual(d[-1], 'n') + d = deque() + self.assertRaises(IndexError, d.__getitem__, 0) + self.assertRaises(IndexError, d.__getitem__, -1) + + def test_setitem(self): + n = 10 + d = deque(xrange(n)) + for i in xrange(n): + d[i] = 10 * i + self.assertEqual(list(d), [10*i for i in xrange(n)]) + l = list(d) + for i in xrange(1-n, 0, -1): + d[i] = 7*i + l[i] = 7*i + self.assertEqual(list(d), l) + + def test_delitem(self): + n = 10 # O(n**2) test, don't make this too big + d = deque(xrange(n)) + self.assertRaises(IndexError, d.__delitem__, -n-1) + self.assertRaises(IndexError, d.__delitem__, n) + for i in xrange(n): + self.assertEqual(len(d), n-i) + j = random.randrange(-len(d), len(d)) + val = d[j] + self.assert_(val in d) + del d[j] + self.assert_(val not in d) + self.assertEqual(len(d), 0) + + def test_rotate(self): + s = tuple('abcde') + n = len(s) + + d = deque(s) + d.rotate(1) # verify rot(1) + self.assertEqual(''.join(d), 'eabcd') + + d = deque(s) + d.rotate(-1) # verify rot(-1) + self.assertEqual(''.join(d), 'bcdea') + d.rotate() # check default to 1 + self.assertEqual(tuple(d), s) + + for i in xrange(n*3): + d = deque(s) + e = deque(d) + d.rotate(i) # check vs. rot(1) n times + for j in xrange(i): + e.rotate(1) + self.assertEqual(tuple(d), tuple(e)) + d.rotate(-i) # check that it works in reverse + self.assertEqual(tuple(d), s) + e.rotate(n-i) # check that it wraps forward + self.assertEqual(tuple(e), s) + + for i in xrange(n*3): + d = deque(s) + e = deque(d) + d.rotate(-i) + for j in xrange(i): + e.rotate(-1) # check vs. rot(-1) n times + self.assertEqual(tuple(d), tuple(e)) + d.rotate(i) # check that it works in reverse + self.assertEqual(tuple(d), s) + e.rotate(i-n) # check that it wraps backaround + self.assertEqual(tuple(e), s) + + d = deque(s) + e = deque(s) + e.rotate(BIG+17) # verify on long series of rotates + dr = d.rotate + for i in xrange(BIG+17): + dr() + self.assertEqual(tuple(d), tuple(e)) + + self.assertRaises(TypeError, d.rotate, 'x') # Wrong arg type + self.assertRaises(TypeError, d.rotate, 1, 10) # Too many args + + d = deque() + d.rotate() # rotate an empty deque + self.assertEqual(d, deque()) + + def test_len(self): + d = deque('ab') + self.assertEqual(len(d), 2) + d.popleft() + self.assertEqual(len(d), 1) + d.pop() + self.assertEqual(len(d), 0) + self.assertRaises(IndexError, d.pop) + self.assertEqual(len(d), 0) + d.append('c') + self.assertEqual(len(d), 1) + d.appendleft('d') + self.assertEqual(len(d), 2) + d.clear() + self.assertEqual(len(d), 0) + + def test_underflow(self): + d = deque() + self.assertRaises(IndexError, d.pop) + self.assertRaises(IndexError, d.popleft) + + def test_clear(self): + d = deque(xrange(100)) + self.assertEqual(len(d), 100) + d.clear() + self.assertEqual(len(d), 0) + self.assertEqual(list(d), []) + d.clear() # clear an emtpy deque + self.assertEqual(list(d), []) + + def test_remove(self): + d = deque('abcdefghcij') + d.remove('c') + self.assertEqual(d, deque('abdefghcij')) + d.remove('c') + self.assertEqual(d, deque('abdefghij')) + self.assertRaises(ValueError, d.remove, 'c') + self.assertEqual(d, deque('abdefghij')) + + # Handle comparison errors + d = deque(['a', 'b', BadCmp(), 'c']) + e = deque(d) + self.assertRaises(RuntimeError, d.remove, 'c') + for x, y in zip(d, e): + # verify that original order and values are retained. + self.assert_(x is y) + + # Handle evil mutator + for match in (True, False): + d = deque(['ab']) + d.extend([MutateCmp(d, match), 'c']) + self.assertRaises(IndexError, d.remove, 'c') + self.assertEqual(d, deque()) + + def test_repr(self): + d = deque(xrange(200)) + e = eval(repr(d)) + self.assertEqual(list(d), list(e)) + d.append(d) + self.assert_('...' in repr(d)) + + def test_print(self): + d = deque(xrange(200)) + d.append(d) + try: + fo = open(test_support.TESTFN, "wb") + print >> fo, d, + fo.close() + fo = open(test_support.TESTFN, "rb") + self.assertEqual(fo.read(), repr(d)) + finally: + fo.close() + os.remove(test_support.TESTFN) + + def test_init(self): + self.assertRaises(TypeError, deque, 'abc', 2); + self.assertRaises(TypeError, deque, 1); + + def test_hash(self): + self.assertRaises(TypeError, hash, deque('abc')) + + def test_long_steadystate_queue_popleft(self): + for size in (0, 1, 2, 9): + d = deque(xrange(size)) + append, pop = d.append, d.popleft + for i in xrange(size, BIG): + append(i) + x = pop() + if x != i - size: + self.assertEqual(x, i-size) + self.assertEqual(list(d), range(BIG-size, BIG)) + + def test_long_steadystate_queue_popright(self): + for size in (0, 1, 2, 9): + d = deque(reversed(xrange(size))) + append, pop = d.appendleft, d.pop + for i in xrange(size, BIG): + append(i) + x = pop() + if x != i - size: + self.assertEqual(x, i-size) + self.assertEqual(list(reversed(list(d))), range(BIG-size, BIG)) + + def test_big_queue_popleft(self): + pass + d = deque() + append, pop = d.append, d.popleft + for i in xrange(BIG): + append(i) + for i in xrange(BIG): + x = pop() + if x != i: + self.assertEqual(x, i) + + def test_big_queue_popright(self): + d = deque() + append, pop = d.appendleft, d.pop + for i in xrange(BIG): + append(i) + for i in xrange(BIG): + x = pop() + if x != i: + self.assertEqual(x, i) + + def test_big_stack_right(self): + d = deque() + append, pop = d.append, d.pop + for i in xrange(BIG): + append(i) + for i in reversed(xrange(BIG)): + x = pop() + if x != i: + self.assertEqual(x, i) + self.assertEqual(len(d), 0) + + def test_big_stack_left(self): + d = deque() + append, pop = d.appendleft, d.popleft + for i in xrange(BIG): + append(i) + for i in reversed(xrange(BIG)): + x = pop() + if x != i: + self.assertEqual(x, i) + self.assertEqual(len(d), 0) + + def test_roundtrip_iter_init(self): + d = deque(xrange(200)) + e = deque(d) + self.assertNotEqual(id(d), id(e)) + self.assertEqual(list(d), list(e)) + + def test_pickle(self): + d = deque(xrange(200)) + for i in (0, 1, 2): + s = pickle.dumps(d, i) + e = pickle.loads(s) + self.assertNotEqual(id(d), id(e)) + self.assertEqual(list(d), list(e)) + + def test_pickle_recursive(self): + d = deque('abc') + d.append(d) + for i in (0, 1, 2): + e = pickle.loads(pickle.dumps(d, i)) + self.assertNotEqual(id(d), id(e)) + self.assertEqual(id(e), id(e[-1])) + + def test_deepcopy(self): + mut = [10] + d = deque([mut]) + e = copy.deepcopy(d) + self.assertEqual(list(d), list(e)) + mut[0] = 11 + self.assertNotEqual(id(d), id(e)) + self.assertNotEqual(list(d), list(e)) + + def test_copy(self): + mut = [10] + d = deque([mut]) + e = copy.copy(d) + self.assertEqual(list(d), list(e)) + mut[0] = 11 + self.assertNotEqual(id(d), id(e)) + self.assertEqual(list(d), list(e)) + + def test_reversed(self): + for s in ('abcd', xrange(200)): + self.assertEqual(list(reversed(deque(s))), list(reversed(s))) + + def test_gc_doesnt_blowup(self): + import gc + # This used to assert-fail in deque_traverse() under a debug + # build, or run wild with a NULL pointer in a release build. + d = deque() + for i in xrange(100): + d.append(1) + gc.collect() + +class TestVariousIteratorArgs(unittest.TestCase): + + def test_constructor(self): + for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for g in (seq_tests.Sequence, seq_tests.IterFunc, + seq_tests.IterGen, seq_tests.IterFuncStop, + seq_tests.itermulti, seq_tests.iterfunc): + self.assertEqual(list(deque(g(s))), list(g(s))) + self.assertRaises(TypeError, deque, seq_tests.IterNextOnly(s)) + self.assertRaises(TypeError, deque, seq_tests.IterNoNext(s)) + self.assertRaises(ZeroDivisionError, deque, seq_tests.IterGenExc(s)) + + def test_iter_with_altered_data(self): + d = deque('abcdefg') + it = iter(d) + d.pop() + self.assertRaises(RuntimeError, it.next) + + def test_runtime_error_on_empty_deque(self): + d = deque() + it = iter(d) + d.append(10) + self.assertRaises(RuntimeError, it.next) + +class Deque(deque): + pass + +class DequeWithBadIter(deque): + def __iter__(self): + raise TypeError + +class TestSubclass(unittest.TestCase): + + def test_basics(self): + d = Deque(xrange(100)) + d.__init__(xrange(100, 200)) + for i in xrange(200, 400): + d.append(i) + for i in reversed(xrange(-200, 0)): + d.appendleft(i) + self.assertEqual(list(d), range(-200, 400)) + self.assertEqual(len(d), 600) + + left = [d.popleft() for i in xrange(250)] + self.assertEqual(left, range(-200, 50)) + self.assertEqual(list(d), range(50, 400)) + + right = [d.pop() for i in xrange(250)] + right.reverse() + self.assertEqual(right, range(150, 400)) + self.assertEqual(list(d), range(50, 150)) + + d.clear() + self.assertEqual(len(d), 0) + + def test_copy_pickle(self): + + d = Deque('abc') + + e = d.__copy__() + self.assertEqual(type(d), type(e)) + self.assertEqual(list(d), list(e)) + + e = Deque(d) + self.assertEqual(type(d), type(e)) + self.assertEqual(list(d), list(e)) + + s = pickle.dumps(d) + e = pickle.loads(s) + self.assertNotEqual(id(d), id(e)) + self.assertEqual(type(d), type(e)) + self.assertEqual(list(d), list(e)) + + def test_pickle(self): + d = Deque('abc') + d.append(d) + + e = pickle.loads(pickle.dumps(d)) + self.assertNotEqual(id(d), id(e)) + self.assertEqual(type(d), type(e)) + dd = d.pop() + ee = e.pop() + self.assertEqual(id(e), id(ee)) + self.assertEqual(d, e) + + d.x = d + e = pickle.loads(pickle.dumps(d)) + self.assertEqual(id(e), id(e.x)) + + d = DequeWithBadIter('abc') + self.assertRaises(TypeError, pickle.dumps, d) + +# def test_weakref(self): +# d = deque('gallahad') +# p = proxy(d) +# self.assertEqual(str(p), str(d)) +# d = None +# self.assertRaises(ReferenceError, str, p) + + def test_strange_subclass(self): + class X(deque): + def __iter__(self): + return iter([]) + d1 = X([1,2,3]) + d2 = X([4,5,6]) + d1 == d2 # not clear if this is supposed to be True or False, + # but it used to give a SystemError + + +class SubclassWithKwargs(deque): + def __init__(self, newarg=1): + deque.__init__(self) + +class TestSubclassWithKwargs(unittest.TestCase): + def test_subclass_with_kwargs(self): + # SF bug #1486663 -- this used to erroneously raise a TypeError + SubclassWithKwargs(newarg=1) + +#============================================================================== + +libreftest = """ +Example from the Library Reference: Doc/lib/libcollections.tex + +>>> from collections import deque +>>> d = deque('ghi') # make a new deque with three items +>>> for elem in d: # iterate over the deque's elements +... print elem.upper() +G +H +I +>>> d.append('j') # add a new entry to the right side +>>> d.appendleft('f') # add a new entry to the left side +>>> d # show the representation of the deque +deque(['f', 'g', 'h', 'i', 'j']) +>>> d.pop() # return and remove the rightmost item +'j' +>>> d.popleft() # return and remove the leftmost item +'f' +>>> list(d) # list the contents of the deque +['g', 'h', 'i'] +>>> d[0] # peek at leftmost item +'g' +>>> d[-1] # peek at rightmost item +'i' +>>> list(reversed(d)) # list the contents of a deque in reverse +['i', 'h', 'g'] +>>> 'h' in d # search the deque +True +>>> d.extend('jkl') # add multiple elements at once +>>> d +deque(['g', 'h', 'i', 'j', 'k', 'l']) +>>> d.rotate(1) # right rotation +>>> d +deque(['l', 'g', 'h', 'i', 'j', 'k']) +>>> d.rotate(-1) # left rotation +>>> d +deque(['g', 'h', 'i', 'j', 'k', 'l']) +>>> deque(reversed(d)) # make a new deque in reverse order +deque(['l', 'k', 'j', 'i', 'h', 'g']) +>>> d.clear() # empty the deque +>>> d.pop() # cannot pop from an empty deque +Traceback (most recent call last): + File "", line 1, in -toplevel- + d.pop() +IndexError: pop from an empty deque + +>>> d.extendleft('abc') # extendleft() reverses the input order +>>> d +deque(['c', 'b', 'a']) + + + +>>> def delete_nth(d, n): +... d.rotate(-n) +... d.popleft() +... d.rotate(n) +... +>>> d = deque('abcdef') +>>> delete_nth(d, 2) # remove the entry at d[2] +>>> d +deque(['a', 'b', 'd', 'e', 'f']) + + + +>>> def roundrobin(*iterables): +... pending = deque(iter(i) for i in iterables) +... while pending: +... task = pending.popleft() +... try: +... yield task.next() +... except StopIteration: +... continue +... pending.append(task) +... + +>>> for value in roundrobin('abc', 'd', 'efgh'): +... print value +... +a +d +e +b +f +c +g +h + + +>>> def maketree(iterable): +... d = deque(iterable) +... while len(d) > 1: +... pair = [d.popleft(), d.popleft()] +... d.append(pair) +... return list(d) +... +>>> print maketree('abcdefgh') +[[[['a', 'b'], ['c', 'd']], [['e', 'f'], ['g', 'h']]]] + +""" + + +#============================================================================== + +__test__ = {'libreftest' : libreftest} + +def test_main(verbose=None): + import sys + test_classes = ( + TestBasic, + TestVariousIteratorArgs, + TestSubclass, + TestSubclassWithKwargs, + ) + + test_support.run_unittest(*test_classes) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*test_classes) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + + # doctests + from test import test_deque + test_support.run_doctest(test_deque, verbose) + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descr.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descr.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,4218 @@ +# Test enhancements related to descriptors and new-style classes + +from test.test_support import verify, vereq, verbose, TestFailed, TESTFN, get_original_stdout +from copy import deepcopy +import warnings +import gc + +warnings.filterwarnings("ignore", + r'complex divmod\(\), // and % are deprecated$', + DeprecationWarning, r'(|%s)$' % __name__) + +def veris(a, b): + if a is not b: + raise TestFailed, "%r is %r" % (a, b) + +def testunop(a, res, expr="len(a)", meth="__len__"): + if verbose: print "checking", expr + dict = {'a': a} + vereq(eval(expr, dict), res) + t = type(a) + m = getattr(t, meth) + while meth not in t.__dict__: + t = t.__bases__[0] + vereq(t.__dict__[meth](a), res) + vereq(m(a), res) + bm = getattr(a, meth) + vereq(bm(), res) + +def testbinop(a, b, res, expr="a+b", meth="__add__"): + if verbose: print "checking", expr + dict = {'a': a, 'b': b} + + # XXX Hack so this passes before 2.3 when -Qnew is specified. + if meth == "__div__" and 1/2 == 0.5: + meth = "__truediv__" + + vereq(eval(expr, dict), res) + t = type(a) + m = getattr(t, meth) + while meth not in t.__dict__: + t = t.__bases__[0] + vereq(t.__dict__[meth](a, b), res) + vereq(m(a, b), res) + bm = getattr(a, meth) + vereq(bm(b), res) + +def testternop(a, b, c, res, expr="a[b:c]", meth="__getslice__"): + if verbose: print "checking", expr + dict = {'a': a, 'b': b, 'c': c} + vereq(eval(expr, dict), res) + t = type(a) + m = getattr(t, meth) + while meth not in t.__dict__: + t = t.__bases__[0] + vereq(m, t.__dict__[meth]) + vereq(m(a, b, c), res) + bm = getattr(a, meth) + vereq(bm(b, c), res) + +def testsetop(a, b, res, stmt="a+=b", meth="__iadd__"): + if verbose: print "checking", stmt + dict = {'a': deepcopy(a), 'b': b} + exec stmt in dict + vereq(dict['a'], res) + t = type(a) + m = getattr(t, meth) + while meth not in t.__dict__: + t = t.__bases__[0] + vereq(m, t.__dict__[meth]) + dict['a'] = deepcopy(a) + m(dict['a'], b) + vereq(dict['a'], res) + dict['a'] = deepcopy(a) + bm = getattr(dict['a'], meth) + bm(b) + vereq(dict['a'], res) + +def testset2op(a, b, c, res, stmt="a[b]=c", meth="__setitem__"): + if verbose: print "checking", stmt + dict = {'a': deepcopy(a), 'b': b, 'c': c} + exec stmt in dict + vereq(dict['a'], res) + t = type(a) + m = getattr(t, meth) + while meth not in t.__dict__: + t = t.__bases__[0] + vereq(m, t.__dict__[meth]) + dict['a'] = deepcopy(a) + m(dict['a'], b, c) + vereq(dict['a'], res) + dict['a'] = deepcopy(a) + bm = getattr(dict['a'], meth) + bm(b, c) + vereq(dict['a'], res) + +def testset3op(a, b, c, d, res, stmt="a[b:c]=d", meth="__setslice__"): + if verbose: print "checking", stmt + dict = {'a': deepcopy(a), 'b': b, 'c': c, 'd': d} + exec stmt in dict + vereq(dict['a'], res) + t = type(a) + while meth not in t.__dict__: + t = t.__bases__[0] + m = getattr(t, meth) + vereq(m, t.__dict__[meth]) + dict['a'] = deepcopy(a) + m(dict['a'], b, c, d) + vereq(dict['a'], res) + dict['a'] = deepcopy(a) + bm = getattr(dict['a'], meth) + bm(b, c, d) + vereq(dict['a'], res) + +def class_docstrings(): + class Classic: + "A classic docstring." + vereq(Classic.__doc__, "A classic docstring.") + vereq(Classic.__dict__['__doc__'], "A classic docstring.") + + class Classic2: + pass + verify(Classic2.__doc__ is None) + + class NewStatic(object): + "Another docstring." + vereq(NewStatic.__doc__, "Another docstring.") + vereq(NewStatic.__dict__['__doc__'], "Another docstring.") + + class NewStatic2(object): + pass + verify(NewStatic2.__doc__ is None) + + class NewDynamic(object): + "Another docstring." + vereq(NewDynamic.__doc__, "Another docstring.") + vereq(NewDynamic.__dict__['__doc__'], "Another docstring.") + + class NewDynamic2(object): + pass + verify(NewDynamic2.__doc__ is None) + +def lists(): + if verbose: print "Testing list operations..." + testbinop([1], [2], [1,2], "a+b", "__add__") + testbinop([1,2,3], 2, 1, "b in a", "__contains__") + testbinop([1,2,3], 4, 0, "b in a", "__contains__") + testbinop([1,2,3], 1, 2, "a[b]", "__getitem__") + testternop([1,2,3], 0, 2, [1,2], "a[b:c]", "__getslice__") + testsetop([1], [2], [1,2], "a+=b", "__iadd__") + testsetop([1,2], 3, [1,2,1,2,1,2], "a*=b", "__imul__") + testunop([1,2,3], 3, "len(a)", "__len__") + testbinop([1,2], 3, [1,2,1,2,1,2], "a*b", "__mul__") + testbinop([1,2], 3, [1,2,1,2,1,2], "b*a", "__rmul__") + testset2op([1,2], 1, 3, [1,3], "a[b]=c", "__setitem__") + testset3op([1,2,3,4], 1, 3, [5,6], [1,5,6,4], "a[b:c]=d", "__setslice__") + +def dicts(): + if verbose: print "Testing dict operations..." + testbinop({1:2}, {2:1}, -1, "cmp(a,b)", "__cmp__") + testbinop({1:2,3:4}, 1, 1, "b in a", "__contains__") + testbinop({1:2,3:4}, 2, 0, "b in a", "__contains__") + testbinop({1:2,3:4}, 1, 2, "a[b]", "__getitem__") + d = {1:2,3:4} + l1 = [] + for i in d.keys(): l1.append(i) + l = [] + for i in iter(d): l.append(i) + vereq(l, l1) + l = [] + for i in d.__iter__(): l.append(i) + vereq(l, l1) + l = [] + for i in dict.__iter__(d): l.append(i) + vereq(l, l1) + d = {1:2, 3:4} + testunop(d, 2, "len(a)", "__len__") + vereq(eval(repr(d), {}), d) + vereq(eval(d.__repr__(), {}), d) + testset2op({1:2,3:4}, 2, 3, {1:2,2:3,3:4}, "a[b]=c", "__setitem__") + +def dict_constructor(): + if verbose: + print "Testing dict constructor ..." + d = dict() + vereq(d, {}) + d = dict({}) + vereq(d, {}) + d = dict({1: 2, 'a': 'b'}) + vereq(d, {1: 2, 'a': 'b'}) + vereq(d, dict(d.items())) + vereq(d, dict(d.iteritems())) + d = dict({'one':1, 'two':2}) + vereq(d, dict(one=1, two=2)) + vereq(d, dict(**d)) + vereq(d, dict({"one": 1}, two=2)) + vereq(d, dict([("two", 2)], one=1)) + vereq(d, dict([("one", 100), ("two", 200)], **d)) + verify(d is not dict(**d)) + for badarg in 0, 0L, 0j, "0", [0], (0,): + try: + dict(badarg) + except TypeError: + pass + except ValueError: + if badarg == "0": + # It's a sequence, and its elements are also sequences (gotta + # love strings ), but they aren't of length 2, so this + # one seemed better as a ValueError than a TypeError. + pass + else: + raise TestFailed("no TypeError from dict(%r)" % badarg) + else: + raise TestFailed("no TypeError from dict(%r)" % badarg) + + try: + dict({}, {}) + except TypeError: + pass + else: + raise TestFailed("no TypeError from dict({}, {})") + + class Mapping: + # Lacks a .keys() method; will be added later. + dict = {1:2, 3:4, 'a':1j} + + try: + dict(Mapping()) + except TypeError: + pass + else: + raise TestFailed("no TypeError from dict(incomplete mapping)") + + Mapping.keys = lambda self: self.dict.keys() + Mapping.__getitem__ = lambda self, i: self.dict[i] + d = dict(Mapping()) + vereq(d, Mapping.dict) + + # Init from sequence of iterable objects, each producing a 2-sequence. + class AddressBookEntry: + def __init__(self, first, last): + self.first = first + self.last = last + def __iter__(self): + return iter([self.first, self.last]) + + d = dict([AddressBookEntry('Tim', 'Warsaw'), + AddressBookEntry('Barry', 'Peters'), + AddressBookEntry('Tim', 'Peters'), + AddressBookEntry('Barry', 'Warsaw')]) + vereq(d, {'Barry': 'Warsaw', 'Tim': 'Peters'}) + + d = dict(zip(range(4), range(1, 5))) + vereq(d, dict([(i, i+1) for i in range(4)])) + + # Bad sequence lengths. + for bad in [('tooshort',)], [('too', 'long', 'by 1')]: + try: + dict(bad) + except ValueError: + pass + else: + raise TestFailed("no ValueError from dict(%r)" % bad) + +def test_dir(): + if verbose: + print "Testing dir() ..." + junk = 12 + vereq(dir(), ['junk']) + del junk + + # Just make sure these don't blow up! + for arg in 2, 2L, 2j, 2e0, [2], "2", u"2", (2,), {2:2}, type, test_dir: + dir(arg) + + # Try classic classes. + class C: + Cdata = 1 + def Cmethod(self): pass + + cstuff = ['Cdata', 'Cmethod', '__doc__', '__module__'] + vereq(dir(C), cstuff) + verify('im_self' in dir(C.Cmethod)) + + c = C() # c.__doc__ is an odd thing to see here; ditto c.__module__. + vereq(dir(c), cstuff) + + c.cdata = 2 + c.cmethod = lambda self: 0 + vereq(dir(c), cstuff + ['cdata', 'cmethod']) + verify('im_self' in dir(c.Cmethod)) + + class A(C): + Adata = 1 + def Amethod(self): pass + + astuff = ['Adata', 'Amethod'] + cstuff + vereq(dir(A), astuff) + verify('im_self' in dir(A.Amethod)) + a = A() + vereq(dir(a), astuff) + verify('im_self' in dir(a.Amethod)) + a.adata = 42 + a.amethod = lambda self: 3 + vereq(dir(a), astuff + ['adata', 'amethod']) + + # The same, but with new-style classes. Since these have object as a + # base class, a lot more gets sucked in. + def interesting(strings): + return [s for s in strings if not s.startswith('_')] + + class C(object): + Cdata = 1 + def Cmethod(self): pass + + cstuff = ['Cdata', 'Cmethod'] + vereq(interesting(dir(C)), cstuff) + + c = C() + vereq(interesting(dir(c)), cstuff) + verify('im_self' in dir(C.Cmethod)) + + c.cdata = 2 + c.cmethod = lambda self: 0 + vereq(interesting(dir(c)), cstuff + ['cdata', 'cmethod']) + verify('im_self' in dir(c.Cmethod)) + + class A(C): + Adata = 1 + def Amethod(self): pass + + astuff = ['Adata', 'Amethod'] + cstuff + vereq(interesting(dir(A)), astuff) + verify('im_self' in dir(A.Amethod)) + a = A() + vereq(interesting(dir(a)), astuff) + a.adata = 42 + a.amethod = lambda self: 3 + vereq(interesting(dir(a)), astuff + ['adata', 'amethod']) + verify('im_self' in dir(a.Amethod)) + + # Try a module subclass. + import sys + class M(type(sys)): + pass + minstance = M("m") + minstance.b = 2 + minstance.a = 1 + names = [x for x in dir(minstance) if x not in ["__name__", "__doc__"]] + vereq(names, ['a', 'b']) + + class M2(M): + def getdict(self): + return "Not a dict!" + __dict__ = property(getdict) + + m2instance = M2("m2") + m2instance.b = 2 + m2instance.a = 1 + vereq(m2instance.__dict__, "Not a dict!") + try: + dir(m2instance) + except TypeError: + pass + + # Two essentially featureless objects, just inheriting stuff from + # object. NB. in PyPy, dir(None) additionally contains '__nonzero__'. + vereq(dir(object()), dir(Ellipsis)) + + # Nasty test case for proxied objects + class Wrapper(object): + def __init__(self, obj): + self.__obj = obj + def __repr__(self): + return "Wrapper(%s)" % repr(self.__obj) + def __getitem__(self, key): + return Wrapper(self.__obj[key]) + def __len__(self): + return len(self.__obj) + def __getattr__(self, name): + return Wrapper(getattr(self.__obj, name)) + + class C(object): + def __getclass(self): + return Wrapper(type(self)) + __class__ = property(__getclass) + + dir(C()) # This used to segfault + +binops = { + 'add': '+', + 'sub': '-', + 'mul': '*', + 'div': '/', + 'mod': '%', + 'divmod': 'divmod', + 'pow': '**', + 'lshift': '<<', + 'rshift': '>>', + 'and': '&', + 'xor': '^', + 'or': '|', + 'cmp': 'cmp', + 'lt': '<', + 'le': '<=', + 'eq': '==', + 'ne': '!=', + 'gt': '>', + 'ge': '>=', + } + +for name, expr in binops.items(): + if expr.islower(): + expr = expr + "(a, b)" + else: + expr = 'a %s b' % expr + binops[name] = expr + +unops = { + 'pos': '+', + 'neg': '-', + 'abs': 'abs', + 'invert': '~', + 'int': 'int', + 'long': 'long', + 'float': 'float', + 'oct': 'oct', + 'hex': 'hex', + } + +for name, expr in unops.items(): + if expr.islower(): + expr = expr + "(a)" + else: + expr = '%s a' % expr + unops[name] = expr + +def numops(a, b, skip=[]): + dict = {'a': a, 'b': b} + for name, expr in binops.items(): + if name not in skip: + name = "__%s__" % name + if hasattr(a, name): + res = eval(expr, dict) + testbinop(a, b, res, expr, name) + for name, expr in unops.items(): + if name not in skip: + name = "__%s__" % name + if hasattr(a, name): + res = eval(expr, dict) + testunop(a, res, expr, name) + +def ints(): + if verbose: print "Testing int operations..." + numops(100, 3) + # The following crashes in Python 2.2 + vereq((1).__nonzero__(), 1) + vereq((0).__nonzero__(), 0) + # This returns 'NotImplemented' in Python 2.2 + class C(int): + def __add__(self, other): + return NotImplemented + vereq(C(5L), 5) + try: + C() + "" + except TypeError: + pass + else: + raise TestFailed, "NotImplemented should have caused TypeError" + import sys + try: + C(sys.maxint+1) + except OverflowError: + pass + else: + raise TestFailed, "should have raised OverflowError" + +def longs(): + if verbose: print "Testing long operations..." + numops(100L, 3L) + +def floats(): + if verbose: print "Testing float operations..." + numops(100.0, 3.0) + +def complexes(): + if verbose: print "Testing complex operations..." + numops(100.0j, 3.0j, skip=['lt', 'le', 'gt', 'ge', 'int', 'long', 'float']) + class Number(complex): + __slots__ = ['prec'] + def __new__(cls, *args, **kwds): + result = complex.__new__(cls, *args) + result.prec = kwds.get('prec', 12) + return result + def __repr__(self): + prec = self.prec + if self.imag == 0.0: + return "%.*g" % (prec, self.real) + if self.real == 0.0: + return "%.*gj" % (prec, self.imag) + return "(%.*g+%.*gj)" % (prec, self.real, prec, self.imag) + __str__ = __repr__ + + a = Number(3.14, prec=6) + vereq(repr(a), "3.14") + vereq(a.prec, 6) + + a = Number(a, prec=2) + vereq(repr(a), "3.1") + vereq(a.prec, 2) + + a = Number(234.5) + vereq(repr(a), "234.5") + vereq(a.prec, 12) + + +def pydicts(): + if verbose: print "Testing Python subclass of dict..." + verify(issubclass(dict, dict)) + verify(isinstance({}, dict)) + d = dict() + vereq(d, {}) + verify(d.__class__ is dict) + verify(isinstance(d, dict)) + class C(dict): + state = -1 + def __init__(self, *a, **kw): + if a: + vereq(len(a), 1) + self.state = a[0] + if kw: + for k, v in kw.items(): self[v] = k + def __getitem__(self, key): + return self.get(key, 0) + def __setitem__(self, key, value): + verify(isinstance(key, type(0))) + dict.__setitem__(self, key, value) + def setstate(self, state): + self.state = state + def getstate(self): + return self.state + verify(issubclass(C, dict)) + a1 = C(12) + vereq(a1.state, 12) + a2 = C(foo=1, bar=2) + vereq(a2[1] == 'foo' and a2[2], 'bar') + a = C() + vereq(a.state, -1) + vereq(a.getstate(), -1) + a.setstate(0) + vereq(a.state, 0) + vereq(a.getstate(), 0) + a.setstate(10) + vereq(a.state, 10) + vereq(a.getstate(), 10) + vereq(a[42], 0) + a[42] = 24 + vereq(a[42], 24) + if verbose: print "pydict stress test ..." + N = 50 + for i in range(N): + a[i] = C() + for j in range(N): + a[i][j] = i*j + for i in range(N): + for j in range(N): + vereq(a[i][j], i*j) + +def pylists(): + if verbose: print "Testing Python subclass of list..." + class C(list): + def __getitem__(self, i): + return list.__getitem__(self, i) + 100 + def __getslice__(self, i, j): + return (i, j) + a = C() + a.extend([0,1,2]) + vereq(a[0], 100) + vereq(a[1], 101) + vereq(a[2], 102) + vereq(a[100:200], (100,200)) + +def metaclass(): + if verbose: print "Testing __metaclass__..." + class C: + __metaclass__ = type + def __init__(self): + self.__state = 0 + def getstate(self): + return self.__state + def setstate(self, state): + self.__state = state + a = C() + vereq(a.getstate(), 0) + a.setstate(10) + vereq(a.getstate(), 10) + class D: + class __metaclass__(type): + def myself(cls): return cls + vereq(D.myself(), D) + d = D() + verify(d.__class__ is D) + class M1(type): + def __new__(cls, name, bases, dict): + dict['__spam__'] = 1 + return type.__new__(cls, name, bases, dict) + class C: + __metaclass__ = M1 + vereq(C.__spam__, 1) + c = C() + vereq(c.__spam__, 1) + + class _instance(object): + pass + class M2(object): + @staticmethod + def __new__(cls, name, bases, dict): + self = object.__new__(cls) + self.name = name + self.bases = bases + self.dict = dict + return self + def __call__(self): + it = _instance() + # Early binding of methods + for key in self.dict: + if key.startswith("__"): + continue + setattr(it, key, self.dict[key].__get__(it, self)) + return it + class C: + __metaclass__ = M2 + def spam(self): + return 42 + vereq(C.name, 'C') + vereq(C.bases, ()) + verify('spam' in C.dict) + c = C() + vereq(c.spam(), 42) + + # More metaclass examples + + class autosuper(type): + # Automatically add __super to the class + # This trick only works for dynamic classes + def __new__(metaclass, name, bases, dict): + cls = super(autosuper, metaclass).__new__(metaclass, + name, bases, dict) + # Name mangling for __super removes leading underscores + while name[:1] == "_": + name = name[1:] + if name: + name = "_%s__super" % name + else: + name = "__super" + setattr(cls, name, super(cls)) + return cls + class A: + __metaclass__ = autosuper + def meth(self): + return "A" + class B(A): + def meth(self): + return "B" + self.__super.meth() + class C(A): + def meth(self): + return "C" + self.__super.meth() + class D(C, B): + def meth(self): + return "D" + self.__super.meth() + vereq(D().meth(), "DCBA") + class E(B, C): + def meth(self): + return "E" + self.__super.meth() + vereq(E().meth(), "EBCA") + + class autoproperty(type): + # Automatically create property attributes when methods + # named _get_x and/or _set_x are found + def __new__(metaclass, name, bases, dict): + hits = {} + for key, val in dict.iteritems(): + if key.startswith("_get_"): + key = key[5:] + get, set = hits.get(key, (None, None)) + get = val + hits[key] = get, set + elif key.startswith("_set_"): + key = key[5:] + get, set = hits.get(key, (None, None)) + set = val + hits[key] = get, set + for key, (get, set) in hits.iteritems(): + dict[key] = property(get, set) + return super(autoproperty, metaclass).__new__(metaclass, + name, bases, dict) + class A: + __metaclass__ = autoproperty + def _get_x(self): + return -self.__x + def _set_x(self, x): + self.__x = -x + a = A() + verify(not hasattr(a, "x")) + a.x = 12 + vereq(a.x, 12) + vereq(a._A__x, -12) + + class multimetaclass(autoproperty, autosuper): + # Merge of multiple cooperating metaclasses + pass + class A: + __metaclass__ = multimetaclass + def _get_x(self): + return "A" + class B(A): + def _get_x(self): + return "B" + self.__super._get_x() + class C(A): + def _get_x(self): + return "C" + self.__super._get_x() + class D(C, B): + def _get_x(self): + return "D" + self.__super._get_x() + vereq(D().x, "DCBA") + + # Make sure type(x) doesn't call x.__class__.__init__ + class T(type): + counter = 0 + def __init__(self, *args): + T.counter += 1 + class C: + __metaclass__ = T + vereq(T.counter, 1) + a = C() + vereq(type(a), C) + vereq(T.counter, 1) + + class C(object): pass + c = C() + try: c() + except TypeError: pass + else: raise TestFailed, "calling object w/o call method should raise TypeError" + +def pymods(): + if verbose: print "Testing Python subclass of module..." + log = [] + import sys + MT = type(sys) + class MM(MT): + def __init__(self, name): + MT.__init__(self, name) + def __getattribute__(self, name): + log.append(("getattr", name)) + return MT.__getattribute__(self, name) + def __setattr__(self, name, value): + log.append(("setattr", name, value)) + MT.__setattr__(self, name, value) + def __delattr__(self, name): + log.append(("delattr", name)) + MT.__delattr__(self, name) + a = MM("a") + a.foo = 12 + x = a.foo + del a.foo + vereq(log, [("setattr", "foo", 12), + ("getattr", "foo"), + ("delattr", "foo")]) + +def multi(): + if verbose: print "Testing multiple inheritance..." + class C(object): + def __init__(self): + self.__state = 0 + def getstate(self): + return self.__state + def setstate(self, state): + self.__state = state + a = C() + vereq(a.getstate(), 0) + a.setstate(10) + vereq(a.getstate(), 10) + class D(dict, C): + def __init__(self): + type({}).__init__(self) + C.__init__(self) + d = D() + vereq(d.keys(), []) + d["hello"] = "world" + vereq(d.items(), [("hello", "world")]) + vereq(d["hello"], "world") + vereq(d.getstate(), 0) + d.setstate(10) + vereq(d.getstate(), 10) + vereq(D.__mro__, (D, dict, C, object)) + + # SF bug #442833 + class Node(object): + def __int__(self): + return int(self.foo()) + def foo(self): + return "23" + class Frag(Node, list): + def foo(self): + return "42" + vereq(Node().__int__(), 23) + vereq(int(Node()), 23) + vereq(Frag().__int__(), 42) + vereq(int(Frag()), 42) + + # MI mixing classic and new-style classes. + + class A: + x = 1 + + class B(A): + pass + + class C(A): + x = 2 + + class D(B, C): + pass + vereq(D.x, 1) + + # Classic MRO is preserved for a classic base class. + class E(D, object): + pass + vereq(E.__mro__, (E, D, B, A, C, object)) + vereq(E.x, 1) + + # But with a mix of classic bases, their MROs are combined using + # new-style MRO. + class F(B, C, object): + pass + vereq(F.__mro__, (F, B, C, A, object)) + vereq(F.x, 2) + + # Try something else. + class C: + def cmethod(self): + return "C a" + def all_method(self): + return "C b" + + class M1(C, object): + def m1method(self): + return "M1 a" + def all_method(self): + return "M1 b" + + vereq(M1.__mro__, (M1, C, object)) + m = M1() + vereq(m.cmethod(), "C a") + vereq(m.m1method(), "M1 a") + vereq(m.all_method(), "M1 b") + + class D(C): + def dmethod(self): + return "D a" + def all_method(self): + return "D b" + + class M2(D, object): + def m2method(self): + return "M2 a" + def all_method(self): + return "M2 b" + + vereq(M2.__mro__, (M2, D, C, object)) + m = M2() + vereq(m.cmethod(), "C a") + vereq(m.dmethod(), "D a") + vereq(m.m2method(), "M2 a") + vereq(m.all_method(), "M2 b") + + class M3(M1, M2, object): + def m3method(self): + return "M3 a" + def all_method(self): + return "M3 b" + vereq(M3.__mro__, (M3, M1, M2, D, C, object)) + m = M3() + vereq(m.cmethod(), "C a") + vereq(m.dmethod(), "D a") + vereq(m.m1method(), "M1 a") + vereq(m.m2method(), "M2 a") + vereq(m.m3method(), "M3 a") + vereq(m.all_method(), "M3 b") + + class Classic: + pass + try: + class New(Classic): + __metaclass__ = type + except TypeError: + pass + else: + raise TestFailed, "new class with only classic bases - shouldn't be" + +def diamond(): + if verbose: print "Testing multiple inheritance special cases..." + class A(object): + def spam(self): return "A" + vereq(A().spam(), "A") + class B(A): + def boo(self): return "B" + def spam(self): return "B" + vereq(B().spam(), "B") + vereq(B().boo(), "B") + class C(A): + def boo(self): return "C" + vereq(C().spam(), "A") + vereq(C().boo(), "C") + class D(B, C): pass + vereq(D().spam(), "B") + vereq(D().boo(), "B") + vereq(D.__mro__, (D, B, C, A, object)) + class E(C, B): pass + vereq(E().spam(), "B") + vereq(E().boo(), "C") + vereq(E.__mro__, (E, C, B, A, object)) + # MRO order disagreement + try: + class F(D, E): pass + except TypeError: + pass + else: + raise TestFailed, "expected MRO order disagreement (F)" + try: + class G(E, D): pass + except TypeError: + pass + else: + raise TestFailed, "expected MRO order disagreement (G)" + + +# see thread python-dev/2002-October/029035.html +def ex5(): + if verbose: print "Testing ex5 from C3 switch discussion..." + class A(object): pass + class B(object): pass + class C(object): pass + class X(A): pass + class Y(A): pass + class Z(X,B,Y,C): pass + vereq(Z.__mro__, (Z, X, B, Y, A, C, object)) + +# see "A Monotonic Superclass Linearization for Dylan", +# by Kim Barrett et al. (OOPSLA 1996) +def monotonicity(): + if verbose: print "Testing MRO monotonicity..." + class Boat(object): pass + class DayBoat(Boat): pass + class WheelBoat(Boat): pass + class EngineLess(DayBoat): pass + class SmallMultihull(DayBoat): pass + class PedalWheelBoat(EngineLess,WheelBoat): pass + class SmallCatamaran(SmallMultihull): pass + class Pedalo(PedalWheelBoat,SmallCatamaran): pass + + vereq(PedalWheelBoat.__mro__, + (PedalWheelBoat, EngineLess, DayBoat, WheelBoat, Boat, + object)) + vereq(SmallCatamaran.__mro__, + (SmallCatamaran, SmallMultihull, DayBoat, Boat, object)) + + vereq(Pedalo.__mro__, + (Pedalo, PedalWheelBoat, EngineLess, SmallCatamaran, + SmallMultihull, DayBoat, WheelBoat, Boat, object)) + +# see "A Monotonic Superclass Linearization for Dylan", +# by Kim Barrett et al. (OOPSLA 1996) +def consistency_with_epg(): + if verbose: print "Testing consistentcy with EPG..." + class Pane(object): pass + class ScrollingMixin(object): pass + class EditingMixin(object): pass + class ScrollablePane(Pane,ScrollingMixin): pass + class EditablePane(Pane,EditingMixin): pass + class EditableScrollablePane(ScrollablePane,EditablePane): pass + + vereq(EditableScrollablePane.__mro__, + (EditableScrollablePane, ScrollablePane, EditablePane, + Pane, ScrollingMixin, EditingMixin, object)) + +mro_err_msg = "cycle among base classes:" + +def mro_disagreement(): + if verbose: print "Testing error messages for MRO disagreement..." + def raises(exc, expected, callable, *args): + try: + callable(*args) + except exc, msg: + if not str(msg).startswith(expected): + raise TestFailed, "Message %r, expected %r" % (str(msg), + expected) + else: + raise TestFailed, "Expected %s" % exc + class A(object): pass + class B(A): pass + class C(object): pass + # Test some very simple errors + raises(TypeError, "duplicate base class A", + type, "X", (A, A), {}) + raises(TypeError, mro_err_msg, + type, "X", (A, B), {}) + raises(TypeError, mro_err_msg, + type, "X", (A, C, B), {}) + # Test a slightly more complex error + class GridLayout(object): pass + class HorizontalGrid(GridLayout): pass + class VerticalGrid(GridLayout): pass + class HVGrid(HorizontalGrid, VerticalGrid): pass + class VHGrid(VerticalGrid, HorizontalGrid): pass + raises(TypeError, mro_err_msg, + type, "ConfusedGrid", (HVGrid, VHGrid), {}) + +def objects(): + if verbose: print "Testing object class..." + a = object() + vereq(a.__class__, object) + vereq(type(a), object) + b = object() + verify(a is not b) + verify(not hasattr(a, "foo")) + try: + a.foo = 12 + except (AttributeError, TypeError): + pass + else: + verify(0, "object() should not allow setting a foo attribute") + verify(not hasattr(object(), "__dict__")) + + class Cdict(object): + pass + x = Cdict() + vereq(x.__dict__, {}) + x.foo = 1 + vereq(x.foo, 1) + vereq(x.__dict__, {'foo': 1}) + +def slots(): + if verbose: print "Testing __slots__..." + class C0(object): + __slots__ = [] + x = C0() + verify(not hasattr(x, "__dict__")) + verify(not hasattr(x, "foo")) + + class C1(object): + __slots__ = ['a'] + x = C1() + verify(not hasattr(x, "__dict__")) + verify(not hasattr(x, "a")) + x.a = 1 + vereq(x.a, 1) + x.a = None + veris(x.a, None) + del x.a + verify(not hasattr(x, "a")) + + class C3(object): + __slots__ = ['a', 'b', 'c'] + x = C3() + verify(not hasattr(x, "__dict__")) + verify(not hasattr(x, 'a')) + verify(not hasattr(x, 'b')) + verify(not hasattr(x, 'c')) + x.a = 1 + x.b = 2 + x.c = 3 + vereq(x.a, 1) + vereq(x.b, 2) + vereq(x.c, 3) + + class C4(object): + """Validate name mangling""" + __slots__ = ['__a'] + def __init__(self, value): + self.__a = value + def get(self): + return self.__a + x = C4(5) + verify(not hasattr(x, '__dict__')) + verify(not hasattr(x, '__a')) + vereq(x.get(), 5) + try: + x.__a = 6 + except AttributeError: + pass + else: + raise TestFailed, "Double underscored names not mangled" + + # Make sure slot names are proper identifiers + try: + class C(object): + __slots__ = [None] + except TypeError: + pass + else: + raise TestFailed, "[None] slots not caught" + try: + class C(object): + __slots__ = ["foo bar"] + except TypeError: + pass + else: + raise TestFailed, "['foo bar'] slots not caught" + try: + class C(object): + __slots__ = ["foo\0bar"] + except TypeError: + pass + else: + raise TestFailed, "['foo\\0bar'] slots not caught" + try: + class C(object): + __slots__ = ["1"] + except TypeError: + pass + else: + raise TestFailed, "['1'] slots not caught" + try: + class C(object): + __slots__ = [""] + except TypeError: + pass + else: + raise TestFailed, "[''] slots not caught" + class C(object): + __slots__ = ["a", "a_b", "_a", "A0123456789Z"] + + # Test unicode slot names + try: + unichr + except NameError: + pass + else: + # _unicode_to_string used to modify slots in certain circumstances + slots = (unicode("foo"), unicode("bar")) + class C(object): + __slots__ = slots + x = C() + x.foo = 5 + vereq(x.foo, 5) + veris(type(slots[0]), unicode) + # this used to leak references + try: + class C(object): + __slots__ = [unichr(128)] + except (TypeError, UnicodeEncodeError): + pass + else: + raise TestFailed, "[unichr(128)] slots not caught" + + # Test leaks + class Counted(object): + counter = 0 # counts the number of instances alive + def __init__(self): + Counted.counter += 1 + def __del__(self): + Counted.counter -= 1 + class C(object): + __slots__ = ['a', 'b', 'c'] + x = C() + x.a = Counted() + x.b = Counted() + x.c = Counted() + vereq(Counted.counter, 3) + del x + vereq(Counted.counter, 0) + class D(C): + pass + x = D() + x.a = Counted() + x.z = Counted() + vereq(Counted.counter, 2) + del x + vereq(Counted.counter, 0) + class E(D): + __slots__ = ['e'] + x = E() + x.a = Counted() + x.z = Counted() + x.e = Counted() + vereq(Counted.counter, 3) + del x + vereq(Counted.counter, 0) + + # Test cyclical leaks [SF bug 519621] + class F(object): + __slots__ = ['a', 'b'] + log = [] + s = F() + s.a = [Counted(), s] + vereq(Counted.counter, 1) + s = None + gc.collect() + gc.collect() + gc.collect() + vereq(Counted.counter, 0) + + # Test lookup leaks [SF bug 572567] + import sys + class G(object): + def __cmp__(self, other): + return 0 + g = G() + orig_objects = len(gc.get_objects()) + for i in xrange(10): + g==g + new_objects = len(gc.get_objects()) + vereq(orig_objects, new_objects) + class H(object): + __slots__ = ['a', 'b'] + def __init__(self): + self.a = 1 + self.b = 2 + def __del__(self): + assert self.a == 1 + assert self.b == 2 + + save_stderr = sys.stderr + sys.stderr = sys.stdout + h = H() + try: + del h + finally: + sys.stderr = save_stderr + +def slotspecials(): + if verbose: print "Testing __dict__ and __weakref__ in __slots__..." + + class D(object): + __slots__ = ["__dict__"] + a = D() + verify(hasattr(a, "__dict__")) + verify(not hasattr(a, "__weakref__")) + a.foo = 42 + vereq(a.__dict__, {"foo": 42}) + + class W(object): + __slots__ = ["__weakref__"] + a = W() + verify(hasattr(a, "__weakref__")) + verify(not hasattr(a, "__dict__")) + try: + a.foo = 42 + except AttributeError: + pass + else: + raise TestFailed, "shouldn't be allowed to set a.foo" + + class C1(W, D): + __slots__ = [] + a = C1() + verify(hasattr(a, "__dict__")) + verify(hasattr(a, "__weakref__")) + a.foo = 42 + vereq(a.__dict__, {"foo": 42}) + + class C2(D, W): + __slots__ = [] + a = C2() + verify(hasattr(a, "__dict__")) + verify(hasattr(a, "__weakref__")) + a.foo = 42 + vereq(a.__dict__, {"foo": 42}) + +# MRO order disagreement +# +# class C3(C1, C2): +# __slots__ = [] +# +# class C4(C2, C1): +# __slots__ = [] + +def dynamics(): + if verbose: print "Testing class attribute propagation..." + class D(object): + pass + class E(D): + pass + class F(D): + pass + D.foo = 1 + vereq(D.foo, 1) + # Test that dynamic attributes are inherited + vereq(E.foo, 1) + vereq(F.foo, 1) + # Test dynamic instances + class C(object): + pass + a = C() + verify(not hasattr(a, "foobar")) + C.foobar = 2 + vereq(a.foobar, 2) + C.method = lambda self: 42 + vereq(a.method(), 42) + C.__repr__ = lambda self: "C()" + vereq(repr(a), "C()") + C.__int__ = lambda self: 100 + vereq(int(a), 100) + vereq(a.foobar, 2) + verify(not hasattr(a, "spam")) + def mygetattr(self, name): + if name == "spam": + return "spam" + raise AttributeError + C.__getattr__ = mygetattr + vereq(a.spam, "spam") + a.new = 12 + vereq(a.new, 12) + def mysetattr(self, name, value): + if name == "spam": + raise AttributeError + return object.__setattr__(self, name, value) + C.__setattr__ = mysetattr + try: + a.spam = "not spam" + except AttributeError: + pass + else: + verify(0, "expected AttributeError") + vereq(a.spam, "spam") + class D(C): + pass + d = D() + d.foo = 1 + vereq(d.foo, 1) + + # Test handling of int*seq and seq*int + class I(int): + pass + vereq("a"*I(2), "aa") + vereq(I(2)*"a", "aa") + vereq(2*I(3), 6) + vereq(I(3)*2, 6) + vereq(I(3)*I(2), 6) + + # Test handling of long*seq and seq*long + class L(long): + pass + vereq("a"*L(2L), "aa") + vereq(L(2L)*"a", "aa") + vereq(2*L(3), 6) + vereq(L(3)*2, 6) + vereq(L(3)*L(2), 6) + + # Test comparison of classes with dynamic metaclasses + class dynamicmetaclass(type): + pass + class someclass: + __metaclass__ = dynamicmetaclass + verify(someclass != object) + +def errors(): + if verbose: print "Testing errors..." + + try: + class C(list, dict): + pass + except TypeError: + pass + else: + verify(0, "inheritance from both list and dict should be illegal") + + try: + class C(object, None): + pass + except TypeError: + pass + else: + verify(0, "inheritance from non-type should be illegal") + class Classic: + pass + + try: + class C(type(len)): + pass + except TypeError: + pass + else: + verify(0, "inheritance from CFunction should be illegal") + + try: + class C(object): + __slots__ = 1 + except TypeError: + pass + else: + verify(0, "__slots__ = 1 should be illegal") + + try: + class C(object): + __slots__ = [1] + except TypeError: + pass + else: + verify(0, "__slots__ = [1] should be illegal") + +def classmethods(): + if verbose: print "Testing class methods..." + class C(object): + def foo(*a): return a + goo = classmethod(foo) + c = C() + vereq(C.goo(1), (C, 1)) + vereq(c.goo(1), (C, 1)) + vereq(c.foo(1), (c, 1)) + class D(C): + pass + d = D() + vereq(D.goo(1), (D, 1)) + vereq(d.goo(1), (D, 1)) + vereq(d.foo(1), (d, 1)) + vereq(D.foo(d, 1), (d, 1)) + # Test for a specific crash (SF bug 528132) + def f(cls, arg): return (cls, arg) + ff = classmethod(f) + vereq(ff.__get__(0, int)(42), (int, 42)) + vereq(ff.__get__(0)(42), (int, 42)) + + # Test super() with classmethods (SF bug 535444) + veris(C.goo.im_self, C) + veris(D.goo.im_self, D) + veris(super(D,D).goo.im_self, D) + veris(super(D,d).goo.im_self, D) + vereq(super(D,D).goo(), (D,)) + vereq(super(D,d).goo(), (D,)) + + # Verify that argument is checked for callability (SF bug 753451) + try: + classmethod(1).__get__(1) + except TypeError: + pass + else: + raise TestFailed, "classmethod should check for callability" + # Verify that classmethod() doesn't allow keyword args + try: + classmethod(f, kw=1) + except TypeError: + pass + else: + raise TestFailed, "classmethod shouldn't accept keyword args" + + +def staticmethods(): + if verbose: print "Testing static methods..." + class C(object): + def foo(*a): return a + goo = staticmethod(foo) + c = C() + vereq(C.goo(1), (1,)) + vereq(c.goo(1), (1,)) + vereq(c.foo(1), (c, 1,)) + class D(C): + pass + d = D() + vereq(D.goo(1), (1,)) + vereq(d.goo(1), (1,)) + vereq(d.foo(1), (d, 1)) + vereq(D.foo(d, 1), (d, 1)) + + + class D(C): + pass + d = D() + vereq(D.goo(1), (1,)) + vereq(d.goo(1), (1,)) + vereq(d.foo(1), (d, 1)) + vereq(D.foo(d, 1), (d, 1)) + +def classic(): + if verbose: print "Testing classic classes..." + class C: + def foo(*a): return a + goo = classmethod(foo) + c = C() + vereq(C.goo(1), (C, 1)) + vereq(c.goo(1), (C, 1)) + vereq(c.foo(1), (c, 1)) + class D(C): + pass + d = D() + vereq(D.goo(1), (D, 1)) + vereq(d.goo(1), (D, 1)) + vereq(d.foo(1), (d, 1)) + vereq(D.foo(d, 1), (d, 1)) + class E: # *not* subclassing from C + foo = C.foo + vereq(E().foo, C.foo) # i.e., unbound + verify(C.foo.__get__(C()).im_self is not None) + +def compattr(): + if verbose: print "Testing computed attributes..." + class C(object): + class computed_attribute(object): + def __init__(self, get, set=None, delete=None): + self.__get = get + self.__set = set + self.__delete = delete + def __get__(self, obj, type=None): + return self.__get(obj) + def __set__(self, obj, value): + return self.__set(obj, value) + def __delete__(self, obj): + return self.__delete(obj) + def __init__(self): + self.__x = 0 + def __get_x(self): + x = self.__x + self.__x = x+1 + return x + def __set_x(self, x): + self.__x = x + def __delete_x(self): + del self.__x + x = computed_attribute(__get_x, __set_x, __delete_x) + a = C() + vereq(a.x, 0) + vereq(a.x, 1) + a.x = 10 + vereq(a.x, 10) + vereq(a.x, 11) + del a.x + vereq(hasattr(a, 'x'), 0) + +def newslot(): + if verbose: print "Testing __new__ slot override..." + class C(list): + def __new__(cls): + self = list.__new__(cls) + self.foo = 1 + return self + def __init__(self): + self.foo = self.foo + 2 + a = C() + vereq(a.foo, 3) + verify(a.__class__ is C) + class D(C): + pass + b = D() + vereq(b.foo, 3) + verify(b.__class__ is D) + +def altmro(): + if verbose: print "Testing mro() and overriding it..." + class A(object): + def f(self): return "A" + class B(A): + pass + class C(A): + def f(self): return "C" + class D(B, C): + pass + vereq(D.mro(), [D, B, C, A, object]) + vereq(D.__mro__, (D, B, C, A, object)) + vereq(D().f(), "C") + + class PerverseMetaType(type): + def mro(cls): + L = type.mro(cls) + L.reverse() + return L + class X(D,B,C,A): + __metaclass__ = PerverseMetaType + vereq(X.__mro__, (object, A, C, B, D, X)) + vereq(X().f(), "A") + + try: + class X(object): + class __metaclass__(type): + def mro(self): + return [self, dict, object] + except TypeError: + pass + else: + raise TestFailed, "devious mro() return not caught" + + try: + class X(object): + class __metaclass__(type): + def mro(self): + return [1] + except TypeError: + pass + else: + raise TestFailed, "non-class mro() return not caught" + + try: + class X(object): + class __metaclass__(type): + def mro(self): + return 1 + except TypeError: + pass + else: + raise TestFailed, "non-sequence mro() return not caught" + + +def overloading(): + if verbose: print "Testing operator overloading..." + + class B(object): + "Intermediate class because object doesn't have a __setattr__" + + class C(B): + + def __getattr__(self, name): + if name == "foo": + return ("getattr", name) + else: + raise AttributeError + def __setattr__(self, name, value): + if name == "foo": + self.setattr = (name, value) + else: + return B.__setattr__(self, name, value) + def __delattr__(self, name): + if name == "foo": + self.delattr = name + else: + return B.__delattr__(self, name) + + def __getitem__(self, key): + return ("getitem", key) + def __setitem__(self, key, value): + self.setitem = (key, value) + def __delitem__(self, key): + self.delitem = key + + def __getslice__(self, i, j): + return ("getslice", i, j) + def __setslice__(self, i, j, value): + self.setslice = (i, j, value) + def __delslice__(self, i, j): + self.delslice = (i, j) + + a = C() + vereq(a.foo, ("getattr", "foo")) + a.foo = 12 + vereq(a.setattr, ("foo", 12)) + del a.foo + vereq(a.delattr, "foo") + + vereq(a[12], ("getitem", 12)) + a[12] = 21 + vereq(a.setitem, (12, 21)) + del a[12] + vereq(a.delitem, 12) + + vereq(a[0:10], ("getslice", 0, 10)) + a[0:10] = "foo" + vereq(a.setslice, (0, 10, "foo")) + del a[0:10] + vereq(a.delslice, (0, 10)) + +def methods(): + if verbose: print "Testing methods..." + class C(object): + def __init__(self, x): + self.x = x + def foo(self): + return self.x + c1 = C(1) + vereq(c1.foo(), 1) + class D(C): + boo = C.foo + goo = c1.foo + d2 = D(2) + vereq(d2.foo(), 2) + vereq(d2.boo(), 2) + vereq(d2.goo(), 1) + class E(object): + foo = C.foo + vereq(E().foo, C.foo) # i.e., unbound + verify(repr(C.foo.__get__(C(1))).startswith("= 0) + vereq(str(c1), repr(c1)) + verify(-1 not in c1) + for i in range(10): + verify(i in c1) + verify(10 not in c1) + # Test the default behavior for dynamic classes + class D(object): + def __getitem__(self, i): + if 0 <= i < 10: return i + raise IndexError + d1 = D() + d2 = D() + verify(not not d1) + verify(id(d1) != id(d2)) + hash(d1) + hash(d2) + vereq(cmp(d1, d2), cmp(id(d1), id(d2))) + vereq(d1, d1) + verify(d1 != d2) + verify(not d1 != d1) + verify(not d1 == d2) + # Note that the module name appears in str/repr, and that varies + # depending on whether this test is run standalone or from a framework. + verify(str(d1).find('D object at ') >= 0) + vereq(str(d1), repr(d1)) + verify(-1 not in d1) + for i in range(10): + verify(i in d1) + verify(10 not in d1) + # Test overridden behavior for static classes + class Proxy(object): + def __init__(self, x): + self.x = x + def __nonzero__(self): + return not not self.x + def __hash__(self): + return hash(self.x) + def __eq__(self, other): + return self.x == other + def __ne__(self, other): + return self.x != other + def __cmp__(self, other): + return cmp(self.x, other.x) + def __str__(self): + return "Proxy:%s" % self.x + def __repr__(self): + return "Proxy(%r)" % self.x + def __contains__(self, value): + return value in self.x + p0 = Proxy(0) + p1 = Proxy(1) + p_1 = Proxy(-1) + verify(not p0) + verify(not not p1) + vereq(hash(p0), hash(0)) + vereq(p0, p0) + verify(p0 != p1) + verify(not p0 != p0) + vereq(not p0, p1) + vereq(cmp(p0, p1), -1) + vereq(cmp(p0, p0), 0) + vereq(cmp(p0, p_1), 1) + vereq(str(p0), "Proxy:0") + vereq(repr(p0), "Proxy(0)") + p10 = Proxy(range(10)) + verify(-1 not in p10) + for i in range(10): + verify(i in p10) + verify(10 not in p10) + # Test overridden behavior for dynamic classes + class DProxy(object): + def __init__(self, x): + self.x = x + def __nonzero__(self): + return not not self.x + def __hash__(self): + return hash(self.x) + def __eq__(self, other): + return self.x == other + def __ne__(self, other): + return self.x != other + def __cmp__(self, other): + return cmp(self.x, other.x) + def __str__(self): + return "DProxy:%s" % self.x + def __repr__(self): + return "DProxy(%r)" % self.x + def __contains__(self, value): + return value in self.x + p0 = DProxy(0) + p1 = DProxy(1) + p_1 = DProxy(-1) + verify(not p0) + verify(not not p1) + vereq(hash(p0), hash(0)) + vereq(p0, p0) + verify(p0 != p1) + verify(not p0 != p0) + vereq(not p0, p1) + vereq(cmp(p0, p1), -1) + vereq(cmp(p0, p0), 0) + vereq(cmp(p0, p_1), 1) + vereq(str(p0), "DProxy:0") + vereq(repr(p0), "DProxy(0)") + p10 = DProxy(range(10)) + verify(-1 not in p10) + for i in range(10): + verify(i in p10) + verify(10 not in p10) + # Safety test for __cmp__ + def unsafecmp(a, b): + try: + a.__class__.__cmp__(a, b) + except TypeError: + pass + else: + raise TestFailed, "shouldn't allow %s.__cmp__(%r, %r)" % ( + a.__class__, a, b) + # unicode, int, float and long does not have a __cmp__ in PyPy + # unsafecmp(u"123", "123") + # unsafecmp("123", u"123") + # unsafecmp(1, 1.0) + # unsafecmp(1.0, 1) + # unsafecmp(1, 1L) + # unsafecmp(1L, 1) + + class Letter(str): + def __new__(cls, letter): + if letter == 'EPS': + return str.__new__(cls) + return str.__new__(cls, letter) + def __str__(self): + if not self: + return 'EPS' + return self + + # sys.stdout needs to be the original to trigger the recursion bug + import sys + test_stdout = sys.stdout + sys.stdout = get_original_stdout() + try: + # nothing should actually be printed, this should raise an exception + print Letter('w') + except RuntimeError: + pass + else: + raise TestFailed, "expected a RuntimeError for print recursion" + sys.stdout = test_stdout + +def weakrefs(): + if verbose: print "Testing weak references..." + import weakref + class C(object): + pass + c = C() + r = weakref.ref(c) + verify(r() is c) + del c + gc.collect() + gc.collect() + gc.collect() + verify(r() is None) + del r + class NoWeak(object): + __slots__ = ['foo'] + no = NoWeak() + try: + weakref.ref(no) + except TypeError, msg: + verify(str(msg).find("weak reference") >= 0) + else: + verify(0, "weakref.ref(no) should be illegal") + class Weak(object): + __slots__ = ['foo', '__weakref__'] + yes = Weak() + r = weakref.ref(yes) + verify(r() is yes) + del yes + gc.collect() + gc.collect() + gc.collect() + verify(r() is None) + del r + +def properties(): + if verbose: print "Testing property..." + class C(object): + def getx(self): + return self.__x + def setx(self, value): + self.__x = value + def delx(self): + del self.__x + x = property(getx, setx, delx, doc="I'm the x property.") + a = C() + verify(not hasattr(a, "x")) + a.x = 42 + vereq(a._C__x, 42) + vereq(a.x, 42) + del a.x + verify(not hasattr(a, "x")) + verify(not hasattr(a, "_C__x")) + C.x.__set__(a, 100) + vereq(C.x.__get__(a), 100) + C.x.__delete__(a) + verify(not hasattr(a, "x")) + + raw = C.__dict__['x'] + verify(isinstance(raw, property)) + + attrs = dir(raw) + verify("__doc__" in attrs) + verify("fget" in attrs) + verify("fset" in attrs) + verify("fdel" in attrs) + + vereq(raw.__doc__, "I'm the x property.") + verify(raw.fget is C.__dict__['getx']) + verify(raw.fset is C.__dict__['setx']) + verify(raw.fdel is C.__dict__['delx']) + + for attr in "__doc__", "fget", "fset", "fdel": + try: + setattr(raw, attr, 42) + except TypeError, msg: + if str(msg).find('readonly') < 0: + raise TestFailed("when setting readonly attr %r on a " + "property, got unexpected TypeError " + "msg %r" % (attr, str(msg))) + else: + raise TestFailed("expected TypeError from trying to set " + "readonly %r attr on a property" % attr) + + class D(object): + __getitem__ = property(lambda s: 1/0) + + d = D() + try: + for i in d: + str(i) + except ZeroDivisionError: + pass + else: + raise TestFailed, "expected ZeroDivisionError from bad property" + + class E(object): + def getter(self): + "getter method" + return 0 + def setter(self, value): + "setter method" + pass + prop = property(getter) + vereq(prop.__doc__, "getter method") + prop2 = property(fset=setter) + vereq(prop2.__doc__, None) + + # this segfaulted in 2.5b2 + try: + import _testcapi + except ImportError: + pass + else: + class X(object): + p = property(_testcapi.test_with_docstring) + + +def supers(): + if verbose: print "Testing super..." + + class A(object): + def meth(self, a): + return "A(%r)" % a + + vereq(A().meth(1), "A(1)") + + class B(A): + def __init__(self): + self.__super = super(B, self) + def meth(self, a): + return "B(%r)" % a + self.__super.meth(a) + + vereq(B().meth(2), "B(2)A(2)") + + class C(A): + def meth(self, a): + return "C(%r)" % a + self.__super.meth(a) + C._C__super = super(C) + + vereq(C().meth(3), "C(3)A(3)") + + class D(C, B): + def meth(self, a): + return "D(%r)" % a + super(D, self).meth(a) + + vereq(D().meth(4), "D(4)C(4)B(4)A(4)") + + # Test for subclassing super + + class mysuper(super): + def __init__(self, *args): + return super(mysuper, self).__init__(*args) + + class E(D): + def meth(self, a): + return "E(%r)" % a + mysuper(E, self).meth(a) + + vereq(E().meth(5), "E(5)D(5)C(5)B(5)A(5)") + + class F(E): + def meth(self, a): + s = self.__super # == mysuper(F, self) + return "F(%r)[%s]" % (a, s.__class__.__name__) + s.meth(a) + F._F__super = mysuper(F) + + vereq(F().meth(6), "F(6)[mysuper]E(6)D(6)C(6)B(6)A(6)") + + # Make sure certain errors are raised + + try: + super(D, 42) + except TypeError: + pass + else: + raise TestFailed, "shouldn't allow super(D, 42)" + + try: + super(D, C()) + except TypeError: + pass + else: + raise TestFailed, "shouldn't allow super(D, C())" + + try: + super(D).__get__(12) + except TypeError: + pass + else: + raise TestFailed, "shouldn't allow super(D).__get__(12)" + + try: + super(D).__get__(C()) + except TypeError: + pass + else: + raise TestFailed, "shouldn't allow super(D).__get__(C())" + + # Make sure data descriptors can be overridden and accessed via super + # (new feature in Python 2.3) + + class DDbase(object): + def getx(self): return 42 + x = property(getx) + + class DDsub(DDbase): + def getx(self): return "hello" + x = property(getx) + + dd = DDsub() + vereq(dd.x, "hello") + vereq(super(DDsub, dd).x, 42) + + # Ensure that super() lookup of descriptor from classmethod + # works (SF ID# 743627) + + class Base(object): + aProp = property(lambda self: "foo") + + class Sub(Base): + @classmethod + def test(klass): + return super(Sub,klass).aProp + + veris(Sub.test(), Base.aProp) + + # Verify that super() doesn't allow keyword args + try: + super(Base, kw=1) + except TypeError: + pass + else: + raise TestFailed, "super shouldn't accept keyword args" + +def inherits(): + if verbose: print "Testing inheritance from basic types..." + + class hexint(int): + def __repr__(self): + return hex(self) + def __add__(self, other): + return hexint(int.__add__(self, other)) + # (Note that overriding __radd__ doesn't work, + # because the int type gets first dibs.) + vereq(repr(hexint(7) + 9), "0x10") + vereq(repr(hexint(1000) + 7), "0x3ef") + a = hexint(12345) + vereq(a, 12345) + vereq(int(a), 12345) + verify(int(a).__class__ is int) + vereq(hash(a), hash(12345)) + verify((+a).__class__ is int) + verify((a >> 0).__class__ is int) + verify((a << 0).__class__ is int) + verify((hexint(0) << 12).__class__ is int) + verify((hexint(0) >> 12).__class__ is int) + + class octlong(long): + __slots__ = [] + def __str__(self): + s = oct(self) + if s[-1] == 'L': + s = s[:-1] + return s + def __add__(self, other): + return self.__class__(super(octlong, self).__add__(other)) + __radd__ = __add__ + vereq(str(octlong(3) + 5), "010") + # (Note that overriding __radd__ here only seems to work + # because the example uses a short int left argument.) + vereq(str(5 + octlong(3000)), "05675") + a = octlong(12345) + vereq(a, 12345L) + vereq(long(a), 12345L) + vereq(hash(a), hash(12345L)) + verify(long(a).__class__ is long) + verify((+a).__class__ is long) + verify((-a).__class__ is long) + verify((-octlong(0)).__class__ is long) + verify((a >> 0).__class__ is long) + verify((a << 0).__class__ is long) + verify((a - 0).__class__ is long) + verify((a * 1).__class__ is long) + verify((a ** 1).__class__ is long) + verify((a // 1).__class__ is long) + verify((1 * a).__class__ is long) + verify((a | 0).__class__ is long) + verify((a ^ 0).__class__ is long) + verify((a & -1L).__class__ is long) + verify((octlong(0) << 12).__class__ is long) + verify((octlong(0) >> 12).__class__ is long) + verify(abs(octlong(0)).__class__ is long) + + # Because octlong overrides __add__, we can't check the absence of +0 + # optimizations using octlong. + class longclone(long): + pass + a = longclone(1) + verify((a + 0).__class__ is long) + verify((0 + a).__class__ is long) + + # Check that negative clones don't segfault + a = longclone(-1) + vereq(a.__dict__, {}) + vereq(long(a), -1) # verify PyNumber_Long() copies the sign bit + + class precfloat(float): + __slots__ = ['prec'] + def __init__(self, value=0.0, prec=12): + self.prec = int(prec) + float.__init__(self, value) + def __repr__(self): + return "%.*g" % (self.prec, self) + vereq(repr(precfloat(1.1)), "1.1") + a = precfloat(12345) + vereq(a, 12345.0) + vereq(float(a), 12345.0) + verify(float(a).__class__ is float) + vereq(hash(a), hash(12345.0)) + verify((+a).__class__ is float) + + class madcomplex(complex): + def __repr__(self): + return "%.17gj%+.17g" % (self.imag, self.real) + a = madcomplex(-3, 4) + vereq(repr(a), "4j-3") + base = complex(-3, 4) + veris(base.__class__, complex) + vereq(a, base) + vereq(complex(a), base) + veris(complex(a).__class__, complex) + a = madcomplex(a) # just trying another form of the constructor + vereq(repr(a), "4j-3") + vereq(a, base) + vereq(complex(a), base) + veris(complex(a).__class__, complex) + vereq(hash(a), hash(base)) + veris((+a).__class__, complex) + veris((a + 0).__class__, complex) + vereq(a + 0, base) + veris((a - 0).__class__, complex) + vereq(a - 0, base) + veris((a * 1).__class__, complex) + vereq(a * 1, base) + veris((a / 1).__class__, complex) + vereq(a / 1, base) + + class madtuple(tuple): + _rev = None + def rev(self): + if self._rev is not None: + return self._rev + L = list(self) + L.reverse() + self._rev = self.__class__(L) + return self._rev + a = madtuple((1,2,3,4,5,6,7,8,9,0)) + vereq(a, (1,2,3,4,5,6,7,8,9,0)) + vereq(a.rev(), madtuple((0,9,8,7,6,5,4,3,2,1))) + vereq(a.rev().rev(), madtuple((1,2,3,4,5,6,7,8,9,0))) + for i in range(10): + t = madtuple(range(i)) + u = t.rev() + v = u.rev() + vereq(v, t) + a = madtuple((1,2,3,4,5)) + vereq(tuple(a), (1,2,3,4,5)) + verify(tuple(a).__class__ is tuple) + vereq(hash(a), hash((1,2,3,4,5))) + verify(a[:].__class__ is tuple) + verify((a * 1).__class__ is tuple) + verify((a * 0).__class__ is tuple) + verify((a + ()).__class__ is tuple) + a = madtuple(()) + vereq(tuple(a), ()) + verify(tuple(a).__class__ is tuple) + verify((a + a).__class__ is tuple) + verify((a * 0).__class__ is tuple) + verify((a * 1).__class__ is tuple) + verify((a * 2).__class__ is tuple) + verify(a[:].__class__ is tuple) + + class madstring(str): + _rev = None + def rev(self): + if self._rev is not None: + return self._rev + L = list(self) + L.reverse() + self._rev = self.__class__("".join(L)) + return self._rev + s = madstring("abcdefghijklmnopqrstuvwxyz") + vereq(s, "abcdefghijklmnopqrstuvwxyz") + vereq(s.rev(), madstring("zyxwvutsrqponmlkjihgfedcba")) + vereq(s.rev().rev(), madstring("abcdefghijklmnopqrstuvwxyz")) + for i in range(256): + s = madstring("".join(map(chr, range(i)))) + t = s.rev() + u = t.rev() + vereq(u, s) + s = madstring("12345") + vereq(str(s), "12345") + verify(str(s).__class__ is str) + + base = "\x00" * 5 + s = madstring(base) + vereq(s, base) + vereq(str(s), base) + verify(str(s).__class__ is str) + vereq(hash(s), hash(base)) + vereq({s: 1}[base], 1) + vereq({base: 1}[s], 1) + verify((s + "").__class__ is str) + vereq(s + "", base) + verify(("" + s).__class__ is str) + vereq("" + s, base) + verify((s * 0).__class__ is str) + vereq(s * 0, "") + verify((s * 1).__class__ is str) + vereq(s * 1, base) + verify((s * 2).__class__ is str) + vereq(s * 2, base + base) + verify(s[:].__class__ is str) + vereq(s[:], base) + verify(s[0:0].__class__ is str) + vereq(s[0:0], "") + verify(s.strip().__class__ is str) + vereq(s.strip(), base) + verify(s.lstrip().__class__ is str) + vereq(s.lstrip(), base) + verify(s.rstrip().__class__ is str) + vereq(s.rstrip(), base) + identitytab = ''.join([chr(i) for i in range(256)]) + verify(s.translate(identitytab).__class__ is str) + vereq(s.translate(identitytab), base) + verify(s.translate(identitytab, "x").__class__ is str) + vereq(s.translate(identitytab, "x"), base) + vereq(s.translate(identitytab, "\x00"), "") + verify(s.replace("x", "x").__class__ is str) + vereq(s.replace("x", "x"), base) + verify(s.ljust(len(s)).__class__ is str) + vereq(s.ljust(len(s)), base) + verify(s.rjust(len(s)).__class__ is str) + vereq(s.rjust(len(s)), base) + verify(s.center(len(s)).__class__ is str) + vereq(s.center(len(s)), base) + verify(s.lower().__class__ is str) + vereq(s.lower(), base) + + class madunicode(unicode): + _rev = None + def rev(self): + if self._rev is not None: + return self._rev + L = list(self) + L.reverse() + self._rev = self.__class__(u"".join(L)) + return self._rev + u = madunicode("ABCDEF") + vereq(u, u"ABCDEF") + vereq(u.rev(), madunicode(u"FEDCBA")) + vereq(u.rev().rev(), madunicode(u"ABCDEF")) + base = u"12345" + u = madunicode(base) + vereq(unicode(u), base) + verify(unicode(u).__class__ is unicode) + vereq(hash(u), hash(base)) + vereq({u: 1}[base], 1) + vereq({base: 1}[u], 1) + verify(u.strip().__class__ is unicode) + vereq(u.strip(), base) + verify(u.lstrip().__class__ is unicode) + vereq(u.lstrip(), base) + verify(u.rstrip().__class__ is unicode) + vereq(u.rstrip(), base) + verify(u.replace(u"x", u"x").__class__ is unicode) + vereq(u.replace(u"x", u"x"), base) + verify(u.replace(u"xy", u"xy").__class__ is unicode) + vereq(u.replace(u"xy", u"xy"), base) + verify(u.center(len(u)).__class__ is unicode) + vereq(u.center(len(u)), base) + verify(u.ljust(len(u)).__class__ is unicode) + vereq(u.ljust(len(u)), base) + verify(u.rjust(len(u)).__class__ is unicode) + vereq(u.rjust(len(u)), base) + verify(u.lower().__class__ is unicode) + vereq(u.lower(), base) + verify(u.upper().__class__ is unicode) + vereq(u.upper(), base) + verify(u.capitalize().__class__ is unicode) + vereq(u.capitalize(), base) + verify(u.title().__class__ is unicode) + vereq(u.title(), base) + verify((u + u"").__class__ is unicode) + vereq(u + u"", base) + verify((u"" + u).__class__ is unicode) + vereq(u"" + u, base) + verify((u * 0).__class__ is unicode) + vereq(u * 0, u"") + verify((u * 1).__class__ is unicode) + vereq(u * 1, base) + verify((u * 2).__class__ is unicode) + vereq(u * 2, base + base) + verify(u[:].__class__ is unicode) + vereq(u[:], base) + verify(u[0:0].__class__ is unicode) + vereq(u[0:0], u"") + + class sublist(list): + pass + a = sublist(range(5)) + vereq(a, range(5)) + a.append("hello") + vereq(a, range(5) + ["hello"]) + a[5] = 5 + vereq(a, range(6)) + a.extend(range(6, 20)) + vereq(a, range(20)) + a[-5:] = [] + vereq(a, range(15)) + del a[10:15] + vereq(len(a), 10) + vereq(a, range(10)) + vereq(list(a), range(10)) + vereq(a[0], 0) + vereq(a[9], 9) + vereq(a[-10], 0) + vereq(a[-1], 9) + vereq(a[:5], range(5)) + + class CountedInput(file): + """Counts lines read by self.readline(). + + self.lineno is the 0-based ordinal of the last line read, up to + a maximum of one greater than the number of lines in the file. + + self.ateof is true if and only if the final "" line has been read, + at which point self.lineno stops incrementing, and further calls + to readline() continue to return "". + """ + + lineno = 0 + ateof = 0 + def readline(self): + if self.ateof: + return "" + s = file.readline(self) + # Next line works too. + # s = super(CountedInput, self).readline() + self.lineno += 1 + if s == "": + self.ateof = 1 + return s + + f = file(name=TESTFN, mode='w') + lines = ['a\n', 'b\n', 'c\n'] + try: + f.writelines(lines) + f.close() + f = CountedInput(TESTFN) + for (i, expected) in zip(range(1, 5) + [4], lines + 2 * [""]): + got = f.readline() + vereq(expected, got) + vereq(f.lineno, i) + vereq(f.ateof, (i > len(lines))) + f.close() + finally: + try: + f.close() + except: + pass + try: + import os + os.unlink(TESTFN) + except: + pass + +def keywords(): + if verbose: + print "Testing keyword args to basic type constructors ..." + vereq(int(x=1), 1) + vereq(float(x=2), 2.0) + vereq(long(x=3), 3L) + vereq(complex(imag=42, real=666), complex(666, 42)) + vereq(str(object=500), '500') + vereq(unicode(string='abc', errors='strict'), u'abc') + vereq(tuple(sequence=range(3)), (0, 1, 2)) + vereq(list(sequence=(0, 1, 2)), range(3)) + # note: as of Python 2.3, dict() no longer has an "items" keyword arg + + for constructor in (int, float, long, complex, str, unicode, + tuple, list, file): + try: + constructor(bogus_keyword_arg=1) + except TypeError: + pass + else: + raise TestFailed("expected TypeError from bogus keyword " + "argument to %r" % constructor) + +def restricted(): + # XXX This test is disabled because rexec is not deemed safe + return + import rexec + if verbose: + print "Testing interaction with restricted execution ..." + + sandbox = rexec.RExec() + + code1 = """f = open(%r, 'w')""" % TESTFN + code2 = """f = file(%r, 'w')""" % TESTFN + code3 = """\ +f = open(%r) +t = type(f) # a sneaky way to get the file() constructor +f.close() +f = t(%r, 'w') # rexec can't catch this by itself +""" % (TESTFN, TESTFN) + + f = open(TESTFN, 'w') # Create the file so code3 can find it. + f.close() + + try: + for code in code1, code2, code3: + try: + sandbox.r_exec(code) + except IOError, msg: + if str(msg).find("restricted") >= 0: + outcome = "OK" + else: + outcome = "got an exception, but not an expected one" + else: + outcome = "expected a restricted-execution exception" + + if outcome != "OK": + raise TestFailed("%s, in %r" % (outcome, code)) + + finally: + try: + import os + os.unlink(TESTFN) + except: + pass + +def str_subclass_as_dict_key(): + if verbose: + print "Testing a str subclass used as dict key .." + + class cistr(str): + """Sublcass of str that computes __eq__ case-insensitively. + + Also computes a hash code of the string in canonical form. + """ + + def __init__(self, value): + self.canonical = value.lower() + self.hashcode = hash(self.canonical) + + def __eq__(self, other): + if not isinstance(other, cistr): + other = cistr(other) + return self.canonical == other.canonical + + def __hash__(self): + return self.hashcode + + vereq(cistr('ABC'), 'abc') + vereq('aBc', cistr('ABC')) + vereq(str(cistr('ABC')), 'ABC') + + d = {cistr('one'): 1, cistr('two'): 2, cistr('tHree'): 3} + vereq(d[cistr('one')], 1) + vereq(d[cistr('tWo')], 2) + vereq(d[cistr('THrEE')], 3) + verify(cistr('ONe') in d) + vereq(d.get(cistr('thrEE')), 3) + +def classic_comparisons(): + if verbose: print "Testing classic comparisons..." + class classic: + pass + for base in (classic, int, object): + if verbose: print " (base = %s)" % base + class C(base): + def __init__(self, value): + self.value = int(value) + def __cmp__(self, other): + if isinstance(other, C): + return cmp(self.value, other.value) + if isinstance(other, int) or isinstance(other, long): + return cmp(self.value, other) + return NotImplemented + c1 = C(1) + c2 = C(2) + c3 = C(3) + vereq(c1, 1) + c = {1: c1, 2: c2, 3: c3} + for x in 1, 2, 3: + for y in 1, 2, 3: + verify(cmp(c[x], c[y]) == cmp(x, y), "x=%d, y=%d" % (x, y)) + for op in "<", "<=", "==", "!=", ">", ">=": + verify(eval("c[x] %s c[y]" % op) == eval("x %s y" % op), + "x=%d, y=%d" % (x, y)) + verify(cmp(c[x], y) == cmp(x, y), "x=%d, y=%d" % (x, y)) + verify(cmp(x, c[y]) == cmp(x, y), "x=%d, y=%d" % (x, y)) + +def rich_comparisons(): + if verbose: + print "Testing rich comparisons..." + class Z(complex): + pass + z = Z(1) + vereq(z, 1+0j) + vereq(1+0j, z) + class ZZ(complex): + def __eq__(self, other): + try: + return abs(self - other) <= 1e-6 + except: + return NotImplemented + zz = ZZ(1.0000003) + vereq(zz, 1+0j) + vereq(1+0j, zz) + + class classic: + pass + for base in (classic, int, object, list): + if verbose: print " (base = %s)" % base + class C(base): + def __init__(self, value): + self.value = int(value) + def __cmp__(self, other): + raise TestFailed, "shouldn't call __cmp__" + def __eq__(self, other): + if isinstance(other, C): + return self.value == other.value + if isinstance(other, int) or isinstance(other, long): + return self.value == other + return NotImplemented + def __ne__(self, other): + if isinstance(other, C): + return self.value != other.value + if isinstance(other, int) or isinstance(other, long): + return self.value != other + return NotImplemented + def __lt__(self, other): + if isinstance(other, C): + return self.value < other.value + if isinstance(other, int) or isinstance(other, long): + return self.value < other + return NotImplemented + def __le__(self, other): + if isinstance(other, C): + return self.value <= other.value + if isinstance(other, int) or isinstance(other, long): + return self.value <= other + return NotImplemented + def __gt__(self, other): + if isinstance(other, C): + return self.value > other.value + if isinstance(other, int) or isinstance(other, long): + return self.value > other + return NotImplemented + def __ge__(self, other): + if isinstance(other, C): + return self.value >= other.value + if isinstance(other, int) or isinstance(other, long): + return self.value >= other + return NotImplemented + c1 = C(1) + c2 = C(2) + c3 = C(3) + vereq(c1, 1) + c = {1: c1, 2: c2, 3: c3} + for x in 1, 2, 3: + for y in 1, 2, 3: + for op in "<", "<=", "==", "!=", ">", ">=": + verify(eval("c[x] %s c[y]" % op) == eval("x %s y" % op), + "x=%d, y=%d" % (x, y)) + verify(eval("c[x] %s y" % op) == eval("x %s y" % op), + "x=%d, y=%d" % (x, y)) + verify(eval("x %s c[y]" % op) == eval("x %s y" % op), + "x=%d, y=%d" % (x, y)) + +def coercions(): + if verbose: print "Testing coercions..." + class I(int): pass + coerce(I(0), 0) + coerce(0, I(0)) + class L(long): pass + coerce(L(0), 0) + coerce(L(0), 0L) + coerce(0, L(0)) + coerce(0L, L(0)) + class F(float): pass + coerce(F(0), 0) + coerce(F(0), 0L) + coerce(F(0), 0.) + coerce(0, F(0)) + coerce(0L, F(0)) + coerce(0., F(0)) + class C(complex): pass + coerce(C(0), 0) + coerce(C(0), 0L) + coerce(C(0), 0.) + coerce(C(0), 0j) + coerce(0, C(0)) + coerce(0L, C(0)) + coerce(0., C(0)) + coerce(0j, C(0)) + +def descrdoc(): + if verbose: print "Testing descriptor doc strings..." + def check(descr, what): + vereq(descr.__doc__, what) + check(file.closed, "True if the file is closed") # getset descriptor + check(file.name, "file name") # member descriptor + +def setclass(): + if verbose: print "Testing __class__ assignment..." + class C(object): pass + class D(object): pass + class E(object): pass + class F(D, E): pass + for cls in C, D, E, F: + for cls2 in C, D, E, F: + x = cls() + x.__class__ = cls2 + verify(x.__class__ is cls2) + x.__class__ = cls + verify(x.__class__ is cls) + def cant(x, C): + try: + x.__class__ = C + except TypeError: + pass + else: + raise TestFailed, "shouldn't allow %r.__class__ = %r" % (x, C) + try: + delattr(x, "__class__") + except (TypeError, AttributeError): + pass + else: + raise TestFailed, "shouldn't allow del %r.__class__" % x + cant(C(), list) + cant(list(), C) + cant(C(), 1) + cant(C(), object) + cant(object(), list) + cant(list(), object) + class Int(int): __slots__ = [] + cant(2, Int) + cant(Int(), int) + cant(True, int) + cant(2, bool) + o = object() + cant(o, type(1)) + cant(o, type(None)) + del o + +def setdict(): + if verbose: print "Testing __dict__ assignment..." + class C(object): pass + a = C() + a.__dict__ = {'b': 1} + vereq(a.b, 1) + def cant(x, dict): + try: + x.__dict__ = dict + except (AttributeError, TypeError): + pass + else: + raise TestFailed, "shouldn't allow %r.__dict__ = %r" % (x, dict) + cant(a, None) + cant(a, []) + cant(a, 1) + del a.__dict__ # Deleting __dict__ is allowed + # Classes don't allow __dict__ assignment + cant(C, {}) + +def pickles(): + if verbose: + print "Testing pickling and copying new-style classes and objects..." + import pickle, cPickle + + def sorteditems(d): + L = d.items() + L.sort() + return L + + global C + class C(object): + def __init__(self, a, b): + super(C, self).__init__() + self.a = a + self.b = b + def __repr__(self): + return "C(%r, %r)" % (self.a, self.b) + + global C1 + class C1(list): + def __new__(cls, a, b): + return super(C1, cls).__new__(cls) + def __getnewargs__(self): + return (self.a, self.b) + def __init__(self, a, b): + self.a = a + self.b = b + def __repr__(self): + return "C1(%r, %r)<%r>" % (self.a, self.b, list(self)) + + global C2 + class C2(int): + def __new__(cls, a, b, val=0): + return super(C2, cls).__new__(cls, val) + def __getnewargs__(self): + return (self.a, self.b, int(self)) + def __init__(self, a, b, val=0): + self.a = a + self.b = b + def __repr__(self): + return "C2(%r, %r)<%r>" % (self.a, self.b, int(self)) + + global C3 + class C3(object): + def __init__(self, foo): + self.foo = foo + def __getstate__(self): + return self.foo + def __setstate__(self, foo): + self.foo = foo + + global C4classic, C4 + class C4classic: # classic + pass + class C4(C4classic, object): # mixed inheritance + pass + + for p in pickle, cPickle: + for bin in 0, 1: + if verbose: + print p.__name__, ["text", "binary"][bin] + + for cls in C, C1, C2: + s = p.dumps(cls, bin) + cls2 = p.loads(s) + verify(cls2 is cls) + + a = C1(1, 2); a.append(42); a.append(24) + b = C2("hello", "world", 42) + s = p.dumps((a, b), bin) + x, y = p.loads(s) + vereq(x.__class__, a.__class__) + vereq(sorteditems(x.__dict__), sorteditems(a.__dict__)) + vereq(y.__class__, b.__class__) + vereq(sorteditems(y.__dict__), sorteditems(b.__dict__)) + vereq(repr(x), repr(a)) + vereq(repr(y), repr(b)) + if verbose: + print "a = x =", a + print "b = y =", b + # Test for __getstate__ and __setstate__ on new style class + u = C3(42) + s = p.dumps(u, bin) + v = p.loads(s) + veris(u.__class__, v.__class__) + vereq(u.foo, v.foo) + # Test for picklability of hybrid class + u = C4() + u.foo = 42 + s = p.dumps(u, bin) + v = p.loads(s) + veris(u.__class__, v.__class__) + vereq(u.foo, v.foo) + + # Testing copy.deepcopy() + if verbose: + print "deepcopy" + import copy + for cls in C, C1, C2: + cls2 = copy.deepcopy(cls) + verify(cls2 is cls) + + a = C1(1, 2); a.append(42); a.append(24) + b = C2("hello", "world", 42) + x, y = copy.deepcopy((a, b)) + vereq(x.__class__, a.__class__) + vereq(sorteditems(x.__dict__), sorteditems(a.__dict__)) + vereq(y.__class__, b.__class__) + vereq(sorteditems(y.__dict__), sorteditems(b.__dict__)) + vereq(repr(x), repr(a)) + vereq(repr(y), repr(b)) + if verbose: + print "a = x =", a + print "b = y =", b + +def pickleslots(): + if verbose: print "Testing pickling of classes with __slots__ ..." + import pickle, cPickle + # Pickling of classes with __slots__ but without __getstate__ should fail + global B, C, D, E + class B(object): + pass + for base in [object, B]: + class C(base): + __slots__ = ['a'] + class D(C): + pass + try: + pickle.dumps(C()) + except TypeError: + pass + else: + raise TestFailed, "should fail: pickle C instance - %s" % base + try: + cPickle.dumps(C()) + except TypeError: + pass + else: + raise TestFailed, "should fail: cPickle C instance - %s" % base + try: + pickle.dumps(C()) + except TypeError: + pass + else: + raise TestFailed, "should fail: pickle D instance - %s" % base + try: + cPickle.dumps(D()) + except TypeError: + pass + else: + raise TestFailed, "should fail: cPickle D instance - %s" % base + # Give C a nice generic __getstate__ and __setstate__ + class C(base): + __slots__ = ['a'] + def __getstate__(self): + try: + d = self.__dict__.copy() + except AttributeError: + d = {} + for cls in self.__class__.__mro__: + for sn in cls.__dict__.get('__slots__', ()): + try: + d[sn] = getattr(self, sn) + except AttributeError: + pass + return d + def __setstate__(self, d): + for k, v in d.items(): + setattr(self, k, v) + class D(C): + pass + # Now it should work + x = C() + y = pickle.loads(pickle.dumps(x)) + vereq(hasattr(y, 'a'), 0) + y = cPickle.loads(cPickle.dumps(x)) + vereq(hasattr(y, 'a'), 0) + x.a = 42 + y = pickle.loads(pickle.dumps(x)) + vereq(y.a, 42) + y = cPickle.loads(cPickle.dumps(x)) + vereq(y.a, 42) + x = D() + x.a = 42 + x.b = 100 + y = pickle.loads(pickle.dumps(x)) + vereq(y.a + y.b, 142) + y = cPickle.loads(cPickle.dumps(x)) + vereq(y.a + y.b, 142) + # A subclass that adds a slot should also work + class E(C): + __slots__ = ['b'] + x = E() + x.a = 42 + x.b = "foo" + y = pickle.loads(pickle.dumps(x)) + vereq(y.a, x.a) + vereq(y.b, x.b) + y = cPickle.loads(cPickle.dumps(x)) + vereq(y.a, x.a) + vereq(y.b, x.b) + +def copies(): + if verbose: print "Testing copy.copy() and copy.deepcopy()..." + import copy + class C(object): + pass + + a = C() + a.foo = 12 + b = copy.copy(a) + vereq(b.__dict__, a.__dict__) + + a.bar = [1,2,3] + c = copy.copy(a) + vereq(c.bar, a.bar) + verify(c.bar is a.bar) + + d = copy.deepcopy(a) + vereq(d.__dict__, a.__dict__) + a.bar.append(4) + vereq(d.bar, [1,2,3]) + +def binopoverride(): + if verbose: print "Testing overrides of binary operations..." + class I(int): + def __repr__(self): + return "I(%r)" % int(self) + def __add__(self, other): + return I(int(self) + int(other)) + __radd__ = __add__ + def __pow__(self, other, mod=None): + if mod is None: + return I(pow(int(self), int(other))) + else: + return I(pow(int(self), int(other), int(mod))) + def __rpow__(self, other, mod=None): + if mod is None: + return I(pow(int(other), int(self), mod)) + else: + return I(pow(int(other), int(self), int(mod))) + + vereq(repr(I(1) + I(2)), "I(3)") + vereq(repr(I(1) + 2), "I(3)") + vereq(repr(1 + I(2)), "I(3)") + vereq(repr(I(2) ** I(3)), "I(8)") + vereq(repr(2 ** I(3)), "I(8)") + vereq(repr(I(2) ** 3), "I(8)") + vereq(repr(pow(I(2), I(3), I(5))), "I(3)") + class S(str): + def __eq__(self, other): + return self.lower() == other.lower() + +def subclasspropagation(): + if verbose: print "Testing propagation of slot functions to subclasses..." + class A(object): + pass + class B(A): + pass + class C(A): + pass + class D(B, C): + pass + d = D() + orig_hash = hash(d) # related to id(d) in platform-dependent ways + A.__hash__ = lambda self: 42 + vereq(hash(d), 42) + C.__hash__ = lambda self: 314 + vereq(hash(d), 314) + B.__hash__ = lambda self: 144 + vereq(hash(d), 144) + D.__hash__ = lambda self: 100 + vereq(hash(d), 100) + del D.__hash__ + vereq(hash(d), 144) + del B.__hash__ + vereq(hash(d), 314) + del C.__hash__ + vereq(hash(d), 42) + del A.__hash__ + vereq(hash(d), orig_hash) + d.foo = 42 + d.bar = 42 + vereq(d.foo, 42) + vereq(d.bar, 42) + def __getattribute__(self, name): + if name == "foo": + return 24 + return object.__getattribute__(self, name) + A.__getattribute__ = __getattribute__ + vereq(d.foo, 24) + vereq(d.bar, 42) + def __getattr__(self, name): + if name in ("spam", "foo", "bar"): + return "hello" + raise AttributeError, name + B.__getattr__ = __getattr__ + vereq(d.spam, "hello") + vereq(d.foo, 24) + vereq(d.bar, 42) + del A.__getattribute__ + vereq(d.foo, 42) + del d.foo + vereq(d.foo, "hello") + vereq(d.bar, 42) + del B.__getattr__ + try: + d.foo + except AttributeError: + pass + else: + raise TestFailed, "d.foo should be undefined now" + + # Test a nasty bug in recurse_down_subclasses() + import gc + class A(object): + pass + class B(A): + pass + del B + gc.collect() + A.__setitem__ = lambda *a: None # crash + +def buffer_inherit(): + import binascii + # SF bug [#470040] ParseTuple t# vs subclasses. + if verbose: + print "Testing that buffer interface is inherited ..." + + class MyStr(str): + pass + base = 'abc' + m = MyStr(base) + # b2a_hex uses the buffer interface to get its argument's value, via + # PyArg_ParseTuple 't#' code. + vereq(binascii.b2a_hex(m), binascii.b2a_hex(base)) + + # It's not clear that unicode will continue to support the character + # buffer interface, and this test will fail if that's taken away. + class MyUni(unicode): + pass + base = u'abc' + m = MyUni(base) + vereq(binascii.b2a_hex(m), binascii.b2a_hex(base)) + + class MyInt(int): + pass + m = MyInt(42) + try: + binascii.b2a_hex(m) + raise TestFailed('subclass of int should not have a buffer interface') + except TypeError: + pass + +def str_of_str_subclass(): + import binascii + import cStringIO + + if verbose: + print "Testing __str__ defined in subclass of str ..." + + class octetstring(str): + def __str__(self): + return binascii.b2a_hex(self) + def __repr__(self): + return self + " repr" + + o = octetstring('A') + vereq(type(o), octetstring) + vereq(type(str(o)), str) + vereq(type(repr(o)), str) + vereq(ord(o), 0x41) + vereq(str(o), '41') + vereq(repr(o), 'A repr') + vereq(o.__str__(), '41') + vereq(o.__repr__(), 'A repr') + + capture = cStringIO.StringIO() + # Calling str() or not exercises different internal paths. + print >> capture, o + print >> capture, str(o) + vereq(capture.getvalue(), '41\n41\n') + capture.close() + +def kwdargs(): + if verbose: print "Testing keyword arguments to __init__, __call__..." + def f(a): return a + vereq(f.__call__(a=42), 42) + a = [] + list.__init__(a, sequence=[0, 1, 2]) + vereq(a, [0, 1, 2]) + +def recursive__call__(): + if verbose: print ("Testing recursive __call__() by setting to instance of " + "class ...") + class A(object): + pass + + A.__call__ = A() + try: + A()() + except RuntimeError: + pass + else: + raise TestFailed("Recursion limit should have been reached for " + "__call__()") + +def delhook(): + if verbose: print "Testing __del__ hook..." + log = [] + class C(object): + def __del__(self): + log.append(1) + c = C() + vereq(log, []) + del c + vereq(log, [1]) + + class D(object): pass + d = D() + try: del d[0] + except TypeError: pass + else: raise TestFailed, "invalid del() didn't raise TypeError" + +def hashinherit(): + if verbose: print "Testing hash of mutable subclasses..." + + class mydict(dict): + pass + d = mydict() + try: + hash(d) + except TypeError: + pass + else: + raise TestFailed, "hash() of dict subclass should fail" + + class mylist(list): + pass + d = mylist() + try: + hash(d) + except TypeError: + pass + else: + raise TestFailed, "hash() of list subclass should fail" + +def strops(): + try: 'a' + 5 + except TypeError: pass + else: raise TestFailed, "'' + 5 doesn't raise TypeError" + + try: ''.split('') + except ValueError: pass + else: raise TestFailed, "''.split('') doesn't raise ValueError" + + try: ''.join([0]) + except TypeError: pass + else: raise TestFailed, "''.join([0]) doesn't raise TypeError" + + try: ''.rindex('5') + except ValueError: pass + else: raise TestFailed, "''.rindex('5') doesn't raise ValueError" + + try: '%(n)s' % None + except TypeError: pass + else: raise TestFailed, "'%(n)s' % None doesn't raise TypeError" + + try: '%(n' % {} + except ValueError: pass + else: raise TestFailed, "'%(n' % {} '' doesn't raise ValueError" + + try: '%*s' % ('abc') + except TypeError: pass + else: raise TestFailed, "'%*s' % ('abc') doesn't raise TypeError" + + try: '%*.*s' % ('abc', 5) + except TypeError: pass + else: raise TestFailed, "'%*.*s' % ('abc', 5) doesn't raise TypeError" + + try: '%s' % (1, 2) + except TypeError: pass + else: raise TestFailed, "'%s' % (1, 2) doesn't raise TypeError" + + try: '%' % None + except ValueError: pass + else: raise TestFailed, "'%' % None doesn't raise ValueError" + + vereq('534253'.isdigit(), 1) + vereq('534253x'.isdigit(), 0) + vereq('%c' % 5, '\x05') + vereq('%c' % '5', '5') + +def deepcopyrecursive(): + if verbose: print "Testing deepcopy of recursive objects..." + class Node: + pass + a = Node() + b = Node() + a.b = b + b.a = a + z = deepcopy(a) # This blew up before + +def modules(): + if verbose: print "Testing uninitialized module objects..." + from types import ModuleType as M + m = M.__new__(M) + str(m) + vereq(hasattr(m, "__name__"), 0) + vereq(hasattr(m, "__file__"), 0) + vereq(hasattr(m, "foo"), 0) + vereq(bool(m.__dict__), False) + m.foo = 1 + vereq(m.__dict__, {"foo": 1}) + +def dictproxyiterkeys(): + class C(object): + def meth(self): + pass + if verbose: print "Testing dict-proxy iterkeys..." + keys = [ key for key in C.__dict__.iterkeys() ] + keys.sort() + vereq(keys, ['__dict__', '__doc__', '__module__', '__weakref__', 'meth']) + +def dictproxyitervalues(): + class C(object): + def meth(self): + pass + if verbose: print "Testing dict-proxy itervalues..." + values = [ values for values in C.__dict__.itervalues() ] + vereq(len(values), 5) + +def dictproxyiteritems(): + class C(object): + def meth(self): + pass + if verbose: print "Testing dict-proxy iteritems..." + keys = [ key for (key, value) in C.__dict__.iteritems() ] + keys.sort() + vereq(keys, ['__dict__', '__doc__', '__module__', '__weakref__', 'meth']) + +def funnynew(): + if verbose: print "Testing __new__ returning something unexpected..." + class C(object): + def __new__(cls, arg): + if isinstance(arg, str): return [1, 2, 3] + elif isinstance(arg, int): return object.__new__(D) + else: return object.__new__(cls) + class D(C): + def __init__(self, arg): + self.foo = arg + vereq(C("1"), [1, 2, 3]) + vereq(D("1"), [1, 2, 3]) + d = D(None) + veris(d.foo, None) + d = C(1) + vereq(isinstance(d, D), True) + vereq(d.foo, 1) + d = D(1) + vereq(isinstance(d, D), True) + vereq(d.foo, 1) + +def imulbug(): + # SF bug 544647 + if verbose: print "Testing for __imul__ problems..." + class C(object): + def __imul__(self, other): + return (self, other) + x = C() + y = x + y *= 1.0 + vereq(y, (x, 1.0)) + y = x + y *= 2 + vereq(y, (x, 2)) + y = x + y *= 3L + vereq(y, (x, 3L)) + y = x + y *= 1L<<100 + vereq(y, (x, 1L<<100)) + y = x + y *= None + vereq(y, (x, None)) + y = x + y *= "foo" + vereq(y, (x, "foo")) + +def docdescriptor(): + # SF bug 542984 + if verbose: print "Testing __doc__ descriptor..." + class DocDescr(object): + def __get__(self, object, otype): + if object: + object = object.__class__.__name__ + ' instance' + if otype: + otype = otype.__name__ + return 'object=%s; type=%s' % (object, otype) + class OldClass: + __doc__ = DocDescr() + class NewClass(object): + __doc__ = DocDescr() + vereq(OldClass.__doc__, 'object=None; type=OldClass') + vereq(OldClass().__doc__, 'object=OldClass instance; type=OldClass') + vereq(NewClass.__doc__, 'object=None; type=NewClass') + vereq(NewClass().__doc__, 'object=NewClass instance; type=NewClass') + +def copy_setstate(): + if verbose: + print "Testing that copy.*copy() correctly uses __setstate__..." + import copy + class C(object): + def __init__(self, foo=None): + self.foo = foo + self.__foo = foo + def setfoo(self, foo=None): + self.foo = foo + def getfoo(self): + return self.__foo + def __getstate__(self): + return [self.foo] + def __setstate__(self, lst): + assert len(lst) == 1 + self.__foo = self.foo = lst[0] + a = C(42) + a.setfoo(24) + vereq(a.foo, 24) + vereq(a.getfoo(), 42) + b = copy.copy(a) + vereq(b.foo, 24) + vereq(b.getfoo(), 24) + b = copy.deepcopy(a) + vereq(b.foo, 24) + vereq(b.getfoo(), 24) + +def slices(): + if verbose: + print "Testing cases with slices and overridden __getitem__ ..." + # Strings + vereq("hello"[:4], "hell") + vereq("hello"[slice(4)], "hell") + vereq(str.__getitem__("hello", slice(4)), "hell") + class S(str): + def __getitem__(self, x): + return str.__getitem__(self, x) + vereq(S("hello")[:4], "hell") + vereq(S("hello")[slice(4)], "hell") + vereq(S("hello").__getitem__(slice(4)), "hell") + # Tuples + vereq((1,2,3)[:2], (1,2)) + vereq((1,2,3)[slice(2)], (1,2)) + vereq(tuple.__getitem__((1,2,3), slice(2)), (1,2)) + class T(tuple): + def __getitem__(self, x): + return tuple.__getitem__(self, x) + vereq(T((1,2,3))[:2], (1,2)) + vereq(T((1,2,3))[slice(2)], (1,2)) + vereq(T((1,2,3)).__getitem__(slice(2)), (1,2)) + # Lists + vereq([1,2,3][:2], [1,2]) + vereq([1,2,3][slice(2)], [1,2]) + vereq(list.__getitem__([1,2,3], slice(2)), [1,2]) + class L(list): + def __getitem__(self, x): + return list.__getitem__(self, x) + vereq(L([1,2,3])[:2], [1,2]) + vereq(L([1,2,3])[slice(2)], [1,2]) + vereq(L([1,2,3]).__getitem__(slice(2)), [1,2]) + # Now do lists and __setitem__ + a = L([1,2,3]) + a[slice(1, 3)] = [3,2] + vereq(a, [1,3,2]) + a[slice(0, 2, 1)] = [3,1] + vereq(a, [3,1,2]) + a.__setitem__(slice(1, 3), [2,1]) + vereq(a, [3,2,1]) + a.__setitem__(slice(0, 2, 1), [2,3]) + vereq(a, [2,3,1]) + +def subtype_resurrection(): + if verbose: + print "Testing resurrection of new-style instance..." + + class C(object): + container = [] + + def __del__(self): + # resurrect the instance + C.container.append(self) + + c = C() + c.attr = 42 + # The most interesting thing here is whether this blows up, due to flawed + # GC tracking logic in typeobject.c's call_finalizer() (a 2.2.1 bug). + del c + gc.collect() + gc.collect() + gc.collect() + # If that didn't blow up, it's also interesting to see whether clearing + # the last container slot works: that will attempt to delete c again, + # which will cause c to get appended back to the container again "during" + # the del. + del C.container[-1] + gc.collect() + gc.collect() + gc.collect() + vereq(len(C.container), 1) + vereq(C.container[-1].attr, 42) + + # Make c mortal again, so that the test framework with -l doesn't report + # it as a leak. + del C.__del__ + +def slottrash(): + # Deallocating deeply nested slotted trash caused stack overflows + if verbose: + print "Testing slot trash..." + class trash(object): + __slots__ = ['x'] + def __init__(self, x): + self.x = x + o = None + for i in xrange(50000): + o = trash(o) + del o + +def slotmultipleinheritance(): + # SF bug 575229, multiple inheritance w/ slots dumps core + class A(object): + __slots__=() + class B(object): + pass + class C(A,B) : + __slots__=() + # No __basicsize__ in PyPy + # vereq(C.__basicsize__, B.__basicsize__) + verify(hasattr(C, '__dict__')) + verify(hasattr(C, '__weakref__')) + C().x = 2 + +def testrmul(): + # SF patch 592646 + if verbose: + print "Testing correct invocation of __rmul__..." + class C(object): + def __mul__(self, other): + return "mul" + def __rmul__(self, other): + return "rmul" + a = C() + vereq(a*2, "mul") + vereq(a*2.2, "mul") + vereq(2*a, "rmul") + vereq(2.2*a, "rmul") + +def testipow(): + # [SF bug 620179] + if verbose: + print "Testing correct invocation of __ipow__..." + class C(object): + def __ipow__(self, other): + pass + a = C() + a **= 2 + +def do_this_first(): + if verbose: + print "Testing SF bug 551412 ..." + # This dumps core when SF bug 551412 isn't fixed -- + # but only when test_descr.py is run separately. + # (That can't be helped -- as soon as PyType_Ready() + # is called for PyLong_Type, the bug is gone.) + class UserLong(object): + def __pow__(self, *args): + pass + try: + pow(0L, UserLong(), 0L) + except: + pass + + if verbose: + print "Testing SF bug 570483..." + # Another segfault only when run early + # (before PyType_Ready(tuple) is called) + type.mro(tuple) + +def test_mutable_bases(): + if verbose: + print "Testing mutable bases..." + # stuff that should work: + class C(object): + pass + class C2(object): + def __getattribute__(self, attr): + if attr == 'a': + return 2 + else: + return super(C2, self).__getattribute__(attr) + def meth(self): + return 1 + class D(C): + pass + class E(D): + pass + d = D() + e = E() + D.__bases__ = (C,) + D.__bases__ = (C2,) + vereq(d.meth(), 1) + vereq(e.meth(), 1) + vereq(d.a, 2) + vereq(e.a, 2) + vereq(C2.__subclasses__(), [D]) + + # stuff that shouldn't: + class L(list): + pass + + try: + L.__bases__ = (dict,) + except TypeError: + pass + else: + raise TestFailed, "shouldn't turn list subclass into dict subclass" + + try: + list.__bases__ = (dict,) + except TypeError: + pass + else: + raise TestFailed, "shouldn't be able to assign to list.__bases__" + + try: + del D.__bases__ + except (AttributeError, TypeError): + pass + else: + raise TestFailed, "shouldn't be able to delete .__bases__" + + try: + D.__bases__ = () + except TypeError, msg: + if str(msg) == "a new-style class can't have only classic bases": + raise TestFailed, "wrong error message for .__bases__ = ()" + else: + raise TestFailed, "shouldn't be able to set .__bases__ to ()" + + try: + D.__bases__ = (D,) + except TypeError: + pass + else: + # actually, we'll have crashed by here... + raise TestFailed, "shouldn't be able to create inheritance cycles" + + try: + D.__bases__ = (C, C) + except TypeError: + pass + else: + raise TestFailed, "didn't detect repeated base classes" + + try: + D.__bases__ = (E,) + except TypeError: + pass + else: + raise TestFailed, "shouldn't be able to create inheritance cycles" + + # let's throw a classic class into the mix: + class Classic: + def meth2(self): + return 3 + + D.__bases__ = (C, Classic) + + vereq(d.meth2(), 3) + vereq(e.meth2(), 3) + try: + d.a + except AttributeError: + pass + else: + raise TestFailed, "attribute should have vanished" + + try: + D.__bases__ = (Classic,) + except TypeError: + pass + else: + raise TestFailed, "new-style class must have a new-style base" + +def test_mutable_bases_with_failing_mro(): + if verbose: + print "Testing mutable bases with failing mro..." + class WorkOnce(type): + def __new__(self, name, bases, ns): + self.flag = 0 + return super(WorkOnce, self).__new__(WorkOnce, name, bases, ns) + def mro(self): + if self.flag > 0: + raise RuntimeError, "bozo" + else: + self.flag += 1 + return type.mro(self) + + class WorkAlways(type): + def mro(self): + # this is here to make sure that .mro()s aren't called + # with an exception set (which was possible at one point). + # An error message will be printed in a debug build. + # What's a good way to test for this? + return type.mro(self) + + class C(object): + pass + + class C2(object): + pass + + class D(C): + pass + + class E(D): + pass + + class F(D): + __metaclass__ = WorkOnce + + class G(D): + __metaclass__ = WorkAlways + + # Immediate subclasses have their mro's adjusted in alphabetical + # order, so E's will get adjusted before adjusting F's fails. We + # check here that E's gets restored. + + E_mro_before = E.__mro__ + D_mro_before = D.__mro__ + + try: + D.__bases__ = (C2,) + except RuntimeError: + vereq(E.__mro__, E_mro_before) + vereq(D.__mro__, D_mro_before) + else: + raise TestFailed, "exception not propagated" + +def test_mutable_bases_catch_mro_conflict(): + if verbose: + print "Testing mutable bases catch mro conflict..." + class A(object): + pass + + class B(object): + pass + + class C(A, B): + pass + + class D(A, B): + pass + + class E(C, D): + pass + + try: + C.__bases__ = (B, A) + except TypeError: + pass + else: + raise TestFailed, "didn't catch MRO conflict" + +def mutable_names(): + if verbose: + print "Testing mutable names..." + class C(object): + pass + + # C.__module__ could be 'test_descr' or '__main__' + mod = C.__module__ + + C.__name__ = 'D' + vereq((C.__module__, C.__name__), (mod, 'D')) + + C.__name__ = 'D.E' + vereq((C.__module__, C.__name__), (mod, 'D.E')) + +def subclass_right_op(): + if verbose: + print "Testing correct dispatch of subclass overloading __r__..." + + # This code tests various cases where right-dispatch of a subclass + # should be preferred over left-dispatch of a base class. + + # Case 1: subclass of int; this tests code in abstract.c::binary_op1() + + class B(int): + def __floordiv__(self, other): + return "B.__floordiv__" + def __rfloordiv__(self, other): + return "B.__rfloordiv__" + + vereq(B(1) // 1, "B.__floordiv__") + vereq(1 // B(1), "B.__rfloordiv__") + + # Case 2: subclass of object; this is just the baseline for case 3 + + class C(object): + def __floordiv__(self, other): + return "C.__floordiv__" + def __rfloordiv__(self, other): + return "C.__rfloordiv__" + + vereq(C() // 1, "C.__floordiv__") + vereq(1 // C(), "C.__rfloordiv__") + + # Case 3: subclass of new-style class; here it gets interesting + + class D(C): + def __floordiv__(self, other): + return "D.__floordiv__" + def __rfloordiv__(self, other): + return "D.__rfloordiv__" + + vereq(D() // C(), "D.__floordiv__") + vereq(C() // D(), "D.__rfloordiv__") + + # Case 4: this didn't work right in 2.2.2 and 2.3a1 + + class E(C): + pass + + vereq(E.__rfloordiv__, C.__rfloordiv__) + + vereq(E() // 1, "C.__floordiv__") + vereq(1 // E(), "C.__rfloordiv__") + vereq(E() // C(), "C.__floordiv__") + vereq(C() // E(), "C.__floordiv__") # This one would fail + +def dict_type_with_metaclass(): + if verbose: + print "Testing type of __dict__ when __metaclass__ set..." + + class B(object): + pass + class M(type): + pass + class C: + # In 2.3a1, C.__dict__ was a real dict rather than a dict proxy + __metaclass__ = M + veris(type(C.__dict__), type(B.__dict__)) + +def meth_class_get(): + # Full coverage of descrobject.c::classmethod_get() + if verbose: + print "Testing __get__ method of METH_CLASS C methods..." + # Baseline + arg = [1, 2, 3] + res = {1: None, 2: None, 3: None} + vereq(dict.fromkeys(arg), res) + vereq({}.fromkeys(arg), res) + # Now get the descriptor + descr = dict.__dict__["fromkeys"] + # More baseline using the descriptor directly + vereq(descr.__get__(None, dict)(arg), res) + vereq(descr.__get__({})(arg), res) + # Now check various error cases + try: + descr.__get__(None, None) + except TypeError: + pass + else: + raise TestFailed, "shouldn't have allowed descr.__get__(None, None)" + try: + descr.__get__(42) + except TypeError: + pass + else: + raise TestFailed, "shouldn't have allowed descr.__get__(42)" + try: + descr.__get__(None, 42) + except TypeError: + pass + else: + raise TestFailed, "shouldn't have allowed descr.__get__(None, 42)" + try: + descr.__get__(None, int) + except TypeError: + pass + else: + raise TestFailed, "shouldn't have allowed descr.__get__(None, int)" + +def isinst_isclass(): + if verbose: + print "Testing proxy isinstance() and isclass()..." + class Proxy(object): + def __init__(self, obj): + self.__obj = obj + def __getattribute__(self, name): + if name.startswith("_Proxy__"): + return object.__getattribute__(self, name) + else: + return getattr(self.__obj, name) + # Test with a classic class + class C: + pass + a = C() + pa = Proxy(a) + verify(isinstance(a, C)) # Baseline + verify(isinstance(pa, C)) # Test + # Test with a classic subclass + class D(C): + pass + a = D() + pa = Proxy(a) + verify(isinstance(a, C)) # Baseline + verify(isinstance(pa, C)) # Test + # Test with a new-style class + class C(object): + pass + a = C() + pa = Proxy(a) + verify(isinstance(a, C)) # Baseline + verify(isinstance(pa, C)) # Test + # Test with a new-style subclass + class D(C): + pass + a = D() + pa = Proxy(a) + verify(isinstance(a, C)) # Baseline + verify(isinstance(pa, C)) # Test + +def proxysuper(): + if verbose: + print "Testing super() for a proxy object..." + class Proxy(object): + def __init__(self, obj): + self.__obj = obj + def __getattribute__(self, name): + if name.startswith("_Proxy__"): + return object.__getattribute__(self, name) + else: + return getattr(self.__obj, name) + + class B(object): + def f(self): + return "B.f" + + class C(B): + def f(self): + return super(C, self).f() + "->C.f" + + obj = C() + p = Proxy(obj) + vereq(C.__dict__["f"](p), "B.f->C.f") + +def carloverre(): + if verbose: + print "Testing prohibition of Carlo Verre's hack..." + try: + object.__setattr__(str, "foo", 42) + except TypeError: + pass + else: + raise TestFailed, "Carlo Verre __setattr__ suceeded!" + try: + object.__delattr__(str, "lower") + except TypeError: + pass + else: + raise TestFailed, "Carlo Verre __delattr__ succeeded!" + +def weakref_segfault(): + # SF 742911 + if verbose: + print "Testing weakref segfault..." + + import weakref + + class Provoker: + def __init__(self, referrent): + self.ref = weakref.ref(referrent) + + def __del__(self): + x = self.ref() + + class Oops(object): + pass + + o = Oops() + o.whatever = Provoker(o) + del o + +def wrapper_segfault(): + # SF 927248: deeply nested wrappers could cause stack overflow + f = lambda:None + for i in xrange(1000000): + f = f.__call__ + f = None + +# Fix SF #762455, segfault when sys.stdout is changed in getattr +def filefault(): + if verbose: + print "Testing sys.stdout is changed in getattr..." + import sys + class StdoutGuard: + def __getattr__(self, attr): + sys.stdout = sys.__stdout__ + raise RuntimeError("Premature access to sys.stdout.%s" % attr) + sys.stdout = StdoutGuard() + try: + print "Oops!" + except RuntimeError: + pass + +def vicious_descriptor_nonsense(): + # A potential segfault spotted by Thomas Wouters in mail to + # python-dev 2003-04-17, turned into an example & fixed by Michael + # Hudson just less than four months later... + if verbose: + print "Testing vicious_descriptor_nonsense..." + + class Evil(object): + def __hash__(self): + return hash('attr') + def __eq__(self, other): + del C.attr + return 0 + + class Descr(object): + def __get__(self, ob, type=None): + return 1 + + class C(object): + attr = Descr() + + c = C() + c.__dict__[Evil()] = 0 + + vereq(c.attr, 1) + # this makes a crash more likely: + gc.collect() + vereq(hasattr(c, 'attr'), False) + +def test_init(): + # SF 1155938 + class Foo(object): + def __init__(self): + return 10 + try: + Foo() + except TypeError: + pass + else: + raise TestFailed, "did not test __init__() for None return" + +def methodwrapper(): + # did not support any reflection before 2.5 + if verbose: + print "Testing method-wrapper objects..." + + l = [] + vereq(l.__add__, l.__add__) + vereq(l.__add__, [].__add__) + verify(l.__add__ != [5].__add__) + verify(l.__add__ != l.__mul__) + verify(l.__add__.__name__ == '__add__') + verify(l.__add__.__self__ is l) + verify(l.__add__.__objclass__ is list) + vereq(l.__add__.__doc__, list.__add__.__doc__) + try: + hash(l.__add__) + except TypeError: + pass + else: + raise TestFailed("no TypeError from hash([].__add__)") + + t = () + t += (7,) + vereq(t.__add__, (7,).__add__) + vereq(hash(t.__add__), hash((7,).__add__)) + +def notimplemented(): + # all binary methods should be able to return a NotImplemented + if verbose: + print "Testing NotImplemented..." + + import sys + import types + import operator + + def specialmethod(self, other): + return NotImplemented + + def check(expr, x, y): + try: + exec expr in {'x': x, 'y': y, 'operator': operator} + except TypeError: + pass + else: + raise TestFailed("no TypeError from %r" % (expr,)) + + N1 = sys.maxint + 1L # might trigger OverflowErrors instead of TypeErrors + N2 = sys.maxint # if sizeof(int) < sizeof(long), might trigger + # ValueErrors instead of TypeErrors + for metaclass in [type, types.ClassType]: + for name, expr, iexpr in [ + ('__add__', 'x + y', 'x += y'), + ('__sub__', 'x - y', 'x -= y'), + ('__mul__', 'x * y', 'x *= y'), + ('__truediv__', 'operator.truediv(x, y)', None), + ('__floordiv__', 'operator.floordiv(x, y)', None), + ('__div__', 'x / y', 'x /= y'), + ('__mod__', 'x % y', 'x %= y'), + ('__divmod__', 'divmod(x, y)', None), + ('__pow__', 'x ** y', 'x **= y'), + ('__lshift__', 'x << y', 'x <<= y'), + ('__rshift__', 'x >> y', 'x >>= y'), + ('__and__', 'x & y', 'x &= y'), + ('__or__', 'x | y', 'x |= y'), + ('__xor__', 'x ^ y', 'x ^= y'), + ('__coerce__', 'coerce(x, y)', None)]: + if name == '__coerce__': + rname = name + else: + rname = '__r' + name[2:] + A = metaclass('A', (), {name: specialmethod}) + B = metaclass('B', (), {rname: specialmethod}) + a = A() + b = B() + check(expr, a, a) + check(expr, a, b) + check(expr, b, a) + check(expr, b, b) + check(expr, a, N1) + check(expr, a, N2) + check(expr, N1, b) + check(expr, N2, b) + if iexpr: + check(iexpr, a, a) + check(iexpr, a, b) + check(iexpr, b, a) + check(iexpr, b, b) + check(iexpr, a, N1) + check(iexpr, a, N2) + iname = '__i' + name[2:] + C = metaclass('C', (), {iname: specialmethod}) + c = C() + check(iexpr, c, a) + check(iexpr, c, b) + check(iexpr, c, N1) + check(iexpr, c, N2) + +def test_assign_slice(): + # ceval.c's assign_slice used to check for + # tp->tp_as_sequence->sq_slice instead of + # tp->tp_as_sequence->sq_ass_slice + + class C(object): + def __setslice__(self, start, stop, value): + self.value = value + + c = C() + c[1:2] = 3 + vereq(c.value, 3) + +def test_main(): + testfuncs = [ + weakref_segfault, # Must be first, somehow + wrapper_segfault, + do_this_first, + class_docstrings, + lists, + dicts, + dict_constructor, + test_dir, + ints, + longs, + floats, + complexes, + # spamlists, + # spamdicts, + pydicts, + pylists, + metaclass, + pymods, + multi, + mro_disagreement, + diamond, + ex5, + monotonicity, + consistency_with_epg, + objects, + slots, + slotspecials, + dynamics, + errors, + # classmethods, + # classmethods_in_c, + staticmethods, + staticmethods_in_c, + classic, + compattr, + newslot, + altmro, + overloading, + methods, + specials, + weakrefs, + properties, + supers, + inherits, + keywords, + restricted, + str_subclass_as_dict_key, + classic_comparisons, + rich_comparisons, + coercions, + descrdoc, + setclass, + setdict, + pickles, + copies, + binopoverride, + subclasspropagation, + buffer_inherit, + str_of_str_subclass, + kwdargs, + recursive__call__, + delhook, + hashinherit, + strops, + deepcopyrecursive, + modules, + dictproxyiterkeys, + dictproxyitervalues, + dictproxyiteritems, + pickleslots, + funnynew, + imulbug, + docdescriptor, + copy_setstate, + slices, + subtype_resurrection, + slottrash, + slotmultipleinheritance, + testrmul, + testipow, + test_mutable_bases, + test_mutable_bases_with_failing_mro, + test_mutable_bases_catch_mro_conflict, + mutable_names, + subclass_right_op, + dict_type_with_metaclass, + meth_class_get, + isinst_isclass, + proxysuper, + carloverre, + filefault, + vicious_descriptor_nonsense, + test_init, + methodwrapper, + notimplemented, + test_assign_slice, + ] + + n = len(testfuncs) + success = 0 + + for testfunc in testfuncs: + try: + print "*"*40 + testfunc() + except Exception, e: + if isinstance(e, KeyboardInterrupt): + raise + print "-->", testfunc.__name__, "FAILURE(%d/%d)" % (success, n), str(e) + else: + success += 1 + print "-->", testfunc.__name__, "OK(%d/%d)" % (success, n) + + if n != success: + raise TestFailed, "%d/%d" % (success, n) + else: + if verbose: print "All OK" + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descrtut.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descrtut.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,493 @@ +# This contains most of the executable examples from Guido's descr +# tutorial, once at +# +# http://www.python.org/2.2/descrintro.html +# +# A few examples left implicit in the writeup were fleshed out, a few were +# skipped due to lack of interest (e.g., faking super() by hand isn't +# of much interest anymore), and a few were fiddled to make the output +# deterministic. + +from test.test_support import sortdict +import pprint + +class defaultdict(dict): + def __init__(self, default=None): + dict.__init__(self) + self.default = default + + def __getitem__(self, key): + try: + return dict.__getitem__(self, key) + except KeyError: + if key == '__builtins__': raise + return self.default + + def get(self, key, *args): + if not args and key != '__builtins__': + args = (self.default,) + return dict.get(self, key, *args) + + def merge(self, other): + for key in other: + if key not in self: + self[key] = other[key] + +test_1 = """ + +Here's the new type at work: + + >>> print defaultdict # show our type + + >>> print type(defaultdict) # its metatype + + >>> a = defaultdict(default=0.0) # create an instance + >>> print a # show the instance + {} + >>> print type(a) # show its type + + >>> print a.__class__ # show its class + + >>> print type(a) is a.__class__ # its type is its class + True + >>> a[1] = 3.25 # modify the instance + >>> print a # show the new value + {1: 3.25} + >>> print a[1] # show the new item + 3.25 + >>> print a[0] # a non-existant item + 0.0 + >>> a.merge({1:100, 2:200}) # use a dict method + >>> print sortdict(a) # show the result + {1: 3.25, 2: 200} + >>> + +We can also use the new type in contexts where classic only allows "real" +dictionaries, such as the locals/globals dictionaries for the exec +statement or the built-in function eval(): + + >>> def sorted(seq): + ... seq.sort() + ... return seq + >>> print sorted(a.keys()) + [1, 2] + >>> exec "x = 3; print x" in a + 3 + >>> print sorted([str(key) for key in a.keys()]) + ['1', '2', '__builtins__', 'x'] + >>> print a['x'] + 3 + >>> + +Now I'll show that defaultdict instances have dynamic instance variables, +just like classic classes: + + >>> a.default = -1 + >>> print a["noway"] + -1 + >>> a.default = -1000 + >>> print a["noway"] + -1000 + >>> 'default' in dir(a) + True + >>> a.x1 = 100 + >>> a.x2 = 200 + >>> print a.x1 + 100 + >>> d = dir(a) + >>> 'default' in d and 'x1' in d and 'x2' in d + True + >>> print sortdict(a.__dict__) + {'default': -1000, 'x1': 100, 'x2': 200} + >>> +""" + +class defaultdict2(dict): + __slots__ = ['default'] + + def __init__(self, default=None): + dict.__init__(self) + self.default = default + + def __getitem__(self, key): + try: + return dict.__getitem__(self, key) + except KeyError: + return self.default + + def get(self, key, *args): + if not args: + args = (self.default,) + return dict.get(self, key, *args) + + def merge(self, other): + for key in other: + if key not in self: + self[key] = other[key] + +test_2 = """ + +The __slots__ declaration takes a list of instance variables, and reserves +space for exactly these in the instance. When __slots__ is used, other +instance variables cannot be assigned to: + + >>> a = defaultdict2(default=0.0) + >>> a[1] + 0.0 + >>> a.default = -1 + >>> a[1] + -1 + >>> a.x1 = 1 + Traceback (most recent call last): + File "", line 1, in ? + AttributeError: 'defaultdict2' object has no attribute 'x1' + >>> + +""" + +test_3 = """ + +Introspecting instances of built-in types + +For instance of built-in types, x.__class__ is now the same as type(x): + + >>> type([]) + + >>> [].__class__ + + >>> list + + >>> isinstance([], list) + True + >>> isinstance([], dict) + False + >>> isinstance([], object) + True + >>> + +Under the new proposal, the __methods__ attribute no longer exists: + + >>> [].__methods__ + Traceback (most recent call last): + File "", line 1, in ? + AttributeError: 'list' object has no attribute '__methods__' + >>> + +Instead, you can get the same information from the list type: + + >>> pprint.pprint(dir(list)) # like list.__dict__.keys(), but sorted + ['__add__', + '__class__', + '__contains__', + '__delattr__', + '__delitem__', + '__doc__', + '__eq__', + '__ge__', + '__getattribute__', + '__getitem__', + '__gt__', + '__hash__', + '__iadd__', + '__imul__', + '__init__', + '__iter__', + '__le__', + '__len__', + '__lt__', + '__mul__', + '__ne__', + '__new__', + '__radd__', + '__reduce__', + '__reduce_ex__', + '__repr__', + '__reversed__', + '__rmul__', + '__setattr__', + '__setitem__', + '__str__', + 'append', + 'count', + 'extend', + 'index', + 'insert', + 'pop', + 'remove', + 'reverse', + 'sort'] + +The new introspection API gives more information than the old one: in +addition to the regular methods, it also shows the methods that are +normally invoked through special notations, e.g. __iadd__ (+=), __len__ +(len), __ne__ (!=). You can invoke any method from this list directly: + + >>> a = ['tic', 'tac'] + >>> list.__len__(a) # same as len(a) + 2 + >>> a.__len__() # ditto + 2 + >>> list.append(a, 'toe') # same as a.append('toe') + >>> a + ['tic', 'tac', 'toe'] + >>> + +This is just like it is for user-defined classes. +""" + +test_4 = """ + +Static methods and class methods + +The new introspection API makes it possible to add static methods and class +methods. Static methods are easy to describe: they behave pretty much like +static methods in C++ or Java. Here's an example: + + >>> class C: + ... + ... @staticmethod + ... def foo(x, y): + ... print "staticmethod", x, y + + >>> C.foo(1, 2) + staticmethod 1 2 + >>> c = C() + >>> c.foo(1, 2) + staticmethod 1 2 + +Class methods use a similar pattern to declare methods that receive an +implicit first argument that is the *class* for which they are invoked. + + >>> class C: + ... @classmethod + ... def foo(cls, y): + ... print "classmethod", cls, y + + >>> C.foo(1) + classmethod test.test_descrtut.C 1 + >>> c = C() + >>> c.foo(1) + classmethod test.test_descrtut.C 1 + + >>> class D(C): + ... pass + + >>> D.foo(1) + classmethod test.test_descrtut.D 1 + >>> d = D() + >>> d.foo(1) + classmethod test.test_descrtut.D 1 + +This prints "classmethod __main__.D 1" both times; in other words, the +class passed as the first argument of foo() is the class involved in the +call, not the class involved in the definition of foo(). + +But notice this: + + >>> class E(C): + ... @classmethod + ... def foo(cls, y): # override C.foo + ... print "E.foo() called" + ... C.foo(y) + + >>> E.foo(1) + E.foo() called + classmethod test.test_descrtut.C 1 + >>> e = E() + >>> e.foo(1) + E.foo() called + classmethod test.test_descrtut.C 1 + +In this example, the call to C.foo() from E.foo() will see class C as its +first argument, not class E. This is to be expected, since the call +specifies the class C. But it stresses the difference between these class +methods and methods defined in metaclasses (where an upcall to a metamethod +would pass the target class as an explicit first argument). +""" + +test_5 = """ + +Attributes defined by get/set methods + + + >>> class property(object): + ... + ... def __init__(self, get, set=None): + ... self.__get = get + ... self.__set = set + ... + ... def __get__(self, inst, type=None): + ... return self.__get(inst) + ... + ... def __set__(self, inst, value): + ... if self.__set is None: + ... raise AttributeError, "this attribute is read-only" + ... return self.__set(inst, value) + +Now let's define a class with an attribute x defined by a pair of methods, +getx() and and setx(): + + >>> class C(object): + ... + ... def __init__(self): + ... self.__x = 0 + ... + ... def getx(self): + ... return self.__x + ... + ... def setx(self, x): + ... if x < 0: x = 0 + ... self.__x = x + ... + ... x = property(getx, setx) + +Here's a small demonstration: + + >>> a = C() + >>> a.x = 10 + >>> print a.x + 10 + >>> a.x = -10 + >>> print a.x + 0 + >>> + +Hmm -- property is builtin now, so let's try it that way too. + + >>> del property # unmask the builtin + >>> property + + + >>> class C(object): + ... def __init__(self): + ... self.__x = 0 + ... def getx(self): + ... return self.__x + ... def setx(self, x): + ... if x < 0: x = 0 + ... self.__x = x + ... x = property(getx, setx) + + + >>> a = C() + >>> a.x = 10 + >>> print a.x + 10 + >>> a.x = -10 + >>> print a.x + 0 + >>> +""" + +test_6 = """ + +Method resolution order + +This example is implicit in the writeup. + +>>> class A: # classic class +... def save(self): +... print "called A.save()" +>>> class B(A): +... pass +>>> class C(A): +... def save(self): +... print "called C.save()" +>>> class D(B, C): +... pass + +>>> D().save() +called A.save() + +>>> class A(object): # new class +... def save(self): +... print "called A.save()" +>>> class B(A): +... pass +>>> class C(A): +... def save(self): +... print "called C.save()" +>>> class D(B, C): +... pass + +>>> D().save() +called C.save() +""" + +class A(object): + def m(self): + return "A" + +class B(A): + def m(self): + return "B" + super(B, self).m() + +class C(A): + def m(self): + return "C" + super(C, self).m() + +class D(C, B): + def m(self): + return "D" + super(D, self).m() + + +test_7 = """ + +Cooperative methods and "super" + +>>> print D().m() # "DCBA" +DCBA +""" + +test_8 = """ + +Backwards incompatibilities + +>>> class A: +... def foo(self): +... print "called A.foo()" + +>>> class B(A): +... pass + +>>> class C(A): +... def foo(self): +... B.foo(self) + +>>> C().foo() +Traceback (most recent call last): + ... +TypeError: unbound method foo() must be called with B instance as first argument (got C instance instead) + +>>> class C(A): +... def foo(self): +... A.foo(self) +>>> C().foo() +called A.foo() +""" + +__test__ = {"tut1": test_1, + "tut2": test_2, + "tut3": test_3, + "tut4": test_4, + "tut5": test_5, + "tut6": test_6, + "tut7": test_7, + "tut8": test_8} + +# Magic test name that regrtest.py invokes *after* importing this module. +# This worms around a bootstrap problem. +# Note that doctest and regrtest both look in sys.argv for a "-v" argument, +# so this works as expected in both ways of running regrtest. +def test_main(verbose=None): + # Obscure: import this module as test.test_descrtut instead of as + # plain test_descrtut because the name of this module works its way + # into the doctest examples, and unless the full test.test_descrtut + # business is used the name can change depending on how the test is + # invoked. + from test import test_support, test_descrtut + test_support.run_doctest(test_descrtut, verbose) + +# This part isn't needed for regrtest, but for running the test directly. +if __name__ == "__main__": + test_main(1) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_dict.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_dict.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,477 @@ +import unittest +from test import test_support + +import sys, UserDict, cStringIO + + +class DictTest(unittest.TestCase): + def test_constructor(self): + # calling built-in types without argument must return empty + self.assertEqual(dict(), {}) + self.assert_(dict() is not {}) + + def test_bool(self): + self.assert_(not {}) + self.assert_({1: 2}) + self.assert_(bool({}) is False) + self.assert_(bool({1: 2}) is True) + + def test_keys(self): + d = {} + self.assertEqual(d.keys(), []) + d = {'a': 1, 'b': 2} + k = d.keys() + self.assert_(d.has_key('a')) + self.assert_(d.has_key('b')) + + self.assertRaises(TypeError, d.keys, None) + + def test_values(self): + d = {} + self.assertEqual(d.values(), []) + d = {1:2} + self.assertEqual(d.values(), [2]) + + self.assertRaises(TypeError, d.values, None) + + def test_items(self): + d = {} + self.assertEqual(d.items(), []) + + d = {1:2} + self.assertEqual(d.items(), [(1, 2)]) + + self.assertRaises(TypeError, d.items, None) + + def test_has_key(self): + d = {} + self.assert_(not d.has_key('a')) + d = {'a': 1, 'b': 2} + k = d.keys() + k.sort() + self.assertEqual(k, ['a', 'b']) + + self.assertRaises(TypeError, d.has_key) + + def test_contains(self): + d = {} + self.assert_(not ('a' in d)) + self.assert_('a' not in d) + d = {'a': 1, 'b': 2} + self.assert_('a' in d) + self.assert_('b' in d) + self.assert_('c' not in d) + + self.assertRaises(TypeError, d.__contains__) + + def test_len(self): + d = {} + self.assertEqual(len(d), 0) + d = {'a': 1, 'b': 2} + self.assertEqual(len(d), 2) + + def test_getitem(self): + d = {'a': 1, 'b': 2} + self.assertEqual(d['a'], 1) + self.assertEqual(d['b'], 2) + d['c'] = 3 + d['a'] = 4 + self.assertEqual(d['c'], 3) + self.assertEqual(d['a'], 4) + del d['b'] + self.assertEqual(d, {'a': 4, 'c': 3}) + + self.assertRaises(TypeError, d.__getitem__) + + class BadEq(object): + def __eq__(self, other): + raise Exc() + + d = {} + d[BadEq()] = 42 + self.assertRaises(KeyError, d.__getitem__, 23) + + class Exc(Exception): pass + + class BadHash(object): + fail = False + def __hash__(self): + if self.fail: + raise Exc() + else: + return 42 + + x = BadHash() + d[x] = 42 + x.fail = True + self.assertRaises(Exc, d.__getitem__, x) + + def test_clear(self): + d = {1:1, 2:2, 3:3} + d.clear() + self.assertEqual(d, {}) + + self.assertRaises(TypeError, d.clear, None) + + def test_update(self): + d = {} + d.update({1:100}) + d.update({2:20}) + d.update({1:1, 2:2, 3:3}) + self.assertEqual(d, {1:1, 2:2, 3:3}) + + d.update() + self.assertEqual(d, {1:1, 2:2, 3:3}) + + self.assertRaises((TypeError, AttributeError), d.update, None) + + class SimpleUserDict: + def __init__(self): + self.d = {1:1, 2:2, 3:3} + def keys(self): + return self.d.keys() + def __getitem__(self, i): + return self.d[i] + d.clear() + d.update(SimpleUserDict()) + self.assertEqual(d, {1:1, 2:2, 3:3}) + + class Exc(Exception): pass + + d.clear() + class FailingUserDict: + def keys(self): + raise Exc + self.assertRaises(Exc, d.update, FailingUserDict()) + + class FailingUserDict: + def keys(self): + class BogonIter: + def __init__(self): + self.i = 1 + def __iter__(self): + return self + def next(self): + if self.i: + self.i = 0 + return 'a' + raise Exc + return BogonIter() + def __getitem__(self, key): + return key + self.assertRaises(Exc, d.update, FailingUserDict()) + + class FailingUserDict: + def keys(self): + class BogonIter: + def __init__(self): + self.i = ord('a') + def __iter__(self): + return self + def next(self): + if self.i <= ord('z'): + rtn = chr(self.i) + self.i += 1 + return rtn + raise StopIteration + return BogonIter() + def __getitem__(self, key): + raise Exc + self.assertRaises(Exc, d.update, FailingUserDict()) + + class badseq(object): + def __iter__(self): + return self + def next(self): + raise Exc() + + self.assertRaises(Exc, {}.update, badseq()) + + self.assertRaises(ValueError, {}.update, [(1, 2, 3)]) + + def test_fromkeys(self): + self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) + d = {} + self.assert_(not(d.fromkeys('abc') is d)) + self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) + self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0}) + self.assertEqual(d.fromkeys([]), {}) + def g(): + yield 1 + self.assertEqual(d.fromkeys(g()), {1:None}) + self.assertRaises(TypeError, {}.fromkeys, 3) + class dictlike(dict): pass + self.assertEqual(dictlike.fromkeys('a'), {'a':None}) + self.assertEqual(dictlike().fromkeys('a'), {'a':None}) + self.assert_(type(dictlike.fromkeys('a')) is dictlike) + self.assert_(type(dictlike().fromkeys('a')) is dictlike) + class mydict(dict): + def __new__(cls): + return UserDict.UserDict() + ud = mydict.fromkeys('ab') + self.assertEqual(ud, {'a':None, 'b':None}) + self.assert_(isinstance(ud, UserDict.UserDict)) + self.assertRaises(TypeError, dict.fromkeys) + + class Exc(Exception): pass + + class baddict1(dict): + def __init__(self): + raise Exc() + + self.assertRaises(Exc, baddict1.fromkeys, [1]) + + class BadSeq(object): + def __iter__(self): + return self + def next(self): + raise Exc() + + self.assertRaises(Exc, dict.fromkeys, BadSeq()) + + class baddict2(dict): + def __setitem__(self, key, value): + raise Exc() + + self.assertRaises(Exc, baddict2.fromkeys, [1]) + + def test_copy(self): + d = {1:1, 2:2, 3:3} + self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) + self.assertEqual({}.copy(), {}) + self.assertRaises(TypeError, d.copy, None) + + def test_get(self): + d = {} + self.assert_(d.get('c') is None) + self.assertEqual(d.get('c', 3), 3) + d = {'a' : 1, 'b' : 2} + self.assert_(d.get('c') is None) + self.assertEqual(d.get('c', 3), 3) + self.assertEqual(d.get('a'), 1) + self.assertEqual(d.get('a', 3), 1) + self.assertRaises(TypeError, d.get) + self.assertRaises(TypeError, d.get, None, None, None) + + def test_setdefault(self): + # dict.setdefault() + d = {} + self.assert_(d.setdefault('key0') is None) + d.setdefault('key0', []) + self.assert_(d.setdefault('key0') is None) + d.setdefault('key', []).append(3) + self.assertEqual(d['key'][0], 3) + d.setdefault('key', []).append(4) + self.assertEqual(len(d['key']), 2) + self.assertRaises(TypeError, d.setdefault) + + class Exc(Exception): pass + + class BadHash(object): + fail = False + def __hash__(self): + if self.fail: + raise Exc() + else: + return 42 + + x = BadHash() + d[x] = 42 + x.fail = True + self.assertRaises(Exc, d.setdefault, x, []) + + def test_popitem(self): + # dict.popitem() + for copymode in -1, +1: + # -1: b has same structure as a + # +1: b is a.copy() + for log2size in range(4): # XXX 12 too large for PyPy + size = 2**log2size + a = {} + b = {} + for i in range(size): + a[repr(i)] = i + if copymode < 0: + b[repr(i)] = i + if copymode > 0: + b = a.copy() + for i in range(size): + ka, va = ta = a.popitem() + self.assertEqual(va, int(ka)) + kb, vb = tb = b.popitem() + self.assertEqual(vb, int(kb)) + self.assert_(not(copymode < 0 and ta != tb)) + self.assert_(not a) + self.assert_(not b) + + d = {} + self.assertRaises(KeyError, d.popitem) + + def test_pop(self): + # Tests for pop with specified key + d = {} + k, v = 'abc', 'def' + d[k] = v + self.assertRaises(KeyError, d.pop, 'ghi') + + self.assertEqual(d.pop(k), v) + self.assertEqual(len(d), 0) + + self.assertRaises(KeyError, d.pop, k) + + # verify longs/ints get same value when key > 32 bits (for 64-bit archs) + # see SF bug #689659 + x = 4503599627370496L + y = 4503599627370496 + h = {x: 'anything', y: 'something else'} + self.assertEqual(h[x], h[y]) + + self.assertEqual(d.pop(k, v), v) + d[k] = v + self.assertEqual(d.pop(k, 1), v) + + self.assertRaises(TypeError, d.pop) + + class Exc(Exception): pass + + class BadHash(object): + fail = False + def __hash__(self): + if self.fail: + raise Exc() + else: + return 42 + + x = BadHash() + d[x] = 42 + x.fail = True + self.assertRaises(Exc, d.pop, x) + + def test_mutatingiteration(self): + d = {} + d[1] = 1 + try: + for i in d: + d[i+1] = 1 + except RuntimeError: + pass + else: + self.fail("changing dict size during iteration doesn't raise Error") + + def test_repr(self): + d = {} + self.assertEqual(repr(d), '{}') + d[1] = 2 + self.assertEqual(repr(d), '{1: 2}') + d = {} + d[1] = d + self.assertEqual(repr(d), '{1: {...}}') + + class Exc(Exception): pass + + class BadRepr(object): + def __repr__(self): + raise Exc() + + d = {1: BadRepr()} + self.assertRaises(Exc, repr, d) + + def test_le(self): + self.assert_(not ({} < {})) + self.assert_(not ({1: 2} < {1L: 2L})) + + class Exc(Exception): pass + + class BadCmp(object): + def __cmp__(self, other): + raise Exc() + + d1 = {BadCmp(): 1} + d2 = {1: 1} + try: + d1 < d2 + except Exc: + pass + else: + self.fail("< didn't raise Exc") + + def test_missing(self): + # Make sure dict doesn't have a __missing__ method + self.assertEqual(hasattr(dict, "__missing__"), False) + self.assertEqual(hasattr({}, "__missing__"), False) + # Test several cases: + # (D) subclass defines __missing__ method returning a value + # (E) subclass defines __missing__ method raising RuntimeError + # (F) subclass sets __missing__ instance variable (no effect) + # (G) subclass doesn't define __missing__ at a all + class D(dict): + def __missing__(self, key): + return 42 + d = D({1: 2, 3: 4}) + self.assertEqual(d[1], 2) + self.assertEqual(d[3], 4) + self.assert_(2 not in d) + self.assert_(2 not in d.keys()) + self.assertEqual(d[2], 42) + class E(dict): + def __missing__(self, key): + raise RuntimeError(key) + e = E() + try: + e[42] + except RuntimeError, err: + self.assertEqual(err.args, (42,)) + else: + self.fail("e[42] didn't raise RuntimeError") + class F(dict): + def __init__(self): + # An instance variable __missing__ should have no effect + self.__missing__ = lambda key: None + f = F() + try: + f[42] + except KeyError, err: + self.assertEqual(err.args, (42,)) + else: + self.fail("f[42] didn't raise KeyError") + class G(dict): + pass + g = G() + try: + g[42] + except KeyError, err: + self.assertEqual(err.args, (42,)) + else: + self.fail("g[42] didn't raise KeyError") + + def test_tuple_keyerror(self): + # SF #1576657 + d = {} + try: + d[(1,)] + except KeyError, e: + self.assertEqual(e.args, ((1,),)) + else: + self.fail("missing KeyError") + + +from test import mapping_tests + +class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol): + type2test = dict + +class Dict(dict): + pass + +class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol): + type2test = Dict + +def test_main(): + test_support.run_unittest( + DictTest, + GeneralMappingTests, + SubclassMappingTests, + ) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_enumerate.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_enumerate.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,219 @@ +import unittest +import sys + +from test import test_support + +class G: + 'Sequence using __getitem__' + def __init__(self, seqn): + self.seqn = seqn + def __getitem__(self, i): + return self.seqn[i] + +class I: + 'Sequence using iterator protocol' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class Ig: + 'Sequence using iterator protocol defined with a generator' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + for val in self.seqn: + yield val + +class X: + 'Missing __getitem__ and __iter__' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class E: + 'Test propagation of exceptions' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + 3 // 0 + +class N: + 'Iterator missing next()' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + +class EnumerateTestCase(unittest.TestCase): + + enum = enumerate + seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')] + + def test_basicfunction(self): + self.assertEqual(type(self.enum(self.seq)), self.enum) + e = self.enum(self.seq) + self.assertEqual(iter(e), e) + self.assertEqual(list(self.enum(self.seq)), self.res) + self.enum.__doc__ + + def test_getitemseqn(self): + self.assertEqual(list(self.enum(G(self.seq))), self.res) + e = self.enum(G('')) + self.assertRaises(StopIteration, e.next) + + def test_iteratorseqn(self): + self.assertEqual(list(self.enum(I(self.seq))), self.res) + e = self.enum(I('')) + self.assertRaises(StopIteration, e.next) + + def test_iteratorgenerator(self): + self.assertEqual(list(self.enum(Ig(self.seq))), self.res) + e = self.enum(Ig('')) + self.assertRaises(StopIteration, e.next) + + def test_noniterable(self): + self.assertRaises(TypeError, self.enum, X(self.seq)) + + def test_illformediterable(self): + self.assertRaises(TypeError, list, self.enum(N(self.seq))) + + def test_exception_propagation(self): + self.assertRaises(ZeroDivisionError, list, self.enum(E(self.seq))) + + def test_argumentcheck(self): + self.assertRaises(TypeError, self.enum) # no arguments + self.assertRaises(TypeError, self.enum, 1) # wrong type (not iterable) + self.assertRaises(TypeError, self.enum, 'abc', 2) # too many arguments + + #Don't test this in PyPy, since the tuple can't be reused + def DONOT_test_tuple_reuse(self): + # Tests an implementation detail where tuple is reused + # whenever nothing else holds a reference to it + self.assertEqual(len(set(map(id, list(enumerate(self.seq))))), len(self.seq)) + self.assertEqual(len(set(map(id, enumerate(self.seq)))), min(1,len(self.seq))) + +class MyEnum(enumerate): + pass + +class SubclassTestCase(EnumerateTestCase): + + enum = MyEnum + +class TestEmpty(EnumerateTestCase): + + seq, res = '', [] + +class TestBig(EnumerateTestCase): + ##original test (takes too long in PyPy): + #seq = range(10,20000, 2) + #res = zip(range(20000), seq) + + seq = range(10, 200, 2) + res = zip(range(200), seq) + +class TestReversed(unittest.TestCase): + + def test_simple(self): + class A: + def __getitem__(self, i): + if i < 5: + return str(i) + raise StopIteration + def __len__(self): + return 5 + for data in 'abc', range(5), tuple(enumerate('abc')), A(), xrange(1,17,5): + self.assertEqual(list(data)[::-1], list(reversed(data))) + self.assertRaises(TypeError, reversed, {}) + +# Implementation detail +# def test_xrange_optimization(self): +# x = xrange(1) +# self.assertEqual(type(reversed(x)), type(iter(x))) + + def test_len(self): + # This is an implementation detail, not an interface requirement + from test.test_iterlen import len + for s in ('hello', tuple('hello'), list('hello'), xrange(5)): + self.assertEqual(len(reversed(s)), len(s)) + r = reversed(s) + list(r) + self.assertEqual(len(r), 0) + class SeqWithWeirdLen: + called = False + def __len__(self): + if not self.called: + self.called = True + return 10 + raise ZeroDivisionError + def __getitem__(self, index): + return index + r = reversed(SeqWithWeirdLen()) + self.assertRaises(ZeroDivisionError, len, r) + + + def test_gc(self): + class Seq: + def __len__(self): + return 10 + def __getitem__(self, index): + return index + s = Seq() + r = reversed(s) + s.r = r + + def test_args(self): + self.assertRaises(TypeError, reversed) + self.assertRaises(TypeError, reversed, [], 'extra') + + def test_bug1229429(self): + # this bug was never in reversed, it was in + # PyObject_CallMethod, and reversed_new calls that sometimes. + if not hasattr(sys, "getrefcount"): + return + def f(): + pass + r = f.__reversed__ = object() + rc = sys.getrefcount(r) + for i in range(10): + try: + reversed(f) + except TypeError: + pass + else: + self.fail("non-callable __reversed__ didn't raise!") + self.assertEqual(rc, sys.getrefcount(r)) + + +def test_main(verbose=None): + testclasses = (EnumerateTestCase, SubclassTestCase, TestEmpty, TestBig, + TestReversed) + test_support.run_unittest(*testclasses) + + # verify reference counting + import sys + if verbose and hasattr(sys, "gettotalrefcount"): + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*testclasses) + counts[i] = sys.gettotalrefcount() + print counts + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_exceptions.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_exceptions.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,349 @@ +# Python test set -- part 5, built-in exceptions + +import os +import sys +import unittest +import warnings +import pickle, cPickle + +from test.test_support import TESTFN, unlink, run_unittest + +# XXX This is not really enough, each *operation* should be tested! + +class ExceptionTests(unittest.TestCase): + + def testReload(self): + # Reloading the built-in exceptions module failed prior to Py2.2, while it + # should act the same as reloading built-in sys. + try: + import exceptions + reload(exceptions) + except ImportError, e: + self.fail("reloading exceptions: %s" % e) + + def raise_catch(self, exc, excname): + try: + raise exc, "spam" + except exc, err: + buf1 = str(err) + try: + raise exc("spam") + except exc, err: + buf2 = str(err) + self.assertEquals(buf1, buf2) + self.assertEquals(exc.__name__, excname) + + def testRaising(self): + self.raise_catch(AttributeError, "AttributeError") + self.assertRaises(AttributeError, getattr, sys, "undefined_attribute") + + self.raise_catch(EOFError, "EOFError") + fp = open(TESTFN, 'w') + fp.close() + fp = open(TESTFN, 'r') + savestdin = sys.stdin + try: + try: + sys.stdin = fp + x = raw_input() + except EOFError: + pass + finally: + sys.stdin = savestdin + fp.close() + unlink(TESTFN) + + self.raise_catch(IOError, "IOError") + self.assertRaises(IOError, open, 'this file does not exist', 'r') + + self.raise_catch(ImportError, "ImportError") + self.assertRaises(ImportError, __import__, "undefined_module") + + self.raise_catch(IndexError, "IndexError") + x = [] + self.assertRaises(IndexError, x.__getitem__, 10) + + self.raise_catch(KeyError, "KeyError") + x = {} + self.assertRaises(KeyError, x.__getitem__, 'key') + + self.raise_catch(KeyboardInterrupt, "KeyboardInterrupt") + + self.raise_catch(MemoryError, "MemoryError") + + self.raise_catch(NameError, "NameError") + try: x = undefined_variable + except NameError: pass + + self.raise_catch(OverflowError, "OverflowError") + x = 1 + for dummy in range(128): + x += x # this simply shouldn't blow up + + self.raise_catch(RuntimeError, "RuntimeError") + + self.raise_catch(SyntaxError, "SyntaxError") + try: exec '/\n' + except SyntaxError: pass + + self.raise_catch(IndentationError, "IndentationError") + + self.raise_catch(TabError, "TabError") + # can only be tested under -tt, and is the only test for -tt + #try: compile("try:\n\t1/0\n \t1/0\nfinally:\n pass\n", '', 'exec') + #except TabError: pass + #else: self.fail("TabError not raised") + + self.raise_catch(SystemError, "SystemError") + + self.raise_catch(SystemExit, "SystemExit") + self.assertRaises(SystemExit, sys.exit, 0) + + self.raise_catch(TypeError, "TypeError") + try: [] + () + except TypeError: pass + + self.raise_catch(ValueError, "ValueError") + self.assertRaises(ValueError, chr, 10000) + + self.raise_catch(ZeroDivisionError, "ZeroDivisionError") + try: x = 1/0 + except ZeroDivisionError: pass + + self.raise_catch(Exception, "Exception") + try: x = 1/0 + except Exception, e: pass + + def testSyntaxErrorMessage(self): + # make sure the right exception message is raised for each of + # these code fragments + + def ckmsg(src, msg): + try: + compile(src, '', 'exec') + except SyntaxError, e: + if e.msg != msg: + self.fail("expected %s, got %s" % (msg, e.msg)) + else: + self.fail("failed to get expected SyntaxError") + + s = '''while 1: + try: + pass + finally: + continue''' + + if not sys.platform.startswith('java'): + ckmsg(s, "'continue' not supported inside 'finally' clause") + + s = '''if 1: + try: + continue + except: + pass''' + + ckmsg(s, "'continue' not properly in loop") + ckmsg("continue\n", "'continue' not properly in loop") + + def testSettingException(self): + # test that setting an exception at the C level works even if the + # exception object can't be constructed. + + class BadException: + def __init__(self_): + raise RuntimeError, "can't instantiate BadException" + + def test_capi1(): + import _testcapi + try: + _testcapi.raise_exception(BadException, 1) + except TypeError, err: + exc, err, tb = sys.exc_info() + co = tb.tb_frame.f_code + self.assertEquals(co.co_name, "test_capi1") + self.assert_(co.co_filename.endswith('test_exceptions'+os.extsep+'py')) + else: + self.fail("Expected exception") + + def test_capi2(): + import _testcapi + try: + _testcapi.raise_exception(BadException, 0) + except RuntimeError, err: + exc, err, tb = sys.exc_info() + co = tb.tb_frame.f_code + self.assertEquals(co.co_name, "__init__") + self.assert_(co.co_filename.endswith('test_exceptions'+os.extsep+'py')) + co2 = tb.tb_frame.f_back.f_code + self.assertEquals(co2.co_name, "test_capi2") + else: + self.fail("Expected exception") + + try: + import _testcapi + except ImportError: + pass + else: + test_capi1() + test_capi2() + + def test_WindowsError(self): + try: + WindowsError + except NameError: + pass + else: + self.failUnlessEqual(str(WindowsError(1001)), + "1001") + self.failUnlessEqual(str(WindowsError(1001, "message")), + "[Error 1001] message") + self.failUnlessEqual(WindowsError(1001, "message").errno, 22) + self.failUnlessEqual(WindowsError(1001, "message").winerror, 1001) + + def testAttributes(self): + # test that exception attributes are happy + + exceptionList = [ + (BaseException, (), {'message' : '', 'args' : ()}), + (BaseException, (1, ), {'message' : 1, 'args' : (1,)}), + (BaseException, ('foo',), + {'message' : 'foo', 'args' : ('foo',)}), + (BaseException, ('foo', 1), + {'message' : '', 'args' : ('foo', 1)}), + (SystemExit, ('foo',), + {'message' : 'foo', 'args' : ('foo',), 'code' : 'foo'}), + (IOError, ('foo',), + {'message' : 'foo', 'args' : ('foo',), 'filename' : None, + 'errno' : None, 'strerror' : None}), + (IOError, ('foo', 'bar'), + {'message' : '', 'args' : ('foo', 'bar'), 'filename' : None, + 'errno' : 'foo', 'strerror' : 'bar'}), + (IOError, ('foo', 'bar', 'baz'), + {'message' : '', 'args' : ('foo', 'bar'), 'filename' : 'baz', + 'errno' : 'foo', 'strerror' : 'bar'}), + (IOError, ('foo', 'bar', 'baz', 'quux'), + {'message' : '', 'args' : ('foo', 'bar', 'baz', 'quux')}), + (EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'), + {'message' : '', 'args' : ('errnoStr', 'strErrorStr'), + 'strerror' : 'strErrorStr', 'errno' : 'errnoStr', + 'filename' : 'filenameStr'}), + (EnvironmentError, (1, 'strErrorStr', 'filenameStr'), + {'message' : '', 'args' : (1, 'strErrorStr'), 'errno' : 1, + 'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}), + (SyntaxError, ('msgStr',), + {'message' : 'msgStr', 'args' : ('msgStr',), 'text' : None, + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : None, 'lineno' : None, 'offset' : None}), + (SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr', + 'textStr')), + {'message' : '', 'offset' : 'offsetStr', 'text' : 'textStr', + 'args' : ('msgStr', ('filenameStr', 'linenoStr', + 'offsetStr', 'textStr')), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : 'filenameStr', 'lineno' : 'linenoStr'}), + (SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr', + 'textStr', 'print_file_and_lineStr'), + {'message' : '', 'text' : None, + 'args' : ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr', + 'textStr', 'print_file_and_lineStr'), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : None, 'lineno' : None, 'offset' : None}), + (UnicodeError, (), {'message' : '', 'args' : (),}), + (UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'), + {'message' : '', 'args' : ('ascii', u'a', 0, 1, + 'ordinal not in range'), + 'encoding' : 'ascii', 'object' : u'a', + 'start' : 0, 'reason' : 'ordinal not in range'}), + (UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'), + {'message' : '', 'args' : ('ascii', '\xff', 0, 1, + 'ordinal not in range'), + 'encoding' : 'ascii', 'object' : '\xff', + 'start' : 0, 'reason' : 'ordinal not in range'}), + (UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"), + {'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'), + 'object' : u'\u3042', 'reason' : 'ouch', + 'start' : 0, 'end' : 1}), + ] + try: + exceptionList.append( + (WindowsError, (1, 'strErrorStr', 'filenameStr'), + {'message' : '', 'args' : (1, 'strErrorStr'), + 'strerror' : 'strErrorStr', 'winerror' : 1, + 'errno' : 22, 'filename' : 'filenameStr'}) + ) + except NameError: + pass + + for exc, args, expected in exceptionList: + try: + raise exc(*args) + except BaseException, e: + if type(e) is not exc: + raise + # Verify module name + self.assertEquals(type(e).__module__, 'exceptions') + # Verify no ref leaks in Exc_str() + s = str(e) + for checkArgName in expected: + self.assertEquals(repr(getattr(e, checkArgName)), + repr(expected[checkArgName]), + 'exception "%s", attribute "%s"' % + (repr(e), checkArgName)) + + # test for pickling support + for p in pickle, cPickle: + for protocol in range(p.HIGHEST_PROTOCOL + 1): + new = p.loads(p.dumps(e, protocol)) + for checkArgName in expected: + got = repr(getattr(new, checkArgName)) + want = repr(expected[checkArgName]) + self.assertEquals(got, want, + 'pickled "%r", attribute "%s' % + (e, checkArgName)) + + def testSlicing(self): + # Test that you can slice an exception directly instead of requiring + # going through the 'args' attribute. + args = (1, 2, 3) + exc = BaseException(*args) + self.failUnlessEqual(exc[:], args) + + def testKeywordArgs(self): + # test that builtin exception don't take keyword args, + # but user-defined subclasses can if they want + self.assertRaises(TypeError, BaseException, a=1) + + class DerivedException(BaseException): + def __init__(self, fancy_arg): + BaseException.__init__(self) + self.fancy_arg = fancy_arg + + x = DerivedException(fancy_arg=42) + self.assertEquals(x.fancy_arg, 42) + + def testInfiniteRecursion(self): + def f(): + return f() + self.assertRaises(RuntimeError, f) + + def g(): + try: + return g() + except ValueError: + return -1 + self.assertRaises(RuntimeError, g) + + def testUnicodeStrUsage(self): + # Make sure both instances and classes have a str and unicode + # representation. + self.failUnless(str(Exception)) + self.failUnless(unicode(Exception)) + self.failUnless(str(Exception('a'))) + self.failUnless(unicode(Exception(u'a'))) + + +def test_main(): + run_unittest(ExceptionTests) + +if __name__ == '__main__': + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_file.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_file.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,343 @@ +import sys +import os +import gc +import unittest +from array import array +from weakref import proxy + +from test.test_support import TESTFN, findfile, run_unittest +from UserList import UserList + +class AutoFileTests(unittest.TestCase): + # file tests for which a test file is automatically set up + + def setUp(self): + self.f = open(TESTFN, 'wb') + + def tearDown(self): + if self.f: + self.f.close() + os.remove(TESTFN) + + def testWeakRefs(self): + # verify weak references + p = proxy(self.f) + p.write('teststring') + self.assertEquals(self.f.name, p.name) + self.f.close() + self.f = None + self.assertRaises(ReferenceError, getattr, p, 'name') + + def testAttributes(self): + # verify expected attributes exist + f = self.f + softspace = f.softspace + f.name # merely shouldn't blow up + f.mode # ditto + f.closed # ditto + + # verify softspace is writable + f.softspace = softspace # merely shouldn't blow up + + # verify the others aren't + for attr in 'name', 'mode', 'closed': + self.assertRaises((AttributeError, TypeError), setattr, f, attr, 'oops') + + def testReadinto(self): + # verify readinto + self.f.write('12') + self.f.close() + a = array('c', 'x'*10) + self.f = open(TESTFN, 'rb') + n = self.f.readinto(a) + self.assertEquals('12', a.tostring()[:n]) + + def testWritelinesUserList(self): + # verify writelines with instance sequence + # XXX writeslines on UserList crashes interpreter + #l = UserList(['1', '2']) + l = ['1', '2'] + self.f.writelines(l) + self.f.close() + self.f = open(TESTFN, 'rb') + buf = self.f.read() + self.assertEquals(buf, '12') + + def testWritelinesIntegers(self): + # verify writelines with integers + self.assertRaises(TypeError, self.f.writelines, [1, 2, 3]) + + def testWritelinesIntegersUserList(self): + # verify writelines with integers in UserList + # l = UserList([1,2,3]) + l = [1,2,3] + + self.assertRaises(TypeError, self.f.writelines, l) + + def testWritelinesNonString(self): + # verify writelines with non-string object + class NonString: + pass + +# self.assertRaises(TypeError, self.f.writelines, +# [NonString(), NonString()]) + + def testRepr(self): + # verify repr works + self.assert_(repr(self.f).startswith(">sys.__stdout__, ( + ' Skipping sys.stdin.seek(-1), it may crash the interpreter.' + ' Test manually.') + self.assertRaises(IOError, sys.stdin.truncate) + + def testUnicodeOpen(self): + # verify repr works for unicode too + f = open(unicode(TESTFN), "w") + self.assert_(repr(f).startswith(" + # "file.truncate fault on windows" + f = open(TESTFN, 'wb') + f.write('12345678901') # 11 bytes + f.close() + + f = open(TESTFN,'rb+') + data = f.read(5) + if data != '12345': + self.fail("Read on file opened for update failed %r" % data) + if f.tell() != 5: + self.fail("File pos after read wrong %d" % f.tell()) + + f.truncate() + if f.tell() != 5: + self.fail("File pos after ftruncate wrong %d" % f.tell()) + + f.close() + size = os.path.getsize(TESTFN) + if size != 5: + self.fail("File size after ftruncate wrong %d" % size) + + try: + bug801631() + finally: + os.unlink(TESTFN) + + def testIteration(self): + # Test the complex interaction when mixing file-iteration and the + # various read* methods. Ostensibly, the mixture could just be tested + # to work when it should work according to the Python language, + # instead of fail when it should fail according to the current CPython + # implementation. People don't always program Python the way they + # should, though, and the implemenation might change in subtle ways, + # so we explicitly test for errors, too; the test will just have to + # be updated when the implementation changes. + dataoffset = 16384 + filler = "ham\n" + assert not dataoffset % len(filler), \ + "dataoffset must be multiple of len(filler)" + nchunks = dataoffset // len(filler) + testlines = [ + "spam, spam and eggs\n", + "eggs, spam, ham and spam\n", + "saussages, spam, spam and eggs\n", + "spam, ham, spam and eggs\n", + "spam, spam, spam, spam, spam, ham, spam\n", + "wonderful spaaaaaam.\n" + ] + methods = [("readline", ()), ("read", ()), ("readlines", ()), + ("readinto", (array("c", " "*100),))] + + try: + # Prepare the testfile + bag = open(TESTFN, "w") + bag.write(filler * nchunks) + bag.writelines(testlines) + bag.close() + # Test for appropriate errors mixing read* and iteration + for methodname, args in methods: + f = open(TESTFN) + if f.next() != filler: + self.fail, "Broken testfile" + meth = getattr(f, methodname) + try: + meth(*args) + except ValueError: + pass + else: + self.fail("%s%r after next() didn't raise ValueError" % + (methodname, args)) + f.close() + + # Test to see if harmless (by accident) mixing of read* and + # iteration still works. This depends on the size of the internal + # iteration buffer (currently 8192,) but we can test it in a + # flexible manner. Each line in the bag o' ham is 4 bytes + # ("h", "a", "m", "\n"), so 4096 lines of that should get us + # exactly on the buffer boundary for any power-of-2 buffersize + # between 4 and 16384 (inclusive). + f = open(TESTFN) + for i in range(nchunks): + f.next() + testline = testlines.pop(0) + try: + line = f.readline() + except ValueError: + self.fail("readline() after next() with supposedly empty " + "iteration-buffer failed anyway") + if line != testline: + self.fail("readline() after next() with empty buffer " + "failed. Got %r, expected %r" % (line, testline)) + testline = testlines.pop(0) + buf = array("c", "\x00" * len(testline)) + try: + f.readinto(buf) + except ValueError: + self.fail("readinto() after next() with supposedly empty " + "iteration-buffer failed anyway") + line = buf.tostring() + if line != testline: + self.fail("readinto() after next() with empty buffer " + "failed. Got %r, expected %r" % (line, testline)) + + testline = testlines.pop(0) + try: + line = f.read(len(testline)) + except ValueErrori + self.fail("read() after next() with supposedly empty " + "iteration-buffer failed anyway") + if line != testline: + self.fail("read() after next() with empty buffer " + "failed. Got %r, expected %r" % (line, testline)) + try: + lines = f.readlines() + except ValueError: + self.fail("readlines() after next() with supposedly empty " + "iteration-buffer failed anyway") + if lines != testlines: + self.fail("readlines() after next() with empty buffer " + "failed. Got %r, expected %r" % (line, testline)) + # Reading after iteration hit EOF shouldn't hurt either + f = open(TESTFN) + try: + for line in f: + pass + try: + f.readline() + f.readinto(buf) + f.read() + f.readlines() + except ValueError: + self.fail("read* failed after next() consumed file") + finally: + f.close() + finally: + os.unlink(TESTFN) + + +def test_main(): + # Historically, these tests have been sloppy about removing TESTFN. + # So get rid of it no matter what. + try: + run_unittest(AutoFileTests, OtherFileTests) + finally: + if os.path.exists(TESTFN): + os.unlink(TESTFN) + +if __name__ == '__main__': + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_format.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_format.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,250 @@ +from test.test_support import verbose, have_unicode, TestFailed +import sys + +# test string formatting operator (I am not sure if this is being tested +# elsewhere but, surely, some of the given cases are *not* tested because +# they crash python) +# test on unicode strings as well + +overflowok = 1 + +def testformat(formatstr, args, output=None): + if verbose: + if output: + print "%s %% %s =? %s ..." %\ + (repr(formatstr), repr(args), repr(output)), + else: + print "%s %% %s works? ..." % (repr(formatstr), repr(args)), + try: + result = formatstr % args + except (OverflowError, MemoryError): + if not overflowok: + raise + if verbose: + print 'overflow (this is fine)' + else: + if output and result != output: + if verbose: + print 'no' + print "%s %% %s == %s != %s" %\ + (repr(formatstr), repr(args), repr(result), repr(output)) + else: + if verbose: + print 'yes' + +def testboth(formatstr, *args): + testformat(formatstr, *args) + if have_unicode: + testformat(unicode(formatstr), *args) + + +testboth("%.1d", (1,), "1") +testboth("%.*d", (sys.maxint,1)) # expect overflow +testboth("%.100d", (1,), '0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001') +testboth("%#.117x", (1,), '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001') +testboth("%#.118x", (1,), '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001') + +testboth("%f", (1.0,), "1.000000") +# these are trying to test the limits of the internal magic-number-length +# formatting buffer, if that number changes then these tests are less +# effective +testboth("%#.*g", (109, -1.e+49/3.)) +testboth("%#.*g", (110, -1.e+49/3.)) +testboth("%#.*g", (110, -1.e+100/3.)) + +# test some ridiculously large precision, expect overflow +#Too slow on PyPy +#testboth('%12.*f', (123456, 1.0)) + +# Formatting of long integers. Overflow is not ok +overflowok = 0 +testboth("%x", 10L, "a") +testboth("%x", 100000000000L, "174876e800") +testboth("%o", 10L, "12") +testboth("%o", 100000000000L, "1351035564000") +testboth("%d", 10L, "10") +testboth("%d", 100000000000L, "100000000000") + +big = 123456789012345678901234567890L +testboth("%d", big, "123456789012345678901234567890") +testboth("%d", -big, "-123456789012345678901234567890") +testboth("%5d", -big, "-123456789012345678901234567890") +testboth("%31d", -big, "-123456789012345678901234567890") +testboth("%32d", -big, " -123456789012345678901234567890") +testboth("%-32d", -big, "-123456789012345678901234567890 ") +testboth("%032d", -big, "-0123456789012345678901234567890") +testboth("%-032d", -big, "-123456789012345678901234567890 ") +testboth("%034d", -big, "-000123456789012345678901234567890") +testboth("%034d", big, "0000123456789012345678901234567890") +testboth("%0+34d", big, "+000123456789012345678901234567890") +testboth("%+34d", big, " +123456789012345678901234567890") +testboth("%34d", big, " 123456789012345678901234567890") +testboth("%.2d", big, "123456789012345678901234567890") +testboth("%.30d", big, "123456789012345678901234567890") +testboth("%.31d", big, "0123456789012345678901234567890") +testboth("%32.31d", big, " 0123456789012345678901234567890") + +big = 0x1234567890abcdef12345L # 21 hex digits +testboth("%x", big, "1234567890abcdef12345") +testboth("%x", -big, "-1234567890abcdef12345") +testboth("%5x", -big, "-1234567890abcdef12345") +testboth("%22x", -big, "-1234567890abcdef12345") +testboth("%23x", -big, " -1234567890abcdef12345") +testboth("%-23x", -big, "-1234567890abcdef12345 ") +testboth("%023x", -big, "-01234567890abcdef12345") +testboth("%-023x", -big, "-1234567890abcdef12345 ") +testboth("%025x", -big, "-0001234567890abcdef12345") +testboth("%025x", big, "00001234567890abcdef12345") +testboth("%0+25x", big, "+0001234567890abcdef12345") +testboth("%+25x", big, " +1234567890abcdef12345") +testboth("%25x", big, " 1234567890abcdef12345") +testboth("%.2x", big, "1234567890abcdef12345") +testboth("%.21x", big, "1234567890abcdef12345") +testboth("%.22x", big, "01234567890abcdef12345") +testboth("%23.22x", big, " 01234567890abcdef12345") +testboth("%-23.22x", big, "01234567890abcdef12345 ") +testboth("%X", big, "1234567890ABCDEF12345") +testboth("%#X", big, "0X1234567890ABCDEF12345") +testboth("%#x", big, "0x1234567890abcdef12345") +testboth("%#x", -big, "-0x1234567890abcdef12345") +testboth("%#.23x", -big, "-0x001234567890abcdef12345") +testboth("%#+.23x", big, "+0x001234567890abcdef12345") +testboth("%# .23x", big, " 0x001234567890abcdef12345") +testboth("%#+.23X", big, "+0X001234567890ABCDEF12345") +testboth("%#-+.23X", big, "+0X001234567890ABCDEF12345") +testboth("%#-+26.23X", big, "+0X001234567890ABCDEF12345") +testboth("%#-+27.23X", big, "+0X001234567890ABCDEF12345 ") +testboth("%#+27.23X", big, " +0X001234567890ABCDEF12345") +# next one gets two leading zeroes from precision, and another from the +# 0 flag and the width +testboth("%#+027.23X", big, "+0X0001234567890ABCDEF12345") +# same, except no 0 flag +testboth("%#+27.23X", big, " +0X001234567890ABCDEF12345") + +big = 012345670123456701234567012345670L # 32 octal digits +testboth("%o", big, "12345670123456701234567012345670") +testboth("%o", -big, "-12345670123456701234567012345670") +testboth("%5o", -big, "-12345670123456701234567012345670") +testboth("%33o", -big, "-12345670123456701234567012345670") +testboth("%34o", -big, " -12345670123456701234567012345670") +testboth("%-34o", -big, "-12345670123456701234567012345670 ") +testboth("%034o", -big, "-012345670123456701234567012345670") +testboth("%-034o", -big, "-12345670123456701234567012345670 ") +testboth("%036o", -big, "-00012345670123456701234567012345670") +testboth("%036o", big, "000012345670123456701234567012345670") +testboth("%0+36o", big, "+00012345670123456701234567012345670") +testboth("%+36o", big, " +12345670123456701234567012345670") +testboth("%36o", big, " 12345670123456701234567012345670") +testboth("%.2o", big, "12345670123456701234567012345670") +testboth("%.32o", big, "12345670123456701234567012345670") +testboth("%.33o", big, "012345670123456701234567012345670") +testboth("%34.33o", big, " 012345670123456701234567012345670") +testboth("%-34.33o", big, "012345670123456701234567012345670 ") +testboth("%o", big, "12345670123456701234567012345670") +testboth("%#o", big, "012345670123456701234567012345670") +testboth("%#o", -big, "-012345670123456701234567012345670") +testboth("%#.34o", -big, "-0012345670123456701234567012345670") +testboth("%#+.34o", big, "+0012345670123456701234567012345670") +testboth("%# .34o", big, " 0012345670123456701234567012345670") +testboth("%#+.34o", big, "+0012345670123456701234567012345670") +testboth("%#-+.34o", big, "+0012345670123456701234567012345670") +testboth("%#-+37.34o", big, "+0012345670123456701234567012345670 ") +testboth("%#+37.34o", big, " +0012345670123456701234567012345670") +# next one gets one leading zero from precision +testboth("%.33o", big, "012345670123456701234567012345670") +# base marker shouldn't change that, since "0" is redundant +testboth("%#.33o", big, "012345670123456701234567012345670") +# but reduce precision, and base marker should add a zero +testboth("%#.32o", big, "012345670123456701234567012345670") +# one leading zero from precision, and another from "0" flag & width +testboth("%034.33o", big, "0012345670123456701234567012345670") +# base marker shouldn't change that +testboth("%0#34.33o", big, "0012345670123456701234567012345670") + +# Some small ints, in both Python int and long flavors). +testboth("%d", 42, "42") +testboth("%d", -42, "-42") +testboth("%d", 42L, "42") +testboth("%d", -42L, "-42") +testboth("%#x", 1, "0x1") +testboth("%#x", 1L, "0x1") +testboth("%#X", 1, "0X1") +testboth("%#X", 1L, "0X1") +testboth("%#o", 1, "01") +testboth("%#o", 1L, "01") +testboth("%#o", 0, "0") +testboth("%#o", 0L, "0") +testboth("%o", 0, "0") +testboth("%o", 0L, "0") +testboth("%d", 0, "0") +testboth("%d", 0L, "0") +testboth("%#x", 0, "0x0") +testboth("%#x", 0L, "0x0") +testboth("%#X", 0, "0X0") +testboth("%#X", 0L, "0X0") + +testboth("%x", 0x42, "42") +testboth("%x", -0x42, "-42") +testboth("%x", 0x42L, "42") +testboth("%x", -0x42L, "-42") + +testboth("%o", 042, "42") +testboth("%o", -042, "-42") +testboth("%o", 042L, "42") +testboth("%o", -042L, "-42") + +# Test exception for unknown format characters +if verbose: + print 'Testing exceptions' + +def test_exc(formatstr, args, exception, excmsg): + try: + testformat(formatstr, args) + except exception, exc: + if str(exc) == excmsg: + if verbose: + print "yes" + else: + if verbose: print 'no' + print 'Unexpected ', exception, ':', repr(str(exc)) + except: + if verbose: print 'no' + print 'Unexpected exception' + raise + else: + raise TestFailed, 'did not get expected exception: %s' % excmsg + +test_exc('abc %a', 1, ValueError, + "unsupported format character 'a' (0x61) at index 5") +if have_unicode: + test_exc(unicode('abc %\u3000','raw-unicode-escape'), 1, ValueError, + "unsupported format character '?' (0x3000) at index 5") + +test_exc('%d', '1', TypeError, "int argument required") +test_exc('%g', '1', TypeError, "float argument required") +test_exc('no format', '1', TypeError, + "not all arguments converted during string formatting") +test_exc('no format', u'1', TypeError, + "not all arguments converted during string formatting") +test_exc(u'no format', '1', TypeError, + "not all arguments converted during string formatting") +test_exc(u'no format', u'1', TypeError, + "not all arguments converted during string formatting") + +class Foobar(long): + def __oct__(self): + # Returning a non-string should not blow up. + return self + 1 + +test_exc('%o', Foobar(), TypeError, + "expected string or Unicode object, long found") + +if sys.maxint == 2**31-1: + # crashes 2.2.1 and earlier: + try: + "%*d"%(sys.maxint, -127) + except (MemoryError, OverflowError): + pass # CPython raises MemoryError, but both CPython and PyPy raise + # OverflowError for string concatenation + else: + raise TestFailed, '"%*d"%(sys.maxint, -127) should fail' Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_funcattrs.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_funcattrs.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,402 @@ +from test.test_support import verbose, TestFailed, verify +import types + +class F: + def a(self): + pass + +def b(): + 'my docstring' + pass + +# __module__ is a special attribute +verify(b.__module__ == __name__) +verify(verify.__module__ == "test.test_support") + +# setting attributes on functions +try: + b.publish +except AttributeError: pass +else: raise TestFailed, 'expected AttributeError' + +if b.__dict__ <> {}: + raise TestFailed, 'expected unassigned func.__dict__ to be {}' + +b.publish = 1 +if b.publish <> 1: + raise TestFailed, 'function attribute not set to expected value' + +docstring = 'its docstring' +b.__doc__ = docstring +if b.__doc__ <> docstring: + raise TestFailed, 'problem with setting __doc__ attribute' + +if 'publish' not in dir(b): + raise TestFailed, 'attribute not in dir()' + +try: + del b.__dict__ +except (AttributeError, TypeError): pass +else: raise TestFailed, 'expected AttributeError or TypeError' + +b.publish = 1 +try: + b.__dict__ = None +except TypeError: pass +else: raise TestFailed, 'func.__dict__ = None expected TypeError' + +d = {'hello': 'world'} +b.__dict__ = d +if b.func_dict is not d: + raise TestFailed, 'func.__dict__ assignment to dictionary failed' +if b.hello <> 'world': + raise TestFailed, 'attribute after func.__dict__ assignment failed' + +f1 = F() +f2 = F() + +try: + F.a.publish +except AttributeError: pass +else: raise TestFailed, 'expected AttributeError' + +try: + f1.a.publish +except AttributeError: pass +else: raise TestFailed, 'expected AttributeError' + +# In Python 2.1 beta 1, we disallowed setting attributes on unbound methods +# (it was already disallowed on bound methods). See the PEP for details. +try: + F.a.publish = 1 +except (AttributeError, TypeError): pass +else: raise TestFailed, 'expected AttributeError or TypeError' + +# But setting it explicitly on the underlying function object is okay. +F.a.im_func.publish = 1 + +if F.a.publish <> 1: + raise TestFailed, 'unbound method attribute not set to expected value' + +if f1.a.publish <> 1: + raise TestFailed, 'bound method attribute access did not work' + +if f2.a.publish <> 1: + raise TestFailed, 'bound method attribute access did not work' + +if 'publish' not in dir(F.a): + raise TestFailed, 'attribute not in dir()' + +try: + f1.a.publish = 0 +except (AttributeError, TypeError): pass +else: raise TestFailed, 'expected AttributeError or TypeError' + +# See the comment above about the change in semantics for Python 2.1b1 +try: + F.a.myclass = F +except (AttributeError, TypeError): pass +else: raise TestFailed, 'expected AttributeError or TypeError' + +F.a.im_func.myclass = F + +f1.a.myclass +f2.a.myclass +f1.a.myclass +F.a.myclass + +if f1.a.myclass is not f2.a.myclass or \ + f1.a.myclass is not F.a.myclass: + raise TestFailed, 'attributes were not the same' + +# try setting __dict__ +try: + F.a.__dict__ = (1, 2, 3) +except (AttributeError, TypeError): pass +else: raise TestFailed, 'expected TypeError or AttributeError' + +F.a.im_func.__dict__ = {'one': 11, 'two': 22, 'three': 33} + +if f1.a.two <> 22: + raise TestFailed, 'setting __dict__' + +from UserDict import UserDict +d = UserDict({'four': 44, 'five': 55}) + +try: + F.a.__dict__ = d +except (AttributeError, TypeError): pass +else: raise TestFailed + +if f2.a.one <> f1.a.one <> F.a.one <> 11: + raise TestFailed + +# im_func may not be a Python method! +import new +F.id = new.instancemethod(id, None, F) + +eff = F() +if eff.id() <> id(eff): + raise TestFailed + +try: + F.id.foo +except AttributeError: pass +else: raise TestFailed + +try: + F.id.foo = 12 +except (AttributeError, TypeError): pass +else: raise TestFailed + +try: + F.id.foo +except AttributeError: pass +else: raise TestFailed + +try: + eff.id.foo +except AttributeError: pass +else: raise TestFailed + +try: + eff.id.foo = 12 +except (AttributeError, TypeError): pass +else: raise TestFailed + +try: + eff.id.foo +except AttributeError: pass +else: raise TestFailed + +# Regression test for a crash in pre-2.1a1 +def another(): + pass + +try: + del another.__dict__ +except (TypeError, AttributeError): pass +else: raise TestFailed, 'del another.__dict__ did not fail' + +try: + del another.func_dict +except (TypeError, AttributeError): pass +else: raise TestFailed, 'del another.func_dict did not fail' + +try: + another.func_dict = None +except TypeError: pass +else: raise TestFailed + +try: + del another.bar +except AttributeError: pass +else: raise TestFailed + +# This isn't specifically related to function attributes, but it does test a +# core dump regression in funcobject.c +del another.func_defaults + +def foo(): + pass + +def bar(): + pass + +def temp(): + print 1 + +if foo==bar: + raise TestFailed + +d={} +d[foo] = 1 + +foo.func_code = temp.func_code + +d[foo] + +# Test all predefined function attributes systematically + +def cantset(obj, name, value, exception=(AttributeError, TypeError)): + verify(hasattr(obj, name)) # Otherwise it's probably a typo + try: + setattr(obj, name, value) + except exception: + pass + else: + raise TestFailed, "shouldn't be able to set %s to %r" % (name, value) + try: + delattr(obj, name) + except (AttributeError, TypeError): + pass + else: + raise TestFailed, "shouldn't be able to del %s" % name + +def test_func_closure(): + a = 12 + def f(): print a + c = f.func_closure + verify(isinstance(c, tuple)) + verify(len(c) == 1) + verify(c[0].__class__.__name__ == "cell") # don't have a type object handy + cantset(f, "func_closure", c) + +def test_func_doc(): + def f(): pass + verify(f.__doc__ is None) + verify(f.func_doc is None) + f.__doc__ = "hello" + verify(f.__doc__ == "hello") + verify(f.func_doc == "hello") + del f.__doc__ + verify(f.__doc__ is None) + verify(f.func_doc is None) + f.func_doc = "world" + verify(f.__doc__ == "world") + verify(f.func_doc == "world") + del f.func_doc + verify(f.func_doc is None) + verify(f.__doc__ is None) + +def test_func_globals(): + def f(): pass + verify(f.func_globals is globals()) + cantset(f, "func_globals", globals()) + +def test_func_name(): + def f(): pass + verify(f.__name__ == "f") + verify(f.func_name == "f") + f.__name__ = "g" + verify(f.__name__ == "g") + verify(f.func_name == "g") + f.func_name = "h" + verify(f.__name__ == "h") + verify(f.func_name == "h") + cantset(f, "func_globals", 1) + cantset(f, "__name__", 1) + # test that you can access func.__name__ in restricted mode + s = """def f(): pass\nf.__name__""" + exec s in {'__builtins__':{}} + + +def test_func_code(): + a = b = 24 + def f(): pass + def g(): print 12 + def f1(): print a + def g1(): print b + def f2(): print a, b + verify(type(f.func_code) is types.CodeType) + f.func_code = g.func_code + cantset(f, "func_code", None) + # can't change the number of free vars + cantset(f, "func_code", f1.func_code, exception=ValueError) + cantset(f1, "func_code", f.func_code, exception=ValueError) + cantset(f1, "func_code", f2.func_code, exception=ValueError) + f1.func_code = g1.func_code + +def test_func_defaults(): + def f(a, b): return (a, b) + verify(f.func_defaults is None) + f.func_defaults = (1, 2) + verify(f.func_defaults == (1, 2)) + verify(f(10) == (10, 2)) + def g(a=1, b=2): return (a, b) + verify(g.func_defaults == (1, 2)) + del g.func_defaults + verify(g.func_defaults is None) + try: + g() + except TypeError: + pass + else: + raise TestFailed, "shouldn't be allowed to call g() w/o defaults" + +def test_func_dict(): + def f(): pass + a = f.__dict__ + b = f.func_dict + verify(a == {}) + verify(a is b) + f.hello = 'world' + verify(a == {'hello': 'world'}) + verify(f.func_dict is a is f.__dict__) + f.func_dict = {} + verify(not hasattr(f, "hello")) + f.__dict__ = {'world': 'hello'} + verify(f.world == "hello") + verify(f.__dict__ is f.func_dict == {'world': 'hello'}) + cantset(f, "func_dict", None) + cantset(f, "__dict__", None) + +def test_im_class(): + class C: + def foo(self): pass + verify(C.foo.im_class is C) + verify(C().foo.im_class is C) + cantset(C.foo, "im_class", C) + cantset(C().foo, "im_class", C) + +def test_im_func(): + def foo(self): pass + class C: + pass + C.foo = foo + verify(C.foo.im_func is foo) + verify(C().foo.im_func is foo) + cantset(C.foo, "im_func", foo) + cantset(C().foo, "im_func", foo) + +def test_im_self(): + class C: + def foo(self): pass + verify(C.foo.im_self is None) + c = C() + verify(c.foo.im_self is c) + cantset(C.foo, "im_self", None) + cantset(c.foo, "im_self", c) + +def test_im_dict(): + class C: + def foo(self): pass + foo.bar = 42 + verify(C.foo.__dict__ == {'bar': 42}) + verify(C().foo.__dict__ == {'bar': 42}) + cantset(C.foo, "__dict__", C.foo.__dict__) + cantset(C().foo, "__dict__", C.foo.__dict__) + +def test_im_doc(): + class C: + def foo(self): "hello" + verify(C.foo.__doc__ == "hello") + verify(C().foo.__doc__ == "hello") + cantset(C.foo, "__doc__", "hello") + cantset(C().foo, "__doc__", "hello") + +def test_im_name(): + class C: + def foo(self): pass + verify(C.foo.__name__ == "foo") + verify(C().foo.__name__ == "foo") + cantset(C.foo, "__name__", "foo") + cantset(C().foo, "__name__", "foo") + +def testmore(): + test_func_closure() + test_func_doc() + test_func_globals() + test_func_name() + test_func_code() + test_func_defaults() + test_func_dict() + # Tests for instance method attributes + test_im_class() + test_im_func() + test_im_self() + test_im_dict() + test_im_doc() + test_im_name() + +testmore() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_generators.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_generators.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,1839 @@ +tutorial_tests = """ +Let's try a simple generator: + + >>> def f(): + ... yield 1 + ... yield 2 + + >>> for i in f(): + ... print i + 1 + 2 + >>> g = f() + >>> g.next() + 1 + >>> g.next() + 2 + +"Falling off the end" stops the generator: + + >>> g.next() + Traceback (most recent call last): + File "", line 1, in ? + File "", line 2, in g + StopIteration + +"return" also stops the generator: + + >>> def f(): + ... yield 1 + ... return + ... yield 2 # never reached + ... + >>> g = f() + >>> g.next() + 1 + >>> g.next() + Traceback (most recent call last): + File "", line 1, in ? + File "", line 3, in f + StopIteration + >>> g.next() # once stopped, can't be resumed + Traceback (most recent call last): + File "", line 1, in ? + StopIteration + +"raise StopIteration" stops the generator too: + + >>> def f(): + ... yield 1 + ... raise StopIteration + ... yield 2 # never reached + ... + >>> g = f() + >>> g.next() + 1 + >>> g.next() + Traceback (most recent call last): + File "", line 1, in ? + StopIteration + >>> g.next() + Traceback (most recent call last): + File "", line 1, in ? + StopIteration + +However, they are not exactly equivalent: + + >>> def g1(): + ... try: + ... return + ... except: + ... yield 1 + ... + >>> list(g1()) + [] + + >>> def g2(): + ... try: + ... raise StopIteration + ... except: + ... yield 42 + >>> print list(g2()) + [42] + +This may be surprising at first: + + >>> def g3(): + ... try: + ... return + ... finally: + ... yield 1 + ... + >>> list(g3()) + [1] + +Let's create an alternate range() function implemented as a generator: + + >>> def yrange(n): + ... for i in range(n): + ... yield i + ... + >>> list(yrange(5)) + [0, 1, 2, 3, 4] + +Generators always return to the most recent caller: + + >>> def creator(): + ... r = yrange(5) + ... print "creator", r.next() + ... return r + ... + >>> def caller(): + ... r = creator() + ... for i in r: + ... print "caller", i + ... + >>> caller() + creator 0 + caller 1 + caller 2 + caller 3 + caller 4 + +Generators can call other generators: + + >>> def zrange(n): + ... for i in yrange(n): + ... yield i + ... + >>> list(zrange(5)) + [0, 1, 2, 3, 4] + +""" + +# The examples from PEP 255. + +pep_tests = """ + +Specification: Yield + + Restriction: A generator cannot be resumed while it is actively + running: + + >>> def g(): + ... i = me.next() + ... yield i + >>> me = g() + >>> me.next() + Traceback (most recent call last): + ... + File "", line 2, in g + ValueError: generator already executing + +Specification: Return + + Note that return isn't always equivalent to raising StopIteration: the + difference lies in how enclosing try/except constructs are treated. + For example, + + >>> def f1(): + ... try: + ... return + ... except: + ... yield 1 + >>> print list(f1()) + [] + + because, as in any function, return simply exits, but + + >>> def f2(): + ... try: + ... raise StopIteration + ... except: + ... yield 42 + >>> print list(f2()) + [42] + + because StopIteration is captured by a bare "except", as is any + exception. + +Specification: Generators and Exception Propagation + + >>> def f(): + ... return 1//0 + >>> def g(): + ... yield f() # the zero division exception propagates + ... yield 42 # and we'll never get here + >>> k = g() + >>> k.next() + Traceback (most recent call last): + File "", line 1, in ? + File "", line 2, in g + File "", line 2, in f + ZeroDivisionError: integer division by zero + >>> k.next() # and the generator cannot be resumed + Traceback (most recent call last): + File "", line 1, in ? + StopIteration + >>> + +Specification: Try/Except/Finally + + >>> def f(): + ... try: + ... yield 1 + ... try: + ... yield 2 + ... 1//0 + ... yield 3 # never get here + ... except ZeroDivisionError: + ... yield 4 + ... yield 5 + ... raise + ... except: + ... yield 6 + ... yield 7 # the "raise" above stops this + ... except: + ... yield 8 + ... yield 9 + ... try: + ... x = 12 + ... finally: + ... yield 10 + ... yield 11 + >>> print list(f()) + [1, 2, 4, 5, 8, 9, 10, 11] + >>> + +Guido's binary tree example. + + >>> # A binary tree class. + >>> class Tree: + ... + ... def __init__(self, label, left=None, right=None): + ... self.label = label + ... self.left = left + ... self.right = right + ... + ... def __repr__(self, level=0, indent=" "): + ... s = level*indent + repr(self.label) + ... if self.left: + ... s = s + "\\n" + self.left.__repr__(level+1, indent) + ... if self.right: + ... s = s + "\\n" + self.right.__repr__(level+1, indent) + ... return s + ... + ... def __iter__(self): + ... return inorder(self) + + >>> # Create a Tree from a list. + >>> def tree(list): + ... n = len(list) + ... if n == 0: + ... return [] + ... i = n // 2 + ... return Tree(list[i], tree(list[:i]), tree(list[i+1:])) + + >>> # Show it off: create a tree. + >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + + >>> # A recursive generator that generates Tree labels in in-order. + >>> def inorder(t): + ... if t: + ... for x in inorder(t.left): + ... yield x + ... yield t.label + ... for x in inorder(t.right): + ... yield x + + >>> # Show it off: create a tree. + >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + >>> # Print the nodes of the tree in in-order. + >>> for x in t: + ... print x, + A B C D E F G H I J K L M N O P Q R S T U V W X Y Z + + >>> # A non-recursive generator. + >>> def inorder(node): + ... stack = [] + ... while node: + ... while node.left: + ... stack.append(node) + ... node = node.left + ... yield node.label + ... while not node.right: + ... try: + ... node = stack.pop() + ... except IndexError: + ... return + ... yield node.label + ... node = node.right + + >>> # Exercise the non-recursive generator. + >>> for x in t: + ... print x, + A B C D E F G H I J K L M N O P Q R S T U V W X Y Z + +""" + +# Examples from Iterator-List and Python-Dev and c.l.py. + +email_tests = """ + +The difference between yielding None and returning it. + +>>> def g(): +... for i in range(3): +... yield None +... yield None +... return +>>> list(g()) +[None, None, None, None] + +Ensure that explicitly raising StopIteration acts like any other exception +in try/except, not like a return. + +>>> def g(): +... yield 1 +... try: +... raise StopIteration +... except: +... yield 2 +... yield 3 +>>> list(g()) +[1, 2, 3] + +Next one was posted to c.l.py. + +>>> def gcomb(x, k): +... "Generate all combinations of k elements from list x." +... +... if k > len(x): +... return +... if k == 0: +... yield [] +... else: +... first, rest = x[0], x[1:] +... # A combination does or doesn't contain first. +... # If it does, the remainder is a k-1 comb of rest. +... for c in gcomb(rest, k-1): +... c.insert(0, first) +... yield c +... # If it doesn't contain first, it's a k comb of rest. +... for c in gcomb(rest, k): +... yield c + +>>> seq = range(1, 5) +>>> for k in range(len(seq) + 2): +... print "%d-combs of %s:" % (k, seq) +... for c in gcomb(seq, k): +... print " ", c +0-combs of [1, 2, 3, 4]: + [] +1-combs of [1, 2, 3, 4]: + [1] + [2] + [3] + [4] +2-combs of [1, 2, 3, 4]: + [1, 2] + [1, 3] + [1, 4] + [2, 3] + [2, 4] + [3, 4] +3-combs of [1, 2, 3, 4]: + [1, 2, 3] + [1, 2, 4] + [1, 3, 4] + [2, 3, 4] +4-combs of [1, 2, 3, 4]: + [1, 2, 3, 4] +5-combs of [1, 2, 3, 4]: + +From the Iterators list, about the types of these things. + +>>> def g(): +... yield 1 +... +>>> type(g) + +>>> i = g() +>>> type(i) + +>>> [s for s in dir(i) if not s.startswith('_')] +['close', 'gi_frame', 'gi_running', 'next', 'send', 'throw'] +>>> print i.next.__doc__ +x.next() -> the next value, or raise StopIteration +>>> iter(i) is i +True +>>> import types +>>> isinstance(i, types.GeneratorType) +True + +And more, added later. + +>>> i.gi_running +0 +>>> type(i.gi_frame) + +>>> i.gi_running = 42 +Traceback (most recent call last): + ... +TypeError: readonly attribute +>>> def g(): +... yield me.gi_running +>>> me = g() +>>> me.gi_running +0 +>>> me.next() +1 +>>> me.gi_running +0 + +A clever union-find implementation from c.l.py, due to David Eppstein. +Sent: Friday, June 29, 2001 12:16 PM +To: python-list at python.org +Subject: Re: PEP 255: Simple Generators + +>>> class disjointSet: +... def __init__(self, name): +... self.name = name +... self.parent = None +... self.generator = self.generate() +... +... def generate(self): +... while not self.parent: +... yield self +... for x in self.parent.generator: +... yield x +... +... def find(self): +... return self.generator.next() +... +... def union(self, parent): +... if self.parent: +... raise ValueError("Sorry, I'm not a root!") +... self.parent = parent +... +... def __str__(self): +... return self.name + +>>> names = "ABCDEFGHIJKLM" +>>> sets = [disjointSet(name) for name in names] +>>> roots = sets[:] + +>>> import random +>>> gen = random.WichmannHill(42) +>>> while 1: +... for s in sets: +... print "%s->%s" % (s, s.find()), +... print +... if len(roots) > 1: +... s1 = gen.choice(roots) +... roots.remove(s1) +... s2 = gen.choice(roots) +... s1.union(s2) +... print "merged", s1, "into", s2 +... else: +... break +A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M +merged D into G +A->A B->B C->C D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M +merged C into F +A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M +merged L into A +A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->A M->M +merged H into E +A->A B->B C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M +merged B into E +A->A B->E C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M +merged J into G +A->A B->E C->F D->G E->E F->F G->G H->E I->I J->G K->K L->A M->M +merged E into G +A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->M +merged M into G +A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->G +merged I into K +A->A B->G C->F D->G E->G F->F G->G H->G I->K J->G K->K L->A M->G +merged K into A +A->A B->G C->F D->G E->G F->F G->G H->G I->A J->G K->A L->A M->G +merged F into A +A->A B->G C->A D->G E->G F->A G->G H->G I->A J->G K->A L->A M->G +merged A into G +A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G + +""" +# Emacs turd ' + +# Fun tests (for sufficiently warped notions of "fun"). + +fun_tests = """ + +Build up to a recursive Sieve of Eratosthenes generator. + +>>> def firstn(g, n): +... return [g.next() for i in range(n)] + +>>> def intsfrom(i): +... while 1: +... yield i +... i += 1 + +>>> firstn(intsfrom(5), 7) +[5, 6, 7, 8, 9, 10, 11] + +>>> def exclude_multiples(n, ints): +... for i in ints: +... if i % n: +... yield i + +>>> firstn(exclude_multiples(3, intsfrom(1)), 6) +[1, 2, 4, 5, 7, 8] + +>>> def sieve(ints): +... prime = ints.next() +... yield prime +... not_divisible_by_prime = exclude_multiples(prime, ints) +... for p in sieve(not_divisible_by_prime): +... yield p + +>>> primes = sieve(intsfrom(2)) +>>> firstn(primes, 20) +[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71] + + +Another famous problem: generate all integers of the form + 2**i * 3**j * 5**k +in increasing order, where i,j,k >= 0. Trickier than it may look at first! +Try writing it without generators, and correctly, and without generating +3 internal results for each result output. + +>>> def times(n, g): +... for i in g: +... yield n * i +>>> firstn(times(10, intsfrom(1)), 10) +[10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + +>>> def merge(g, h): +... ng = g.next() +... nh = h.next() +... while 1: +... if ng < nh: +... yield ng +... ng = g.next() +... elif ng > nh: +... yield nh +... nh = h.next() +... else: +... yield ng +... ng = g.next() +... nh = h.next() + +The following works, but is doing a whale of a lot of redundant work -- +it's not clear how to get the internal uses of m235 to share a single +generator. Note that me_times2 (etc) each need to see every element in the +result sequence. So this is an example where lazy lists are more natural +(you can look at the head of a lazy list any number of times). + +>>> def m235(): +... yield 1 +... me_times2 = times(2, m235()) +... me_times3 = times(3, m235()) +... me_times5 = times(5, m235()) +... for i in merge(merge(me_times2, +... me_times3), +... me_times5): +... yield i + +Don't print "too many" of these -- the implementation above is extremely +inefficient: each call of m235() leads to 3 recursive calls, and in +turn each of those 3 more, and so on, and so on, until we've descended +enough levels to satisfy the print stmts. Very odd: when I printed 5 +lines of results below, this managed to screw up Win98's malloc in "the +usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting +address space, and it *looked* like a very slow leak. + +>>> result = m235() +>>> for i in range(3): +... print firstn(result, 15) +[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] +[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] +[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] + +Heh. Here's one way to get a shared list, complete with an excruciating +namespace renaming trick. The *pretty* part is that the times() and merge() +functions can be reused as-is, because they only assume their stream +arguments are iterable -- a LazyList is the same as a generator to times(). + +>>> class LazyList: +... def __init__(self, g): +... self.sofar = [] +... self.fetch = g.next +... +... def __getitem__(self, i): +... sofar, fetch = self.sofar, self.fetch +... while i >= len(sofar): +... sofar.append(fetch()) +... return sofar[i] + +>>> def m235(): +... yield 1 +... # Gack: m235 below actually refers to a LazyList. +... me_times2 = times(2, m235) +... me_times3 = times(3, m235) +... me_times5 = times(5, m235) +... for i in merge(merge(me_times2, +... me_times3), +... me_times5): +... yield i + +Print as many of these as you like -- *this* implementation is memory- +efficient. + +>>> m235 = LazyList(m235()) +>>> for i in range(5): +... print [m235[j] for j in range(15*i, 15*(i+1))] +[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] +[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] +[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] +[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384] +[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675] + +Ye olde Fibonacci generator, LazyList style. + +>>> def fibgen(a, b): +... +... def sum(g, h): +... while 1: +... yield g.next() + h.next() +... +... def tail(g): +... g.next() # throw first away +... for x in g: +... yield x +... +... yield a +... yield b +... for s in sum(iter(fib), +... tail(iter(fib))): +... yield s + +>>> fib = LazyList(fibgen(1, 2)) +>>> firstn(iter(fib), 17) +[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584] + + +Running after your tail with itertools.tee (new in version 2.4) + +The algorithms "m235" (Hamming) and Fibonacci presented above are both +examples of a whole family of FP (functional programming) algorithms +where a function produces and returns a list while the production algorithm +suppose the list as already produced by recursively calling itself. +For these algorithms to work, they must: + +- produce at least a first element without presupposing the existence of + the rest of the list +- produce their elements in a lazy manner + +To work efficiently, the beginning of the list must not be recomputed over +and over again. This is ensured in most FP languages as a built-in feature. +In python, we have to explicitly maintain a list of already computed results +and abandon genuine recursivity. + +This is what had been attempted above with the LazyList class. One problem +with that class is that it keeps a list of all of the generated results and +therefore continually grows. This partially defeats the goal of the generator +concept, viz. produce the results only as needed instead of producing them +all and thereby wasting memory. + +Thanks to itertools.tee, it is now clear "how to get the internal uses of +m235 to share a single generator". + +>>> from itertools import tee +>>> def m235(): +... def _m235(): +... yield 1 +... for n in merge(times(2, m2), +... merge(times(3, m3), +... times(5, m5))): +... yield n +... m1 = _m235() +... m2, m3, m5, mRes = tee(m1, 4) +... return mRes + +>>> it = m235() +>>> for i in range(5): +... print firstn(it, 15) +[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] +[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] +[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] +[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384] +[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675] + +The "tee" function does just what we want. It internally keeps a generated +result for as long as it has not been "consumed" from all of the duplicated +iterators, whereupon it is deleted. You can therefore print the hamming +sequence during hours without increasing memory usage, or very little. + +The beauty of it is that recursive running-after-their-tail FP algorithms +are quite straightforwardly expressed with this Python idiom. + +Ye olde Fibonacci generator, tee style. + +>>> def fib(): +... +... def _isum(g, h): +... while 1: +... yield g.next() + h.next() +... +... def _fib(): +... yield 1 +... yield 2 +... fibTail.next() # throw first away +... for res in _isum(fibHead, fibTail): +... yield res +... +... realfib = _fib() +... fibHead, fibTail, fibRes = tee(realfib, 3) +... return fibRes + +>>> firstn(fib(), 17) +[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584] + +""" + +# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0 +# hackery. + +syntax_tests = """ + +>>> def f(): +... return 22 +... yield 1 +Traceback (most recent call last): + .. +SyntaxError: 'return' with argument inside generator (, line 3) + +>>> def f(): +... yield 1 +... return 22 +Traceback (most recent call last): + .. +SyntaxError: 'return' with argument inside generator (, line 3) + +"return None" is not the same as "return" in a generator: + +>>> def f(): +... yield 1 +... return None +Traceback (most recent call last): + .. +SyntaxError: 'return' with argument inside generator (, line 3) + +These are fine: + +>>> def f(): +... yield 1 +... return + +>>> def f(): +... try: +... yield 1 +... finally: +... pass + +>>> def f(): +... try: +... try: +... 1//0 +... except ZeroDivisionError: +... yield 666 +... except: +... pass +... finally: +... pass + +>>> def f(): +... try: +... try: +... yield 12 +... 1//0 +... except ZeroDivisionError: +... yield 666 +... except: +... try: +... x = 12 +... finally: +... yield 12 +... except: +... return +>>> list(f()) +[12, 666] + +>>> def f(): +... yield +>>> type(f()) + + + +>>> def f(): +... if 0: +... yield +>>> type(f()) + + + +>>> def f(): +... if 0: +... yield 1 +>>> type(f()) + + +>>> def f(): +... if "": +... yield None +>>> type(f()) + + +>>> def f(): +... return +... try: +... if x==4: +... pass +... elif 0: +... try: +... 1//0 +... except SyntaxError: +... pass +... else: +... if 0: +... while 12: +... x += 1 +... yield 2 # don't blink +... f(a, b, c, d, e) +... else: +... pass +... except: +... x = 1 +... return +>>> type(f()) + + +>>> def f(): +... if 0: +... def g(): +... yield 1 +... +>>> type(f()) + + +>>> def f(): +... if 0: +... class C: +... def __init__(self): +... yield 1 +... def f(self): +... yield 2 +>>> type(f()) + + +>>> def f(): +... if 0: +... return +... if 0: +... yield 2 +>>> type(f()) + + + +>>> def f(): +... if 0: +... lambda x: x # shouldn't trigger here +... return # or here +... def f(i): +... return 2*i # or here +... if 0: +... return 3 # but *this* sucks (line 8) +... if 0: +... yield 2 # because it's a generator (line 10) +Traceback (most recent call last): +SyntaxError: 'return' with argument inside generator (, line 10) + +This one caused a crash (see SF bug 567538): + +>>> def f(): +... for i in range(3): +... try: +... continue +... finally: +... yield i +... +>>> g = f() +>>> print g.next() +0 +>>> print g.next() +1 +>>> print g.next() +2 +>>> print g.next() +Traceback (most recent call last): +StopIteration +""" + +# conjoin is a simple backtracking generator, named in honor of Icon's +# "conjunction" control structure. Pass a list of no-argument functions +# that return iterable objects. Easiest to explain by example: assume the +# function list [x, y, z] is passed. Then conjoin acts like: +# +# def g(): +# values = [None] * 3 +# for values[0] in x(): +# for values[1] in y(): +# for values[2] in z(): +# yield values +# +# So some 3-lists of values *may* be generated, each time we successfully +# get into the innermost loop. If an iterator fails (is exhausted) before +# then, it "backtracks" to get the next value from the nearest enclosing +# iterator (the one "to the left"), and starts all over again at the next +# slot (pumps a fresh iterator). Of course this is most useful when the +# iterators have side-effects, so that which values *can* be generated at +# each slot depend on the values iterated at previous slots. + +def conjoin(gs): + + values = [None] * len(gs) + + def gen(i, values=values): + if i >= len(gs): + yield values + else: + for values[i] in gs[i](): + for x in gen(i+1): + yield x + + for x in gen(0): + yield x + +# That works fine, but recursing a level and checking i against len(gs) for +# each item produced is inefficient. By doing manual loop unrolling across +# generator boundaries, it's possible to eliminate most of that overhead. +# This isn't worth the bother *in general* for generators, but conjoin() is +# a core building block for some CPU-intensive generator applications. + +def conjoin(gs): + + n = len(gs) + values = [None] * n + + # Do one loop nest at time recursively, until the # of loop nests + # remaining is divisible by 3. + + def gen(i, values=values): + if i >= n: + yield values + + elif (n-i) % 3: + ip1 = i+1 + for values[i] in gs[i](): + for x in gen(ip1): + yield x + + else: + for x in _gen3(i): + yield x + + # Do three loop nests at a time, recursing only if at least three more + # remain. Don't call directly: this is an internal optimization for + # gen's use. + + def _gen3(i, values=values): + assert i < n and (n-i) % 3 == 0 + ip1, ip2, ip3 = i+1, i+2, i+3 + g, g1, g2 = gs[i : ip3] + + if ip3 >= n: + # These are the last three, so we can yield values directly. + for values[i] in g(): + for values[ip1] in g1(): + for values[ip2] in g2(): + yield values + + else: + # At least 6 loop nests remain; peel off 3 and recurse for the + # rest. + for values[i] in g(): + for values[ip1] in g1(): + for values[ip2] in g2(): + for x in _gen3(ip3): + yield x + + for x in gen(0): + yield x + +# And one more approach: For backtracking apps like the Knight's Tour +# solver below, the number of backtracking levels can be enormous (one +# level per square, for the Knight's Tour, so that e.g. a 100x100 board +# needs 10,000 levels). In such cases Python is likely to run out of +# stack space due to recursion. So here's a recursion-free version of +# conjoin too. +# NOTE WELL: This allows large problems to be solved with only trivial +# demands on stack space. Without explicitly resumable generators, this is +# much harder to achieve. OTOH, this is much slower (up to a factor of 2) +# than the fancy unrolled recursive conjoin. + +def flat_conjoin(gs): # rename to conjoin to run tests with this instead + n = len(gs) + values = [None] * n + iters = [None] * n + _StopIteration = StopIteration # make local because caught a *lot* + i = 0 + while 1: + # Descend. + try: + while i < n: + it = iters[i] = gs[i]().next + values[i] = it() + i += 1 + except _StopIteration: + pass + else: + assert i == n + yield values + + # Backtrack until an older iterator can be resumed. + i -= 1 + while i >= 0: + try: + values[i] = iters[i]() + # Success! Start fresh at next level. + i += 1 + break + except _StopIteration: + # Continue backtracking. + i -= 1 + else: + assert i < 0 + break + +# A conjoin-based N-Queens solver. + +class Queens: + def __init__(self, n): + self.n = n + rangen = range(n) + + # Assign a unique int to each column and diagonal. + # columns: n of those, range(n). + # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along + # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0- + # based. + # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along + # each, smallest i+j is 0, largest is 2n-2. + + # For each square, compute a bit vector of the columns and + # diagonals it covers, and for each row compute a function that + # generates the possiblities for the columns in that row. + self.rowgenerators = [] + for i in rangen: + rowuses = [(1L << j) | # column ordinal + (1L << (n + i-j + n-1)) | # NW-SE ordinal + (1L << (n + 2*n-1 + i+j)) # NE-SW ordinal + for j in rangen] + + def rowgen(rowuses=rowuses): + for j in rangen: + uses = rowuses[j] + if uses & self.used == 0: + self.used |= uses + yield j + self.used &= ~uses + + self.rowgenerators.append(rowgen) + + # Generate solutions. + def solve(self): + self.used = 0 + for row2col in conjoin(self.rowgenerators): + yield row2col + + def printsolution(self, row2col): + n = self.n + assert n == len(row2col) + sep = "+" + "-+" * n + print sep + for i in range(n): + squares = [" " for j in range(n)] + squares[row2col[i]] = "Q" + print "|" + "|".join(squares) + "|" + print sep + +# A conjoin-based Knight's Tour solver. This is pretty sophisticated +# (e.g., when used with flat_conjoin above, and passing hard=1 to the +# constructor, a 200x200 Knight's Tour was found quickly -- note that we're +# creating 10s of thousands of generators then!), and is lengthy. + +class Knights: + def __init__(self, m, n, hard=0): + self.m, self.n = m, n + + # solve() will set up succs[i] to be a list of square #i's + # successors. + succs = self.succs = [] + + # Remove i0 from each of its successor's successor lists, i.e. + # successors can't go back to i0 again. Return 0 if we can + # detect this makes a solution impossible, else return 1. + + def remove_from_successors(i0, len=len): + # If we remove all exits from a free square, we're dead: + # even if we move to it next, we can't leave it again. + # If we create a square with one exit, we must visit it next; + # else somebody else will have to visit it, and since there's + # only one adjacent, there won't be a way to leave it again. + # Finelly, if we create more than one free square with a + # single exit, we can only move to one of them next, leaving + # the other one a dead end. + ne0 = ne1 = 0 + for i in succs[i0]: + s = succs[i] + s.remove(i0) + e = len(s) + if e == 0: + ne0 += 1 + elif e == 1: + ne1 += 1 + return ne0 == 0 and ne1 < 2 + + # Put i0 back in each of its successor's successor lists. + + def add_to_successors(i0): + for i in succs[i0]: + succs[i].append(i0) + + # Generate the first move. + def first(): + if m < 1 or n < 1: + return + + # Since we're looking for a cycle, it doesn't matter where we + # start. Starting in a corner makes the 2nd move easy. + corner = self.coords2index(0, 0) + remove_from_successors(corner) + self.lastij = corner + yield corner + add_to_successors(corner) + + # Generate the second moves. + def second(): + corner = self.coords2index(0, 0) + assert self.lastij == corner # i.e., we started in the corner + if m < 3 or n < 3: + return + assert len(succs[corner]) == 2 + assert self.coords2index(1, 2) in succs[corner] + assert self.coords2index(2, 1) in succs[corner] + # Only two choices. Whichever we pick, the other must be the + # square picked on move m*n, as it's the only way to get back + # to (0, 0). Save its index in self.final so that moves before + # the last know it must be kept free. + for i, j in (1, 2), (2, 1): + this = self.coords2index(i, j) + final = self.coords2index(3-i, 3-j) + self.final = final + + remove_from_successors(this) + succs[final].append(corner) + self.lastij = this + yield this + succs[final].remove(corner) + add_to_successors(this) + + # Generate moves 3 thru m*n-1. + def advance(len=len): + # If some successor has only one exit, must take it. + # Else favor successors with fewer exits. + candidates = [] + for i in succs[self.lastij]: + e = len(succs[i]) + assert e > 0, "else remove_from_successors() pruning flawed" + if e == 1: + candidates = [(e, i)] + break + candidates.append((e, i)) + else: + candidates.sort() + + for e, i in candidates: + if i != self.final: + if remove_from_successors(i): + self.lastij = i + yield i + add_to_successors(i) + + # Generate moves 3 thru m*n-1. Alternative version using a + # stronger (but more expensive) heuristic to order successors. + # Since the # of backtracking levels is m*n, a poor move early on + # can take eons to undo. Smallest square board for which this + # matters a lot is 52x52. + def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len): + # If some successor has only one exit, must take it. + # Else favor successors with fewer exits. + # Break ties via max distance from board centerpoint (favor + # corners and edges whenever possible). + candidates = [] + for i in succs[self.lastij]: + e = len(succs[i]) + assert e > 0, "else remove_from_successors() pruning flawed" + if e == 1: + candidates = [(e, 0, i)] + break + i1, j1 = self.index2coords(i) + d = (i1 - vmid)**2 + (j1 - hmid)**2 + candidates.append((e, -d, i)) + else: + candidates.sort() + + for e, d, i in candidates: + if i != self.final: + if remove_from_successors(i): + self.lastij = i + yield i + add_to_successors(i) + + # Generate the last move. + def last(): + assert self.final in succs[self.lastij] + yield self.final + + if m*n < 4: + self.squaregenerators = [first] + else: + self.squaregenerators = [first, second] + \ + [hard and advance_hard or advance] * (m*n - 3) + \ + [last] + + def coords2index(self, i, j): + assert 0 <= i < self.m + assert 0 <= j < self.n + return i * self.n + j + + def index2coords(self, index): + assert 0 <= index < self.m * self.n + return divmod(index, self.n) + + def _init_board(self): + succs = self.succs + del succs[:] + m, n = self.m, self.n + c2i = self.coords2index + + offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2), + (-1, -2), (-2, -1), (-2, 1), (-1, 2)] + rangen = range(n) + for i in range(m): + for j in rangen: + s = [c2i(i+io, j+jo) for io, jo in offsets + if 0 <= i+io < m and + 0 <= j+jo < n] + succs.append(s) + + # Generate solutions. + def solve(self): + self._init_board() + for x in conjoin(self.squaregenerators): + yield x + + def printsolution(self, x): + m, n = self.m, self.n + assert len(x) == m*n + w = len(str(m*n)) + format = "%" + str(w) + "d" + + squares = [[None] * n for i in range(m)] + k = 1 + for i in x: + i1, j1 = self.index2coords(i) + squares[i1][j1] = format % k + k += 1 + + sep = "+" + ("-" * w + "+") * n + print sep + for i in range(m): + row = squares[i] + print "|" + "|".join(row) + "|" + print sep + +conjoin_tests = """ + +Generate the 3-bit binary numbers in order. This illustrates dumbest- +possible use of conjoin, just to generate the full cross-product. + +>>> for c in conjoin([lambda: iter((0, 1))] * 3): +... print c +[0, 0, 0] +[0, 0, 1] +[0, 1, 0] +[0, 1, 1] +[1, 0, 0] +[1, 0, 1] +[1, 1, 0] +[1, 1, 1] + +For efficiency in typical backtracking apps, conjoin() yields the same list +object each time. So if you want to save away a full account of its +generated sequence, you need to copy its results. + +>>> def gencopy(iterator): +... for x in iterator: +... yield x[:] + +>>> for n in range(10): +... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n))) +... print n, len(all), all[0] == [0] * n, all[-1] == [1] * n +0 1 True True +1 2 True True +2 4 True True +3 8 True True +4 16 True True +5 32 True True +6 64 True True +7 128 True True +8 256 True True +9 512 True True + +And run an 8-queens solver. + +>>> q = Queens(8) +>>> LIMIT = 2 +>>> count = 0 +>>> for row2col in q.solve(): +... count += 1 +... if count <= LIMIT: +... print "Solution", count +... q.printsolution(row2col) +Solution 1 ++-+-+-+-+-+-+-+-+ +|Q| | | | | | | | ++-+-+-+-+-+-+-+-+ +| | | | |Q| | | | ++-+-+-+-+-+-+-+-+ +| | | | | | | |Q| ++-+-+-+-+-+-+-+-+ +| | | | | |Q| | | ++-+-+-+-+-+-+-+-+ +| | |Q| | | | | | ++-+-+-+-+-+-+-+-+ +| | | | | | |Q| | ++-+-+-+-+-+-+-+-+ +| |Q| | | | | | | ++-+-+-+-+-+-+-+-+ +| | | |Q| | | | | ++-+-+-+-+-+-+-+-+ +Solution 2 ++-+-+-+-+-+-+-+-+ +|Q| | | | | | | | ++-+-+-+-+-+-+-+-+ +| | | | | |Q| | | ++-+-+-+-+-+-+-+-+ +| | | | | | | |Q| ++-+-+-+-+-+-+-+-+ +| | |Q| | | | | | ++-+-+-+-+-+-+-+-+ +| | | | | | |Q| | ++-+-+-+-+-+-+-+-+ +| | | |Q| | | | | ++-+-+-+-+-+-+-+-+ +| |Q| | | | | | | ++-+-+-+-+-+-+-+-+ +| | | | |Q| | | | ++-+-+-+-+-+-+-+-+ + +>>> print count, "solutions in all." +92 solutions in all. + +And run a Knight's Tour on a 10x10 board. Note that there are about +20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion. + +>>> k = Knights(10, 10) +>>> LIMIT = 2 +>>> count = 0 +>>> for x in k.solve(): +... count += 1 +... if count <= LIMIT: +... print "Solution", count +... k.printsolution(x) +... else: +... break +Solution 1 ++---+---+---+---+---+---+---+---+---+---+ +| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8| ++---+---+---+---+---+---+---+---+---+---+ +| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11| ++---+---+---+---+---+---+---+---+---+---+ +| 59|100| 73| 36| 41| 56| 39| 32| 9| 6| ++---+---+---+---+---+---+---+---+---+---+ +| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31| ++---+---+---+---+---+---+---+---+---+---+ +| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50| ++---+---+---+---+---+---+---+---+---+---+ +| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13| ++---+---+---+---+---+---+---+---+---+---+ +| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44| ++---+---+---+---+---+---+---+---+---+---+ +| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17| ++---+---+---+---+---+---+---+---+---+---+ +| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66| ++---+---+---+---+---+---+---+---+---+---+ +| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15| ++---+---+---+---+---+---+---+---+---+---+ +Solution 2 ++---+---+---+---+---+---+---+---+---+---+ +| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8| ++---+---+---+---+---+---+---+---+---+---+ +| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11| ++---+---+---+---+---+---+---+---+---+---+ +| 59|100| 73| 36| 41| 56| 39| 32| 9| 6| ++---+---+---+---+---+---+---+---+---+---+ +| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31| ++---+---+---+---+---+---+---+---+---+---+ +| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50| ++---+---+---+---+---+---+---+---+---+---+ +| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13| ++---+---+---+---+---+---+---+---+---+---+ +| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44| ++---+---+---+---+---+---+---+---+---+---+ +| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17| ++---+---+---+---+---+---+---+---+---+---+ +| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66| ++---+---+---+---+---+---+---+---+---+---+ +| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15| ++---+---+---+---+---+---+---+---+---+---+ +""" + +weakref_tests = """\ +Generators are weakly referencable: + +>>> import weakref +>>> def gen(): +... yield 'foo!' +... +>>> wr = weakref.ref(gen) +>>> wr() is gen +True +>>> p = weakref.proxy(gen) + +Generator-iterators are weakly referencable as well: + +>>> gi = gen() +>>> wr = weakref.ref(gi) +>>> wr() is gi +True +>>> p = weakref.proxy(gi) +>>> list(p) +['foo!'] + +""" + +coroutine_tests = """\ +Sending a value into a started generator: + +>>> def f(): +... print (yield 1) +... yield 2 +>>> g = f() +>>> g.next() +1 +>>> g.send(42) +42 +2 + +Sending a value into a new generator produces a TypeError: + +>>> f().send("foo") +Traceback (most recent call last): +... +TypeError: can't send non-None value to a just-started generator + + +Yield by itself yields None: + +>>> def f(): yield +>>> list(f()) +[None] + + + +An obscene abuse of a yield expression within a generator expression: + +>>> list((yield 21) for i in range(4)) +[21, None, 21, None, 21, None, 21, None] + +And a more sane, but still weird usage: + +>>> def f(): list(i for i in [(yield 26)]) +>>> type(f()) + + + +A yield expression with augmented assignment. + +>>> def coroutine(seq): +... count = 0 +... while count < 200: +... count += yield +... seq.append(count) +>>> seq = [] +>>> c = coroutine(seq) +>>> c.next() +>>> print seq +[] +>>> c.send(10) +>>> print seq +[10] +>>> c.send(10) +>>> print seq +[10, 20] +>>> c.send(10) +>>> print seq +[10, 20, 30] + + +Check some syntax errors for yield expressions: + +>>> f=lambda: (yield 1),(yield 2) +Traceback (most recent call last): + ... +SyntaxError: 'yield' outside function (, line 1) + +>>> def f(): return lambda x=(yield): 1 +Traceback (most recent call last): + ... +SyntaxError: 'return' with argument inside generator (, line 1) + +>>> def f(): x = yield = y +Traceback (most recent call last): + ... +SyntaxError: assignment to yield expression not possible (, line 1) + +>>> def f(): (yield bar) = y +Traceback (most recent call last): + ... +SyntaxError: can't assign to yield expression (, line 1) + +>>> def f(): (yield bar) += y +Traceback (most recent call last): + ... +SyntaxError: augmented assignment to yield expression not possible (, line 1) + + +Now check some throw() conditions: + +>>> def f(): +... while True: +... try: +... print (yield) +... except ValueError,v: +... print "caught ValueError (%s)" % (v), +>>> import sys +>>> g = f() +>>> g.next() + +>>> g.throw(ValueError) # type only +caught ValueError () + +>>> g.throw(ValueError("xyz")) # value only +caught ValueError (xyz) + +>>> g.throw(ValueError, ValueError(1)) # value+matching type +caught ValueError (1) + +>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped +caught ValueError (1) + +>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback +caught ValueError (1) + +>>> g.throw(ValueError(1), "foo") # bad args +Traceback (most recent call last): + ... +TypeError: instance exception may not have a separate value + +>>> g.throw(ValueError, "foo", 23) # bad args +Traceback (most recent call last): + ... +TypeError: throw() third argument must be a traceback object + +>>> def throw(g,exc): +... try: +... raise exc +... except: +... g.throw(*sys.exc_info()) +>>> throw(g,ValueError) # do it with traceback included +caught ValueError () + +>>> g.send(1) +1 + +>>> throw(g,TypeError) # terminate the generator +Traceback (most recent call last): + ... +TypeError + +>>> print g.gi_frame +None + +>>> g.send(2) +Traceback (most recent call last): + ... +StopIteration + +>>> g.throw(ValueError,6) # throw on closed generator +Traceback (most recent call last): + ... +ValueError: 6 + +>>> f().throw(ValueError,7) # throw on just-opened generator +Traceback (most recent call last): + ... +ValueError: 7 + +>>> f().throw("abc") # throw on just-opened generator +Traceback (most recent call last): + ... +abc + +Now let's try closing a generator: + +>>> def f(): +... try: yield +... except GeneratorExit: +... print "exiting" + +>>> g = f() +>>> g.next() +>>> g.close() +exiting +>>> g.close() # should be no-op now + +>>> f().close() # close on just-opened generator should be fine + +>>> def f(): yield # an even simpler generator +>>> f().close() # close before opening +>>> g = f() +>>> g.next() +>>> g.close() # close normally + +And finalization: + +>>> def f(): +... try: yield +... finally: +... print "exiting" + +>>> g = f() +>>> g.next() +>>> del g +exiting + + +Now let's try some ill-behaved generators: + +>>> def f(): +... try: yield +... except GeneratorExit: +... yield "foo!" +>>> g = f() +>>> g.next() +>>> g.close() +Traceback (most recent call last): + ... +RuntimeError: generator ignored GeneratorExit +>>> g.close() + + +Our ill-behaved code should be invoked during GC: + +>>> import sys, StringIO +>>> old, sys.stderr = sys.stderr, StringIO.StringIO() +>>> g = f() +>>> g.next() +>>> del g +>>> sys.stderr.getvalue().startswith( +... "Exception exceptions.RuntimeError: 'generator ignored GeneratorExit' in " +... ) +True +>>> sys.stderr = old + + +And errors thrown during closing should propagate: + +>>> def f(): +... try: yield +... except GeneratorExit: +... raise TypeError("fie!") +>>> g = f() +>>> g.next() +>>> g.close() +Traceback (most recent call last): + ... +TypeError: fie! + + +Ensure that various yield expression constructs make their +enclosing function a generator: + +>>> def f(): x += yield +>>> type(f()) + + +>>> def f(): x = yield +>>> type(f()) + + +>>> def f(): lambda x=(yield): 1 +>>> type(f()) + + +>>> def f(): x=(i for i in (yield) if (yield)) +>>> type(f()) + + +>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27 +>>> data = [1,2] +>>> g = f(data) +>>> type(g) + +>>> g.send(None) +'a' +>>> data +[1, 2] +>>> g.send(0) +'b' +>>> data +[27, 2] +>>> try: g.send(1) +... except StopIteration: pass +>>> data +[27, 27] + +""" + +refleaks_tests = """ +Prior to adding cycle-GC support to itertools.tee, this code would leak +references. We add it to the standard suite so the routine refleak-tests +would trigger if it starts being uncleanable again. + +>>> import itertools +>>> def leak(): +... class gen: +... def __iter__(self): +... return self +... def next(self): +... return self.item +... g = gen() +... head, tail = itertools.tee(g) +... g.item = head +... return head +>>> it = leak() + +Make sure to also test the involvement of the tee-internal teedataobject, +which stores returned items. + +>>> item = it.next() + + + +This test leaked at one point due to generator finalization/destruction. +It was copied from Lib/test/leakers/test_generator_cycle.py before the file +was removed. + +>>> def leak(): +... def gen(): +... while True: +... yield g +... g = gen() + +>>> leak() + + + +This test isn't really generator related, but rather exception-in-cleanup +related. The coroutine tests (above) just happen to cause an exception in +the generator's __del__ (tp_del) method. We can also test for this +explicitly, without generators. We do have to redirect stderr to avoid +printing warnings and to doublecheck that we actually tested what we wanted +to test. + +>>> import sys, StringIO +>>> old = sys.stderr +>>> try: +... sys.stderr = StringIO.StringIO() +... class Leaker: +... def __del__(self): +... raise RuntimeError +... +... l = Leaker() +... del l +... err = sys.stderr.getvalue().strip() +... err.startswith( +... "Exception exceptions.RuntimeError: RuntimeError() in <" +... ) +... err.endswith("> ignored") +... len(err.splitlines()) +... finally: +... sys.stderr = old +True +True +1 + + + +These refleak tests should perhaps be in a testfile of their own, +test_generators just happened to be the test that drew these out. + +""" + +__test__ = {"tut": tutorial_tests, + "pep": pep_tests, + "email": email_tests, + "fun": fun_tests, + "syntax": syntax_tests, + "conjoin": conjoin_tests, + "weakref": weakref_tests, + "coroutine": coroutine_tests, + "refleaks": refleaks_tests, + } + +# Magic test name that regrtest.py invokes *after* importing this module. +# This worms around a bootstrap problem. +# Note that doctest and regrtest both look in sys.argv for a "-v" argument, +# so this works as expected in both ways of running regrtest. +def test_main(verbose=None): + from test import test_support, test_generators + test_support.run_doctest(test_generators, verbose) + +# This part isn't needed for regrtest, but for running the test directly. +if __name__ == "__main__": + test_main(1) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_genexps.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_genexps.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,281 @@ +doctests = """ + +Test simple loop with conditional + + >>> sum(i*i for i in range(10) if i&1 == 1) + 165 + +Test simple nesting + + >>> list((i,j) for i in range(3) for j in range(4) ) + [(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)] + +Test nesting with the inner expression dependent on the outer + + >>> list((i,j) for i in range(4) for j in range(i) ) + [(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)] + +Make sure the induction variable is not exposed + + >>> i = 20 + >>> sum(i*i for i in range(5)) + 30 + >>> i + 20 + +Test first class + + >>> g = (i*i for i in range(4)) + >>> type(g) + + >>> list(g) + [0, 1, 4, 9] + +Test direct calls to next() + + >>> g = (i*i for i in range(3)) + >>> g.next() + 0 + >>> g.next() + 1 + >>> g.next() + 4 + >>> g.next() + Traceback (most recent call last): + File "", line 1, in -toplevel- + g.next() + StopIteration + +Does it stay stopped? + + >>> g.next() + Traceback (most recent call last): + File "", line 1, in -toplevel- + g.next() + StopIteration + >>> list(g) + [] + +Test running gen when defining function is out of scope + + >>> def f(n): + ... return (i*i for i in xrange(n)) + >>> list(f(10)) + [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + + >>> def f(n): + ... return ((i,j) for i in xrange(3) for j in xrange(n)) + >>> list(f(4)) + [(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)] + >>> def f(n): + ... return ((i,j) for i in xrange(3) for j in xrange(4) if j in xrange(n)) + >>> list(f(4)) + [(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)] + >>> list(f(2)) + [(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1)] + +Verify that parenthesis are required in a statement + + >>> def f(n): + ... return i*i for i in xrange(n) + Traceback (most recent call last): + ... + SyntaxError: invalid syntax + +Verify that parenthesis are required when used as a keyword argument value + + >>> dict(a = i for i in xrange(10)) + Traceback (most recent call last): + ... + SyntaxError: invalid syntax + +Verify that parenthesis are required when used as a keyword argument value + + >>> dict(a = (i for i in xrange(10))) #doctest: +ELLIPSIS + {'a': } + +Verify early binding for the outermost for-expression + + >>> x=10 + >>> g = (i*i for i in range(x)) + >>> x = 5 + >>> list(g) + [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + +Verify that the outermost for-expression makes an immediate check +for iterability + + >>> (i for i in 6) + Traceback (most recent call last): + File "", line 1, in -toplevel- + (i for i in 6) + TypeError: 'int' object is not iterable + +Verify late binding for the outermost if-expression + + >>> include = (2,4,6,8) + >>> g = (i*i for i in range(10) if i in include) + >>> include = (1,3,5,7,9) + >>> list(g) + [1, 9, 25, 49, 81] + +Verify late binding for the innermost for-expression + + >>> g = ((i,j) for i in range(3) for j in range(x)) + >>> x = 4 + >>> list(g) + [(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)] + +Verify re-use of tuples (a side benefit of using genexps over listcomps) + +## >>> tupleids = map(id, ((i,i) for i in xrange(10))) +## >>> int(max(tupleids) - min(tupleids)) +## 0 + +Verify that syntax error's are raised for genexps used as lvalues + + >>> (y for y in (1,2)) = 10 + Traceback (most recent call last): + ... + SyntaxError: assign to generator expression not possible + + >>> (y for y in (1,2)) += 10 + Traceback (most recent call last): + ... + SyntaxError: augmented assign to tuple literal or generator expression not possible + + + +########### Tests borrowed from or inspired by test_generators.py ############ + +Make a generator that acts like range() + + >>> yrange = lambda n: (i for i in xrange(n)) + >>> list(yrange(10)) + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + +Generators always return to the most recent caller: + + >>> def creator(): + ... r = yrange(5) + ... print "creator", r.next() + ... return r + >>> def caller(): + ... r = creator() + ... for i in r: + ... print "caller", i + >>> caller() + creator 0 + caller 1 + caller 2 + caller 3 + caller 4 + +Generators can call other generators: + + >>> def zrange(n): + ... for i in yrange(n): + ... yield i + >>> list(zrange(5)) + [0, 1, 2, 3, 4] + + +Verify that a gen exp cannot be resumed while it is actively running: + + >>> g = (me.next() for i in xrange(10)) + >>> me = g + >>> me.next() + Traceback (most recent call last): + File "", line 1, in -toplevel- + me.next() + File "", line 1, in + g = (me.next() for i in xrange(10)) + ValueError: generator already executing + +Verify exception propagation + + >>> g = (10 // i for i in (5, 0, 2)) + >>> g.next() + 2 + >>> g.next() + Traceback (most recent call last): + File "", line 1, in -toplevel- + g.next() + File "", line 1, in + g = (10 // i for i in (5, 0, 2)) + ZeroDivisionError: integer division by zero + >>> g.next() + Traceback (most recent call last): + File "", line 1, in -toplevel- + g.next() + StopIteration + +Make sure that None is a valid return value + + >>> list(None for i in xrange(10)) + [None, None, None, None, None, None, None, None, None, None] + +Check that generator attributes are present + + >>> g = (i*i for i in range(3)) + >>> expected = set(['gi_frame', 'gi_running', 'next']) + >>> set(attr for attr in dir(g) if not attr.startswith('__')) >= expected + True + + >>> print g.next.__doc__ + x.next() -> the next value, or raise StopIteration + >>> import types + >>> isinstance(g, types.GeneratorType) + True + +Check the __iter__ slot is defined to return self + + >>> iter(g) is g + True + +Verify that the running flag is set properly + + >>> g = (me.gi_running for i in (0,1)) + >>> me = g + >>> me.gi_running + 0 + >>> me.next() + 1 + >>> me.gi_running + 0 + +Verify that genexps are weakly referencable + + >>> import weakref + >>> g = (i*i for i in range(4)) + >>> wr = weakref.ref(g) + >>> wr() is g + True + >>> p = weakref.proxy(g) + >>> list(p) + [0, 1, 4, 9] + + +""" + + +__test__ = {'doctests' : doctests} + +def test_main(verbose=None): + import sys + from test import test_support + from test import test_genexps + test_support.run_doctest(test_genexps, verbose) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_doctest(test_genexps, verbose) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_iter.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_iter.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,889 @@ +# Test iterators. + +import gc +import unittest +from test.test_support import run_unittest, TESTFN, unlink, have_unicode + +# Test result of triple loop (too big to inline) +TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2), + (0, 1, 0), (0, 1, 1), (0, 1, 2), + (0, 2, 0), (0, 2, 1), (0, 2, 2), + + (1, 0, 0), (1, 0, 1), (1, 0, 2), + (1, 1, 0), (1, 1, 1), (1, 1, 2), + (1, 2, 0), (1, 2, 1), (1, 2, 2), + + (2, 0, 0), (2, 0, 1), (2, 0, 2), + (2, 1, 0), (2, 1, 1), (2, 1, 2), + (2, 2, 0), (2, 2, 1), (2, 2, 2)] + +# Helper classes + +class BasicIterClass: + def __init__(self, n): + self.n = n + self.i = 0 + def next(self): + res = self.i + if res >= self.n: + raise StopIteration + self.i = res + 1 + return res + +class IteratingSequenceClass: + def __init__(self, n): + self.n = n + def __iter__(self): + return BasicIterClass(self.n) + +class SequenceClass: + def __init__(self, n): + self.n = n + def __getitem__(self, i): + if 0 <= i < self.n: + return i + else: + raise IndexError + +# Main test suite + +class TestCase(unittest.TestCase): + + # Helper to check that an iterator returns a given sequence + def check_iterator(self, it, seq): + res = [] + while 1: + try: + val = it.next() + except StopIteration: + break + res.append(val) + self.assertEqual(res, seq) + + # Helper to check that a for loop generates a given sequence + def check_for_loop(self, expr, seq): + res = [] + for val in expr: + res.append(val) + self.assertEqual(res, seq) + + # Test basic use of iter() function + def test_iter_basic(self): + self.check_iterator(iter(range(10)), range(10)) + + # Test that iter(iter(x)) is the same as iter(x) + def test_iter_idempotency(self): + seq = range(10) + it = iter(seq) + it2 = iter(it) + self.assert_(it is it2) + + # Test that for loops over iterators work + def test_iter_for_loop(self): + self.check_for_loop(iter(range(10)), range(10)) + + # Test several independent iterators over the same list + def test_iter_independence(self): + seq = range(3) + res = [] + for i in iter(seq): + for j in iter(seq): + for k in iter(seq): + res.append((i, j, k)) + self.assertEqual(res, TRIPLETS) + + # Test triple list comprehension using iterators + def test_nested_comprehensions_iter(self): + seq = range(3) + res = [(i, j, k) + for i in iter(seq) for j in iter(seq) for k in iter(seq)] + self.assertEqual(res, TRIPLETS) + + # Test triple list comprehension without iterators + def test_nested_comprehensions_for(self): + seq = range(3) + res = [(i, j, k) for i in seq for j in seq for k in seq] + self.assertEqual(res, TRIPLETS) + + # Test a class with __iter__ in a for loop + def test_iter_class_for(self): + self.check_for_loop(IteratingSequenceClass(10), range(10)) + + # Test a class with __iter__ with explicit iter() + def test_iter_class_iter(self): + self.check_iterator(iter(IteratingSequenceClass(10)), range(10)) + + # Test for loop on a sequence class without __iter__ + def test_seq_class_for(self): + self.check_for_loop(SequenceClass(10), range(10)) + + # Test iter() on a sequence class without __iter__ + def test_seq_class_iter(self): + self.check_iterator(iter(SequenceClass(10)), range(10)) + + # Test two-argument iter() with callable instance + def test_iter_callable(self): + class C: + def __init__(self): + self.i = 0 + def __call__(self): + i = self.i + self.i = i + 1 + if i > 100: + raise IndexError # Emergency stop + return i + self.check_iterator(iter(C(), 10), range(10)) + + # Test two-argument iter() with function + def test_iter_function(self): + def spam(state=[0]): + i = state[0] + state[0] = i+1 + return i + self.check_iterator(iter(spam, 10), range(10)) + + # Test two-argument iter() with function that raises StopIteration + def test_iter_function_stop(self): + def spam(state=[0]): + i = state[0] + if i == 10: + raise StopIteration + state[0] = i+1 + return i + self.check_iterator(iter(spam, 20), range(10)) + + # Test exception propagation through function iterator + def test_exception_function(self): + def spam(state=[0]): + i = state[0] + state[0] = i+1 + if i == 10: + raise RuntimeError + return i + res = [] + try: + for x in iter(spam, 20): + res.append(x) + except RuntimeError: + self.assertEqual(res, range(10)) + else: + self.fail("should have raised RuntimeError") + + # Test exception propagation through sequence iterator + def test_exception_sequence(self): + class MySequenceClass(SequenceClass): + def __getitem__(self, i): + if i == 10: + raise RuntimeError + return SequenceClass.__getitem__(self, i) + res = [] + try: + for x in MySequenceClass(20): + res.append(x) + except RuntimeError: + self.assertEqual(res, range(10)) + else: + self.fail("should have raised RuntimeError") + + # Test for StopIteration from __getitem__ + def test_stop_sequence(self): + class MySequenceClass(SequenceClass): + def __getitem__(self, i): + if i == 10: + raise StopIteration + return SequenceClass.__getitem__(self, i) + self.check_for_loop(MySequenceClass(20), range(10)) + + # Test a big range + def test_iter_big_range(self): + self.check_for_loop(iter(range(10000)), range(10000)) + + # Test an empty list + def test_iter_empty(self): + self.check_for_loop(iter([]), []) + + # Test a tuple + def test_iter_tuple(self): + self.check_for_loop(iter((0,1,2,3,4,5,6,7,8,9)), range(10)) + + # Test an xrange + def test_iter_xrange(self): + self.check_for_loop(iter(xrange(10)), range(10)) + + # Test a string + def test_iter_string(self): + self.check_for_loop(iter("abcde"), ["a", "b", "c", "d", "e"]) + + # Test a Unicode string + if have_unicode: + def test_iter_unicode(self): + self.check_for_loop(iter(unicode("abcde")), + [unicode("a"), unicode("b"), unicode("c"), + unicode("d"), unicode("e")]) + + # Test a directory + def test_iter_dict(self): + dict = {} + for i in range(10): + dict[i] = None + self.check_for_loop(dict, dict.keys()) + + # Test a file + def test_iter_file(self): + f = open(TESTFN, "w") + try: + for i in range(5): + f.write("%d\n" % i) + finally: + f.close() + f = open(TESTFN, "r") + try: + self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"]) + self.check_for_loop(f, []) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test list()'s use of iterators. + def test_builtin_list(self): + self.assertEqual(list(SequenceClass(5)), range(5)) + self.assertEqual(list(SequenceClass(0)), []) + self.assertEqual(list(()), []) + self.assertEqual(list(range(10, -1, -1)), range(10, -1, -1)) + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(list(d), d.keys()) + + self.assertRaises(TypeError, list, list) + self.assertRaises(TypeError, list, 42) + + f = open(TESTFN, "w") + try: + for i in range(5): + f.write("%d\n" % i) + finally: + f.close() + f = open(TESTFN, "r") + try: + self.assertEqual(list(f), ["0\n", "1\n", "2\n", "3\n", "4\n"]) + f.seek(0, 0) + self.assertEqual(list(f), + ["0\n", "1\n", "2\n", "3\n", "4\n"]) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test tuples()'s use of iterators. + def test_builtin_tuple(self): + self.assertEqual(tuple(SequenceClass(5)), (0, 1, 2, 3, 4)) + self.assertEqual(tuple(SequenceClass(0)), ()) + self.assertEqual(tuple([]), ()) + self.assertEqual(tuple(()), ()) + self.assertEqual(tuple("abc"), ("a", "b", "c")) + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(tuple(d), tuple(d.keys())) + + self.assertRaises(TypeError, tuple, list) + self.assertRaises(TypeError, tuple, 42) + + f = open(TESTFN, "w") + try: + for i in range(5): + f.write("%d\n" % i) + finally: + f.close() + f = open(TESTFN, "r") + try: + self.assertEqual(tuple(f), ("0\n", "1\n", "2\n", "3\n", "4\n")) + f.seek(0, 0) + self.assertEqual(tuple(f), + ("0\n", "1\n", "2\n", "3\n", "4\n")) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test filter()'s use of iterators. + def test_builtin_filter(self): + self.assertEqual(filter(None, SequenceClass(5)), range(1, 5)) + self.assertEqual(filter(None, SequenceClass(0)), []) + self.assertEqual(filter(None, ()), ()) + self.assertEqual(filter(None, "abc"), "abc") + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(filter(None, d), d.keys()) + + self.assertRaises(TypeError, filter, None, list) + self.assertRaises(TypeError, filter, None, 42) + + class Boolean: + def __init__(self, truth): + self.truth = truth + def __nonzero__(self): + return self.truth + bTrue = Boolean(1) + bFalse = Boolean(0) + + class Seq: + def __init__(self, *args): + self.vals = args + def __iter__(self): + class SeqIter: + def __init__(self, vals): + self.vals = vals + self.i = 0 + def __iter__(self): + return self + def next(self): + i = self.i + self.i = i + 1 + if i < len(self.vals): + return self.vals[i] + else: + raise StopIteration + return SeqIter(self.vals) + + seq = Seq(*([bTrue, bFalse] * 25)) + self.assertEqual(filter(lambda x: not x, seq), [bFalse]*25) + self.assertEqual(filter(lambda x: not x, iter(seq)), [bFalse]*25) + + # Test max() and min()'s use of iterators. + def test_builtin_max_min(self): + self.assertEqual(max(SequenceClass(5)), 4) + self.assertEqual(min(SequenceClass(5)), 0) + self.assertEqual(max(8, -1), 8) + self.assertEqual(min(8, -1), -1) + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(max(d), "two") + self.assertEqual(min(d), "one") + self.assertEqual(max(d.itervalues()), 3) + self.assertEqual(min(iter(d.itervalues())), 1) + + f = open(TESTFN, "w") + try: + f.write("medium line\n") + f.write("xtra large line\n") + f.write("itty-bitty line\n") + finally: + f.close() + f = open(TESTFN, "r") + try: + self.assertEqual(min(f), "itty-bitty line\n") + f.seek(0, 0) + self.assertEqual(max(f), "xtra large line\n") + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test map()'s use of iterators. + def test_builtin_map(self): + self.assertEqual(map(None, SequenceClass(5)), range(5)) + self.assertEqual(map(lambda x: x+1, SequenceClass(5)), range(1, 6)) + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(map(None, d), d.keys()) + self.assertEqual(map(lambda k, d=d: (k, d[k]), d), d.items()) + dkeys = d.keys() + expected = [(i < len(d) and dkeys[i] or None, + i, + i < len(d) and dkeys[i] or None) + for i in range(5)] + self.assertEqual(map(None, d, + SequenceClass(5), + iter(d.iterkeys())), + expected) + + f = open(TESTFN, "w") + try: + for i in range(10): + f.write("xy" * i + "\n") # line i has len 2*i+1 + finally: + f.close() + f = open(TESTFN, "r") + try: + self.assertEqual(map(len, f), range(1, 21, 2)) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test zip()'s use of iterators. + def test_builtin_zip(self): + self.assertEqual(zip(), []) + self.assertEqual(zip(*[]), []) + self.assertEqual(zip(*[(1, 2), 'ab']), [(1, 'a'), (2, 'b')]) + + self.assertRaises(TypeError, zip, None) + self.assertRaises(TypeError, zip, range(10), 42) + self.assertRaises(TypeError, zip, range(10), zip) + + self.assertEqual(zip(IteratingSequenceClass(3)), + [(0,), (1,), (2,)]) + self.assertEqual(zip(SequenceClass(3)), + [(0,), (1,), (2,)]) + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(d.items(), zip(d, d.itervalues())) + + # Generate all ints starting at constructor arg. + class IntsFrom: + def __init__(self, start): + self.i = start + + def __iter__(self): + return self + + def next(self): + i = self.i + self.i = i+1 + return i + + f = open(TESTFN, "w") + try: + f.write("a\n" "bbb\n" "cc\n") + finally: + f.close() + f = open(TESTFN, "r") + try: + self.assertEqual(zip(IntsFrom(0), f, IntsFrom(-100)), + [(0, "a\n", -100), + (1, "bbb\n", -99), + (2, "cc\n", -98)]) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + self.assertEqual(zip(xrange(5)), [(i,) for i in range(5)]) + + # Classes that lie about their lengths. + class NoGuessLen5: + def __getitem__(self, i): + if i >= 5: + raise IndexError + return i + + class Guess3Len5(NoGuessLen5): + def __len__(self): + return 3 + + class Guess30Len5(NoGuessLen5): + def __len__(self): + return 30 + + self.assertEqual(len(Guess3Len5()), 3) + self.assertEqual(len(Guess30Len5()), 30) + self.assertEqual(zip(NoGuessLen5()), zip(range(5))) + self.assertEqual(zip(Guess3Len5()), zip(range(5))) + self.assertEqual(zip(Guess30Len5()), zip(range(5))) + + expected = [(i, i) for i in range(5)] + for x in NoGuessLen5(), Guess3Len5(), Guess30Len5(): + for y in NoGuessLen5(), Guess3Len5(), Guess30Len5(): + self.assertEqual(zip(x, y), expected) + + # Test reduces()'s use of iterators. + def test_builtin_reduce(self): + from operator import add + self.assertEqual(reduce(add, SequenceClass(5)), 10) + self.assertEqual(reduce(add, SequenceClass(5), 42), 52) + self.assertRaises(TypeError, reduce, add, SequenceClass(0)) + self.assertEqual(reduce(add, SequenceClass(0), 42), 42) + self.assertEqual(reduce(add, SequenceClass(1)), 0) + self.assertEqual(reduce(add, SequenceClass(1), 42), 42) + + d = {"one": 1, "two": 2, "three": 3} + self.assertEqual(reduce(add, d), "".join(d.keys())) + + # This test case will be removed if we don't have Unicode + def test_unicode_join_endcase(self): + + # This class inserts a Unicode object into its argument's natural + # iteration, in the 3rd position. + class OhPhooey: + def __init__(self, seq): + self.it = iter(seq) + self.i = 0 + + def __iter__(self): + return self + + def next(self): + i = self.i + self.i = i+1 + if i == 2: + return unicode("fooled you!") + return self.it.next() + + f = open(TESTFN, "w") + try: + f.write("a\n" + "b\n" + "c\n") + finally: + f.close() + + f = open(TESTFN, "r") + # Nasty: string.join(s) can't know whether unicode.join() is needed + # until it's seen all of s's elements. But in this case, f's + # iterator cannot be restarted. So what we're testing here is + # whether string.join() can manage to remember everything it's seen + # and pass that on to unicode.join(). + try: + got = " - ".join(OhPhooey(f)) + self.assertEqual(got, unicode("a\n - b\n - fooled you! - c\n")) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + if not have_unicode: + def test_unicode_join_endcase(self): pass + + # Test iterators with 'x in y' and 'x not in y'. + def test_in_and_not_in(self): + for sc5 in IteratingSequenceClass(5), SequenceClass(5): + for i in range(5): + self.assert_(i in sc5) + for i in "abc", -1, 5, 42.42, (3, 4), [], {1: 1}, 3-12j, sc5: + self.assert_(i not in sc5) + + self.assertRaises(TypeError, lambda: 3 in 12) + self.assertRaises(TypeError, lambda: 3 not in map) + + d = {"one": 1, "two": 2, "three": 3, 1j: 2j} + for k in d: + self.assert_(k in d) + self.assert_(k not in d.itervalues()) + for v in d.values(): + self.assert_(v in d.itervalues()) + self.assert_(v not in d) + for k, v in d.iteritems(): + self.assert_((k, v) in d.iteritems()) + self.assert_((v, k) not in d.iteritems()) + + f = open(TESTFN, "w") + try: + f.write("a\n" "b\n" "c\n") + finally: + f.close() + f = open(TESTFN, "r") + try: + for chunk in "abc": + f.seek(0, 0) + self.assert_(chunk not in f) + f.seek(0, 0) + self.assert_((chunk + "\n") in f) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test iterators with operator.countOf (PySequence_Count). + def test_countOf(self): + from operator import countOf + self.assertEqual(countOf([1,2,2,3,2,5], 2), 3) + self.assertEqual(countOf((1,2,2,3,2,5), 2), 3) + self.assertEqual(countOf("122325", "2"), 3) + self.assertEqual(countOf("122325", "6"), 0) + + self.assertRaises(TypeError, countOf, 42, 1) + self.assertRaises(TypeError, countOf, countOf, countOf) + + d = {"one": 3, "two": 3, "three": 3, 1j: 2j} + for k in d: + self.assertEqual(countOf(d, k), 1) + self.assertEqual(countOf(d.itervalues(), 3), 3) + self.assertEqual(countOf(d.itervalues(), 2j), 1) + self.assertEqual(countOf(d.itervalues(), 1j), 0) + + f = open(TESTFN, "w") + try: + f.write("a\n" "b\n" "c\n" "b\n") + finally: + f.close() + f = open(TESTFN, "r") + try: + for letter, count in ("a", 1), ("b", 2), ("c", 1), ("d", 0): + f.seek(0, 0) + self.assertEqual(countOf(f, letter + "\n"), count) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + # Test iterators with operator.indexOf (PySequence_Index). + def test_indexOf(self): + from operator import indexOf + self.assertEqual(indexOf([1,2,2,3,2,5], 1), 0) + self.assertEqual(indexOf((1,2,2,3,2,5), 2), 1) + self.assertEqual(indexOf((1,2,2,3,2,5), 3), 3) + self.assertEqual(indexOf((1,2,2,3,2,5), 5), 5) + self.assertRaises(ValueError, indexOf, (1,2,2,3,2,5), 0) + self.assertRaises(ValueError, indexOf, (1,2,2,3,2,5), 6) + + self.assertEqual(indexOf("122325", "2"), 1) + self.assertEqual(indexOf("122325", "5"), 5) + self.assertRaises(ValueError, indexOf, "122325", "6") + + self.assertRaises(TypeError, indexOf, 42, 1) + self.assertRaises(TypeError, indexOf, indexOf, indexOf) + + f = open(TESTFN, "w") + try: + f.write("a\n" "b\n" "c\n" "d\n" "e\n") + finally: + f.close() + f = open(TESTFN, "r") + try: + fiter = iter(f) + self.assertEqual(indexOf(fiter, "b\n"), 1) + self.assertEqual(indexOf(fiter, "d\n"), 1) + self.assertEqual(indexOf(fiter, "e\n"), 0) + self.assertRaises(ValueError, indexOf, fiter, "a\n") + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + iclass = IteratingSequenceClass(3) + for i in range(3): + self.assertEqual(indexOf(iclass, i), i) + self.assertRaises(ValueError, indexOf, iclass, -1) + + # Test iterators with file.writelines(). + def test_writelines(self): + f = file(TESTFN, "w") + + try: + self.assertRaises(TypeError, f.writelines, None) + self.assertRaises(TypeError, f.writelines, 42) + + f.writelines(["1\n", "2\n"]) + f.writelines(("3\n", "4\n")) + f.writelines({'5\n': None}) + f.writelines({}) + + # Try a big chunk too. + class Iterator: + def __init__(self, start, finish): + self.start = start + self.finish = finish + self.i = self.start + + def next(self): + if self.i >= self.finish: + raise StopIteration + result = str(self.i) + '\n' + self.i += 1 + return result + + def __iter__(self): + return self + + class Whatever: + def __init__(self, start, finish): + self.start = start + self.finish = finish + + def __iter__(self): + return Iterator(self.start, self.finish) + + f.writelines(Whatever(6, 6+2000)) + f.close() + + f = file(TESTFN) + expected = [str(i) + "\n" for i in range(1, 2006)] + self.assertEqual(list(f), expected) + + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + + # Test iterators on RHS of unpacking assignments. + def test_unpack_iter(self): + a, b = 1, 2 + self.assertEqual((a, b), (1, 2)) + + a, b, c = IteratingSequenceClass(3) + self.assertEqual((a, b, c), (0, 1, 2)) + + try: # too many values + a, b = IteratingSequenceClass(3) + except ValueError: + pass + else: + self.fail("should have raised ValueError") + + try: # not enough values + a, b, c = IteratingSequenceClass(2) + except ValueError: + pass + else: + self.fail("should have raised ValueError") + + try: # not iterable + a, b, c = len + except TypeError: + pass + else: + self.fail("should have raised TypeError") + + a, b, c = {1: 42, 2: 42, 3: 42}.itervalues() + self.assertEqual((a, b, c), (42, 42, 42)) + + f = open(TESTFN, "w") + lines = ("a\n", "bb\n", "ccc\n") + try: + for line in lines: + f.write(line) + finally: + f.close() + f = open(TESTFN, "r") + try: + a, b, c = f + self.assertEqual((a, b, c), lines) + finally: + f.close() + try: + unlink(TESTFN) + except OSError: + pass + + (a, b), (c,) = IteratingSequenceClass(2), {42: 24} + self.assertEqual((a, b, c), (0, 1, 42)) + + # Test reference count behavior + + class C(object): + count = 0 + def __new__(cls): + cls.count += 1 + return object.__new__(cls) + def __del__(self): + cls = self.__class__ + assert cls.count > 0 + cls.count -= 1 + x = C() + self.assertEqual(C.count, 1) + del x + gc.collect() + self.assertEqual(C.count, 0) + l = [C(), C(), C()] + self.assertEqual(C.count, 3) + try: + a, b = iter(l) + except ValueError: + pass + del l + gc.collect() + self.assertEqual(C.count, 0) + + + # Make sure StopIteration is a "sink state". + # This tests various things that weren't sink states in Python 2.2.1, + # plus various things that always were fine. + + def test_sinkstate_list(self): + # This used to fail + a = range(5) + b = iter(a) + self.assertEqual(list(b), range(5)) + a.extend(range(5, 10)) + self.assertEqual(list(b), []) + + def test_sinkstate_tuple(self): + a = (0, 1, 2, 3, 4) + b = iter(a) + self.assertEqual(list(b), range(5)) + self.assertEqual(list(b), []) + + def test_sinkstate_string(self): + a = "abcde" + b = iter(a) + self.assertEqual(list(b), ['a', 'b', 'c', 'd', 'e']) + self.assertEqual(list(b), []) + + def test_sinkstate_sequence(self): + # This used to fail + a = SequenceClass(5) + b = iter(a) + self.assertEqual(list(b), range(5)) + a.n = 10 + self.assertEqual(list(b), []) + + def test_sinkstate_callable(self): + # This used to fail + def spam(state=[0]): + i = state[0] + state[0] = i+1 + if i == 10: + raise AssertionError, "shouldn't have gotten this far" + return i + b = iter(spam, 5) + self.assertEqual(list(b), range(5)) + self.assertEqual(list(b), []) + + def test_sinkstate_dict(self): + # XXX For a more thorough test, see towards the end of: + # http://mail.python.org/pipermail/python-dev/2002-July/026512.html + a = {1:1, 2:2, 0:0, 4:4, 3:3} + for b in iter(a), a.iterkeys(), a.iteritems(), a.itervalues(): + b = iter(a) + self.assertEqual(len(list(b)), 5) + self.assertEqual(list(b), []) + + def test_sinkstate_yield(self): + def gen(): + for i in range(5): + yield i + b = gen() + self.assertEqual(list(b), range(5)) + self.assertEqual(list(b), []) + + def test_sinkstate_range(self): + a = xrange(5) + b = iter(a) + self.assertEqual(list(b), range(5)) + self.assertEqual(list(b), []) + + def test_sinkstate_enumerate(self): + a = range(5) + e = enumerate(a) + b = iter(e) + self.assertEqual(list(b), zip(range(5), range(5))) + self.assertEqual(list(b), []) + + +def test_main(): + run_unittest(TestCase) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_itertools.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_itertools.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,975 @@ +import unittest +from test import test_support +from itertools import * +from weakref import proxy +import sys +import operator +import random + +def onearg(x): + 'Test function of one argument' + return 2*x + +def errfunc(*args): + 'Test function that raises an error' + raise ValueError + +def gen3(): + 'Non-restartable source sequence' + for i in (0, 1, 2): + yield i + +def isEven(x): + 'Test predicate' + return x%2==0 + +def isOdd(x): + 'Test predicate' + return x%2==1 + +class StopNow: + 'Class emulating an empty iterable.' + def __iter__(self): + return self + def next(self): + raise StopIteration + +def take(n, seq): + 'Convenience function for partially consuming a long of infinite iterable' + return list(islice(seq, n)) + +class TestBasicOps(unittest.TestCase): + def test_chain(self): + self.assertEqual(list(chain('abc', 'def')), list('abcdef')) + self.assertEqual(list(chain('abc')), list('abc')) + self.assertEqual(list(chain('')), []) + self.assertEqual(take(4, chain('abc', 'def')), list('abcd')) + self.assertRaises(TypeError, chain, 2, 3) + + def test_count(self): + self.assertEqual(zip('abc',count()), [('a', 0), ('b', 1), ('c', 2)]) + self.assertEqual(zip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)]) + self.assertEqual(take(2, zip('abc',count(3))), [('a', 3), ('b', 4)]) + self.assertRaises(TypeError, count, 2, 3) + self.assertRaises(TypeError, count, 'a') + self.assertRaises(OverflowError, list, islice(count(sys.maxint-5), 10)) + c = count(3) + self.assertEqual(repr(c), 'count(3)') + c.next() + self.assertEqual(repr(c), 'count(4)') + c = count(-9) + self.assertEqual(repr(c), 'count(-9)') + c.next() + self.assertEqual(c.next(), -8) + + def test_cycle(self): + self.assertEqual(take(10, cycle('abc')), list('abcabcabca')) + self.assertEqual(list(cycle('')), []) + self.assertRaises(TypeError, cycle) + self.assertRaises(TypeError, cycle, 5) + self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0]) + + def test_groupby(self): + # Check whether it accepts arguments correctly + self.assertEqual([], list(groupby([]))) + self.assertEqual([], list(groupby([], key=id))) + self.assertRaises(TypeError, list, groupby('abc', [])) + self.assertRaises(TypeError, groupby, None) + self.assertRaises(TypeError, groupby, 'abc', lambda x:x, 10) + + # Check normal input + s = [(0, 10, 20), (0, 11,21), (0,12,21), (1,13,21), (1,14,22), + (2,15,22), (3,16,23), (3,17,23)] + dup = [] + for k, g in groupby(s, lambda r:r[0]): + for elem in g: + self.assertEqual(k, elem[0]) + dup.append(elem) + self.assertEqual(s, dup) + + # Check nested case + dup = [] + for k, g in groupby(s, lambda r:r[0]): + for ik, ig in groupby(g, lambda r:r[2]): + for elem in ig: + self.assertEqual(k, elem[0]) + self.assertEqual(ik, elem[2]) + dup.append(elem) + self.assertEqual(s, dup) + + # Check case where inner iterator is not used + keys = [k for k, g in groupby(s, lambda r:r[0])] + expectedkeys = set([r[0] for r in s]) + self.assertEqual(set(keys), expectedkeys) + self.assertEqual(len(keys), len(expectedkeys)) + + # Exercise pipes and filters style + s = 'abracadabra' + # sort s | uniq + r = [k for k, g in groupby(sorted(s))] + self.assertEqual(r, ['a', 'b', 'c', 'd', 'r']) + # sort s | uniq -d + r = [k for k, g in groupby(sorted(s)) if list(islice(g,1,2))] + self.assertEqual(r, ['a', 'b', 'r']) + # sort s | uniq -c + r = [(len(list(g)), k) for k, g in groupby(sorted(s))] + self.assertEqual(r, [(5, 'a'), (2, 'b'), (1, 'c'), (1, 'd'), (2, 'r')]) + # sort s | uniq -c | sort -rn | head -3 + r = sorted([(len(list(g)) , k) for k, g in groupby(sorted(s))], reverse=True)[:3] + self.assertEqual(r, [(5, 'a'), (2, 'r'), (2, 'b')]) + + # iter.next failure + class ExpectedError(Exception): + pass + def delayed_raise(n=0): + for i in range(n): + yield 'yo' + raise ExpectedError + def gulp(iterable, keyp=None, func=list): + return [func(g) for k, g in groupby(iterable, keyp)] + + # iter.next failure on outer object + self.assertRaises(ExpectedError, gulp, delayed_raise(0)) + # iter.next failure on inner object + self.assertRaises(ExpectedError, gulp, delayed_raise(1)) + + # __cmp__ failure + class DummyCmp: + def __cmp__(self, dst): + raise ExpectedError + s = [DummyCmp(), DummyCmp(), None] + + # __cmp__ failure on outer object + self.assertRaises(ExpectedError, gulp, s, func=id) + # __cmp__ failure on inner object + self.assertRaises(ExpectedError, gulp, s) + + # keyfunc failure + def keyfunc(obj): + if keyfunc.skip > 0: + keyfunc.skip -= 1 + return obj + else: + raise ExpectedError + + # keyfunc failure on outer object + keyfunc.skip = 0 + self.assertRaises(ExpectedError, gulp, [None], keyfunc) + keyfunc.skip = 1 + self.assertRaises(ExpectedError, gulp, [None, None], keyfunc) + + def test_ifilter(self): + self.assertEqual(list(ifilter(isEven, range(6))), [0,2,4]) + self.assertEqual(list(ifilter(None, [0,1,0,2,0])), [1,2]) + self.assertEqual(take(4, ifilter(isEven, count())), [0,2,4,6]) + self.assertRaises(TypeError, ifilter) + self.assertRaises(TypeError, ifilter, lambda x:x) + self.assertRaises(TypeError, ifilter, lambda x:x, range(6), 7) + self.assertRaises(TypeError, ifilter, isEven, 3) + self.assertRaises(TypeError, ifilter(range(6), range(6)).next) + + def test_ifilterfalse(self): + self.assertEqual(list(ifilterfalse(isEven, range(6))), [1,3,5]) + self.assertEqual(list(ifilterfalse(None, [0,1,0,2,0])), [0,0,0]) + self.assertEqual(take(4, ifilterfalse(isEven, count())), [1,3,5,7]) + self.assertRaises(TypeError, ifilterfalse) + self.assertRaises(TypeError, ifilterfalse, lambda x:x) + self.assertRaises(TypeError, ifilterfalse, lambda x:x, range(6), 7) + self.assertRaises(TypeError, ifilterfalse, isEven, 3) + self.assertRaises(TypeError, ifilterfalse(range(6), range(6)).next) + + def test_izip(self): + ans = [(x,y) for x, y in izip('abc',count())] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + self.assertEqual(list(izip('abc', range(6))), zip('abc', range(6))) + self.assertEqual(list(izip('abcdef', range(3))), zip('abcdef', range(3))) + self.assertEqual(take(3,izip('abcdef', count())), zip('abcdef', range(3))) + self.assertEqual(list(izip('abcdef')), zip('abcdef')) + self.assertEqual(list(izip()), zip()) + self.assertRaises(TypeError, izip, 3) + self.assertRaises(TypeError, izip, range(3), 3) + # Check tuple re-use (implementation detail) + self.assertEqual([tuple(list(pair)) for pair in izip('abc', 'def')], + zip('abc', 'def')) + self.assertEqual([pair for pair in izip('abc', 'def')], + zip('abc', 'def')) + # the following test deals with a specific implementation detail, + # that izip "reuses" the SAME tuple object each time when it can; + # it does not apply correctly to pypy, so I'm commenting it -- AM + # ids = map(id, izip('abc', 'def')) + # self.assertEqual(min(ids), max(ids)) + ids = map(id, list(izip('abc', 'def'))) + self.assertEqual(len(dict.fromkeys(ids)), len(ids)) + + def test_repeat(self): + self.assertEqual(zip(xrange(3),repeat('a')), + [(0, 'a'), (1, 'a'), (2, 'a')]) + self.assertEqual(list(repeat('a', 3)), ['a', 'a', 'a']) + self.assertEqual(take(3, repeat('a')), ['a', 'a', 'a']) + self.assertEqual(list(repeat('a', 0)), []) + self.assertEqual(list(repeat('a', -3)), []) + self.assertRaises(TypeError, repeat) + self.assertRaises(TypeError, repeat, None, 3, 4) + self.assertRaises(TypeError, repeat, None, 'a') + r = repeat(1+0j) + self.assertEqual(repr(r), 'repeat((1+0j))') + r = repeat(1+0j, 5) + self.assertEqual(repr(r), 'repeat((1+0j), 5)') + list(r) + self.assertEqual(repr(r), 'repeat((1+0j), 0)') + + def test_imap(self): + self.assertEqual(list(imap(operator.pow, range(3), range(1,7))), + [0**1, 1**2, 2**3]) + self.assertEqual(list(imap(None, 'abc', range(5))), + [('a',0),('b',1),('c',2)]) + self.assertEqual(list(imap(None, 'abc', count())), + [('a',0),('b',1),('c',2)]) + self.assertEqual(take(2,imap(None, 'abc', count())), + [('a',0),('b',1)]) + self.assertEqual(list(imap(operator.pow, [])), []) + self.assertRaises(TypeError, imap) + self.assertRaises(TypeError, imap, operator.neg) + self.assertRaises(TypeError, imap(10, range(5)).next) + self.assertRaises(ValueError, imap(errfunc, [4], [5]).next) + self.assertRaises(TypeError, imap(onearg, [4], [5]).next) + + def test_starmap(self): + self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))), + [0**1, 1**2, 2**3]) + self.assertEqual(take(3, starmap(operator.pow, izip(count(), count(1)))), + [0**1, 1**2, 2**3]) + self.assertEqual(list(starmap(operator.pow, [])), []) + self.assertRaises(TypeError, list, starmap(operator.pow, [[4,5]])) + self.assertRaises(TypeError, starmap) + self.assertRaises(TypeError, starmap, operator.pow, [(4,5)], 'extra') + self.assertRaises(TypeError, starmap(10, [(4,5)]).next) + self.assertRaises(ValueError, starmap(errfunc, [(4,5)]).next) + self.assertRaises(TypeError, starmap(onearg, [(4,5)]).next) + + def test_islice(self): + for args in [ # islice(args) should agree with range(args) + (10, 20, 3), + (10, 3, 20), + (10, 20), + (10, 3), + (20,) + ]: + self.assertEqual(list(islice(xrange(100), *args)), range(*args)) + + for args, tgtargs in [ # Stop when seqn is exhausted + ((10, 110, 3), ((10, 100, 3))), + ((10, 110), ((10, 100))), + ((110,), (100,)) + ]: + self.assertEqual(list(islice(xrange(100), *args)), range(*tgtargs)) + + # Test stop=None + self.assertEqual(list(islice(xrange(10), None)), range(10)) + self.assertEqual(list(islice(xrange(10), None, None)), range(10)) + self.assertEqual(list(islice(xrange(10), None, None, None)), range(10)) + self.assertEqual(list(islice(xrange(10), 2, None)), range(2, 10)) + self.assertEqual(list(islice(xrange(10), 1, None, 2)), range(1, 10, 2)) + + # Test number of items consumed SF #1171417 + it = iter(range(10)) + self.assertEqual(list(islice(it, 3)), range(3)) + self.assertEqual(list(it), range(3, 10)) + + # Test invalid arguments + self.assertRaises(TypeError, islice, xrange(10)) + self.assertRaises(TypeError, islice, xrange(10), 1, 2, 3, 4) + self.assertRaises(ValueError, islice, xrange(10), -5, 10, 1) + self.assertRaises(ValueError, islice, xrange(10), 1, -5, -1) + self.assertRaises(ValueError, islice, xrange(10), 1, 10, -1) + self.assertRaises(ValueError, islice, xrange(10), 1, 10, 0) + self.assertRaises(ValueError, islice, xrange(10), 'a') + self.assertRaises(ValueError, islice, xrange(10), 'a', 1) + self.assertRaises(ValueError, islice, xrange(10), 1, 'a') + self.assertRaises(ValueError, islice, xrange(10), 'a', 1, 1) + self.assertRaises(ValueError, islice, xrange(10), 1, 'a', 1) + # too slow to test on pypy, weakened...: + # self.assertEqual(len(list(islice(count(), 1, 10, sys.maxint))), 1) + self.assertEqual(len(list(islice(count(), 1, 10, 99))), 1) + + def test_takewhile(self): + data = [1, 3, 5, 20, 2, 4, 6, 8] + underten = lambda x: x<10 + self.assertEqual(list(takewhile(underten, data)), [1, 3, 5]) + self.assertEqual(list(takewhile(underten, [])), []) + self.assertRaises(TypeError, takewhile) + self.assertRaises(TypeError, takewhile, operator.pow) + self.assertRaises(TypeError, takewhile, operator.pow, [(4,5)], 'extra') + self.assertRaises(TypeError, takewhile(10, [(4,5)]).next) + self.assertRaises(ValueError, takewhile(errfunc, [(4,5)]).next) + t = takewhile(bool, [1, 1, 1, 0, 0, 0]) + self.assertEqual(list(t), [1, 1, 1]) + self.assertRaises(StopIteration, t.next) + + def test_dropwhile(self): + data = [1, 3, 5, 20, 2, 4, 6, 8] + underten = lambda x: x<10 + self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8]) + self.assertEqual(list(dropwhile(underten, [])), []) + self.assertRaises(TypeError, dropwhile) + self.assertRaises(TypeError, dropwhile, operator.pow) + self.assertRaises(TypeError, dropwhile, operator.pow, [(4,5)], 'extra') + self.assertRaises(TypeError, dropwhile(10, [(4,5)]).next) + self.assertRaises(ValueError, dropwhile(errfunc, [(4,5)]).next) + + def test_tee(self): + n = 20 + def irange(n): + for i in xrange(n): + yield i + + a, b = tee([]) # test empty iterator + self.assertEqual(list(a), []) + self.assertEqual(list(b), []) + + a, b = tee(irange(n)) # test 100% interleaved + self.assertEqual(zip(a,b), zip(range(n),range(n))) + + a, b = tee(irange(n)) # test 0% interleaved + self.assertEqual(list(a), range(n)) + self.assertEqual(list(b), range(n)) + + a, b = tee(irange(n)) # test dealloc of leading iterator + for i in xrange(n // 2): + self.assertEqual(a.next(), i) + del a + self.assertEqual(list(b), range(n)) + + a, b = tee(irange(n)) # test dealloc of trailing iterator + for i in xrange(n // 2): + self.assertEqual(a.next(), i) + del b + self.assertEqual(list(a), range(n // 2, n)) + + for j in xrange(5): # test randomly interleaved + order = [0]*n + [1]*n + random.shuffle(order) + lists = ([], []) + its = tee(irange(n)) + for i in order: + value = its[i].next() + lists[i].append(value) + self.assertEqual(lists[0], range(n)) + self.assertEqual(lists[1], range(n)) + + # test argument format checking + self.assertRaises(TypeError, tee) + self.assertRaises(TypeError, tee, 3) + self.assertRaises(TypeError, tee, [1,2], 'x') + self.assertRaises(TypeError, tee, [1,2], 3, 'x') + + # tee object should be instantiable + a, b = tee('abc') + c = type(a)('def') + self.assertEqual(list(c), list('def')) + + # test long-lagged and multi-way split + a, b, c = tee(xrange(n), 3) + for i in xrange(n // 2): + self.assertEqual(a.next(), i) + self.assertEqual(list(b), range(n)) + self.assertEqual([c.next(), c.next()], range(2)) + self.assertEqual(list(a), range(n // 2, n)) + self.assertEqual(list(c), range(2, n)) + + # test values of n + self.assertRaises(TypeError, tee, 'abc', 'invalid') + self.assertRaises(ValueError, tee, [], -1) + for n in xrange(5): + result = tee('abc', n) + self.assertEqual(type(result), tuple) + self.assertEqual(len(result), n) + self.assertEqual(map(list, result), [list('abc')]*n) + + # tee pass-through to copyable iterator + a, b = tee('abc') + c, d = tee(a) + self.assert_(a is c) + + # test tee_new + t1, t2 = tee('abc') + tnew = type(t1) + self.assertRaises(TypeError, tnew) + self.assertRaises(TypeError, tnew, 10) + t3 = tnew(t1) + self.assert_(list(t1) == list(t2) == list(t3) == list('abc')) + + # Commented out until weakref support is implemented. +# # test that tee objects are weak referencable +# a, b = tee(xrange(10)) +# p = proxy(a) +# self.assertEqual(getattr(p, '__class__'), type(b)) +# del a +# self.assertRaises(ReferenceError, getattr, p, '__class__') + + def test_StopIteration(self): + self.assertRaises(StopIteration, izip().next) + + for f in (chain, cycle, izip, groupby): + self.assertRaises(StopIteration, f([]).next) + self.assertRaises(StopIteration, f(StopNow()).next) + + self.assertRaises(StopIteration, islice([], None).next) + self.assertRaises(StopIteration, islice(StopNow(), None).next) + + p, q = tee([]) + self.assertRaises(StopIteration, p.next) + self.assertRaises(StopIteration, q.next) + p, q = tee(StopNow()) + self.assertRaises(StopIteration, p.next) + self.assertRaises(StopIteration, q.next) + + self.assertRaises(StopIteration, repeat(None, 0).next) + + for f in (ifilter, ifilterfalse, imap, takewhile, dropwhile, starmap): + self.assertRaises(StopIteration, f(lambda x:x, []).next) + self.assertRaises(StopIteration, f(lambda x:x, StopNow()).next) + +class TestGC(unittest.TestCase): + + def makecycle(self, iterator, container): + container.append(iterator) + iterator.next() + del container, iterator + + def test_chain(self): + a = [] + self.makecycle(chain(a), a) + + def test_cycle(self): + a = [] + self.makecycle(cycle([a]*2), a) + + def test_dropwhile(self): + a = [] + self.makecycle(dropwhile(bool, [0, a, a]), a) + + def test_groupby(self): + a = [] + self.makecycle(groupby([a]*2, lambda x:x), a) + + def test_ifilter(self): + a = [] + self.makecycle(ifilter(lambda x:True, [a]*2), a) + + def test_ifilterfalse(self): + a = [] + self.makecycle(ifilterfalse(lambda x:False, a), a) + + def test_izip(self): + a = [] + self.makecycle(izip([a]*2, [a]*3), a) + + def test_imap(self): + a = [] + self.makecycle(imap(lambda x:x, [a]*2), a) + + def test_islice(self): + a = [] + self.makecycle(islice([a]*2, None), a) + + def test_repeat(self): + a = [] + self.makecycle(repeat(a), a) + + def test_starmap(self): + a = [] + self.makecycle(starmap(lambda *t: t, [(a,a)]*2), a) + + def test_takewhile(self): + a = [] + self.makecycle(takewhile(bool, [1, 0, a, a]), a) + +def R(seqn): + 'Regular generator' + for i in seqn: + yield i + +class G: + 'Sequence using __getitem__' + def __init__(self, seqn): + self.seqn = seqn + def __getitem__(self, i): + return self.seqn[i] + +class I: + 'Sequence using iterator protocol' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class Ig: + 'Sequence using iterator protocol defined with a generator' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + for val in self.seqn: + yield val + +class X: + 'Missing __getitem__ and __iter__' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class N: + 'Iterator missing next()' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + +class E: + 'Test propagation of exceptions' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + 3 // 0 + +class S: + 'Test immediate stop' + def __init__(self, seqn): + pass + def __iter__(self): + return self + def next(self): + raise StopIteration + +def L(seqn): + 'Test multiple tiers of iterators' + return chain(imap(lambda x:x, R(Ig(G(seqn))))) + + +class TestVariousIteratorArgs(unittest.TestCase): + + def test_chain(self): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(list(chain(g(s))), list(g(s))) + self.assertEqual(list(chain(g(s), g(s))), list(g(s))+list(g(s))) + self.assertRaises(TypeError, chain, X(s)) + self.assertRaises(TypeError, list, chain(N(s))) + self.assertRaises(ZeroDivisionError, list, chain(E(s))) + + def test_cycle(self): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + tgtlen = len(s) * 3 + expected = list(g(s))*3 + actual = list(islice(cycle(g(s)), tgtlen)) + self.assertEqual(actual, expected) + self.assertRaises(TypeError, cycle, X(s)) + self.assertRaises(TypeError, list, cycle(N(s))) + self.assertRaises(ZeroDivisionError, list, cycle(E(s))) + + def test_groupby(self): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual([k for k, sb in groupby(g(s))], list(g(s))) + self.assertRaises(TypeError, groupby, X(s)) + self.assertRaises(TypeError, list, groupby(N(s))) + self.assertRaises(ZeroDivisionError, list, groupby(E(s))) + + def test_ifilter(self): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(list(ifilter(isEven, g(s))), filter(isEven, g(s))) + self.assertRaises(TypeError, ifilter, isEven, X(s)) + self.assertRaises(TypeError, list, ifilter(isEven, N(s))) + self.assertRaises(ZeroDivisionError, list, ifilter(isEven, E(s))) + + def test_ifilterfalse(self): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(list(ifilterfalse(isEven, g(s))), filter(isOdd, g(s))) + self.assertRaises(TypeError, ifilterfalse, isEven, X(s)) + self.assertRaises(TypeError, list, ifilterfalse(isEven, N(s))) + self.assertRaises(ZeroDivisionError, list, ifilterfalse(isEven, E(s))) + + def test_izip(self): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(list(izip(g(s))), zip(g(s))) + self.assertEqual(list(izip(g(s), g(s))), zip(g(s), g(s))) + self.assertRaises(TypeError, izip, X(s)) + self.assertRaises(TypeError, list, izip(N(s))) + self.assertRaises(ZeroDivisionError, list, izip(E(s))) + + def test_imap(self): + for s in (range(10), range(0), range(100), (7,11), xrange(20,50,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(list(imap(onearg, g(s))), map(onearg, g(s))) + self.assertEqual(list(imap(operator.pow, g(s), g(s))), map(operator.pow, g(s), g(s))) + self.assertRaises(TypeError, imap, onearg, X(s)) + self.assertRaises(TypeError, list, imap(onearg, N(s))) + self.assertRaises(ZeroDivisionError, list, imap(onearg, E(s))) + + def test_islice(self): + for s in ("12345", "", range(10), ('do', 1.2), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(list(islice(g(s),1,None,2)), list(g(s))[1::2]) + self.assertRaises(TypeError, islice, X(s), 10) + self.assertRaises(TypeError, list, islice(N(s), 10)) + self.assertRaises(ZeroDivisionError, list, islice(E(s), 10)) + + def test_starmap(self): + for s in (range(10), range(0), range(100), (7,11), xrange(20,50,5)): + for g in (G, I, Ig, S, L, R): + ss = zip(s, s) + self.assertEqual(list(starmap(operator.pow, g(ss))), map(operator.pow, g(s), g(s))) + self.assertRaises(TypeError, starmap, operator.pow, X(ss)) + self.assertRaises(TypeError, list, starmap(operator.pow, N(ss))) + self.assertRaises(ZeroDivisionError, list, starmap(operator.pow, E(ss))) + + def test_takewhile(self): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + tgt = [] + for elem in g(s): + if not isEven(elem): break + tgt.append(elem) + self.assertEqual(list(takewhile(isEven, g(s))), tgt) + self.assertRaises(TypeError, takewhile, isEven, X(s)) + self.assertRaises(TypeError, list, takewhile(isEven, N(s))) + self.assertRaises(ZeroDivisionError, list, takewhile(isEven, E(s))) + + def test_dropwhile(self): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + tgt = [] + for elem in g(s): + if not tgt and isOdd(elem): continue + tgt.append(elem) + self.assertEqual(list(dropwhile(isOdd, g(s))), tgt) + self.assertRaises(TypeError, dropwhile, isOdd, X(s)) + self.assertRaises(TypeError, list, dropwhile(isOdd, N(s))) + self.assertRaises(ZeroDivisionError, list, dropwhile(isOdd, E(s))) + + def test_tee(self): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): + for g in (G, I, Ig, S, L, R): + it1, it2 = tee(g(s)) + self.assertEqual(list(it1), list(g(s))) + self.assertEqual(list(it2), list(g(s))) + self.assertRaises(TypeError, tee, X(s)) + self.assertRaises(TypeError, list, tee(N(s))[0]) + self.assertRaises(ZeroDivisionError, list, tee(E(s))[0]) + +class LengthTransparency(unittest.TestCase): + + def test_repeat(self): + from test.test_iterlen import len + self.assertEqual(len(repeat(None, 50)), 50) + self.assertRaises(TypeError, len, repeat(None)) + +class RegressionTests(unittest.TestCase): + + def test_sf_793826(self): + # Fix Armin Rigo's successful efforts to wreak havoc + + def mutatingtuple(tuple1, f, tuple2): + # this builds a tuple t which is a copy of tuple1, + # then calls f(t), then mutates t to be equal to tuple2 + # (needs len(tuple1) == len(tuple2)). + def g(value, first=[1]): + if first: + del first[:] + f(z.next()) + return value + items = list(tuple2) + items[1:1] = list(tuple1) + gen = imap(g, items) + z = izip(*[gen]*len(tuple1)) + z.next() + + def f(t): + global T + T = t + first[:] = list(T) + + first = [] + mutatingtuple((1,2,3), f, (4,5,6)) + second = list(T) + self.assertEqual(first, second) + + + def test_sf_950057(self): + # Make sure that chain() and cycle() catch exceptions immediately + # rather than when shifting between input sources + + def gen1(): + hist.append(0) + yield 1 + hist.append(1) + raise AssertionError + hist.append(2) + + def gen2(x): + hist.append(3) + yield 2 + hist.append(4) + if x: + raise StopIteration + + hist = [] + self.assertRaises(AssertionError, list, chain(gen1(), gen2(False))) + self.assertEqual(hist, [0,1]) + + hist = [] + self.assertRaises(AssertionError, list, chain(gen1(), gen2(True))) + self.assertEqual(hist, [0,1]) + + hist = [] + self.assertRaises(AssertionError, list, cycle(gen1())) + self.assertEqual(hist, [0,1]) + +class SubclassWithKwargsTest(unittest.TestCase): + def test_keywords_in_subclass(self): + # count is not subclassable... + for cls in (repeat, izip, ifilter, ifilterfalse, chain, imap, + starmap, islice, takewhile, dropwhile, cycle): + class Subclass(cls): + def __init__(self, newarg=None, *args): + cls.__init__(self, *args) + try: + Subclass(newarg=1) + except TypeError, err: + # we expect type errors because of wrong argument count + self.failIf("does not take keyword arguments" in err.args[0]) + + +libreftest = """ Doctest for examples in the library reference: libitertools.tex + + +>>> amounts = [120.15, 764.05, 823.14] +>>> for checknum, amount in izip(count(1200), amounts): +... print 'Check %d is for $%.2f' % (checknum, amount) +... +Check 1200 is for $120.15 +Check 1201 is for $764.05 +Check 1202 is for $823.14 + +>>> import operator +>>> for cube in imap(operator.pow, xrange(1,4), repeat(3)): +... print cube +... +1 +8 +27 + +>>> reportlines = ['EuroPython', 'Roster', '', 'alex', '', 'laura', '', 'martin', '', 'walter', '', 'samuele'] +>>> for name in islice(reportlines, 3, None, 2): +... print name.title() +... +Alex +Laura +Martin +Walter +Samuele + +>>> from operator import itemgetter +>>> d = dict(a=1, b=2, c=1, d=2, e=1, f=2, g=3) +>>> di = sorted(sorted(d.iteritems()), key=itemgetter(1)) +>>> for k, g in groupby(di, itemgetter(1)): +... print k, map(itemgetter(0), g) +... +1 ['a', 'c', 'e'] +2 ['b', 'd', 'f'] +3 ['g'] + +# Find runs of consecutive numbers using groupby. The key to the solution +# is differencing with a range so that consecutive numbers all appear in +# same group. +>>> data = [ 1, 4,5,6, 10, 15,16,17,18, 22, 25,26,27,28] +>>> for k, g in groupby(enumerate(data), lambda (i,x):i-x): +... print map(operator.itemgetter(1), g) +... +[1] +[4, 5, 6] +[10] +[15, 16, 17, 18] +[22] +[25, 26, 27, 28] + +>>> def take(n, seq): +... return list(islice(seq, n)) + +>>> def enumerate(iterable): +... return izip(count(), iterable) + +>>> def tabulate(function): +... "Return function(0), function(1), ..." +... return imap(function, count()) + +>>> def iteritems(mapping): +... return izip(mapping.iterkeys(), mapping.itervalues()) + +>>> def nth(iterable, n): +... "Returns the nth item" +... return list(islice(iterable, n, n+1)) + +>>> def all(seq, pred=None): +... "Returns True if pred(x) is true for every element in the iterable" +... for elem in ifilterfalse(pred, seq): +... return False +... return True + +>>> def any(seq, pred=None): +... "Returns True if pred(x) is true for at least one element in the iterable" +... for elem in ifilter(pred, seq): +... return True +... return False + +>>> def no(seq, pred=None): +... "Returns True if pred(x) is false for every element in the iterable" +... for elem in ifilter(pred, seq): +... return False +... return True + +>>> def quantify(seq, pred=None): +... "Count how many times the predicate is true in the sequence" +... return sum(imap(pred, seq)) + +>>> def padnone(seq): +... "Returns the sequence elements and then returns None indefinitely" +... return chain(seq, repeat(None)) + +>>> def ncycles(seq, n): +... "Returns the sequence elements n times" +... return chain(*repeat(seq, n)) + +>>> def dotproduct(vec1, vec2): +... return sum(imap(operator.mul, vec1, vec2)) + +>>> def flatten(listOfLists): +... return list(chain(*listOfLists)) + +>>> def repeatfunc(func, times=None, *args): +... "Repeat calls to func with specified arguments." +... " Example: repeatfunc(random.random)" +... if times is None: +... return starmap(func, repeat(args)) +... else: +... return starmap(func, repeat(args, times)) + +>>> def pairwise(iterable): +... "s -> (s0,s1), (s1,s2), (s2, s3), ..." +... a, b = tee(iterable) +... try: +... b.next() +... except StopIteration: +... pass +... return izip(a, b) + +This is not part of the examples but it tests to make sure the definitions +perform as purported. + +>>> take(10, count()) +[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + +>>> list(enumerate('abc')) +[(0, 'a'), (1, 'b'), (2, 'c')] + +>>> list(islice(tabulate(lambda x: 2*x), 4)) +[0, 2, 4, 6] + +>>> nth('abcde', 3) +['d'] + +>>> all([2, 4, 6, 8], lambda x: x%2==0) +True + +>>> all([2, 3, 6, 8], lambda x: x%2==0) +False + +>>> any([2, 4, 6, 8], lambda x: x%2==0) +True + +>>> any([1, 3, 5, 9], lambda x: x%2==0,) +False + +>>> no([1, 3, 5, 9], lambda x: x%2==0) +True + +>>> no([1, 2, 5, 9], lambda x: x%2==0) +False + +>>> quantify(xrange(99), lambda x: x%2==0) +50 + +>>> a = [[1, 2, 3], [4, 5, 6]] +>>> flatten(a) +[1, 2, 3, 4, 5, 6] + +>>> list(repeatfunc(pow, 5, 2, 3)) +[8, 8, 8, 8, 8] + +>>> import random +>>> take(5, imap(int, repeatfunc(random.random))) +[0, 0, 0, 0, 0] + +>>> list(pairwise('abcd')) +[('a', 'b'), ('b', 'c'), ('c', 'd')] + +>>> list(pairwise([])) +[] + +>>> list(pairwise('a')) +[] + +>>> list(islice(padnone('abc'), 0, 6)) +['a', 'b', 'c', None, None, None] + +>>> list(ncycles('abc', 3)) +['a', 'b', 'c', 'a', 'b', 'c', 'a', 'b', 'c'] + +>>> dotproduct([1,2,3], [4,5,6]) +32 + +""" + +__test__ = {'libreftest' : libreftest} + +def test_main(verbose=None): + test_classes = (TestBasicOps, TestVariousIteratorArgs, TestGC, + RegressionTests, LengthTransparency, + SubclassWithKwargsTest) + test_support.run_unittest(*test_classes) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*test_classes) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + + # doctest the examples in the library reference + test_support.run_doctest(sys.modules[__name__], verbose) + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_marshal.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_marshal.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,221 @@ +#!/usr/bin/env python +# -*- coding: iso-8859-1 -*- + +from test import test_support +import marshal +import sys +import unittest +import os + +def dump_and_load(obj): + f = file(test_support.TESTFN, "wb") + marshal.dump(obj, f) + f.close() + f = file(test_support.TESTFN, "rb") + got = marshal.load(f) + f.close() + return got + +class IntTestCase(unittest.TestCase): + def test_ints(self): + # Test the full range of Python ints. + n = sys.maxint + while n: + for expected in (-n, n): + s = marshal.dumps(expected) + got = marshal.loads(s) + self.assertEqual(expected, got) + got = dump_and_load(expected) + self.assertEqual(expected, got) + n = n >> 1 + os.unlink(test_support.TESTFN) + + def test_int64(self): + # Simulate int marshaling on a 64-bit box. This is most interesting if + # we're running the test on a 32-bit box, of course. + + def to_little_endian_string(value, nbytes): + bytes = [] + for i in range(nbytes): + bytes.append(chr(value & 0xff)) + value >>= 8 + return ''.join(bytes) + + maxint64 = (1L << 63) - 1 + minint64 = -maxint64-1 + + for base in maxint64, minint64, -maxint64, -(minint64 >> 1): + while base: + s = 'I' + to_little_endian_string(base, 8) + got = marshal.loads(s) + self.assertEqual(base, got) + if base == -1: # a fixed-point for shifting right 1 + base = 0 + else: + base >>= 1 + + def test_bool(self): + for b in (True, False): + new = marshal.loads(marshal.dumps(b)) + self.assertEqual(b, new) + self.assertEqual(type(b), type(new)) + new = dump_and_load(b) + self.assertEqual(b, new) + self.assertEqual(type(b), type(new)) + +class FloatTestCase(unittest.TestCase): + def test_floats(self): + # Test a few floats + small = 1e-25 + n = sys.maxint * 3.7e250 + while n > small: + for expected in (-n, n): + f = float(expected) + s = marshal.dumps(f) + got = marshal.loads(s) + self.assertEqual(f, got) + got = dump_and_load(f) + self.assertEqual(f, got) + n /= 123.4567 + + f = 0.0 + s = marshal.dumps(f) + got = marshal.loads(s) + self.assertEqual(f, got) + # and with version <= 1 (floats marshalled differently then) + s = marshal.dumps(f, 1) + got = marshal.loads(s) + self.assertEqual(f, got) + + n = sys.maxint * 3.7e-250 + while n < small: + for expected in (-n, n): + f = float(expected) + s = marshal.dumps(f) + got = marshal.loads(s) + self.assertEqual(f, got) + got = dump_and_load(f) + self.assertEqual(f, got) + n *= 123.4567 + os.unlink(test_support.TESTFN) + +class StringTestCase(unittest.TestCase): + def test_unicode(self): + for s in [u"", u"Andr? Previn", u"abc", u" "*10000]: + new = marshal.loads(marshal.dumps(s)) + self.assertEqual(s, new) + self.assertEqual(type(s), type(new)) + new = dump_and_load(s) + self.assertEqual(s, new) + self.assertEqual(type(s), type(new)) + os.unlink(test_support.TESTFN) + + def test_string(self): + for s in ["", "Andr? Previn", "abc", " "*10000]: + new = marshal.loads(marshal.dumps(s)) + self.assertEqual(s, new) + self.assertEqual(type(s), type(new)) + new = dump_and_load(s) + self.assertEqual(s, new) + self.assertEqual(type(s), type(new)) + os.unlink(test_support.TESTFN) + + def test_buffer(self): + for s in ["", "Andr? Previn", "abc", " "*10000]: + b = buffer(s) + new = marshal.loads(marshal.dumps(b)) + self.assertEqual(s, new) + new = dump_and_load(b) + self.assertEqual(s, new) + os.unlink(test_support.TESTFN) + +class ExceptionTestCase(unittest.TestCase): + def test_exceptions(self): + new = marshal.loads(marshal.dumps(StopIteration)) + self.assertEqual(StopIteration, new) + +class CodeTestCase(unittest.TestCase): + def test_code(self): + co = ExceptionTestCase.test_exceptions.func_code + new = marshal.loads(marshal.dumps(co)) + self.assertEqual(co, new) + +class ContainerTestCase(unittest.TestCase): + d = {'astring': 'foo at bar.baz.spam', + 'afloat': 7283.43, + 'anint': 2**20, + 'ashortlong': 2L, + 'alist': ['.zyx.41'], + 'atuple': ('.zyx.41',)*10, + 'aboolean': False, + 'aunicode': u"Andr? Previn" + } + def test_dict(self): + new = marshal.loads(marshal.dumps(self.d)) + self.assertEqual(self.d, new) + new = dump_and_load(self.d) + self.assertEqual(self.d, new) + os.unlink(test_support.TESTFN) + + def test_list(self): + lst = self.d.items() + new = marshal.loads(marshal.dumps(lst)) + self.assertEqual(lst, new) + new = dump_and_load(lst) + self.assertEqual(lst, new) + os.unlink(test_support.TESTFN) + + def test_tuple(self): + t = tuple(self.d.keys()) + new = marshal.loads(marshal.dumps(t)) + self.assertEqual(t, new) + new = dump_and_load(t) + self.assertEqual(t, new) + os.unlink(test_support.TESTFN) + + def test_sets(self): + for constructor in (set, frozenset): + t = constructor(self.d.keys()) + new = marshal.loads(marshal.dumps(t)) + self.assertEqual(t, new) + self.assert_(isinstance(new, constructor)) + self.assertNotEqual(id(t), id(new)) + new = dump_and_load(t) + self.assertEqual(t, new) + os.unlink(test_support.TESTFN) + +class BugsTestCase(unittest.TestCase): + def test_bug_5888452(self): + # Simple-minded check for SF 588452: Debug build crashes + marshal.dumps([128] * 1000) + + def test_patch_873224(self): + self.assertRaises(Exception, marshal.loads, '0') + self.assertRaises(Exception, marshal.loads, 'f') + self.assertRaises(Exception, marshal.loads, marshal.dumps(5L)[:-1]) + + def test_version_argument(self): + # Python 2.4.0 crashes for any call to marshal.dumps(x, y) + self.assertEquals(marshal.loads(marshal.dumps(5, 0)), 5) + self.assertEquals(marshal.loads(marshal.dumps(5, 1)), 5) + + def test_fuzz(self): + # simple test that it's at least not *totally* trivial to + # crash from bad marshal data + for c in [chr(i) for i in range(256)]: + try: + marshal.loads(c) + except Exception: + pass + +def test_main(): + test_support.run_unittest(IntTestCase, + FloatTestCase, + StringTestCase, + CodeTestCase, + ContainerTestCase, + ExceptionTestCase, + BugsTestCase) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mmap.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mmap.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,397 @@ +from test.test_support import verify, vereq, TESTFN +import mmap +import os, re + +PAGESIZE = mmap.PAGESIZE + +def test_both(): + "Test mmap module on Unix systems and Windows" + + # Create a file to be mmap'ed. + if os.path.exists(TESTFN): + os.unlink(TESTFN) + f = open(TESTFN, 'w+') + + try: # unlink TESTFN no matter what + # Write 2 pages worth of data to the file + f.write('\0'* PAGESIZE) + f.write('foo') + f.write('\0'* (PAGESIZE-3) ) + f.flush() + m = mmap.mmap(f.fileno(), 2 * PAGESIZE) + f.close() + + # Simple sanity checks + + print type(m) # SF bug 128713: segfaulted on Linux + print ' Position of foo:', m.find('foo') / float(PAGESIZE), 'pages' + vereq(m.find('foo'), PAGESIZE) + + print ' Length of file:', len(m) / float(PAGESIZE), 'pages' + vereq(len(m), 2*PAGESIZE) + + print ' Contents of byte 0:', repr(m[0]) + vereq(m[0], '\0') + print ' Contents of first 3 bytes:', repr(m[0:3]) + vereq(m[0:3], '\0\0\0') + + # Modify the file's content + print "\n Modifying file's content..." + m[0] = '3' + m[PAGESIZE +3: PAGESIZE +3+3] = 'bar' + + # Check that the modification worked + print ' Contents of byte 0:', repr(m[0]) + vereq(m[0], '3') + print ' Contents of first 3 bytes:', repr(m[0:3]) + vereq(m[0:3], '3\0\0') + print ' Contents of second page:', repr(m[PAGESIZE-1 : PAGESIZE + 7]) + vereq(m[PAGESIZE-1 : PAGESIZE + 7], '\0foobar\0') + + m.flush() + + # Test doing a regular expression match in an mmap'ed file + match = re.search('[A-Za-z]+', m) + if match is None: + print ' ERROR: regex match on mmap failed!' + else: + start, end = match.span(0) + length = end - start + + print ' Regex match on mmap (page start, length of match):', + print start / float(PAGESIZE), length + + vereq(start, PAGESIZE) + vereq(end, PAGESIZE + 6) + + # test seeking around (try to overflow the seek implementation) + m.seek(0,0) + print ' Seek to zeroth byte' + vereq(m.tell(), 0) + m.seek(42,1) + print ' Seek to 42nd byte' + vereq(m.tell(), 42) + m.seek(0,2) + print ' Seek to last byte' + vereq(m.tell(), len(m)) + + print ' Try to seek to negative position...' + try: + m.seek(-1) + except ValueError: + pass + else: + verify(0, 'expected a ValueError but did not get it') + + print ' Try to seek beyond end of mmap...' + try: + m.seek(1,2) + except ValueError: + pass + else: + verify(0, 'expected a ValueError but did not get it') + + print ' Try to seek to negative position...' + try: + m.seek(-len(m)-1,2) + except ValueError: + pass + else: + verify(0, 'expected a ValueError but did not get it') + + # Try resizing map + print ' Attempting resize()' + try: + m.resize(512) + except SystemError: + # resize() not supported + # No messages are printed, since the output of this test suite + # would then be different across platforms. + pass + else: + # resize() is supported + verify(len(m) == 512, + "len(m) is %d, but expecting 512" % (len(m),) ) + # Check that we can no longer seek beyond the new size. + try: + m.seek(513,0) + except ValueError: + pass + else: + verify(0, 'Could seek beyond the new size') + + # Check that the underlying file is truncated too + # (bug #728515) + f = open(TESTFN) + f.seek(0, 2) + verify(f.tell() == 512, 'Underlying file not truncated') + f.close() + verify(m.size() == 512, 'New size not reflected in file') + + m.close() + + finally: + try: + f.close() + except OSError: + pass + try: + os.unlink(TESTFN) + except OSError: + pass + + # Test for "access" keyword parameter + try: + mapsize = 10 + print " Creating", mapsize, "byte test data file." + f = open(TESTFN, "wb") + f.write("a"*mapsize) + f.close() + print " Opening mmap with access=ACCESS_READ" + f = open(TESTFN, "rb") + m = mmap.mmap(f.fileno(), mapsize, access=mmap.ACCESS_READ) + verify(m[:] == 'a'*mapsize, "Readonly memory map data incorrect.") + + print " Ensuring that readonly mmap can't be slice assigned." + try: + m[:] = 'b'*mapsize + except TypeError: + pass + else: + verify(0, "Able to write to readonly memory map") + + print " Ensuring that readonly mmap can't be item assigned." + try: + m[0] = 'b' + except TypeError: + pass + else: + verify(0, "Able to write to readonly memory map") + + print " Ensuring that readonly mmap can't be write() to." + try: + m.seek(0,0) + m.write('abc') + except TypeError: + pass + else: + verify(0, "Able to write to readonly memory map") + + print " Ensuring that readonly mmap can't be write_byte() to." + try: + m.seek(0,0) + m.write_byte('d') + except TypeError: + pass + else: + verify(0, "Able to write to readonly memory map") + + print " Ensuring that readonly mmap can't be resized." + try: + m.resize(2*mapsize) + except SystemError: # resize is not universally supported + pass + except TypeError: + pass + else: + verify(0, "Able to resize readonly memory map") + del m, f + verify(open(TESTFN, "rb").read() == 'a'*mapsize, + "Readonly memory map data file was modified") + + print " Opening mmap with size too big" + import sys + f = open(TESTFN, "r+b") + try: + m = mmap.mmap(f.fileno(), mapsize+1) + except ValueError: + # we do not expect a ValueError on Windows + # CAUTION: This also changes the size of the file on disk, and + # later tests assume that the length hasn't changed. We need to + # repair that. + if sys.platform.startswith('win'): + verify(0, "Opening mmap with size+1 should work on Windows.") + else: + # we expect a ValueError on Unix, but not on Windows + if not sys.platform.startswith('win'): + verify(0, "Opening mmap with size+1 should raise ValueError.") + m.close() + f.close() + if sys.platform.startswith('win'): + # Repair damage from the resizing test. + f = open(TESTFN, 'r+b') + f.truncate(mapsize) + f.close() + + print " Opening mmap with access=ACCESS_WRITE" + f = open(TESTFN, "r+b") + m = mmap.mmap(f.fileno(), mapsize, access=mmap.ACCESS_WRITE) + print " Modifying write-through memory map." + m[:] = 'c'*mapsize + verify(m[:] == 'c'*mapsize, + "Write-through memory map memory not updated properly.") + m.flush() + m.close() + f.close() + f = open(TESTFN, 'rb') + stuff = f.read() + f.close() + verify(stuff == 'c'*mapsize, + "Write-through memory map data file not updated properly.") + + print " Opening mmap with access=ACCESS_COPY" + f = open(TESTFN, "r+b") + m = mmap.mmap(f.fileno(), mapsize, access=mmap.ACCESS_COPY) + print " Modifying copy-on-write memory map." + m[:] = 'd'*mapsize + verify(m[:] == 'd' * mapsize, + "Copy-on-write memory map data not written correctly.") + m.flush() + verify(open(TESTFN, "rb").read() == 'c'*mapsize, + "Copy-on-write test data file should not be modified.") + try: + print " Ensuring copy-on-write maps cannot be resized." + m.resize(2*mapsize) + except TypeError: + pass + else: + verify(0, "Copy-on-write mmap resize did not raise exception.") + del m, f + try: + print " Ensuring invalid access parameter raises exception." + f = open(TESTFN, "r+b") + m = mmap.mmap(f.fileno(), mapsize, access=4) + except ValueError: + pass + else: + verify(0, "Invalid access code should have raised exception.") + + if os.name == "posix": + # Try incompatible flags, prot and access parameters. + f = open(TESTFN, "r+b") + try: + m = mmap.mmap(f.fileno(), mapsize, flags=mmap.MAP_PRIVATE, + prot=mmap.PROT_READ, access=mmap.ACCESS_WRITE) + except ValueError: + pass + else: + verify(0, "Incompatible parameters should raise ValueError.") + f.close() + finally: + try: + os.unlink(TESTFN) + except OSError: + pass + + print ' Try opening a bad file descriptor...' + try: + mmap.mmap(-2, 4096) + except mmap.error: + pass + else: + verify(0, 'expected a mmap.error but did not get it') + + # Do a tougher .find() test. SF bug 515943 pointed out that, in 2.2, + # searching for data with embedded \0 bytes didn't work. + f = open(TESTFN, 'w+') + + try: # unlink TESTFN no matter what + data = 'aabaac\x00deef\x00\x00aa\x00' + n = len(data) + f.write(data) + f.flush() + m = mmap.mmap(f.fileno(), n) + f.close() + + for start in range(n+1): + for finish in range(start, n+1): + slice = data[start : finish] + vereq(m.find(slice), data.find(slice)) + vereq(m.find(slice + 'x'), -1) + m.close() + + finally: + os.unlink(TESTFN) + + # make sure a double close doesn't crash on Solaris (Bug# 665913) + f = open(TESTFN, 'w+') + + try: # unlink TESTFN no matter what + f.write(2**16 * 'a') # Arbitrary character + f.close() + + f = open(TESTFN) + mf = mmap.mmap(f.fileno(), 2**16, access=mmap.ACCESS_READ) + mf.close() + mf.close() + f.close() + + finally: + os.unlink(TESTFN) + + # test mapping of entire file by passing 0 for map length + if hasattr(os, "stat"): + print " Ensuring that passing 0 as map length sets map size to current file size." + f = open(TESTFN, "w+") + + try: + f.write(2**16 * 'm') # Arbitrary character + f.close() + + f = open(TESTFN, "rb+") + mf = mmap.mmap(f.fileno(), 0) + verify(len(mf) == 2**16, "Map size should equal file size.") + vereq(mf.read(2**16), 2**16 * "m") + mf.close() + f.close() + + finally: + os.unlink(TESTFN) + + # test mapping of entire file by passing 0 for map length + if hasattr(os, "stat"): + print " Ensuring that passing 0 as map length sets map size to current file size." + f = open(TESTFN, "w+") + try: + f.write(2**16 * 'm') # Arbitrary character + f.close() + + f = open(TESTFN, "rb+") + mf = mmap.mmap(f.fileno(), 0) + verify(len(mf) == 2**16, "Map size should equal file size.") + vereq(mf.read(2**16), 2**16 * "m") + mf.close() + f.close() + + finally: + os.unlink(TESTFN) + + # make move works everywhere (64-bit format problem earlier) + f = open(TESTFN, 'w+') + + try: # unlink TESTFN no matter what + f.write("ABCDEabcde") # Arbitrary character + f.flush() + + mf = mmap.mmap(f.fileno(), 10) + mf.move(5, 0, 5) + verify(mf[:] == "ABCDEABCDE", "Map move should have duplicated front 5") + mf.close() + f.close() + + finally: + os.unlink(TESTFN) + +def test_anon(): + print " anonymous mmap.mmap(-1, PAGESIZE)..." + m = mmap.mmap(-1, PAGESIZE) + for x in xrange(PAGESIZE): + verify(m[x] == '\0', "anonymously mmap'ed contents should be zero") + + for x in xrange(PAGESIZE): + m[x] = ch = chr(x & 255) + vereq(m[x], ch) + +test_both() +test_anon() +print ' Test passed' Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_module.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_module.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,48 @@ +# Test the module type + +from test.test_support import verify, vereq, verbose, TestFailed + +import sys +module = type(sys) + +# An uninitialized module has no __dict__ or __name__, and __doc__ is None +foo = module.__new__(module) +verify(not foo.__dict__) +try: + s = foo.__name__ +except AttributeError: + pass +else: + raise TestFailed, "__name__ = %s" % repr(s) +vereq(foo.__doc__, module.__doc__) + +# Regularly initialized module, no docstring +foo = module("foo") +vereq(foo.__name__, "foo") +vereq(foo.__doc__, None) +vereq(foo.__dict__, {"__name__": "foo", "__doc__": None}) + +# ASCII docstring +foo = module("foo", "foodoc") +vereq(foo.__name__, "foo") +vereq(foo.__doc__, "foodoc") +vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc"}) + +# Unicode docstring +foo = module("foo", u"foodoc\u1234") +vereq(foo.__name__, "foo") +vereq(foo.__doc__, u"foodoc\u1234") +vereq(foo.__dict__, {"__name__": "foo", "__doc__": u"foodoc\u1234"}) + +# Reinitialization should not replace the __dict__ +foo.bar = 42 +d = foo.__dict__ +foo.__init__("foo", "foodoc") +vereq(foo.__name__, "foo") +vereq(foo.__doc__, "foodoc") +vereq(foo.bar, 42) +vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc", "bar": 42}) +verify(foo.__dict__ is d) + +if verbose: + print "All OK" Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mutants.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mutants.py Mon Aug 18 01:37:16 2008 @@ -0,0 +1,292 @@ +from test.test_support import verbose, TESTFN +import random +import os + +# From SF bug #422121: Insecurities in dict comparison. + +# Safety of code doing comparisons has been an historical Python weak spot. +# The problem is that comparison of structures written in C *naturally* +# wants to hold on to things like the size of the container, or "the +# biggest" containee so far, across a traversal of the container; but +# code to do containee comparisons can call back into Python and mutate +# the container in arbitrary ways while the C loop is in midstream. If the +# C code isn't extremely paranoid about digging things out of memory on +# each trip, and artificially boosting refcounts for the duration, anything +# from infinite loops to OS crashes can result (yes, I use Windows ). +# +# The other problem is that code designed to provoke a weakness is usually +# white-box code, and so catches only the particular vulnerabilities the +# author knew to protect against. For example, Python's list.sort() code +# went thru many iterations as one "new" vulnerability after another was +# discovered. +# +# So the dict comparison test here uses a black-box approach instead, +# generating dicts of various sizes at random, and performing random +# mutations on them at random times. This proved very effective, +# triggering at least six distinct failure modes the first 20 times I +# ran it. Indeed, at the start, the driver never got beyond 6 iterations +# before the test died. + +# The dicts are global to make it easy to mutate tham from within functions. +dict1 = {} +dict2 = {} + +# The current set of keys in dict1 and dict2. These are materialized as +# lists to make it easy to pick a dict key at random. +dict1keys = [] +dict2keys = [] + +# Global flag telling maybe_mutate() whether to *consider* mutating. +mutate = 0 + +# If global mutate is true, consider mutating a dict. May or may not +# mutate a dict even if mutate is true. If it does decide to mutate a +# dict, it picks one of {dict1, dict2} at random, and deletes a random +# entry from it; or, more rarely, adds a random element. + +def maybe_mutate(): + global mutate + if not mutate: + return + if random.random() < 0.5: + return + + if random.random() < 0.5: + target, keys = dict1, dict1keys + else: + target, keys = dict2, dict2keys + + if random.random() < 0.2: + # Insert a new key. + mutate = 0 # disable mutation until key inserted + while 1: + newkey = Horrid(random.randrange(100)) + if newkey not in target: + break + target[newkey] = Horrid(random.randrange(100)) + keys.append(newkey) + mutate = 1 + + elif keys: + # Delete a key at random. + mutate = 0 # disable mutation until key deleted + i = random.randrange(len(keys)) + key = keys[i] + del target[key] + del keys[i] + mutate = 1 + +# A horrid class that triggers random mutations of dict1 and dict2 when +# instances are compared. + +class Horrid: + def __init__(self, i): + # Comparison outcomes are determined by the value of i. + self.i = i + + # An artificial hashcode is selected at random so that we don't + # have any systematic relationship between comparison outcomes + # (based on self.i and other.i) and relative position within the + # hash vector (based on hashcode). + self.hashcode = random.randrange(1000000000) + + def __hash__(self): + return 42 + return self.hashcode + + def __cmp__(self, other): + maybe_mutate() # The point of the test. + return cmp(self.i, other.i) + + def __eq__(self, other): + maybe_mutate() # The point of the test. + return self.i == other.i + + def __repr__(self): + return "Horrid(%d)" % self.i + +# Fill dict d with numentries (Horrid(i), Horrid(j)) key-value pairs, +# where i and j are selected at random from the candidates list. +# Return d.keys() after filling. + +def fill_dict(d, candidates, numentries): + d.clear() + for i in xrange(numentries): + d[Horrid(random.choice(candidates))] = \ + Horrid(random.choice(candidates)) + return d.keys() + +# Test one pair of randomly generated dicts, each with n entries. +# Note that dict comparison is trivial if they don't have the same number +# of entires (then the "shorter" dict is instantly considered to be the +# smaller one, without even looking at the entries). + +def test_one(n): + global mutate, dict1, dict2, dict1keys, dict2keys + + # Fill the dicts without mutating them. + mutate = 0 + dict1keys = fill_dict(dict1, range(n), n) + dict2keys = fill_dict(dict2, range(n), n) + + # Enable mutation, then compare the dicts so long as they have the + # same size. + mutate = 1 + if verbose: + print "trying w/ lengths", len(dict1), len(dict2), + while dict1 and len(dict1) == len(dict2): + if verbose: + print ".", + if random.random() < 0.5: + c = cmp(dict1, dict2) + else: + c = dict1 == dict2 + if verbose: + print + +# Run test_one n times. At the start (before the bugs were fixed), 20 +# consecutive runs of this test each blew up on or before the sixth time +# test_one was run. So n doesn't have to be large to get an interesting +# test. +# OTOH, calling with large n is also interesting, to ensure that the fixed +# code doesn't hold on to refcounts *too* long (in which case memory would +# leak). + +def test(n): + for i in xrange(n): + test_one(random.randrange(1, 100)) + +# See last comment block for clues about good values for n. +test(20) + +########################################################################## +# Another segfault bug, distilled by Michael Hudson from a c.l.py post. + +class Child: + def __init__(self, parent): + self.__dict__['parent'] = parent + def __getattr__(self, attr): + self.parent.a = 1 + self.parent.b = 1 + self.parent.c = 1 + self.parent.d = 1 + self.parent.e = 1 + self.parent.f = 1 + self.parent.g = 1 + self.parent.h = 1 + self.parent.i = 1 + return getattr(self.parent, attr) + +class Parent: + def __init__(self): + self.a = Child(self) + +# Hard to say what this will print! May vary from time to time. But +# we're specifically trying to test the tp_print slot here, and this is +# the clearest way to do it. We print the result to a temp file so that +# the expected-output file doesn't need to change. + +f = open(TESTFN, "w") +print >> f, Parent().__dict__ +f.close() +os.unlink(TESTFN) + +########################################################################## +# And another core-dumper from Michael Hudson. + +dict = {} + +# Force dict to malloc its table. +for i in range(1, 10): + dict[i] = i + +f = open(TESTFN, "w") + +class Machiavelli: + def __repr__(self): + dict.clear() + + # Michael sez: "doesn't crash without this. don't know why." + # Tim sez: "luck of the draw; crashes with or without for me." + print >> f + + return `"machiavelli"` + + def __hash__(self): + return 0 + +dict[Machiavelli()] = Machiavelli() + +print >> f, str(dict) +f.close() +os.unlink(TESTFN) +del f, dict + + +########################################################################## +# And another core-dumper from Michael Hudson. + +dict = {} + +# let's force dict to malloc its table +for i in range(1, 10): + dict[i] = i + +class Machiavelli2: + def __eq__(self, other): + dict.clear() + return 1 + + def __hash__(self): + return 0 + +dict[Machiavelli2()] = Machiavelli2() + +try: + dict[Machiavelli2()] +except KeyError: + pass + +del dict + +########################################################################## +# And another core-dumper from Michael Hudson. + +dict = {} + +# let's force dict to malloc its table +for i in range(1, 10): + dict[i] = i + +class Machiavelli3: + def __init__(self, id): + self.id = id + + def __eq__(self, other): + if self.id == other.id: + dict.clear() + return 1 + else: + return 0 + + def __repr__(self): + return "%s(%s)"%(self.__class__.__name__, self.id) + + def __hash__(self): + return 0 + +dict[Machiavelli3(1)] = Machiavelli3(0) +dict[Machiavelli3(2)] = Machiavelli3(0) + +f = open(TESTFN, "w") +try: + try: + print >> f, dict[Machiavelli3(2)] + except KeyError: + pass +finally: + f.close() + os.unlink(TESTFN) + +del dict +del dict1, dict2, dict1keys, dict2keys From bgola at codespeak.net Mon Aug 18 06:29:15 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 06:29:15 +0200 (CEST) Subject: [pypy-svn] r57383 - pypy/branch/2.5-features/lib-python/modified-2.5.1/test Message-ID: <20080818042915.1D0D4169E04@codespeak.net> Author: bgola Date: Mon Aug 18 06:29:12 2008 New Revision: 57383 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_operations.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_optparse.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_os.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_parser.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_quopri.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_random.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_re.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_repr.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_scope.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_set.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sha.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_slice.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_socket.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sort.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_struct.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sys.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_tempfile.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_trace.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_traceback.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unicodedata.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unpack.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_urllib2.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_userstring.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_weakref.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_xrange.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_zipimport.py (contents, props changed) pypy/branch/2.5-features/lib-python/modified-2.5.1/test/tf_inherit_check.py (contents, props changed) Log: porting the stdlib tests Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_operations.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_operations.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,78 @@ +# Python test set -- part 3, built-in operations. + + +print '3. Operations' +print 'XXX Mostly not yet implemented' + + +print '3.1 Dictionary lookups fail if __cmp__() raises an exception' + +class BadDictKey: + + def __hash__(self): + return hash(self.__class__) + + def __cmp__(self, other): + if isinstance(other, self.__class__): + print "raising error" + raise RuntimeError, "gotcha" + return other + +d = {} +x1 = BadDictKey() +x2 = BadDictKey() +d[x1] = 1 +for stmt in ['d[x2] = 2', + 'z = d[x2]', + 'x2 in d', + 'd.has_key(x2)', + 'd.get(x2)', + 'd.setdefault(x2, 42)', + 'd.pop(x2)', + 'd.update({x2: 2})']: + try: + exec stmt + except RuntimeError: + print "%s: caught the RuntimeError outside" % (stmt,) + else: + print "%s: No exception passed through!" # old CPython behavior + + +# Dict resizing bug, found by Jack Jansen in 2.2 CVS development. +# This version got an assert failure in debug build, infinite loop in +# release build. Unfortunately, provoking this kind of stuff requires +# a mix of inserts and deletes hitting exactly the right hash codes in +# exactly the right order, and I can't think of a randomized approach +# that would be *likely* to hit a failing case in reasonable time. + +d = {} +for i in range(5): + d[i] = i +for i in range(5): + del d[i] +for i in range(5, 9): # i==8 was the problem + d[i] = i + + +# Another dict resizing bug (SF bug #1456209). +# This caused Segmentation faults or Illegal instructions. + +class X(object): + def __hash__(self): + return 5 + def __eq__(self, other): + if resizing: + d.clear() + return False +d = {} +resizing = False +d[X()] = 1 +d[X()] = 2 +d[X()] = 3 +d[X()] = 4 +d[X()] = 5 +# now trigger a resize +resizing = True +d[9] = 6 + +print 'resize bugs not triggered.' Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_optparse.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_optparse.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,1642 @@ +#!/usr/bin/python + +# +# Test suite for Optik. Supplied by Johannes Gijsbers +# (taradino at softhome.net) -- translated from the original Optik +# test suite to this PyUnit-based version. +# +# $Id: test_optparse.py 50791 2006-07-23 16:05:51Z greg.ward $ +# + +import sys +import os +import re +import copy +import types +import unittest + +from StringIO import StringIO +from pprint import pprint +from test import test_support + + +from optparse import make_option, Option, IndentedHelpFormatter, \ + TitledHelpFormatter, OptionParser, OptionContainer, OptionGroup, \ + SUPPRESS_HELP, SUPPRESS_USAGE, OptionError, OptionConflictError, \ + BadOptionError, OptionValueError, Values +from optparse import _match_abbrev +from optparse import _parse_num + +# Do the right thing with boolean values for all known Python versions. +try: + True, False +except NameError: + (True, False) = (1, 0) + +retype = type(re.compile('')) + +class InterceptedError(Exception): + def __init__(self, + error_message=None, + exit_status=None, + exit_message=None): + self.error_message = error_message + self.exit_status = exit_status + self.exit_message = exit_message + + def __str__(self): + return self.error_message or self.exit_message or "intercepted error" + +class InterceptingOptionParser(OptionParser): + def exit(self, status=0, msg=None): + raise InterceptedError(exit_status=status, exit_message=msg) + + def error(self, msg): + raise InterceptedError(error_message=msg) + + +class BaseTest(unittest.TestCase): + def assertParseOK(self, args, expected_opts, expected_positional_args): + """Assert the options are what we expected when parsing arguments. + + Otherwise, fail with a nicely formatted message. + + Keyword arguments: + args -- A list of arguments to parse with OptionParser. + expected_opts -- The options expected. + expected_positional_args -- The positional arguments expected. + + Returns the options and positional args for further testing. + """ + + (options, positional_args) = self.parser.parse_args(args) + optdict = vars(options) + + self.assertEqual(optdict, expected_opts, + """ +Options are %(optdict)s. +Should be %(expected_opts)s. +Args were %(args)s.""" % locals()) + + self.assertEqual(positional_args, expected_positional_args, + """ +Positional arguments are %(positional_args)s. +Should be %(expected_positional_args)s. +Args were %(args)s.""" % locals ()) + + return (options, positional_args) + + def assertRaises(self, + func, + args, + kwargs, + expected_exception, + expected_message): + """ + Assert that the expected exception is raised when calling a + function, and that the right error message is included with + that exception. + + Arguments: + func -- the function to call + args -- positional arguments to `func` + kwargs -- keyword arguments to `func` + expected_exception -- exception that should be raised + expected_message -- expected exception message (or pattern + if a compiled regex object) + + Returns the exception raised for further testing. + """ + if args is None: + args = () + if kwargs is None: + kwargs = {} + + try: + func(*args, **kwargs) + except expected_exception, err: + actual_message = str(err) + if isinstance(expected_message, retype): + self.assert_(expected_message.search(actual_message), + """\ +expected exception message pattern: +/%s/ +actual exception message: +'''%s''' +""" % (expected_message.pattern, actual_message)) + else: + self.assertEqual(actual_message, + expected_message, + """\ +expected exception message: +'''%s''' +actual exception message: +'''%s''' +""" % (expected_message, actual_message)) + + return err + else: + self.fail("""expected exception %(expected_exception)s not raised +called %(func)r +with args %(args)r +and kwargs %(kwargs)r +""" % locals ()) + + + # -- Assertions used in more than one class -------------------- + + def assertParseFail(self, cmdline_args, expected_output): + """ + Assert the parser fails with the expected message. Caller + must ensure that self.parser is an InterceptingOptionParser. + """ + try: + self.parser.parse_args(cmdline_args) + except InterceptedError, err: + self.assertEqual(err.error_message, expected_output) + else: + self.assertFalse("expected parse failure") + + def assertOutput(self, + cmdline_args, + expected_output, + expected_status=0, + expected_error=None): + """Assert the parser prints the expected output on stdout.""" + save_stdout = sys.stdout + encoding = getattr(save_stdout, 'encoding', None) + try: + try: + sys.stdout = StringIO() + if encoding: + sys.stdout.encoding = encoding + self.parser.parse_args(cmdline_args) + finally: + output = sys.stdout.getvalue() + sys.stdout = save_stdout + + except InterceptedError, err: + self.assert_( + type(output) is types.StringType, + "expected output to be an ordinary string, not %r" + % type(output)) + + if output != expected_output: + self.fail("expected: \n'''\n" + expected_output + + "'''\nbut got \n'''\n" + output + "'''") + self.assertEqual(err.exit_status, expected_status) + self.assertEqual(err.exit_message, expected_error) + else: + self.assertFalse("expected parser.exit()") + + def assertTypeError(self, func, expected_message, *args): + """Assert that TypeError is raised when executing func.""" + self.assertRaises(func, args, None, TypeError, expected_message) + + def assertHelp(self, parser, expected_help): + actual_help = parser.format_help() + if actual_help != expected_help: + raise self.failureException( + 'help text failure; expected:\n"' + + expected_help + '"; got:\n"' + + actual_help + '"\n') + +# -- Test make_option() aka Option ------------------------------------- + +# It's not necessary to test correct options here. All the tests in the +# parser.parse_args() section deal with those, because they're needed +# there. + +class TestOptionChecks(BaseTest): + def setUp(self): + self.parser = OptionParser(usage=SUPPRESS_USAGE) + + def assertOptionError(self, expected_message, args=[], kwargs={}): + self.assertRaises(make_option, args, kwargs, + OptionError, expected_message) + + def test_opt_string_empty(self): + self.assertTypeError(make_option, + "at least one option string must be supplied") + + def test_opt_string_too_short(self): + self.assertOptionError( + "invalid option string 'b': must be at least two characters long", + ["b"]) + + def test_opt_string_short_invalid(self): + self.assertOptionError( + "invalid short option string '--': must be " + "of the form -x, (x any non-dash char)", + ["--"]) + + def test_opt_string_long_invalid(self): + self.assertOptionError( + "invalid long option string '---': " + "must start with --, followed by non-dash", + ["---"]) + + def test_attr_invalid(self): + d = {'foo': None, 'bar': None} + msg = ', '.join(d.keys()) + self.assertOptionError( + "option -b: invalid keyword arguments: %s" % msg, + ["-b"], d) + + def test_action_invalid(self): + self.assertOptionError( + "option -b: invalid action: 'foo'", + ["-b"], {'action': 'foo'}) + + def test_type_invalid(self): + self.assertOptionError( + "option -b: invalid option type: 'foo'", + ["-b"], {'type': 'foo'}) + self.assertOptionError( + "option -b: invalid option type: 'tuple'", + ["-b"], {'type': tuple}) + + def test_no_type_for_action(self): + self.assertOptionError( + "option -b: must not supply a type for action 'count'", + ["-b"], {'action': 'count', 'type': 'int'}) + + def test_no_choices_list(self): + self.assertOptionError( + "option -b/--bad: must supply a list of " + "choices for type 'choice'", + ["-b", "--bad"], {'type': "choice"}) + + def test_bad_choices_list(self): + typename = type('').__name__ + self.assertOptionError( + "option -b/--bad: choices must be a list of " + "strings ('%s' supplied)" % typename, + ["-b", "--bad"], + {'type': "choice", 'choices':"bad choices"}) + + def test_no_choices_for_type(self): + self.assertOptionError( + "option -b: must not supply choices for type 'int'", + ["-b"], {'type': 'int', 'choices':"bad"}) + + def test_no_const_for_action(self): + self.assertOptionError( + "option -b: 'const' must not be supplied for action 'store'", + ["-b"], {'action': 'store', 'const': 1}) + + def test_no_nargs_for_action(self): + self.assertOptionError( + "option -b: 'nargs' must not be supplied for action 'count'", + ["-b"], {'action': 'count', 'nargs': 2}) + + def test_callback_not_callable(self): + self.assertOptionError( + "option -b: callback not callable: 'foo'", + ["-b"], {'action': 'callback', + 'callback': 'foo'}) + + def dummy(self): + pass + + def test_callback_args_no_tuple(self): + self.assertOptionError( + "option -b: callback_args, if supplied, " + "must be a tuple: not 'foo'", + ["-b"], {'action': 'callback', + 'callback': self.dummy, + 'callback_args': 'foo'}) + + def test_callback_kwargs_no_dict(self): + self.assertOptionError( + "option -b: callback_kwargs, if supplied, " + "must be a dict: not 'foo'", + ["-b"], {'action': 'callback', + 'callback': self.dummy, + 'callback_kwargs': 'foo'}) + + def test_no_callback_for_action(self): + self.assertOptionError( + "option -b: callback supplied ('foo') for non-callback option", + ["-b"], {'action': 'store', + 'callback': 'foo'}) + + def test_no_callback_args_for_action(self): + self.assertOptionError( + "option -b: callback_args supplied for non-callback option", + ["-b"], {'action': 'store', + 'callback_args': 'foo'}) + + def test_no_callback_kwargs_for_action(self): + self.assertOptionError( + "option -b: callback_kwargs supplied for non-callback option", + ["-b"], {'action': 'store', + 'callback_kwargs': 'foo'}) + +class TestOptionParser(BaseTest): + def setUp(self): + self.parser = OptionParser() + self.parser.add_option("-v", "--verbose", "-n", "--noisy", + action="store_true", dest="verbose") + self.parser.add_option("-q", "--quiet", "--silent", + action="store_false", dest="verbose") + + def test_add_option_no_Option(self): + self.assertTypeError(self.parser.add_option, + "not an Option instance: None", None) + + def test_add_option_invalid_arguments(self): + self.assertTypeError(self.parser.add_option, + "invalid arguments", None, None) + + def test_get_option(self): + opt1 = self.parser.get_option("-v") + self.assert_(isinstance(opt1, Option)) + self.assertEqual(opt1._short_opts, ["-v", "-n"]) + self.assertEqual(opt1._long_opts, ["--verbose", "--noisy"]) + self.assertEqual(opt1.action, "store_true") + self.assertEqual(opt1.dest, "verbose") + + def test_get_option_equals(self): + opt1 = self.parser.get_option("-v") + opt2 = self.parser.get_option("--verbose") + opt3 = self.parser.get_option("-n") + opt4 = self.parser.get_option("--noisy") + self.assert_(opt1 is opt2 is opt3 is opt4) + + def test_has_option(self): + self.assert_(self.parser.has_option("-v")) + self.assert_(self.parser.has_option("--verbose")) + + def assert_removed(self): + self.assert_(self.parser.get_option("-v") is None) + self.assert_(self.parser.get_option("--verbose") is None) + self.assert_(self.parser.get_option("-n") is None) + self.assert_(self.parser.get_option("--noisy") is None) + + self.failIf(self.parser.has_option("-v")) + self.failIf(self.parser.has_option("--verbose")) + self.failIf(self.parser.has_option("-n")) + self.failIf(self.parser.has_option("--noisy")) + + self.assert_(self.parser.has_option("-q")) + self.assert_(self.parser.has_option("--silent")) + + def test_remove_short_opt(self): + self.parser.remove_option("-n") + self.assert_removed() + + def test_remove_long_opt(self): + self.parser.remove_option("--verbose") + self.assert_removed() + + def test_remove_nonexistent(self): + self.assertRaises(self.parser.remove_option, ('foo',), None, + ValueError, "no such option 'foo'") + + def test_refleak(self): + # If an OptionParser is carrying around a reference to a large + # object, various cycles can prevent it from being GC'd in + # a timely fashion. destroy() breaks the cycles to ensure stuff + # can be cleaned up. + big_thing = [42] + refcount = sys.getrefcount(big_thing) + parser = OptionParser() + parser.add_option("-a", "--aaarggh") + parser.big_thing = big_thing + + parser.destroy() + #self.assertEqual(refcount, sys.getrefcount(big_thing)) + del parser + self.assertEqual(refcount, sys.getrefcount(big_thing)) + + +class TestOptionValues(BaseTest): + def setUp(self): + pass + + def test_basics(self): + values = Values() + self.assertEqual(vars(values), {}) + self.assertEqual(values, {}) + self.assertNotEqual(values, {"foo": "bar"}) + self.assertNotEqual(values, "") + + dict = {"foo": "bar", "baz": 42} + values = Values(defaults=dict) + self.assertEqual(vars(values), dict) + self.assertEqual(values, dict) + self.assertNotEqual(values, {"foo": "bar"}) + self.assertNotEqual(values, {}) + self.assertNotEqual(values, "") + self.assertNotEqual(values, []) + + +class TestTypeAliases(BaseTest): + def setUp(self): + self.parser = OptionParser() + + def test_str_aliases_string(self): + self.parser.add_option("-s", type="str") + self.assertEquals(self.parser.get_option("-s").type, "string") + + def test_new_type_object(self): + self.parser.add_option("-s", type=str) + self.assertEquals(self.parser.get_option("-s").type, "string") + self.parser.add_option("-x", type=int) + self.assertEquals(self.parser.get_option("-x").type, "int") + + def test_old_type_object(self): + self.parser.add_option("-s", type=types.StringType) + self.assertEquals(self.parser.get_option("-s").type, "string") + self.parser.add_option("-x", type=types.IntType) + self.assertEquals(self.parser.get_option("-x").type, "int") + + +# Custom type for testing processing of default values. +_time_units = { 's' : 1, 'm' : 60, 'h' : 60*60, 'd' : 60*60*24 } + +def _check_duration(option, opt, value): + try: + if value[-1].isdigit(): + return int(value) + else: + return int(value[:-1]) * _time_units[value[-1]] + except ValueError, IndexError: + raise OptionValueError( + 'option %s: invalid duration: %r' % (opt, value)) + +class DurationOption(Option): + TYPES = Option.TYPES + ('duration',) + TYPE_CHECKER = copy.copy(Option.TYPE_CHECKER) + TYPE_CHECKER['duration'] = _check_duration + +class TestDefaultValues(BaseTest): + def setUp(self): + self.parser = OptionParser() + self.parser.add_option("-v", "--verbose", default=True) + self.parser.add_option("-q", "--quiet", dest='verbose') + self.parser.add_option("-n", type="int", default=37) + self.parser.add_option("-m", type="int") + self.parser.add_option("-s", default="foo") + self.parser.add_option("-t") + self.parser.add_option("-u", default=None) + self.expected = { 'verbose': True, + 'n': 37, + 'm': None, + 's': "foo", + 't': None, + 'u': None } + + def test_basic_defaults(self): + self.assertEqual(self.parser.get_default_values(), self.expected) + + def test_mixed_defaults_post(self): + self.parser.set_defaults(n=42, m=-100) + self.expected.update({'n': 42, 'm': -100}) + self.assertEqual(self.parser.get_default_values(), self.expected) + + def test_mixed_defaults_pre(self): + self.parser.set_defaults(x="barf", y="blah") + self.parser.add_option("-x", default="frob") + self.parser.add_option("-y") + + self.expected.update({'x': "frob", 'y': "blah"}) + self.assertEqual(self.parser.get_default_values(), self.expected) + + self.parser.remove_option("-y") + self.parser.add_option("-y", default=None) + self.expected.update({'y': None}) + self.assertEqual(self.parser.get_default_values(), self.expected) + + def test_process_default(self): + self.parser.option_class = DurationOption + self.parser.add_option("-d", type="duration", default=300) + self.parser.add_option("-e", type="duration", default="6m") + self.parser.set_defaults(n="42") + self.expected.update({'d': 300, 'e': 360, 'n': 42}) + self.assertEqual(self.parser.get_default_values(), self.expected) + + self.parser.set_process_default_values(False) + self.expected.update({'d': 300, 'e': "6m", 'n': "42"}) + self.assertEqual(self.parser.get_default_values(), self.expected) + + +class TestProgName(BaseTest): + """ + Test that %prog expands to the right thing in usage, version, + and help strings. + """ + + def assertUsage(self, parser, expected_usage): + self.assertEqual(parser.get_usage(), expected_usage) + + def assertVersion(self, parser, expected_version): + self.assertEqual(parser.get_version(), expected_version) + + + def test_default_progname(self): + # Make sure that program name taken from sys.argv[0] by default. + save_argv = sys.argv[:] + try: + sys.argv[0] = os.path.join("foo", "bar", "baz.py") + parser = OptionParser("%prog ...", version="%prog 1.2") + expected_usage = "Usage: baz.py ...\n" + self.assertUsage(parser, expected_usage) + self.assertVersion(parser, "baz.py 1.2") + self.assertHelp(parser, + expected_usage + "\n" + + "Options:\n" + " --version show program's version number and exit\n" + " -h, --help show this help message and exit\n") + finally: + sys.argv[:] = save_argv + + def test_custom_progname(self): + parser = OptionParser(prog="thingy", + version="%prog 0.1", + usage="%prog arg arg") + parser.remove_option("-h") + parser.remove_option("--version") + expected_usage = "Usage: thingy arg arg\n" + self.assertUsage(parser, expected_usage) + self.assertVersion(parser, "thingy 0.1") + self.assertHelp(parser, expected_usage + "\n") + + +class TestExpandDefaults(BaseTest): + def setUp(self): + self.parser = OptionParser(prog="test") + self.help_prefix = """\ +Usage: test [options] + +Options: + -h, --help show this help message and exit +""" + self.file_help = "read from FILE [default: %default]" + self.expected_help_file = self.help_prefix + \ + " -f FILE, --file=FILE read from FILE [default: foo.txt]\n" + self.expected_help_none = self.help_prefix + \ + " -f FILE, --file=FILE read from FILE [default: none]\n" + + def test_option_default(self): + self.parser.add_option("-f", "--file", + default="foo.txt", + help=self.file_help) + self.assertHelp(self.parser, self.expected_help_file) + + def test_parser_default_1(self): + self.parser.add_option("-f", "--file", + help=self.file_help) + self.parser.set_default('file', "foo.txt") + self.assertHelp(self.parser, self.expected_help_file) + + def test_parser_default_2(self): + self.parser.add_option("-f", "--file", + help=self.file_help) + self.parser.set_defaults(file="foo.txt") + self.assertHelp(self.parser, self.expected_help_file) + + def test_no_default(self): + self.parser.add_option("-f", "--file", + help=self.file_help) + self.assertHelp(self.parser, self.expected_help_none) + + def test_default_none_1(self): + self.parser.add_option("-f", "--file", + default=None, + help=self.file_help) + self.assertHelp(self.parser, self.expected_help_none) + + def test_default_none_2(self): + self.parser.add_option("-f", "--file", + help=self.file_help) + self.parser.set_defaults(file=None) + self.assertHelp(self.parser, self.expected_help_none) + + def test_float_default(self): + self.parser.add_option( + "-p", "--prob", + help="blow up with probability PROB [default: %default]") + self.parser.set_defaults(prob=0.43) + expected_help = self.help_prefix + \ + " -p PROB, --prob=PROB blow up with probability PROB [default: 0.43]\n" + self.assertHelp(self.parser, expected_help) + + def test_alt_expand(self): + self.parser.add_option("-f", "--file", + default="foo.txt", + help="read from FILE [default: *DEFAULT*]") + self.parser.formatter.default_tag = "*DEFAULT*" + self.assertHelp(self.parser, self.expected_help_file) + + def test_no_expand(self): + self.parser.add_option("-f", "--file", + default="foo.txt", + help="read from %default file") + self.parser.formatter.default_tag = None + expected_help = self.help_prefix + \ + " -f FILE, --file=FILE read from %default file\n" + self.assertHelp(self.parser, expected_help) + + +# -- Test parser.parse_args() ------------------------------------------ + +class TestStandard(BaseTest): + def setUp(self): + options = [make_option("-a", type="string"), + make_option("-b", "--boo", type="int", dest='boo'), + make_option("--foo", action="append")] + + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, + option_list=options) + + def test_required_value(self): + self.assertParseFail(["-a"], "-a option requires an argument") + + def test_invalid_integer(self): + self.assertParseFail(["-b", "5x"], + "option -b: invalid integer value: '5x'") + + def test_no_such_option(self): + self.assertParseFail(["--boo13"], "no such option: --boo13") + + def test_long_invalid_integer(self): + self.assertParseFail(["--boo=x5"], + "option --boo: invalid integer value: 'x5'") + + def test_empty(self): + self.assertParseOK([], {'a': None, 'boo': None, 'foo': None}, []) + + def test_shortopt_empty_longopt_append(self): + self.assertParseOK(["-a", "", "--foo=blah", "--foo="], + {'a': "", 'boo': None, 'foo': ["blah", ""]}, + []) + + def test_long_option_append(self): + self.assertParseOK(["--foo", "bar", "--foo", "", "--foo=x"], + {'a': None, + 'boo': None, + 'foo': ["bar", "", "x"]}, + []) + + def test_option_argument_joined(self): + self.assertParseOK(["-abc"], + {'a': "bc", 'boo': None, 'foo': None}, + []) + + def test_option_argument_split(self): + self.assertParseOK(["-a", "34"], + {'a': "34", 'boo': None, 'foo': None}, + []) + + def test_option_argument_joined_integer(self): + self.assertParseOK(["-b34"], + {'a': None, 'boo': 34, 'foo': None}, + []) + + def test_option_argument_split_negative_integer(self): + self.assertParseOK(["-b", "-5"], + {'a': None, 'boo': -5, 'foo': None}, + []) + + def test_long_option_argument_joined(self): + self.assertParseOK(["--boo=13"], + {'a': None, 'boo': 13, 'foo': None}, + []) + + def test_long_option_argument_split(self): + self.assertParseOK(["--boo", "111"], + {'a': None, 'boo': 111, 'foo': None}, + []) + + def test_long_option_short_option(self): + self.assertParseOK(["--foo=bar", "-axyz"], + {'a': 'xyz', 'boo': None, 'foo': ["bar"]}, + []) + + def test_abbrev_long_option(self): + self.assertParseOK(["--f=bar", "-axyz"], + {'a': 'xyz', 'boo': None, 'foo': ["bar"]}, + []) + + def test_defaults(self): + (options, args) = self.parser.parse_args([]) + defaults = self.parser.get_default_values() + self.assertEqual(vars(defaults), vars(options)) + + def test_ambiguous_option(self): + self.parser.add_option("--foz", action="store", + type="string", dest="foo") + self.assertParseFail(["--f=bar"], + "ambiguous option: --f (--foo, --foz?)") + + + def test_short_and_long_option_split(self): + self.assertParseOK(["-a", "xyz", "--foo", "bar"], + {'a': 'xyz', 'boo': None, 'foo': ["bar"]}, + []), + + def test_short_option_split_long_option_append(self): + self.assertParseOK(["--foo=bar", "-b", "123", "--foo", "baz"], + {'a': None, 'boo': 123, 'foo': ["bar", "baz"]}, + []) + + def test_short_option_split_one_positional_arg(self): + self.assertParseOK(["-a", "foo", "bar"], + {'a': "foo", 'boo': None, 'foo': None}, + ["bar"]), + + def test_short_option_consumes_separator(self): + self.assertParseOK(["-a", "--", "foo", "bar"], + {'a': "--", 'boo': None, 'foo': None}, + ["foo", "bar"]), + self.assertParseOK(["-a", "--", "--foo", "bar"], + {'a': "--", 'boo': None, 'foo': ["bar"]}, + []), + + def test_short_option_joined_and_separator(self): + self.assertParseOK(["-ab", "--", "--foo", "bar"], + {'a': "b", 'boo': None, 'foo': None}, + ["--foo", "bar"]), + + def test_hyphen_becomes_positional_arg(self): + self.assertParseOK(["-ab", "-", "--foo", "bar"], + {'a': "b", 'boo': None, 'foo': ["bar"]}, + ["-"]) + + def test_no_append_versus_append(self): + self.assertParseOK(["-b3", "-b", "5", "--foo=bar", "--foo", "baz"], + {'a': None, 'boo': 5, 'foo': ["bar", "baz"]}, + []) + + def test_option_consumes_optionlike_string(self): + self.assertParseOK(["-a", "-b3"], + {'a': "-b3", 'boo': None, 'foo': None}, + []) + +class TestBool(BaseTest): + def setUp(self): + options = [make_option("-v", + "--verbose", + action="store_true", + dest="verbose", + default=''), + make_option("-q", + "--quiet", + action="store_false", + dest="verbose")] + self.parser = OptionParser(option_list = options) + + def test_bool_default(self): + self.assertParseOK([], + {'verbose': ''}, + []) + + def test_bool_false(self): + (options, args) = self.assertParseOK(["-q"], + {'verbose': 0}, + []) + if hasattr(__builtins__, 'False'): + self.failUnless(options.verbose is False) + + def test_bool_true(self): + (options, args) = self.assertParseOK(["-v"], + {'verbose': 1}, + []) + if hasattr(__builtins__, 'True'): + self.failUnless(options.verbose is True) + + def test_bool_flicker_on_and_off(self): + self.assertParseOK(["-qvq", "-q", "-v"], + {'verbose': 1}, + []) + +class TestChoice(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.parser.add_option("-c", action="store", type="choice", + dest="choice", choices=["one", "two", "three"]) + + def test_valid_choice(self): + self.assertParseOK(["-c", "one", "xyz"], + {'choice': 'one'}, + ["xyz"]) + + def test_invalid_choice(self): + self.assertParseFail(["-c", "four", "abc"], + "option -c: invalid choice: 'four' " + "(choose from 'one', 'two', 'three')") + + def test_add_choice_option(self): + self.parser.add_option("-d", "--default", + choices=["four", "five", "six"]) + opt = self.parser.get_option("-d") + self.assertEqual(opt.type, "choice") + self.assertEqual(opt.action, "store") + +class TestCount(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.v_opt = make_option("-v", action="count", dest="verbose") + self.parser.add_option(self.v_opt) + self.parser.add_option("--verbose", type="int", dest="verbose") + self.parser.add_option("-q", "--quiet", + action="store_const", dest="verbose", const=0) + + def test_empty(self): + self.assertParseOK([], {'verbose': None}, []) + + def test_count_one(self): + self.assertParseOK(["-v"], {'verbose': 1}, []) + + def test_count_three(self): + self.assertParseOK(["-vvv"], {'verbose': 3}, []) + + def test_count_three_apart(self): + self.assertParseOK(["-v", "-v", "-v"], {'verbose': 3}, []) + + def test_count_override_amount(self): + self.assertParseOK(["-vvv", "--verbose=2"], {'verbose': 2}, []) + + def test_count_override_quiet(self): + self.assertParseOK(["-vvv", "--verbose=2", "-q"], {'verbose': 0}, []) + + def test_count_overriding(self): + self.assertParseOK(["-vvv", "--verbose=2", "-q", "-v"], + {'verbose': 1}, []) + + def test_count_interspersed_args(self): + self.assertParseOK(["--quiet", "3", "-v"], + {'verbose': 1}, + ["3"]) + + def test_count_no_interspersed_args(self): + self.parser.disable_interspersed_args() + self.assertParseOK(["--quiet", "3", "-v"], + {'verbose': 0}, + ["3", "-v"]) + + def test_count_no_such_option(self): + self.assertParseFail(["-q3", "-v"], "no such option: -3") + + def test_count_option_no_value(self): + self.assertParseFail(["--quiet=3", "-v"], + "--quiet option does not take a value") + + def test_count_with_default(self): + self.parser.set_default('verbose', 0) + self.assertParseOK([], {'verbose':0}, []) + + def test_count_overriding_default(self): + self.parser.set_default('verbose', 0) + self.assertParseOK(["-vvv", "--verbose=2", "-q", "-v"], + {'verbose': 1}, []) + +class TestMultipleArgs(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.parser.add_option("-p", "--point", + action="store", nargs=3, type="float", dest="point") + + def test_nargs_with_positional_args(self): + self.assertParseOK(["foo", "-p", "1", "2.5", "-4.3", "xyz"], + {'point': (1.0, 2.5, -4.3)}, + ["foo", "xyz"]) + + def test_nargs_long_opt(self): + self.assertParseOK(["--point", "-1", "2.5", "-0", "xyz"], + {'point': (-1.0, 2.5, -0.0)}, + ["xyz"]) + + def test_nargs_invalid_float_value(self): + self.assertParseFail(["-p", "1.0", "2x", "3.5"], + "option -p: " + "invalid floating-point value: '2x'") + + def test_nargs_required_values(self): + self.assertParseFail(["--point", "1.0", "3.5"], + "--point option requires 3 arguments") + +class TestMultipleArgsAppend(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.parser.add_option("-p", "--point", action="store", nargs=3, + type="float", dest="point") + self.parser.add_option("-f", "--foo", action="append", nargs=2, + type="int", dest="foo") + self.parser.add_option("-z", "--zero", action="append_const", + dest="foo", const=(0, 0)) + + def test_nargs_append(self): + self.assertParseOK(["-f", "4", "-3", "blah", "--foo", "1", "666"], + {'point': None, 'foo': [(4, -3), (1, 666)]}, + ["blah"]) + + def test_nargs_append_required_values(self): + self.assertParseFail(["-f4,3"], + "-f option requires 2 arguments") + + def test_nargs_append_simple(self): + self.assertParseOK(["--foo=3", "4"], + {'point': None, 'foo':[(3, 4)]}, + []) + + def test_nargs_append_const(self): + self.assertParseOK(["--zero", "--foo", "3", "4", "-z"], + {'point': None, 'foo':[(0, 0), (3, 4), (0, 0)]}, + []) + +class TestVersion(BaseTest): + def test_version(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, + version="%prog 0.1") + save_argv = sys.argv[:] + try: + sys.argv[0] = os.path.join(os.curdir, "foo", "bar") + self.assertOutput(["--version"], "bar 0.1\n") + finally: + sys.argv[:] = save_argv + + def test_no_version(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.assertParseFail(["--version"], + "no such option: --version") + +# -- Test conflicting default values and parser.parse_args() ----------- + +class TestConflictingDefaults(BaseTest): + """Conflicting default values: the last one should win.""" + def setUp(self): + self.parser = OptionParser(option_list=[ + make_option("-v", action="store_true", dest="verbose", default=1)]) + + def test_conflict_default(self): + self.parser.add_option("-q", action="store_false", dest="verbose", + default=0) + self.assertParseOK([], {'verbose': 0}, []) + + def test_conflict_default_none(self): + self.parser.add_option("-q", action="store_false", dest="verbose", + default=None) + self.assertParseOK([], {'verbose': None}, []) + +class TestOptionGroup(BaseTest): + def setUp(self): + self.parser = OptionParser(usage=SUPPRESS_USAGE) + + def test_option_group_create_instance(self): + group = OptionGroup(self.parser, "Spam") + self.parser.add_option_group(group) + group.add_option("--spam", action="store_true", + help="spam spam spam spam") + self.assertParseOK(["--spam"], {'spam': 1}, []) + + def test_add_group_no_group(self): + self.assertTypeError(self.parser.add_option_group, + "not an OptionGroup instance: None", None) + + def test_add_group_invalid_arguments(self): + self.assertTypeError(self.parser.add_option_group, + "invalid arguments", None, None) + + def test_add_group_wrong_parser(self): + group = OptionGroup(self.parser, "Spam") + group.parser = OptionParser() + self.assertRaises(self.parser.add_option_group, (group,), None, + ValueError, "invalid OptionGroup (wrong parser)") + + def test_group_manipulate(self): + group = self.parser.add_option_group("Group 2", + description="Some more options") + group.set_title("Bacon") + group.add_option("--bacon", type="int") + self.assert_(self.parser.get_option_group("--bacon"), group) + +# -- Test extending and parser.parse_args() ---------------------------- + +class TestExtendAddTypes(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, + option_class=self.MyOption) + self.parser.add_option("-a", None, type="string", dest="a") + self.parser.add_option("-f", "--file", type="file", dest="file") + + def tearDown(self): + if os.path.isdir(test_support.TESTFN): + os.rmdir(test_support.TESTFN) + elif os.path.isfile(test_support.TESTFN): + os.unlink(test_support.TESTFN) + + class MyOption (Option): + def check_file(option, opt, value): + if not os.path.exists(value): + raise OptionValueError("%s: file does not exist" % value) + elif not os.path.isfile(value): + raise OptionValueError("%s: not a regular file" % value) + return value + + TYPES = Option.TYPES + ("file",) + TYPE_CHECKER = copy.copy(Option.TYPE_CHECKER) + TYPE_CHECKER["file"] = check_file + + def test_filetype_ok(self): + open(test_support.TESTFN, "w").close() + self.assertParseOK(["--file", test_support.TESTFN, "-afoo"], + {'file': test_support.TESTFN, 'a': 'foo'}, + []) + + def test_filetype_noexist(self): + self.assertParseFail(["--file", test_support.TESTFN, "-afoo"], + "%s: file does not exist" % + test_support.TESTFN) + + def test_filetype_notfile(self): + os.mkdir(test_support.TESTFN) + self.assertParseFail(["--file", test_support.TESTFN, "-afoo"], + "%s: not a regular file" % + test_support.TESTFN) + + +class TestExtendAddActions(BaseTest): + def setUp(self): + options = [self.MyOption("-a", "--apple", action="extend", + type="string", dest="apple")] + self.parser = OptionParser(option_list=options) + + class MyOption (Option): + ACTIONS = Option.ACTIONS + ("extend",) + STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) + TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) + + def take_action(self, action, dest, opt, value, values, parser): + if action == "extend": + lvalue = value.split(",") + values.ensure_value(dest, []).extend(lvalue) + else: + Option.take_action(self, action, dest, opt, parser, value, + values) + + def test_extend_add_action(self): + self.assertParseOK(["-afoo,bar", "--apple=blah"], + {'apple': ["foo", "bar", "blah"]}, + []) + + def test_extend_add_action_normal(self): + self.assertParseOK(["-a", "foo", "-abar", "--apple=x,y"], + {'apple': ["foo", "bar", "x", "y"]}, + []) + +# -- Test callbacks and parser.parse_args() ---------------------------- + +class TestCallback(BaseTest): + def setUp(self): + options = [make_option("-x", + None, + action="callback", + callback=self.process_opt), + make_option("-f", + "--file", + action="callback", + callback=self.process_opt, + type="string", + dest="filename")] + self.parser = OptionParser(option_list=options) + + def process_opt(self, option, opt, value, parser_): + if opt == "-x": + self.assertEqual(option._short_opts, ["-x"]) + self.assertEqual(option._long_opts, []) + self.assert_(parser_ is self.parser) + self.assert_(value is None) + self.assertEqual(vars(parser_.values), {'filename': None}) + + parser_.values.x = 42 + elif opt == "--file": + self.assertEqual(option._short_opts, ["-f"]) + self.assertEqual(option._long_opts, ["--file"]) + self.assert_(parser_ is self.parser) + self.assertEqual(value, "foo") + self.assertEqual(vars(parser_.values), {'filename': None, 'x': 42}) + + setattr(parser_.values, option.dest, value) + else: + self.fail("Unknown option %r in process_opt." % opt) + + def test_callback(self): + self.assertParseOK(["-x", "--file=foo"], + {'filename': "foo", 'x': 42}, + []) + + def test_callback_help(self): + # This test was prompted by SF bug #960515 -- the point is + # not to inspect the help text, just to make sure that + # format_help() doesn't crash. + parser = OptionParser(usage=SUPPRESS_USAGE) + parser.remove_option("-h") + parser.add_option("-t", "--test", action="callback", + callback=lambda: None, type="string", + help="foo") + + expected_help = ("Options:\n" + " -t TEST, --test=TEST foo\n") + self.assertHelp(parser, expected_help) + + +class TestCallbackExtraArgs(BaseTest): + def setUp(self): + options = [make_option("-p", "--point", action="callback", + callback=self.process_tuple, + callback_args=(3, int), type="string", + dest="points", default=[])] + self.parser = OptionParser(option_list=options) + + def process_tuple(self, option, opt, value, parser_, len, type): + self.assertEqual(len, 3) + self.assert_(type is int) + + if opt == "-p": + self.assertEqual(value, "1,2,3") + elif opt == "--point": + self.assertEqual(value, "4,5,6") + + value = tuple(map(type, value.split(","))) + getattr(parser_.values, option.dest).append(value) + + def test_callback_extra_args(self): + self.assertParseOK(["-p1,2,3", "--point", "4,5,6"], + {'points': [(1,2,3), (4,5,6)]}, + []) + +class TestCallbackMeddleArgs(BaseTest): + def setUp(self): + options = [make_option(str(x), action="callback", + callback=self.process_n, dest='things') + for x in range(-1, -6, -1)] + self.parser = OptionParser(option_list=options) + + # Callback that meddles in rargs, largs + def process_n(self, option, opt, value, parser_): + # option is -3, -5, etc. + nargs = int(opt[1:]) + rargs = parser_.rargs + if len(rargs) < nargs: + self.fail("Expected %d arguments for %s option." % (nargs, opt)) + dest = parser_.values.ensure_value(option.dest, []) + dest.append(tuple(rargs[0:nargs])) + parser_.largs.append(nargs) + del rargs[0:nargs] + + def test_callback_meddle_args(self): + self.assertParseOK(["-1", "foo", "-3", "bar", "baz", "qux"], + {'things': [("foo",), ("bar", "baz", "qux")]}, + [1, 3]) + + def test_callback_meddle_args_separator(self): + self.assertParseOK(["-2", "foo", "--"], + {'things': [('foo', '--')]}, + [2]) + +class TestCallbackManyArgs(BaseTest): + def setUp(self): + options = [make_option("-a", "--apple", action="callback", nargs=2, + callback=self.process_many, type="string"), + make_option("-b", "--bob", action="callback", nargs=3, + callback=self.process_many, type="int")] + self.parser = OptionParser(option_list=options) + + def process_many(self, option, opt, value, parser_): + if opt == "-a": + self.assertEqual(value, ("foo", "bar")) + elif opt == "--apple": + self.assertEqual(value, ("ding", "dong")) + elif opt == "-b": + self.assertEqual(value, (1, 2, 3)) + elif opt == "--bob": + self.assertEqual(value, (-666, 42, 0)) + + def test_many_args(self): + self.assertParseOK(["-a", "foo", "bar", "--apple", "ding", "dong", + "-b", "1", "2", "3", "--bob", "-666", "42", + "0"], + {"apple": None, "bob": None}, + []) + +class TestCallbackCheckAbbrev(BaseTest): + def setUp(self): + self.parser = OptionParser() + self.parser.add_option("--foo-bar", action="callback", + callback=self.check_abbrev) + + def check_abbrev(self, option, opt, value, parser): + self.assertEqual(opt, "--foo-bar") + + def test_abbrev_callback_expansion(self): + self.assertParseOK(["--foo"], {}, []) + +class TestCallbackVarArgs(BaseTest): + def setUp(self): + options = [make_option("-a", type="int", nargs=2, dest="a"), + make_option("-b", action="store_true", dest="b"), + make_option("-c", "--callback", action="callback", + callback=self.variable_args, dest="c")] + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, + option_list=options) + + def variable_args(self, option, opt, value, parser): + self.assert_(value is None) + done = 0 + value = [] + rargs = parser.rargs + while rargs: + arg = rargs[0] + if ((arg[:2] == "--" and len(arg) > 2) or + (arg[:1] == "-" and len(arg) > 1 and arg[1] != "-")): + break + else: + value.append(arg) + del rargs[0] + setattr(parser.values, option.dest, value) + + def test_variable_args(self): + self.assertParseOK(["-a3", "-5", "--callback", "foo", "bar"], + {'a': (3, -5), 'b': None, 'c': ["foo", "bar"]}, + []) + + def test_consume_separator_stop_at_option(self): + self.assertParseOK(["-c", "37", "--", "xxx", "-b", "hello"], + {'a': None, + 'b': True, + 'c': ["37", "--", "xxx"]}, + ["hello"]) + + def test_positional_arg_and_variable_args(self): + self.assertParseOK(["hello", "-c", "foo", "-", "bar"], + {'a': None, + 'b': None, + 'c':["foo", "-", "bar"]}, + ["hello"]) + + def test_stop_at_option(self): + self.assertParseOK(["-c", "foo", "-b"], + {'a': None, 'b': True, 'c': ["foo"]}, + []) + + def test_stop_at_invalid_option(self): + self.assertParseFail(["-c", "3", "-5", "-a"], "no such option: -5") + + +# -- Test conflict handling and parser.parse_args() -------------------- + +class ConflictBase(BaseTest): + def setUp(self): + options = [make_option("-v", "--verbose", action="count", + dest="verbose", help="increment verbosity")] + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, + option_list=options) + + def show_version(self, option, opt, value, parser): + parser.values.show_version = 1 + +class TestConflict(ConflictBase): + """Use the default conflict resolution for Optik 1.2: error.""" + def assert_conflict_error(self, func): + err = self.assertRaises( + func, ("-v", "--version"), {'action' : "callback", + 'callback' : self.show_version, + 'help' : "show version"}, + OptionConflictError, + "option -v/--version: conflicting option string(s): -v") + + self.assertEqual(err.msg, "conflicting option string(s): -v") + self.assertEqual(err.option_id, "-v/--version") + + def test_conflict_error(self): + self.assert_conflict_error(self.parser.add_option) + + def test_conflict_error_group(self): + group = OptionGroup(self.parser, "Group 1") + self.assert_conflict_error(group.add_option) + + def test_no_such_conflict_handler(self): + self.assertRaises( + self.parser.set_conflict_handler, ('foo',), None, + ValueError, "invalid conflict_resolution value 'foo'") + + +class TestConflictResolve(ConflictBase): + def setUp(self): + ConflictBase.setUp(self) + self.parser.set_conflict_handler("resolve") + self.parser.add_option("-v", "--version", action="callback", + callback=self.show_version, help="show version") + + def test_conflict_resolve(self): + v_opt = self.parser.get_option("-v") + verbose_opt = self.parser.get_option("--verbose") + version_opt = self.parser.get_option("--version") + + self.assert_(v_opt is version_opt) + self.assert_(v_opt is not verbose_opt) + self.assertEqual(v_opt._long_opts, ["--version"]) + self.assertEqual(version_opt._short_opts, ["-v"]) + self.assertEqual(version_opt._long_opts, ["--version"]) + self.assertEqual(verbose_opt._short_opts, []) + self.assertEqual(verbose_opt._long_opts, ["--verbose"]) + + def test_conflict_resolve_help(self): + self.assertOutput(["-h"], """\ +Options: + --verbose increment verbosity + -h, --help show this help message and exit + -v, --version show version +""") + + def test_conflict_resolve_short_opt(self): + self.assertParseOK(["-v"], + {'verbose': None, 'show_version': 1}, + []) + + def test_conflict_resolve_long_opt(self): + self.assertParseOK(["--verbose"], + {'verbose': 1}, + []) + + def test_conflict_resolve_long_opts(self): + self.assertParseOK(["--verbose", "--version"], + {'verbose': 1, 'show_version': 1}, + []) + +class TestConflictOverride(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.parser.set_conflict_handler("resolve") + self.parser.add_option("-n", "--dry-run", + action="store_true", dest="dry_run", + help="don't do anything") + self.parser.add_option("--dry-run", "-n", + action="store_const", const=42, dest="dry_run", + help="dry run mode") + + def test_conflict_override_opts(self): + opt = self.parser.get_option("--dry-run") + self.assertEqual(opt._short_opts, ["-n"]) + self.assertEqual(opt._long_opts, ["--dry-run"]) + + def test_conflict_override_help(self): + self.assertOutput(["-h"], """\ +Options: + -h, --help show this help message and exit + -n, --dry-run dry run mode +""") + + def test_conflict_override_args(self): + self.assertParseOK(["-n"], + {'dry_run': 42}, + []) + +# -- Other testing. ---------------------------------------------------- + +_expected_help_basic = """\ +Usage: bar.py [options] + +Options: + -a APPLE throw APPLEs at basket + -b NUM, --boo=NUM shout "boo!" NUM times (in order to frighten away all the + evil spirits that cause trouble and mayhem) + --foo=FOO store FOO in the foo list for later fooing + -h, --help show this help message and exit +""" + +_expected_help_long_opts_first = """\ +Usage: bar.py [options] + +Options: + -a APPLE throw APPLEs at basket + --boo=NUM, -b NUM shout "boo!" NUM times (in order to frighten away all the + evil spirits that cause trouble and mayhem) + --foo=FOO store FOO in the foo list for later fooing + --help, -h show this help message and exit +""" + +_expected_help_title_formatter = """\ +Usage +===== + bar.py [options] + +Options +======= +-a APPLE throw APPLEs at basket +--boo=NUM, -b NUM shout "boo!" NUM times (in order to frighten away all the + evil spirits that cause trouble and mayhem) +--foo=FOO store FOO in the foo list for later fooing +--help, -h show this help message and exit +""" + +_expected_help_short_lines = """\ +Usage: bar.py [options] + +Options: + -a APPLE throw APPLEs at basket + -b NUM, --boo=NUM shout "boo!" NUM times (in order to + frighten away all the evil spirits + that cause trouble and mayhem) + --foo=FOO store FOO in the foo list for later + fooing + -h, --help show this help message and exit +""" + +class TestHelp(BaseTest): + def setUp(self): + self.parser = self.make_parser(80) + + def make_parser(self, columns): + options = [ + make_option("-a", type="string", dest='a', + metavar="APPLE", help="throw APPLEs at basket"), + make_option("-b", "--boo", type="int", dest='boo', + metavar="NUM", + help= + "shout \"boo!\" NUM times (in order to frighten away " + "all the evil spirits that cause trouble and mayhem)"), + make_option("--foo", action="append", type="string", dest='foo', + help="store FOO in the foo list for later fooing"), + ] + + # We need to set COLUMNS for the OptionParser constructor, but + # we must restore its original value -- otherwise, this test + # screws things up for other tests when it's part of the Python + # test suite. + orig_columns = os.environ.get('COLUMNS') + os.environ['COLUMNS'] = str(columns) + try: + return InterceptingOptionParser(option_list=options) + finally: + if orig_columns is None: + del os.environ['COLUMNS'] + else: + os.environ['COLUMNS'] = orig_columns + + def assertHelpEquals(self, expected_output): + if type(expected_output) is types.UnicodeType: + encoding = self.parser._get_encoding(sys.stdout) + expected_output = expected_output.encode(encoding, "replace") + + save_argv = sys.argv[:] + try: + # Make optparse believe bar.py is being executed. + sys.argv[0] = os.path.join("foo", "bar.py") + self.assertOutput(["-h"], expected_output) + finally: + sys.argv[:] = save_argv + + def test_help(self): + self.assertHelpEquals(_expected_help_basic) + + def test_help_old_usage(self): + self.parser.set_usage("Usage: %prog [options]") + self.assertHelpEquals(_expected_help_basic) + + def test_help_long_opts_first(self): + self.parser.formatter.short_first = 0 + self.assertHelpEquals(_expected_help_long_opts_first) + + def test_help_title_formatter(self): + self.parser.formatter = TitledHelpFormatter() + self.assertHelpEquals(_expected_help_title_formatter) + + def test_wrap_columns(self): + # Ensure that wrapping respects $COLUMNS environment variable. + # Need to reconstruct the parser, since that's the only time + # we look at $COLUMNS. + self.parser = self.make_parser(60) + self.assertHelpEquals(_expected_help_short_lines) + + def test_help_unicode(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE) + self.parser.add_option("-a", action="store_true", help=u"ol\u00E9!") + expect = u"""\ +Options: + -h, --help show this help message and exit + -a ol\u00E9! +""" + self.assertHelpEquals(expect) + + def test_help_unicode_description(self): + self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, + description=u"ol\u00E9!") + expect = u"""\ +ol\u00E9! + +Options: + -h, --help show this help message and exit +""" + self.assertHelpEquals(expect) + + def test_help_description_groups(self): + self.parser.set_description( + "This is the program description for %prog. %prog has " + "an option group as well as single options.") + + group = OptionGroup( + self.parser, "Dangerous Options", + "Caution: use of these options is at your own risk. " + "It is believed that some of them bite.") + group.add_option("-g", action="store_true", help="Group option.") + self.parser.add_option_group(group) + + expect = """\ +Usage: bar.py [options] + +This is the program description for bar.py. bar.py has an option group as +well as single options. + +Options: + -a APPLE throw APPLEs at basket + -b NUM, --boo=NUM shout "boo!" NUM times (in order to frighten away all the + evil spirits that cause trouble and mayhem) + --foo=FOO store FOO in the foo list for later fooing + -h, --help show this help message and exit + + Dangerous Options: + Caution: use of these options is at your own risk. It is believed + that some of them bite. + + -g Group option. +""" + + self.assertHelpEquals(expect) + + self.parser.epilog = "Please report bugs to /dev/null." + self.assertHelpEquals(expect + "\nPlease report bugs to /dev/null.\n") + + +class TestMatchAbbrev(BaseTest): + def test_match_abbrev(self): + self.assertEqual(_match_abbrev("--f", + {"--foz": None, + "--foo": None, + "--fie": None, + "--f": None}), + "--f") + + def test_match_abbrev_error(self): + s = "--f" + wordmap = {"--foz": None, "--foo": None, "--fie": None} + self.assertRaises( + _match_abbrev, (s, wordmap), None, + BadOptionError, "ambiguous option: --f (--fie, --foo, --foz?)") + + +class TestParseNumber(BaseTest): + def setUp(self): + self.parser = InterceptingOptionParser() + self.parser.add_option("-n", type=int) + self.parser.add_option("-l", type=long) + + def test_parse_num_fail(self): + self.assertRaises( + _parse_num, ("", int), {}, + ValueError, + re.compile(r"invalid literal for int().*: '?'?")) + self.assertRaises( + _parse_num, ("0xOoops", long), {}, + ValueError, + re.compile(r"invalid literal for long().*: '?0xOoops'?")) + + def test_parse_num_ok(self): + self.assertEqual(_parse_num("0", int), 0) + self.assertEqual(_parse_num("0x10", int), 16) + self.assertEqual(_parse_num("0XA", long), 10L) + self.assertEqual(_parse_num("010", long), 8L) + self.assertEqual(_parse_num("0b11", int), 3) + self.assertEqual(_parse_num("0b", long), 0L) + + def test_numeric_options(self): + self.assertParseOK(["-n", "42", "-l", "0x20"], + { "n": 42, "l": 0x20 }, []) + self.assertParseOK(["-n", "0b0101", "-l010"], + { "n": 5, "l": 8 }, []) + self.assertParseFail(["-n008"], + "option -n: invalid integer value: '008'") + self.assertParseFail(["-l0b0123"], + "option -l: invalid long integer value: '0b0123'") + self.assertParseFail(["-l", "0x12x"], + "option -l: invalid long integer value: '0x12x'") + + +def _testclasses(): + mod = sys.modules[__name__] + return [getattr(mod, name) for name in dir(mod) if name.startswith('Test')] + +def suite(): + suite = unittest.TestSuite() + for testclass in _testclasses(): + suite.addTest(unittest.makeSuite(testclass)) + return suite + +def test_main(): + test_support.run_suite(suite()) + +if __name__ == '__main__': + unittest.main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_os.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_os.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,438 @@ +# As a test suite for the os module, this is woefully inadequate, but this +# does add tests for a few functions which have been determined to be more +# portable than they had been thought to be. + +import os +import unittest +import warnings +import sys +from test import test_support + +warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, __name__) +warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning, __name__) + +# Tests creating TESTFN +class FileTests(unittest.TestCase): + def setUp(self): + if os.path.exists(test_support.TESTFN): + os.unlink(test_support.TESTFN) + tearDown = setUp + + def test_access(self): + f = os.open(test_support.TESTFN, os.O_CREAT|os.O_RDWR) + os.close(f) + self.assert_(os.access(test_support.TESTFN, os.W_OK)) + + +class TemporaryFileTests(unittest.TestCase): + def setUp(self): + self.files = [] + os.mkdir(test_support.TESTFN) + + def tearDown(self): + for name in self.files: + os.unlink(name) + os.rmdir(test_support.TESTFN) + + def check_tempfile(self, name): + # make sure it doesn't already exist: + self.failIf(os.path.exists(name), + "file already exists for temporary file") + # make sure we can create the file + open(name, "w") + self.files.append(name) + + def test_tempnam(self): + if not hasattr(os, "tempnam"): + return + warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, + r"test_os$") + self.check_tempfile(os.tempnam()) + + name = os.tempnam(test_support.TESTFN) + self.check_tempfile(name) + + name = os.tempnam(test_support.TESTFN, "pfx") + self.assert_(os.path.basename(name)[:3] == "pfx") + self.check_tempfile(name) + + def test_tmpfile(self): + if not hasattr(os, "tmpfile"): + return + fp = os.tmpfile() + fp.write("foobar") + fp.seek(0,0) + s = fp.read() + fp.close() + self.assert_(s == "foobar") + + def test_tmpnam(self): + import sys + if not hasattr(os, "tmpnam"): + return + warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning, + r"test_os$") + name = os.tmpnam() + if sys.platform in ("win32",): + # The Windows tmpnam() seems useless. From the MS docs: + # + # The character string that tmpnam creates consists of + # the path prefix, defined by the entry P_tmpdir in the + # file STDIO.H, followed by a sequence consisting of the + # digit characters '0' through '9'; the numerical value + # of this string is in the range 1 - 65,535. Changing the + # definitions of L_tmpnam or P_tmpdir in STDIO.H does not + # change the operation of tmpnam. + # + # The really bizarre part is that, at least under MSVC6, + # P_tmpdir is "\\". That is, the path returned refers to + # the root of the current drive. That's a terrible place to + # put temp files, and, depending on privileges, the user + # may not even be able to open a file in the root directory. + self.failIf(os.path.exists(name), + "file already exists for temporary file") + else: + self.check_tempfile(name) + +# Test attributes on return values from os.*stat* family. +class StatAttributeTests(unittest.TestCase): + def setUp(self): + os.mkdir(test_support.TESTFN) + self.fname = os.path.join(test_support.TESTFN, "f1") + f = open(self.fname, 'wb') + f.write("ABC") + f.close() + + def tearDown(self): + os.unlink(self.fname) + os.rmdir(test_support.TESTFN) + + def test_stat_attributes(self): + if not hasattr(os, "stat"): + return + + import stat + result = os.stat(self.fname) + + # Make sure direct access works + self.assertEquals(result[stat.ST_SIZE], 3) + self.assertEquals(result.st_size, 3) + + import sys + + # Make sure all the attributes are there + members = dir(result) + for name in dir(stat): + if name[:3] == 'ST_': + attr = name.lower() + if name.endswith("TIME"): + def trunc(x): return int(x) + else: + def trunc(x): return x + self.assertEquals(trunc(getattr(result, attr)), + result[getattr(stat, name)]) + self.assert_(attr in members) + + try: + result[200] + self.fail("No exception thrown") + except IndexError: + pass + + # Make sure that assignment fails + try: + result.st_mode = 1 + self.fail("No exception thrown") + except (AttributeError, TypeError): + pass + + try: + result.st_rdev = 1 + self.fail("No exception thrown") + except (AttributeError, TypeError): + pass + + try: + result.parrot = 1 + self.fail("No exception thrown") + except (AttributeError, TypeError): + pass + + # Use the stat_result constructor with a too-short tuple. + try: + result2 = os.stat_result((10,)) + #self.fail("No exception thrown") - XXX very much a detail IMHO + except TypeError: + pass + + # Use the constructr with a too-long tuple. + try: + result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14)) + except TypeError: + pass + + + def test_statvfs_attributes(self): + if not hasattr(os, "statvfs"): + return + + import statvfs + try: + result = os.statvfs(self.fname) + except OSError, e: + # On AtheOS, glibc always returns ENOSYS + import errno + if e.errno == errno.ENOSYS: + return + + # Make sure direct access works + self.assertEquals(result.f_bfree, result[statvfs.F_BFREE]) + + # Make sure all the attributes are there + members = dir(result) + for name in dir(statvfs): + if name[:2] == 'F_': + attr = name.lower() + self.assertEquals(getattr(result, attr), + result[getattr(statvfs, name)]) + self.assert_(attr in members) + + # Make sure that assignment really fails + try: + result.f_bfree = 1 + self.fail("No exception thrown") + except (AttributeError, TypeError): + pass + + try: + result.parrot = 1 + self.fail("No exception thrown") + except (AttributeError, TypeError): + pass + + # Use the constructor with a too-short tuple. + try: + result2 = os.statvfs_result((10,)) + self.fail("No exception thrown") + except TypeError: + pass + + # Use the constructr with a too-long tuple. + try: + result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14)) + except TypeError: + pass + + # Restrict test to Win32, since there is no guarantee other + # systems support centiseconds + if sys.platform == 'win32': + def test_1565150(self): + t1 = 1159195039.25 + os.utime(self.fname, (t1, t1)) + self.assertEquals(os.stat(self.fname).st_mtime, t1) + + def test_1686475(self): + # Verify that an open file can be stat'ed + try: + os.stat(r"c:\pagefile.sys") + except WindowsError, e: + if e == 2: # file does not exist; cannot run test + return + self.fail("Could not stat pagefile.sys") + +from test import mapping_tests + +class EnvironTests(mapping_tests.BasicTestMappingProtocol): + """check that os.environ object conform to mapping protocol""" + type2test = None + def _reference(self): + return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"} + def _empty_mapping(self): + os.environ.clear() + return os.environ + def setUp(self): + self.__save = dict(os.environ) + os.environ.clear() + def tearDown(self): + os.environ.clear() + os.environ.update(self.__save) + + # Bug 1110478 + def test_update2(self): + if os.path.exists("/bin/sh"): + os.environ.update(HELLO="World") + value = os.popen("/bin/sh -c 'echo $HELLO'").read().strip() + self.assertEquals(value, "World") + +class WalkTests(unittest.TestCase): + """Tests for os.walk().""" + + def test_traversal(self): + import os + from os.path import join + + # Build: + # TESTFN/ a file kid and two directory kids + # tmp1 + # SUB1/ a file kid and a directory kid + # tmp2 + # SUB11/ no kids + # SUB2/ just a file kid + # tmp3 + sub1_path = join(test_support.TESTFN, "SUB1") + sub11_path = join(sub1_path, "SUB11") + sub2_path = join(test_support.TESTFN, "SUB2") + tmp1_path = join(test_support.TESTFN, "tmp1") + tmp2_path = join(sub1_path, "tmp2") + tmp3_path = join(sub2_path, "tmp3") + + # Create stuff. + os.makedirs(sub11_path) + os.makedirs(sub2_path) + for path in tmp1_path, tmp2_path, tmp3_path: + f = file(path, "w") + f.write("I'm " + path + " and proud of it. Blame test_os.\n") + f.close() + + # Walk top-down. + all = list(os.walk(test_support.TESTFN)) + self.assertEqual(len(all), 4) + # We can't know which order SUB1 and SUB2 will appear in. + # Not flipped: TESTFN, SUB1, SUB11, SUB2 + # flipped: TESTFN, SUB2, SUB1, SUB11 + flipped = all[0][1][0] != "SUB1" + all[0][1].sort() + self.assertEqual(all[0], (test_support.TESTFN, ["SUB1", "SUB2"], ["tmp1"])) + self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"])) + self.assertEqual(all[2 + flipped], (sub11_path, [], [])) + self.assertEqual(all[3 - 2 * flipped], (sub2_path, [], ["tmp3"])) + + # Prune the search. + all = [] + for root, dirs, files in os.walk(test_support.TESTFN): + all.append((root, dirs, files)) + # Don't descend into SUB1. + if 'SUB1' in dirs: + # Note that this also mutates the dirs we appended to all! + dirs.remove('SUB1') + self.assertEqual(len(all), 2) + self.assertEqual(all[0], (test_support.TESTFN, ["SUB2"], ["tmp1"])) + self.assertEqual(all[1], (sub2_path, [], ["tmp3"])) + + # Walk bottom-up. + all = list(os.walk(test_support.TESTFN, topdown=False)) + self.assertEqual(len(all), 4) + # We can't know which order SUB1 and SUB2 will appear in. + # Not flipped: SUB11, SUB1, SUB2, TESTFN + # flipped: SUB2, SUB11, SUB1, TESTFN + flipped = all[3][1][0] != "SUB1" + all[3][1].sort() + self.assertEqual(all[3], (test_support.TESTFN, ["SUB1", "SUB2"], ["tmp1"])) + self.assertEqual(all[flipped], (sub11_path, [], [])) + self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"])) + self.assertEqual(all[2 - 2 * flipped], (sub2_path, [], ["tmp3"])) + + # Tear everything down. This is a decent use for bottom-up on + # Windows, which doesn't have a recursive delete command. The + # (not so) subtlety is that rmdir will fail unless the dir's + # kids are removed first, so bottom up is essential. + for root, dirs, files in os.walk(test_support.TESTFN, topdown=False): + for name in files: + os.remove(join(root, name)) + for name in dirs: + os.rmdir(join(root, name)) + os.rmdir(test_support.TESTFN) + +class MakedirTests (unittest.TestCase): + def setUp(self): + os.mkdir(test_support.TESTFN) + + def test_makedir(self): + base = test_support.TESTFN + path = os.path.join(base, 'dir1', 'dir2', 'dir3') + os.makedirs(path) # Should work + path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4') + os.makedirs(path) + + # Try paths with a '.' in them + self.failUnlessRaises(OSError, os.makedirs, os.curdir) + path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir) + os.makedirs(path) + path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4', + 'dir5', 'dir6') + os.makedirs(path) + + + + + def tearDown(self): + path = os.path.join(test_support.TESTFN, 'dir1', 'dir2', 'dir3', + 'dir4', 'dir5', 'dir6') + # If the tests failed, the bottom-most directory ('../dir6') + # may not have been created, so we look for the outermost directory + # that exists. + while not os.path.exists(path) and path != test_support.TESTFN: + path = os.path.dirname(path) + + os.removedirs(path) + +class DevNullTests (unittest.TestCase): + def test_devnull(self): + f = file(os.devnull, 'w') + f.write('hello') + f.close() + f = file(os.devnull, 'r') + self.assertEqual(f.read(), '') + f.close() + +class URandomTests (unittest.TestCase): + def test_urandom(self): + try: + self.assertEqual(len(os.urandom(1)), 1) + self.assertEqual(len(os.urandom(10)), 10) + self.assertEqual(len(os.urandom(100)), 100) + self.assertEqual(len(os.urandom(1000)), 1000) + except NotImplementedError: + pass + +class Win32ErrorTests(unittest.TestCase): + def test_rename(self): + self.assertRaises(WindowsError, os.rename, test_support.TESTFN, test_support.TESTFN+".bak") + + def test_remove(self): + self.assertRaises(WindowsError, os.remove, test_support.TESTFN) + + def test_chdir(self): + self.assertRaises(WindowsError, os.chdir, test_support.TESTFN) + + def test_mkdir(self): + self.assertRaises(WindowsError, os.chdir, test_support.TESTFN) + + def test_utime(self): + self.assertRaises(WindowsError, os.utime, test_support.TESTFN, None) + + def test_access(self): + self.assertRaises(WindowsError, os.utime, test_support.TESTFN, 0) + + def test_chmod(self): + self.assertRaises(WindowsError, os.utime, test_support.TESTFN, 0) + +if sys.platform != 'win32': + class Win32ErrorTests(unittest.TestCase): + pass + +def test_main(): + test_support.run_unittest( + FileTests, + TemporaryFileTests, + StatAttributeTests, + EnvironTests, + WalkTests, + MakedirTests, + DevNullTests, + URandomTests, + Win32ErrorTests + ) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_parser.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_parser.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,447 @@ +import parser +import unittest +from test import test_support + +# +# First, we test that we can generate trees from valid source fragments, +# and that these valid trees are indeed allowed by the tree-loading side +# of the parser module. +# + +class RoundtripLegalSyntaxTestCase(unittest.TestCase): + + def roundtrip(self, f, s): + st1 = f(s) + t = st1.totuple() + try: + st2 = parser.sequence2st(t) + except parser.ParserError, why: + self.fail("could not roundtrip %r: %s" % (s, why)) + + self.assertEquals(t, st2.totuple(), + "could not re-generate syntax tree") + + def check_expr(self, s): + self.roundtrip(parser.expr, s) + + def check_suite(self, s): + self.roundtrip(parser.suite, s) + + def test_yield_statement(self): + self.check_suite("def f(): yield 1") + self.check_suite("def f(): yield") + self.check_suite("def f(): x += yield") + self.check_suite("def f(): x = yield 1") + self.check_suite("def f(): x = y = yield 1") + self.check_suite("def f(): x = yield") + self.check_suite("def f(): x = y = yield") + self.check_suite("def f(): 1 + (yield)*2") + self.check_suite("def f(): (yield 1)*2") + self.check_suite("def f(): return; yield 1") + self.check_suite("def f(): yield 1; return") + self.check_suite("def f():\n" + " for x in range(30):\n" + " yield x\n") + self.check_suite("def f():\n" + " if (yield):\n" + " yield x\n") + + def test_expressions(self): + self.check_expr("foo(1)") + self.check_expr("[1, 2, 3]") + self.check_expr("[x**3 for x in range(20)]") + self.check_expr("[x**3 for x in range(20) if x % 3]") + self.check_expr("[x**3 for x in range(20) if x % 2 if x % 3]") + self.check_expr("list(x**3 for x in range(20))") + self.check_expr("list(x**3 for x in range(20) if x % 3)") + self.check_expr("list(x**3 for x in range(20) if x % 2 if x % 3)") + self.check_expr("foo(*args)") + self.check_expr("foo(*args, **kw)") + self.check_expr("foo(**kw)") + self.check_expr("foo(key=value)") + self.check_expr("foo(key=value, *args)") + self.check_expr("foo(key=value, *args, **kw)") + self.check_expr("foo(key=value, **kw)") + self.check_expr("foo(a, b, c, *args)") + self.check_expr("foo(a, b, c, *args, **kw)") + self.check_expr("foo(a, b, c, **kw)") + self.check_expr("foo + bar") + self.check_expr("foo - bar") + self.check_expr("foo * bar") + self.check_expr("foo / bar") + self.check_expr("foo // bar") + self.check_expr("lambda: 0") + self.check_expr("lambda x: 0") + self.check_expr("lambda *y: 0") + self.check_expr("lambda *y, **z: 0") + self.check_expr("lambda **z: 0") + self.check_expr("lambda x, y: 0") + self.check_expr("lambda foo=bar: 0") + self.check_expr("lambda foo=bar, spaz=nifty+spit: 0") + self.check_expr("lambda foo=bar, **z: 0") + self.check_expr("lambda foo=bar, blaz=blat+2, **z: 0") + self.check_expr("lambda foo=bar, blaz=blat+2, *y, **z: 0") + self.check_expr("lambda x, *y, **z: 0") + self.check_expr("(x for x in range(10))") + self.check_expr("foo(x for x in range(10))") + + def test_print(self): + self.check_suite("print") + self.check_suite("print 1") + self.check_suite("print 1,") + self.check_suite("print >>fp") + self.check_suite("print >>fp, 1") + self.check_suite("print >>fp, 1,") + + def test_simple_expression(self): + # expr_stmt + self.check_suite("a") + + def test_simple_assignments(self): + self.check_suite("a = b") + self.check_suite("a = b = c = d = e") + + def test_simple_augmented_assignments(self): + self.check_suite("a += b") + self.check_suite("a -= b") + self.check_suite("a *= b") + self.check_suite("a /= b") + self.check_suite("a //= b") + self.check_suite("a %= b") + self.check_suite("a &= b") + self.check_suite("a |= b") + self.check_suite("a ^= b") + self.check_suite("a <<= b") + self.check_suite("a >>= b") + self.check_suite("a **= b") + + def test_function_defs(self): + self.check_suite("def f(): pass") + self.check_suite("def f(*args): pass") + self.check_suite("def f(*args, **kw): pass") + self.check_suite("def f(**kw): pass") + self.check_suite("def f(foo=bar): pass") + self.check_suite("def f(foo=bar, *args): pass") + self.check_suite("def f(foo=bar, *args, **kw): pass") + self.check_suite("def f(foo=bar, **kw): pass") + + self.check_suite("def f(a, b): pass") + self.check_suite("def f(a, b, *args): pass") + self.check_suite("def f(a, b, *args, **kw): pass") + self.check_suite("def f(a, b, **kw): pass") + self.check_suite("def f(a, b, foo=bar): pass") + self.check_suite("def f(a, b, foo=bar, *args): pass") + self.check_suite("def f(a, b, foo=bar, *args, **kw): pass") + self.check_suite("def f(a, b, foo=bar, **kw): pass") + + self.check_suite("@staticmethod\n" + "def f(): pass") + self.check_suite("@staticmethod\n" + "@funcattrs(x, y)\n" + "def f(): pass") + self.check_suite("@funcattrs()\n" + "def f(): pass") + + def test_class_defs(self): + self.check_suite("class foo():pass") + + def test_import_from_statement(self): + self.check_suite("from sys.path import *") + self.check_suite("from sys.path import dirname") + self.check_suite("from sys.path import (dirname)") + self.check_suite("from sys.path import (dirname,)") + self.check_suite("from sys.path import dirname as my_dirname") + self.check_suite("from sys.path import (dirname as my_dirname)") + self.check_suite("from sys.path import (dirname as my_dirname,)") + self.check_suite("from sys.path import dirname, basename") + self.check_suite("from sys.path import (dirname, basename)") + self.check_suite("from sys.path import (dirname, basename,)") + self.check_suite( + "from sys.path import dirname as my_dirname, basename") + self.check_suite( + "from sys.path import (dirname as my_dirname, basename)") + self.check_suite( + "from sys.path import (dirname as my_dirname, basename,)") + self.check_suite( + "from sys.path import dirname, basename as my_basename") + self.check_suite( + "from sys.path import (dirname, basename as my_basename)") + self.check_suite( + "from sys.path import (dirname, basename as my_basename,)") + + def test_basic_import_statement(self): + self.check_suite("import sys") + self.check_suite("import sys as system") + self.check_suite("import sys, math") + self.check_suite("import sys as system, math") + self.check_suite("import sys, math as my_math") + + def test_pep263(self): + self.check_suite("# -*- coding: iso-8859-1 -*-\n" + "pass\n") + + def test_assert(self): + self.check_suite("assert alo < ahi and blo < bhi\n") + +# +# Second, we take *invalid* trees and make sure we get ParserError +# rejections for them. +# + +class IllegalSyntaxTestCase(unittest.TestCase): + + def check_bad_tree(self, tree, label): + try: + parser.sequence2st(tree) + except parser.ParserError: + pass + else: + self.fail("did not detect invalid tree for %r" % label) + + def test_junk(self): + # not even remotely valid: + self.check_bad_tree((1, 2, 3), "") + + def test_illegal_yield_1(self): + # Illegal yield statement: def f(): return 1; yield 1 + tree = \ + (257, + (264, + (285, + (259, + (1, 'def'), + (1, 'f'), + (260, (7, '('), (8, ')')), + (11, ':'), + (291, + (4, ''), + (5, ''), + (264, + (265, + (266, + (272, + (275, + (1, 'return'), + (313, + (292, + (293, + (294, + (295, + (297, + (298, + (299, + (300, + (301, + (302, (303, (304, (305, (2, '1')))))))))))))))))), + (264, + (265, + (266, + (272, + (276, + (1, 'yield'), + (313, + (292, + (293, + (294, + (295, + (297, + (298, + (299, + (300, + (301, + (302, + (303, (304, (305, (2, '1')))))))))))))))))), + (4, ''))), + (6, ''))))), + (4, ''), + (0, '')))) + self.check_bad_tree(tree, "def f():\n return 1\n yield 1") + + def test_illegal_yield_2(self): + # Illegal return in generator: def f(): return 1; yield 1 + tree = \ + (257, + (264, + (265, + (266, + (278, + (1, 'from'), + (281, (1, '__future__')), + (1, 'import'), + (279, (1, 'generators')))), + (4, ''))), + (264, + (285, + (259, + (1, 'def'), + (1, 'f'), + (260, (7, '('), (8, ')')), + (11, ':'), + (291, + (4, ''), + (5, ''), + (264, + (265, + (266, + (272, + (275, + (1, 'return'), + (313, + (292, + (293, + (294, + (295, + (297, + (298, + (299, + (300, + (301, + (302, (303, (304, (305, (2, '1')))))))))))))))))), + (264, + (265, + (266, + (272, + (276, + (1, 'yield'), + (313, + (292, + (293, + (294, + (295, + (297, + (298, + (299, + (300, + (301, + (302, + (303, (304, (305, (2, '1')))))))))))))))))), + (4, ''))), + (6, ''))))), + (4, ''), + (0, '')))) + self.check_bad_tree(tree, "def f():\n return 1\n yield 1") + + def test_print_chevron_comma(self): + # Illegal input: print >>fp, + tree = \ + (257, + (264, + (265, + (266, + (268, + (1, 'print'), + (35, '>>'), + (290, + (291, + (292, + (293, + (295, + (296, + (297, + (298, (299, (300, (301, (302, (303, (1, 'fp')))))))))))))), + (12, ','))), + (4, ''))), + (0, '')) + self.check_bad_tree(tree, "print >>fp,") + + def test_a_comma_comma_c(self): + # Illegal input: a,,c + tree = \ + (258, + (311, + (290, + (291, + (292, + (293, + (295, + (296, + (297, + (298, (299, (300, (301, (302, (303, (1, 'a')))))))))))))), + (12, ','), + (12, ','), + (290, + (291, + (292, + (293, + (295, + (296, + (297, + (298, (299, (300, (301, (302, (303, (1, 'c'))))))))))))))), + (4, ''), + (0, '')) + self.check_bad_tree(tree, "a,,c") + + def test_illegal_operator(self): + # Illegal input: a $= b + tree = \ + (257, + (264, + (265, + (266, + (267, + (312, + (291, + (292, + (293, + (294, + (296, + (297, + (298, + (299, + (300, (301, (302, (303, (304, (1, 'a'))))))))))))))), + (268, (37, '$=')), + (312, + (291, + (292, + (293, + (294, + (296, + (297, + (298, + (299, + (300, (301, (302, (303, (304, (1, 'b'))))))))))))))))), + (4, ''))), + (0, '')) + self.check_bad_tree(tree, "a $= b") + + def test_malformed_global(self): + #doesn't have global keyword in ast + tree = (257, + (264, + (265, + (266, + (282, (1, 'foo'))), (4, ''))), + (4, ''), + (0, '')) + self.check_bad_tree(tree, "malformed global ast") + + +class CompileTestCase(unittest.TestCase): + + # These tests are very minimal. :-( + + def test_compile_expr(self): + st = parser.expr('2 + 3') + code = parser.compilest(st) + self.assertEquals(eval(code), 5) + + def test_compile_suite(self): + st = parser.suite('x = 2; y = x + 3') + code = parser.compilest(st) + globs = {} + exec code in globs + self.assertEquals(globs['y'], 5) + + def test_compile_error(self): + st = parser.suite('1 = 3 + 4') + self.assertRaises(SyntaxError, parser.compilest, st) + +def test_main(): + test_support.run_unittest( + RoundtripLegalSyntaxTestCase, + CompileTestCase, + ) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_quopri.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_quopri.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,201 @@ +from test import test_support +import unittest + +import sys, os, cStringIO, subprocess +import quopri + + + +ENCSAMPLE = """\ +Here's a bunch of special=20 + +=A1=A2=A3=A4=A5=A6=A7=A8=A9 +=AA=AB=AC=AD=AE=AF=B0=B1=B2=B3 +=B4=B5=B6=B7=B8=B9=BA=BB=BC=BD=BE +=BF=C0=C1=C2=C3=C4=C5=C6 +=C7=C8=C9=CA=CB=CC=CD=CE=CF +=D0=D1=D2=D3=D4=D5=D6=D7 +=D8=D9=DA=DB=DC=DD=DE=DF +=E0=E1=E2=E3=E4=E5=E6=E7 +=E8=E9=EA=EB=EC=ED=EE=EF +=F0=F1=F2=F3=F4=F5=F6=F7 +=F8=F9=FA=FB=FC=FD=FE=FF + +characters... have fun! +""" + +# First line ends with a space +DECSAMPLE = "Here's a bunch of special \n" + \ +"""\ + +\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9 +\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3 +\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe +\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6 +\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf +\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7 +\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf +\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7 +\xe8\xe9\xea\xeb\xec\xed\xee\xef +\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7 +\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff + +characters... have fun! +""" + + +def withpythonimplementation(testfunc): + def newtest(self): + # Test default implementation + testfunc(self) + # Test Python implementation + if quopri.b2a_qp is not None or quopri.a2b_qp is not None: + oldencode = quopri.b2a_qp + olddecode = quopri.a2b_qp + try: + quopri.b2a_qp = None + quopri.a2b_qp = None + testfunc(self) + finally: + quopri.b2a_qp = oldencode + quopri.a2b_qp = olddecode + newtest.__name__ = testfunc.__name__ + return newtest + +class QuopriTestCase(unittest.TestCase): + # Each entry is a tuple of (plaintext, encoded string). These strings are + # used in the "quotetabs=0" tests. + STRINGS = ( + # Some normal strings + ('hello', 'hello'), + ('''hello + there + world''', '''hello + there + world'''), + ('''hello + there + world +''', '''hello + there + world +'''), + ('\201\202\203', '=81=82=83'), + # Add some trailing MUST QUOTE strings + ('hello ', 'hello=20'), + ('hello\t', 'hello=09'), + # Some long lines. First, a single line of 108 characters + ('xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\xd8\xd9\xda\xdb\xdc\xdd\xde\xdfxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx', + '''xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx=D8=D9=DA=DB=DC=DD=DE=DFx= +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'''), + # A line of exactly 76 characters, no soft line break should be needed + #('yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', + #'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'), + # A line of 77 characters, forcing a soft line break at position 75, + # and a second line of exactly 2 characters (because the soft line + # break `=' sign counts against the line length limit). + ('zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz', + '''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz= +zz'''), + # A line of 151 characters, forcing a soft line break at position 75, + # with a second line of exactly 76 characters and no trailing = + #('zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz', + #'''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz= +#zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''), + # A string containing a hard line break, but which the first line is + # 151 characters and the second line is exactly 76 characters. This + # should leave us with three lines, the first which has a soft line + # break, and which the second and third do not. + #('''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +#zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''', + #'''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy= +#yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +#zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''), + # Now some really complex stuff ;) + (DECSAMPLE, ENCSAMPLE), + ) + + # These are used in the "quotetabs=1" tests. + ESTRINGS = ( + ('hello world', 'hello=20world'), + ('hello\tworld', 'hello=09world'), + ) + + # These are used in the "header=1" tests. + HSTRINGS = ( + ('hello world', 'hello_world'), + ('hello_world', 'hello=5Fworld'), + ) + + @withpythonimplementation + def test_encodestring(self): + for p, e in self.STRINGS: + if encodestring(p) != e: + print '\n"%s" is different from \n"%s"' % (encodestring(p), e) + self.assert_(encodestring(p) == e) + + @withpythonimplementation + def test_decodestring(self): + for p, e in self.STRINGS: + self.assert_(quopri.decodestring(e) == p) + + @withpythonimplementation + def test_idempotent_string(self): + for p, e in self.STRINGS: + self.assert_(quopri.decodestring(quopri.encodestring(e)) == e) + + @withpythonimplementation + def test_encode(self): + for p, e in self.STRINGS: + infp = cStringIO.StringIO(p) + outfp = cStringIO.StringIO() + quopri.encode(infp, outfp, quotetabs=False) + self.assert_(outfp.getvalue() == e) + + @withpythonimplementation + def test_decode(self): + for p, e in self.STRINGS: + infp = cStringIO.StringIO(e) + outfp = cStringIO.StringIO() + quopri.decode(infp, outfp) + self.assert_(outfp.getvalue() == p) + + @withpythonimplementation + def test_embedded_ws(self): + for p, e in self.ESTRINGS: + self.assert_(quopri.encodestring(p, quotetabs=True) == e) + self.assert_(quopri.decodestring(e) == p) + + @withpythonimplementation + def test_encode_header(self): + for p, e in self.HSTRINGS: + self.assert_(quopri.encodestring(p, header=True) == e) + + @withpythonimplementation + def test_decode_header(self): + for p, e in self.HSTRINGS: + self.assert_(quopri.decodestring(e, header=True) == p) + + def test_scriptencode(self): + (p, e) = self.STRINGS[-1] + process = subprocess.Popen([sys.executable, "-mquopri"], + stdin=subprocess.PIPE, stdout=subprocess.PIPE) + cout, cerr = process.communicate(p) + # On Windows, Python will output the result to stdout using + # CRLF, as the mode of stdout is text mode. To compare this + # with the expected result, we need to do a line-by-line comparison. + self.assert_(cout.splitlines() == e.splitlines()) + + def test_scriptdecode(self): + (p, e) = self.STRINGS[-1] + process = subprocess.Popen([sys.executable, "-mquopri", "-d"], + stdin=subprocess.PIPE, stdout=subprocess.PIPE) + cout, cerr = process.communicate(e) + self.assert_(cout.splitlines() == p.splitlines()) + +def test_main(): + test_support.run_unittest(QuopriTestCase) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_random.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_random.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,553 @@ +#!/usr/bin/env python + +import unittest +import random +import time +import pickle +import warnings +from math import log, exp, sqrt, pi +from test import test_support + +class TestBasicOps(unittest.TestCase): + # Superclass with tests common to all generators. + # Subclasses must arrange for self.gen to retrieve the Random instance + # to be tested. + + def randomlist(self, n): + """Helper function to make a list of random numbers""" + return [self.gen.random() for i in xrange(n)] + + def test_autoseed(self): + self.gen.seed() + state1 = self.gen.getstate() + time.sleep(0.1) + self.gen.seed() # diffent seeds at different times + state2 = self.gen.getstate() + self.assertNotEqual(state1, state2) + + def test_saverestore(self): + N = 1000 + self.gen.seed() + state = self.gen.getstate() + randseq = self.randomlist(N) + self.gen.setstate(state) # should regenerate the same sequence + self.assertEqual(randseq, self.randomlist(N)) + + def test_seedargs(self): + for arg in [None, 0, 0L, 1, 1L, -1, -1L, 10**20, -(10**20), + 3.14, 1+2j, 'a', tuple('abc')]: + self.gen.seed(arg) + for arg in [range(3), dict(one=1)]: + self.assertRaises(TypeError, self.gen.seed, arg) + self.assertRaises(TypeError, self.gen.seed, 1, 2) + self.assertRaises(TypeError, type(self.gen), []) + + def test_jumpahead(self): + self.gen.seed() + state1 = self.gen.getstate() + self.gen.jumpahead(100) + state2 = self.gen.getstate() # s/b distinct from state1 + self.assertNotEqual(state1, state2) + self.gen.jumpahead(100) + state3 = self.gen.getstate() # s/b distinct from state2 + self.assertNotEqual(state2, state3) + + self.assertRaises(TypeError, self.gen.jumpahead) # needs an arg + # wrong type - can get ValueError if by any chance "ick" compares < 0 + self.assertRaises((TypeError, ValueError), self.gen.jumpahead, "ick") + self.assertRaises(TypeError, self.gen.jumpahead, 2.3) # wrong type + self.assertRaises(TypeError, self.gen.jumpahead, 2, 3) # too many + + def test_sample(self): + # For the entire allowable range of 0 <= k <= N, validate that + # the sample is of the correct length and contains only unique items + N = 100 + population = xrange(N) + for k in xrange(N+1): + s = self.gen.sample(population, k) + self.assertEqual(len(s), k) + uniq = set(s) + self.assertEqual(len(uniq), k) + self.failUnless(uniq <= set(population)) + self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0 + + def test_sample_distribution(self): + # For the entire allowable range of 0 <= k <= N, validate that + # sample generates all possible permutations + n = 5 + pop = range(n) + trials = 10000 # large num prevents false negatives without slowing normal case + def factorial(n): + return reduce(int.__mul__, xrange(1, n), 1) + for k in xrange(n): + expected = factorial(n) // factorial(n-k) + perms = {} + for i in xrange(trials): + perms[tuple(self.gen.sample(pop, k))] = None + if len(perms) == expected: + break + else: + self.fail() + + def test_sample_inputs(self): + # SF bug #801342 -- population can be any iterable defining __len__() + self.gen.sample(set(range(20)), 2) + self.gen.sample(range(20), 2) + self.gen.sample(xrange(20), 2) + self.gen.sample(str('abcdefghijklmnopqrst'), 2) + self.gen.sample(tuple('abcdefghijklmnopqrst'), 2) + + def test_sample_on_dicts(self): + self.gen.sample(dict.fromkeys('abcdefghijklmnopqrst'), 2) + + # SF bug #1460340 -- random.sample can raise KeyError + a = dict.fromkeys(range(10)+range(10,100,2)+range(100,110)) + self.gen.sample(a, 3) + + # A followup to bug #1460340: sampling from a dict could return + # a subset of its keys or of its values, depending on the size of + # the subset requested. + N = 30 + d = dict((i, complex(i, i)) for i in xrange(N)) + for k in xrange(N+1): + samp = self.gen.sample(d, k) + # Verify that we got ints back (keys); the values are complex. + for x in samp: + self.assert_(type(x) is int) + samp.sort() + self.assertEqual(samp, range(N)) + + def test_gauss(self): + # Ensure that the seed() method initializes all the hidden state. In + # particular, through 2.2.1 it failed to reset a piece of state used + # by (and only by) the .gauss() method. + + for seed in 1, 12, 123, 1234, 12345, 123456, 654321: + self.gen.seed(seed) + x1 = self.gen.random() + y1 = self.gen.gauss(0, 1) + + self.gen.seed(seed) + x2 = self.gen.random() + y2 = self.gen.gauss(0, 1) + + self.assertEqual(x1, x2) + self.assertEqual(y1, y2) + + def test_pickling(self): + state = pickle.dumps(self.gen) + origseq = [self.gen.random() for i in xrange(10)] + newgen = pickle.loads(state) + restoredseq = [newgen.random() for i in xrange(10)] + self.assertEqual(origseq, restoredseq) + +class WichmannHill_TestBasicOps(TestBasicOps): + gen = random.WichmannHill() + + def test_setstate_first_arg(self): + self.assertRaises(ValueError, self.gen.setstate, (2, None, None)) + + def test_strong_jumpahead(self): + # tests that jumpahead(n) semantics correspond to n calls to random() + N = 1000 + s = self.gen.getstate() + self.gen.jumpahead(N) + r1 = self.gen.random() + # now do it the slow way + self.gen.setstate(s) + for i in xrange(N): + self.gen.random() + r2 = self.gen.random() + self.assertEqual(r1, r2) + + def test_gauss_with_whseed(self): + # Ensure that the seed() method initializes all the hidden state. In + # particular, through 2.2.1 it failed to reset a piece of state used + # by (and only by) the .gauss() method. + + for seed in 1, 12, 123, 1234, 12345, 123456, 654321: + self.gen.whseed(seed) + x1 = self.gen.random() + y1 = self.gen.gauss(0, 1) + + self.gen.whseed(seed) + x2 = self.gen.random() + y2 = self.gen.gauss(0, 1) + + self.assertEqual(x1, x2) + self.assertEqual(y1, y2) + + def test_bigrand(self): + # Verify warnings are raised when randrange is too large for random() + oldfilters = warnings.filters[:] + warnings.filterwarnings("error", "Underlying random") + self.assertRaises(UserWarning, self.gen.randrange, 2**60) + warnings.filters[:] = oldfilters + +class SystemRandom_TestBasicOps(TestBasicOps): + gen = random.SystemRandom() + + def test_autoseed(self): + # Doesn't need to do anything except not fail + self.gen.seed() + + def test_saverestore(self): + self.assertRaises(NotImplementedError, self.gen.getstate) + self.assertRaises(NotImplementedError, self.gen.setstate, None) + + def test_seedargs(self): + # Doesn't need to do anything except not fail + self.gen.seed(100) + + def test_jumpahead(self): + # Doesn't need to do anything except not fail + self.gen.jumpahead(100) + + def test_gauss(self): + self.gen.gauss_next = None + self.gen.seed(100) + self.assertEqual(self.gen.gauss_next, None) + + def test_pickling(self): + self.assertRaises(NotImplementedError, pickle.dumps, self.gen) + + def test_53_bits_per_float(self): + # This should pass whenever a C double has 53 bit precision. + span = 2 ** 53 + cum = 0 + for i in xrange(100): + cum |= int(self.gen.random() * span) + self.assertEqual(cum, span-1) + + def test_bigrand(self): + # The randrange routine should build-up the required number of bits + # in stages so that all bit positions are active. + span = 2 ** 500 + cum = 0 + for i in xrange(100): + r = self.gen.randrange(span) + self.assert_(0 <= r < span) + cum |= r + self.assertEqual(cum, span-1) + + def test_bigrand_ranges(self): + for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: + start = self.gen.randrange(2 ** i) + stop = self.gen.randrange(2 ** (i-2)) + if stop <= start: + return + self.assert_(start <= self.gen.randrange(start, stop) < stop) + + def test_rangelimits(self): + for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: + self.assertEqual(set(range(start,stop)), + set([self.gen.randrange(start,stop) for i in xrange(100)])) + + def test_genrandbits(self): + # Verify ranges + for k in xrange(1, 1000): + self.assert_(0 <= self.gen.getrandbits(k) < 2**k) + + # Verify all bits active + getbits = self.gen.getrandbits + for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: + cum = 0 + for i in xrange(100): + cum |= getbits(span) + self.assertEqual(cum, 2**span-1) + + # Verify argument checking + self.assertRaises(TypeError, self.gen.getrandbits) + self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) + self.assertRaises(ValueError, self.gen.getrandbits, 0) + self.assertRaises(ValueError, self.gen.getrandbits, -1) + self.assertRaises(TypeError, self.gen.getrandbits, 10.1) + + def test_randbelow_logic(self, _log=log, int=int): + # check bitcount transition points: 2**i and 2**(i+1)-1 + # show that: k = int(1.001 + _log(n, 2)) + # is equal to or one greater than the number of bits in n + for i in xrange(1, 1000): + n = 1L << i # check an exact power of two + numbits = i+1 + k = int(1.00001 + _log(n, 2)) + self.assertEqual(k, numbits) + self.assert_(n == 2**(k-1)) + + n += n - 1 # check 1 below the next power of two + k = int(1.00001 + _log(n, 2)) + self.assert_(k in [numbits, numbits+1]) + self.assert_(2**k > n > 2**(k-2)) + + n -= n >> 15 # check a little farther below the next power of two + k = int(1.00001 + _log(n, 2)) + self.assertEqual(k, numbits) # note the stronger assertion + self.assert_(2**k > n > 2**(k-1)) # note the stronger assertion + + +class MersenneTwister_TestBasicOps(TestBasicOps): + gen = random.Random() + + def test_setstate_first_arg(self): + self.assertRaises(ValueError, self.gen.setstate, (1, None, None)) + + def test_setstate_middle_arg(self): + # Wrong type, s/b tuple + self.assertRaises(TypeError, self.gen.setstate, (2, None, None)) + # Wrong length, s/b 625 + self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None)) + # Wrong type, s/b tuple of 625 ints + self.assertRaises(TypeError, self.gen.setstate, (2, ('a',)*625, None)) + # Last element s/b an int also + self.assertRaises(TypeError, self.gen.setstate, (2, (0,)*624+('a',), None)) + + def test_referenceImplementation(self): + # Compare the python implementation with results from the original + # code. Create 2000 53-bit precision random floats. Compare only + # the last ten entries to show that the independent implementations + # are tracking. Here is the main() function needed to create the + # list of expected random numbers: + # void main(void){ + # int i; + # unsigned long init[4]={61731, 24903, 614, 42143}, length=4; + # init_by_array(init, length); + # for (i=0; i<2000; i++) { + # printf("%.15f ", genrand_res53()); + # if (i%5==4) printf("\n"); + # } + # } + expected = [0.45839803073713259, + 0.86057815201978782, + 0.92848331726782152, + 0.35932681119782461, + 0.081823493762449573, + 0.14332226470169329, + 0.084297823823520024, + 0.53814864671831453, + 0.089215024911993401, + 0.78486196105372907] + + self.gen.seed(61731L + (24903L<<32) + (614L<<64) + (42143L<<96)) + actual = self.randomlist(2000)[-10:] + for a, e in zip(actual, expected): + self.assertAlmostEqual(a,e,places=14) + + def test_strong_reference_implementation(self): + # Like test_referenceImplementation, but checks for exact bit-level + # equality. This should pass on any box where C double contains + # at least 53 bits of precision (the underlying algorithm suffers + # no rounding errors -- all results are exact). + from math import ldexp + + expected = [0x0eab3258d2231fL, + 0x1b89db315277a5L, + 0x1db622a5518016L, + 0x0b7f9af0d575bfL, + 0x029e4c4db82240L, + 0x04961892f5d673L, + 0x02b291598e4589L, + 0x11388382c15694L, + 0x02dad977c9e1feL, + 0x191d96d4d334c6L] + self.gen.seed(61731L + (24903L<<32) + (614L<<64) + (42143L<<96)) + actual = self.randomlist(2000)[-10:] + for a, e in zip(actual, expected): + self.assertEqual(long(ldexp(a, 53)), e) + + def test_long_seed(self): + # This is most interesting to run in debug mode, just to make sure + # nothing blows up. Under the covers, a dynamically resized array + # is allocated, consuming space proportional to the number of bits + # in the seed. Unfortunately, that's a quadratic-time algorithm, + # so don't make this horribly big. + seed = (1L << (10000 * 8)) - 1 # about 10K bytes + self.gen.seed(seed) + + def test_53_bits_per_float(self): + # This should pass whenever a C double has 53 bit precision. + span = 2 ** 53 + cum = 0 + for i in xrange(100): + cum |= int(self.gen.random() * span) + self.assertEqual(cum, span-1) + + def test_bigrand(self): + # The randrange routine should build-up the required number of bits + # in stages so that all bit positions are active. + span = 2 ** 500 + cum = 0 + for i in xrange(100): + r = self.gen.randrange(span) + self.assert_(0 <= r < span) + cum |= r + self.assertEqual(cum, span-1) + + def test_bigrand_ranges(self): + for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: + start = self.gen.randrange(2 ** i) + stop = self.gen.randrange(2 ** (i-2)) + if stop <= start: + return + self.assert_(start <= self.gen.randrange(start, stop) < stop) + + def test_rangelimits(self): + for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: + self.assertEqual(set(range(start,stop)), + set([self.gen.randrange(start,stop) for i in xrange(100)])) + + def test_genrandbits(self): + # Verify cross-platform repeatability + self.gen.seed(1234567) + self.assertEqual(self.gen.getrandbits(100), + 97904845777343510404718956115L) + # Verify ranges + for k in xrange(1, 1000): + self.assert_(0 <= self.gen.getrandbits(k) < 2**k) + + # Verify all bits active + getbits = self.gen.getrandbits + for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: + cum = 0 + for i in xrange(100): + cum |= getbits(span) + self.assertEqual(cum, 2**span-1) + + # Verify argument checking + self.assertRaises(TypeError, self.gen.getrandbits) + self.assertRaises(TypeError, self.gen.getrandbits, 'a') + self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) + self.assertRaises(ValueError, self.gen.getrandbits, 0) + self.assertRaises(ValueError, self.gen.getrandbits, -1) + + def test_randbelow_logic(self, _log=log, int=int): + # check bitcount transition points: 2**i and 2**(i+1)-1 + # show that: k = int(1.001 + _log(n, 2)) + # is equal to or one greater than the number of bits in n + for i in xrange(1, 1000): + n = 1L << i # check an exact power of two + numbits = i+1 + k = int(1.00001 + _log(n, 2)) + self.assertEqual(k, numbits) + self.assert_(n == 2**(k-1)) + + n += n - 1 # check 1 below the next power of two + k = int(1.00001 + _log(n, 2)) + self.assert_(k in [numbits, numbits+1]) + self.assert_(2**k > n > 2**(k-2)) + + n -= n >> 15 # check a little farther below the next power of two + k = int(1.00001 + _log(n, 2)) + self.assertEqual(k, numbits) # note the stronger assertion + self.assert_(2**k > n > 2**(k-1)) # note the stronger assertion + + def test_randrange_bug_1590891(self): + start = 1000000000000 + stop = -100000000000000000000 + step = -200 + x = self.gen.randrange(start, stop, step) + self.assert_(stop < x <= start) + self.assertEqual((x+stop)%step, 0) + +_gammacoeff = (0.9999999999995183, 676.5203681218835, -1259.139216722289, + 771.3234287757674, -176.6150291498386, 12.50734324009056, + -0.1385710331296526, 0.9934937113930748e-05, 0.1659470187408462e-06) + +def gamma(z, cof=_gammacoeff, g=7): + z -= 1.0 + sum = cof[0] + for i in xrange(1,len(cof)): + sum += cof[i] / (z+i) + z += 0.5 + return (z+g)**z / exp(z+g) * sqrt(2*pi) * sum + +class TestDistributions(unittest.TestCase): + def test_zeroinputs(self): + # Verify that distributions can handle a series of zero inputs' + g = random.Random() + x = [g.random() for i in xrange(50)] + [0.0]*5 + g.random = x[:].pop; g.uniform(1,10) + g.random = x[:].pop; g.paretovariate(1.0) + g.random = x[:].pop; g.expovariate(1.0) + g.random = x[:].pop; g.weibullvariate(1.0, 1.0) + g.random = x[:].pop; g.normalvariate(0.0, 1.0) + g.random = x[:].pop; g.gauss(0.0, 1.0) + g.random = x[:].pop; g.lognormvariate(0.0, 1.0) + g.random = x[:].pop; g.vonmisesvariate(0.0, 1.0) + g.random = x[:].pop; g.gammavariate(0.01, 1.0) + g.random = x[:].pop; g.gammavariate(1.0, 1.0) + g.random = x[:].pop; g.gammavariate(200.0, 1.0) + g.random = x[:].pop; g.betavariate(3.0, 3.0) + + def test_avg_std(self): + # Use integration to test distribution average and standard deviation. + # Only works for distributions which do not consume variates in pairs + g = random.Random() + N = 5000 + x = [i/float(N) for i in xrange(1,N)] + for variate, args, mu, sigmasqrd in [ + (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12), + (g.expovariate, (1.5,), 1/1.5, 1/1.5**2), + (g.paretovariate, (5.0,), 5.0/(5.0-1), + 5.0/((5.0-1)**2*(5.0-2))), + (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0), + gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]: + g.random = x[:].pop + y = [] + for i in xrange(len(x)): + try: + y.append(variate(*args)) + except IndexError: + pass + s1 = s2 = 0 + for e in y: + s1 += e + s2 += (e - mu) ** 2 + N = len(y) + self.assertAlmostEqual(s1/N, mu, 2) + self.assertAlmostEqual(s2/(N-1), sigmasqrd, 2) + +class TestModule(unittest.TestCase): + def testMagicConstants(self): + self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141) + self.assertAlmostEqual(random.TWOPI, 6.28318530718) + self.assertAlmostEqual(random.LOG4, 1.38629436111989) + self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627) + + def test__all__(self): + # tests validity but not completeness of the __all__ list + self.failUnless(set(random.__all__) <= set(dir(random))) + + def test_random_subclass_with_kwargs(self): + # SF bug #1486663 -- this used to erroneously raise a TypeError + class Subclass(random.Random): + def __init__(self, newarg=None): + random.Random.__init__(self) + Subclass(newarg=1) + + +def test_main(verbose=None): + testclasses = [WichmannHill_TestBasicOps, + MersenneTwister_TestBasicOps, + TestDistributions, + TestModule] + + try: + random.SystemRandom().random() + except NotImplementedError: + pass + else: + testclasses.append(SystemRandom_TestBasicOps) + + test_support.run_unittest(*testclasses) + + # verify reference counting + import sys + if verbose and hasattr(sys, "gettotalrefcount"): + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*testclasses) + counts[i] = sys.gettotalrefcount() + print counts + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_re.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_re.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,755 @@ +import sys +sys.path = ['.'] + sys.path + +from test.test_support import verbose, run_unittest +import re +from re import Scanner +import sys, os #, traceback +#from weakref import proxy + +# Misc tests from Tim Peters' re.doc + +# WARNING: Don't change details in these tests if you don't know +# what you're doing. Some of these tests were carefuly modeled to +# cover most of the code. + +import unittest + +class ReTests(unittest.TestCase): + + def DONOTtest_weakref(self): + s = 'QabbbcR' + x = re.compile('ab+c') + y = proxy(x) + self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR')) + + def test_search_star_plus(self): + self.assertEqual(re.search('x*', 'axx').span(0), (0, 0)) + self.assertEqual(re.search('x*', 'axx').span(), (0, 0)) + self.assertEqual(re.search('x+', 'axx').span(0), (1, 3)) + self.assertEqual(re.search('x+', 'axx').span(), (1, 3)) + self.assertEqual(re.search('x', 'aaa'), None) + self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0)) + self.assertEqual(re.match('a*', 'xxx').span(), (0, 0)) + self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3)) + self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3)) + self.assertEqual(re.match('a+', 'xxx'), None) + + def bump_num(self, matchobj): + int_value = int(matchobj.group(0)) + return str(int_value + 1) + + def test_basic_re_sub(self): + self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x') + self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'), + '9.3 -3 24x100y') + self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3), + '9.3 -3 23x99y') + + self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n') + self.assertEqual(re.sub('.', r"\n", 'x'), '\n') + + s = r"\1\1" + self.assertEqual(re.sub('(.)', s, 'x'), 'xx') + self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s) + self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s) + + self.assertEqual(re.sub('(?Px)', '\g\g', 'xx'), 'xxxx') + self.assertEqual(re.sub('(?Px)', '\g\g<1>', 'xx'), 'xxxx') + self.assertEqual(re.sub('(?Px)', '\g\g', 'xx'), 'xxxx') + self.assertEqual(re.sub('(?Px)', '\g<1>\g<1>', 'xx'), 'xxxx') + + self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'), + '\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D') + self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a') + self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), + (chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7))) + + self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest') + + def test_bug_449964(self): + # fails for group followed by other escape + self.assertEqual(re.sub(r'(?Px)', '\g<1>\g<1>\\b', 'xx'), + 'xx\bxx\b') + + def test_bug_449000(self): + # Test for sub() on escaped characters + self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'), + 'abc\ndef\n') + self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'), + 'abc\ndef\n') + self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'), + 'abc\ndef\n') + self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'), + 'abc\ndef\n') + + def test_sub_template_numeric_escape(self): + # bug 776311 and friends + self.assertEqual(re.sub('x', r'\0', 'x'), '\0') + self.assertEqual(re.sub('x', r'\000', 'x'), '\000') + self.assertEqual(re.sub('x', r'\001', 'x'), '\001') + self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8') + self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9') + self.assertEqual(re.sub('x', r'\111', 'x'), '\111') + self.assertEqual(re.sub('x', r'\117', 'x'), '\117') + + self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111') + self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1') + + self.assertEqual(re.sub('x', r'\00', 'x'), '\x00') + self.assertEqual(re.sub('x', r'\07', 'x'), '\x07') + self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8') + self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9') + self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a') + + self.assertEqual(re.sub('x', r'\400', 'x'), '\0') + self.assertEqual(re.sub('x', r'\777', 'x'), '\377') + + self.assertRaises(re.error, re.sub, 'x', r'\1', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\8', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\9', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\11', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\18', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\90', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\99', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8' + self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x') + self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1' + self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0' + + # in python2.3 (etc), these loop endlessly in sre_parser.py + self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x') + self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'), + 'xz8') + self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'), + 'xza') + + def test_qualified_re_sub(self): + self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb') + self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa') + + def test_bug_114660(self): + self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'), + 'hello there') + + def test_bug_462270(self): + # Test for empty sub() behaviour, see SF bug #462270 + self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-') + self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d') + + def test_symbolic_refs(self): + self.assertRaises(re.error, re.sub, '(?Px)', '\gx)', '\g<', 'xx') + self.assertRaises(re.error, re.sub, '(?Px)', '\g', 'xx') + self.assertRaises(re.error, re.sub, '(?Px)', '\g', 'xx') + self.assertRaises(re.error, re.sub, '(?Px)', '\g<1a1>', 'xx') + self.assertRaises(IndexError, re.sub, '(?Px)', '\g', 'xx') + self.assertRaises(re.error, re.sub, '(?Px)|(?Py)', '\g', 'xx') + self.assertRaises(re.error, re.sub, '(?Px)|(?Py)', '\\2', 'xx') + self.assertRaises(re.error, re.sub, '(?Px)', '\g<-1>', 'xx') + + def test_re_subn(self): + self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2)) + self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1)) + self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0)) + self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4)) + self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2)) + + def test_re_split(self): + self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c']) + self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c']) + self.assertEqual(re.split("(:*)", ":a:b::c"), + ['', ':', 'a', ':', 'b', '::', 'c']) + self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c']) + self.assertEqual(re.split("(:)*", ":a:b::c"), + ['', ':', 'a', ':', 'b', ':', 'c']) + self.assertEqual(re.split("([b:]+)", ":a:b::c"), + ['', ':', 'a', ':b::', 'c']) + self.assertEqual(re.split("(b)|(:+)", ":a:b::c"), + ['', None, ':', 'a', None, ':', '', 'b', None, '', + None, '::', 'c']) + self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"), + ['', 'a', '', '', 'c']) + + def test_qualified_re_split(self): + self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c']) + self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d']) + self.assertEqual(re.split("(:)", ":a:b::c", 2), + ['', ':', 'a', ':', 'b::c']) + self.assertEqual(re.split("(:*)", ":a:b::c", 2), + ['', ':', 'a', ':', 'b::c']) + + def test_re_findall(self): + self.assertEqual(re.findall(":+", "abc"), []) + self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"]) + self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"]) + self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""), + (":", ":"), + (":", "::")]) + + def test_bug_117612(self): + self.assertEqual(re.findall(r"(a|(b))", "aba"), + [("a", ""),("b", "b"),("a", "")]) + + def test_re_match(self): + self.assertEqual(re.match('a', 'a').groups(), ()) + self.assertEqual(re.match('(a)', 'a').groups(), ('a',)) + self.assertEqual(re.match(r'(a)', 'a').group(0), 'a') + self.assertEqual(re.match(r'(a)', 'a').group(1), 'a') + self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a')) + + pat = re.compile('((a)|(b))(c)?') + self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None)) + self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None)) + self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c')) + self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c')) + self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c')) + + # A single group + m = re.match('(a)', 'a') + self.assertEqual(m.group(0), 'a') + self.assertEqual(m.group(0), 'a') + self.assertEqual(m.group(1), 'a') + self.assertEqual(m.group(1, 1), ('a', 'a')) + + pat = re.compile('(?:(?Pa)|(?Pb))(?Pc)?') + self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None)) + self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'), + (None, 'b', None)) + self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c')) + + def test_re_groupref_exists(self): + self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(), + ('(', 'a')) + self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(), + (None, 'a')) + self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None) + self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None) + self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(), + ('a', 'b')) + self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(), + (None, 'd')) + self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(), + (None, 'd')) + self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(), + ('a', '')) + + # Tests for bug #1177831: exercise groups other than the first group + p = re.compile('(?Pa)(?Pb)?((?(g2)c|d))') + self.assertEqual(p.match('abc').groups(), + ('a', 'b', 'c')) + self.assertEqual(p.match('ad').groups(), + ('a', None, 'd')) + self.assertEqual(p.match('abd'), None) + self.assertEqual(p.match('ac'), None) + + + def test_re_groupref(self): + self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(), + ('|', 'a')) + self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(), + (None, 'a')) + self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None) + self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None) + self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(), + ('a', 'a')) + self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(), + (None, None)) + + def test_groupdict(self): + self.assertEqual(re.match('(?Pfirst) (?Psecond)', + 'first second').groupdict(), + {'first':'first', 'second':'second'}) + + def test_expand(self): + self.assertEqual(re.match("(?Pfirst) (?Psecond)", + "first second") + .expand(r"\2 \1 \g \g"), + "second first second first") + + def test_repeat_minmax(self): + self.assertEqual(re.match("^(\w){1}$", "abc"), None) + self.assertEqual(re.match("^(\w){1}?$", "abc"), None) + self.assertEqual(re.match("^(\w){1,2}$", "abc"), None) + self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None) + + self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c") + self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c") + + self.assertEqual(re.match("^x{1}$", "xxx"), None) + self.assertEqual(re.match("^x{1}?$", "xxx"), None) + self.assertEqual(re.match("^x{1,2}$", "xxx"), None) + self.assertEqual(re.match("^x{1,2}?$", "xxx"), None) + + self.assertNotEqual(re.match("^x{3}$", "xxx"), None) + self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None) + self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None) + self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None) + self.assertNotEqual(re.match("^x{3}?$", "xxx"), None) + self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None) + self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None) + self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None) + + self.assertEqual(re.match("^x{}$", "xxx"), None) + self.assertNotEqual(re.match("^x{}$", "x{}"), None) + + def test_getattr(self): + self.assertEqual(re.match("(a)", "a").pos, 0) + self.assertEqual(re.match("(a)", "a").endpos, 1) + self.assertEqual(re.match("(a)", "a").string, "a") + self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1))) + self.assertNotEqual(re.match("(a)", "a").re, None) + + def test_special_escapes(self): + self.assertEqual(re.search(r"\b(b.)\b", + "abcd abc bcd bx").group(1), "bx") + self.assertEqual(re.search(r"\B(b.)\B", + "abc bcd bc abxd").group(1), "bx") + self.assertEqual(re.search(r"\b(b.)\b", + "abcd abc bcd bx", re.LOCALE).group(1), "bx") + self.assertEqual(re.search(r"\B(b.)\B", + "abc bcd bc abxd", re.LOCALE).group(1), "bx") + self.assertEqual(re.search(r"\b(b.)\b", + "abcd abc bcd bx", re.UNICODE).group(1), "bx") + self.assertEqual(re.search(r"\B(b.)\B", + "abc bcd bc abxd", re.UNICODE).group(1), "bx") + self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc") + self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc") + self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None) + self.assertEqual(re.search(r"\b(b.)\b", + u"abcd abc bcd bx").group(1), "bx") + self.assertEqual(re.search(r"\B(b.)\B", + u"abc bcd bc abxd").group(1), "bx") + self.assertEqual(re.search(r"^abc$", u"\nabc\n", re.M).group(0), "abc") + self.assertEqual(re.search(r"^\Aabc\Z$", u"abc", re.M).group(0), "abc") + self.assertEqual(re.search(r"^\Aabc\Z$", u"\nabc\n", re.M), None) + self.assertEqual(re.search(r"\d\D\w\W\s\S", + "1aa! a").group(0), "1aa! a") + self.assertEqual(re.search(r"\d\D\w\W\s\S", + "1aa! a", re.LOCALE).group(0), "1aa! a") + self.assertEqual(re.search(r"\d\D\w\W\s\S", + "1aa! a", re.UNICODE).group(0), "1aa! a") + + def test_ignore_case(self): + self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC") + self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC") + + def test_bigcharset(self): + self.assertEqual(re.match(u"([\u2222\u2223])", + u"\u2222").group(1), u"\u2222") + self.assertEqual(re.match(u"([\u2222\u2223])", + u"\u2222", re.UNICODE).group(1), u"\u2222") + + def test_anyall(self): + self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0), + "a\nb") + self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0), + "a\n\nb") + + def test_non_consuming(self): + self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a") + self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a") + self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a") + self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a") + self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a") + self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a") + self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a") + + self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a") + self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a") + self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a") + self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a") + + def test_ignore_case(self): + self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b") + self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb") + self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b") + self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb") + self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a") + self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa") + self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a") + self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa") + + def test_category(self): + self.assertEqual(re.match(r"(\s)", " ").group(1), " ") + + def test_getlower(self): + import _sre + self.assertEqual(_sre.getlower(ord('A'), 0), ord('a')) + self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a')) + self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a')) + + self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC") + self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC") + + def test_not_literal(self): + self.assertEqual(re.search("\s([^a])", " b").group(1), "b") + self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb") + + def test_search_coverage(self): + self.assertEqual(re.search("\s(b)", " b").group(1), "b") + self.assertEqual(re.search("a\s", "a ").group(0), "a ") + + def test_re_escape(self): + p="" + for i in range(0, 256): + p = p + chr(i) + self.assertEqual(re.match(re.escape(chr(i)), chr(i)) is not None, + True) + self.assertEqual(re.match(re.escape(chr(i)), chr(i)).span(), (0,1)) + + pat=re.compile(re.escape(p)) + self.assertEqual(pat.match(p) is not None, True) + self.assertEqual(pat.match(p).span(), (0,256)) + + def test_pickling(self): + import pickle + self.pickle_test(pickle) + import cPickle + self.pickle_test(cPickle) + # old pickles expect the _compile() reconstructor in sre module + import warnings + original_filters = warnings.filters[:] + try: + warnings.filterwarnings("ignore", "The sre module is deprecated", + DeprecationWarning) + from sre import _compile + finally: + warnings.filters = original_filters + + def pickle_test(self, pickle): + oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)') + s = pickle.dumps(oldpat) + newpat = pickle.loads(s) + # Not using object identity for _sre.py, since some Python builds do + # not seem to preserve that in all cases (observed on an UCS-4 build + # of 2.4.1). + #self.assertEqual(oldpat, newpat) + self.assertEqual(oldpat.__dict__, newpat.__dict__) + + def test_constants(self): + self.assertEqual(re.I, re.IGNORECASE) + self.assertEqual(re.L, re.LOCALE) + self.assertEqual(re.M, re.MULTILINE) + self.assertEqual(re.S, re.DOTALL) + self.assertEqual(re.X, re.VERBOSE) + + def test_flags(self): + for flag in [re.I, re.M, re.X, re.S, re.L]: + self.assertNotEqual(re.compile('^pattern$', flag), None) + + def test_sre_character_literals(self): + for i in [0, 8, 16, 32, 64, 127, 128, 255]: + self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None) + self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None) + self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None) + self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None) + self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None) + self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None) + self.assertRaises(re.error, re.match, "\911", "") + + def test_sre_character_class_literals(self): + for i in [0, 8, 16, 32, 64, 127, 128, 255]: + self.assertNotEqual(re.match(r"[\%03o]" % i, chr(i)), None) + self.assertNotEqual(re.match(r"[\%03o0]" % i, chr(i)), None) + self.assertNotEqual(re.match(r"[\%03o8]" % i, chr(i)), None) + self.assertNotEqual(re.match(r"[\x%02x]" % i, chr(i)), None) + self.assertNotEqual(re.match(r"[\x%02x0]" % i, chr(i)), None) + self.assertNotEqual(re.match(r"[\x%02xz]" % i, chr(i)), None) + self.assertRaises(re.error, re.match, "[\911]", "") + + def test_bug_113254(self): + self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1) + self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1) + self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1)) + + def test_bug_527371(self): + # bug described in patches 527371/672491 + self.assertEqual(re.match(r'(a)?a','a').lastindex, None) + self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1) + self.assertEqual(re.match(r'(?Pa)(?Pb)?b','ab').lastgroup, 'a') + self.assertEqual(re.match("(?Pa(b))", "ab").lastgroup, 'a') + self.assertEqual(re.match("((a))", "a").lastindex, 1) + + def test_bug_545855(self): + # bug 545855 -- This pattern failed to cause a compile error as it + # should, instead provoking a TypeError. + self.assertRaises(re.error, re.compile, 'foo[a-') + + def DONOTtest_bug_418626(self): + # XXX disabled for PyPy, too time-consuming. But our implementation is + # in fact non-recursive as well. + # bugs 418626 at al. -- Testing Greg Chapman's addition of op code + # SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of + # pattern '*?' on a long string. + self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001) + self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0), + 20003) + self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001) + # non-simple '*?' still used to hit the recursion limit, before the + # non-recursive scheme was implemented. + self.assertEqual(re.search('(a|b)*?c', 10000*'ab'+'cd').end(0), 20001) + + def test_bug_612074(self): + pat=u"["+re.escape(u"\u2039")+u"]" + self.assertEqual(re.compile(pat) and 1, 1) + + def DONOTtest_stack_overflow(self): + # XXX disabled for PyPy, too time-consuming. But our implementation is + # in fact non-recursive as well. + # nasty cases that used to overflow the straightforward recursive + # implementation of repeated groups. + self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x') + self.assertEqual(re.match('(x)*y', 50000*'x'+'y').group(1), 'x') + self.assertEqual(re.match('(x)*?y', 50000*'x'+'y').group(1), 'x') + + def test_scanner(self): + def s_ident(scanner, token): return token + def s_operator(scanner, token): return "op%s" % token + def s_float(scanner, token): return float(token) + def s_int(scanner, token): return int(token) + + scanner = Scanner([ + (r"[a-zA-Z_]\w*", s_ident), + (r"\d+\.\d*", s_float), + (r"\d+", s_int), + (r"=|\+|-|\*|/", s_operator), + (r"\s+", None), + ]) + + self.assertNotEqual(scanner.scanner.scanner("").pattern, None) + + self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"), + (['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5, + 'op+', 'bar'], '')) + + def test_bug_448951(self): + # bug 448951 (similar to 429357, but with single char match) + # (Also test greedy matches.) + for op in '','?','*': + self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(), + (None, None)) + self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(), + ('a:', 'a')) + + def test_bug_725106(self): + # capturing groups in alternatives in repeats + self.assertEqual(re.match('^((a)|b)*', 'abc').groups(), + ('b', 'a')) + self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(), + ('c', 'b')) + self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(), + ('b', None)) + self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(), + ('b', None)) + self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(), + ('b', 'a')) + self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(), + ('c', 'b')) + self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(), + ('b', None)) + self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(), + ('b', None)) + + def test_bug_725149(self): + # mark_stack_base restoring before restoring marks + self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(), + ('a', None)) + self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(), + ('a', None, None)) + + def test_bug_764548(self): + # bug 764548, re.compile() barfs on str/unicode subclasses + try: + unicode + except NameError: + return # no problem if we have no unicode + class my_unicode(unicode): pass + pat = re.compile(my_unicode("abc")) + self.assertEqual(pat.match("xyz"), None) + + def test_finditer(self): + iter = re.finditer(r":+", "a:b::c:::d") + self.assertEqual([item.group(0) for item in iter], + [":", "::", ":::"]) + + def test_bug_926075(self): + try: + unicode + except NameError: + return # no problem if we have no unicode + self.assert_(re.compile('bug_926075') is not + re.compile(eval("u'bug_926075'"))) + + def test_bug_931848(self): + try: + unicode + except NameError: + pass + pattern = eval('u"[\u002E\u3002\uFF0E\uFF61]"') + self.assertEqual(re.compile(pattern).split("a.b.c"), + ['a','b','c']) + + def test_bug_581080(self): + iter = re.finditer(r"\s", "a b") + self.assertEqual(iter.next().span(), (1,2)) + self.assertRaises(StopIteration, iter.next) + + scanner = re.compile(r"\s").scanner("a b") + self.assertEqual(scanner.search().span(), (1, 2)) + self.assertEqual(scanner.search(), None) + + def test_bug_817234(self): + iter = re.finditer(r".*", "asdf") + self.assertEqual(iter.next().span(), (0, 4)) + self.assertEqual(iter.next().span(), (4, 4)) + self.assertRaises(StopIteration, iter.next) + + def test_empty_array(self): + # SF buf 1647541 + import array + for typecode in 'cbBuhHiIlLfd': + a = array.array(typecode) + self.assertEqual(re.compile("bla").match(a), None) + self.assertEqual(re.compile("").match(a).groups(), ()) + +def run_re_tests(): + from test.re_tests import benchmarks, tests, SUCCEED, FAIL, SYNTAX_ERROR + if verbose: + print 'Running re_tests test suite' + else: + # To save time, only run the first and last 10 tests + #tests = tests[:10] + tests[-10:] + pass + + for t in tests: + sys.stdout.flush() + pattern = s = outcome = repl = expected = None + if len(t) == 5: + pattern, s, outcome, repl, expected = t + elif len(t) == 3: + pattern, s, outcome = t + else: + raise ValueError, ('Test tuples should have 3 or 5 fields', t) + + try: + obj = re.compile(pattern) + except re.error: + if outcome == SYNTAX_ERROR: pass # Expected a syntax error + else: + print '=== Syntax error:', t + except KeyboardInterrupt: raise KeyboardInterrupt + except: + print '*** Unexpected error ***', t + # Traceback disabled in PyPy for speed reasons + #if verbose: + # traceback.print_exc(file=sys.stdout) + else: + try: + result = obj.search(s) + except re.error, msg: + print '=== Unexpected exception', t, repr(msg) + if outcome == SYNTAX_ERROR: + # This should have been a syntax error; forget it. + pass + elif outcome == FAIL: + if result is None: pass # No match, as expected + else: print '=== Succeeded incorrectly', t + elif outcome == SUCCEED: + if result is not None: + # Matched, as expected, so now we compute the + # result string and compare it to our expected result. + start, end = result.span(0) + vardict={'found': result.group(0), + 'groups': result.group(), + 'flags': result.re.flags} + for i in range(1, 100): + try: + gi = result.group(i) + # Special hack because else the string concat fails: + if gi is None: + gi = "None" + except IndexError: + gi = "Error" + vardict['g%d' % i] = gi + for i in result.re.groupindex.keys(): + try: + gi = result.group(i) + if gi is None: + gi = "None" + except IndexError: + gi = "Error" + vardict[i] = gi + repl = eval(repl, vardict) + if repl != expected: + print '=== grouping error', t, + print repr(repl) + ' should be ' + repr(expected) + else: + print '=== Failed incorrectly', t + + # Try the match on a unicode string, and check that it + # still succeeds. + try: + result = obj.search(unicode(s, "latin-1")) + if result is None: + print '=== Fails on unicode match', t + except NameError: + continue # 1.5.2 + except TypeError: + continue # unicode test case + + # Try the match on a unicode pattern, and check that it + # still succeeds. + obj=re.compile(unicode(pattern, "latin-1")) + result = obj.search(s) + if result is None: + print '=== Fails on unicode pattern match', t + + # Try the match with the search area limited to the extent + # of the match and see if it still succeeds. \B will + # break (because it won't match at the end or start of a + # string), so we'll ignore patterns that feature it. + + if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \ + and result is not None: + obj = re.compile(pattern) + result = obj.search(s, result.start(0), result.end(0) + 1) + if result is None: + print '=== Failed on range-limited match', t + + # Try the match with IGNORECASE enabled, and check that it + # still succeeds. + obj = re.compile(pattern, re.IGNORECASE) + result = obj.search(s) + if result is None: + print '=== Fails on case-insensitive match', t + + # Try the match with LOCALE enabled, and check that it + # still succeeds. + obj = re.compile(pattern, re.LOCALE) + result = obj.search(s) + if result is None: + print '=== Fails on locale-sensitive match', t + + # Try the match with UNICODE locale enabled, and check + # that it still succeeds. + obj = re.compile(pattern, re.UNICODE) + result = obj.search(s) + if result is None: + print '=== Fails on unicode-sensitive match', t + +def test_main(): + run_unittest(ReTests) + # XXX Disabled re_tests for PyPy because they take approximately forever + # to run ... + #run_re_tests() + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_repr.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_repr.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,311 @@ +""" + Test cases for the repr module + Nick Mathewson +""" + +import sys +import os +import shutil +import unittest + +from test.test_support import run_unittest +from repr import repr as r # Don't shadow builtin repr + + +def nestedTuple(nesting): + t = () + for i in range(nesting): + t = (t,) + return t + +class ReprTests(unittest.TestCase): + + def test_string(self): + eq = self.assertEquals + eq(r("abc"), "'abc'") + eq(r("abcdefghijklmnop"),"'abcdefghijklmnop'") + + s = "a"*30+"b"*30 + expected = repr(s)[:13] + "..." + repr(s)[-14:] + eq(r(s), expected) + + eq(r("\"'"), repr("\"'")) + s = "\""*30+"'"*100 + expected = repr(s)[:13] + "..." + repr(s)[-14:] + eq(r(s), expected) + + def test_container(self): + from array import array + from collections import deque + + eq = self.assertEquals + # Tuples give up after 6 elements + eq(r(()), "()") + eq(r((1,)), "(1,)") + eq(r((1, 2, 3)), "(1, 2, 3)") + eq(r((1, 2, 3, 4, 5, 6)), "(1, 2, 3, 4, 5, 6)") + eq(r((1, 2, 3, 4, 5, 6, 7)), "(1, 2, 3, 4, 5, 6, ...)") + + # Lists give up after 6 as well + eq(r([]), "[]") + eq(r([1]), "[1]") + eq(r([1, 2, 3]), "[1, 2, 3]") + eq(r([1, 2, 3, 4, 5, 6]), "[1, 2, 3, 4, 5, 6]") + eq(r([1, 2, 3, 4, 5, 6, 7]), "[1, 2, 3, 4, 5, 6, ...]") + + # Sets give up after 6 as well + eq(r(set([])), "set([])") + eq(r(set([1])), "set([1])") + eq(r(set([1, 2, 3])), "set([1, 2, 3])") + eq(r(set([1, 2, 3, 4, 5, 6])), "set([1, 2, 3, 4, 5, 6])") + eq(r(set([1, 2, 3, 4, 5, 6, 7])), "set([1, 2, 3, 4, 5, 6, ...])") + + # Frozensets give up after 6 as well + eq(r(frozenset([])), "frozenset([])") + eq(r(frozenset([1])), "frozenset([1])") + eq(r(frozenset([1, 2, 3])), "frozenset([1, 2, 3])") + eq(r(frozenset([1, 2, 3, 4, 5, 6])), "frozenset([1, 2, 3, 4, 5, 6])") + eq(r(frozenset([1, 2, 3, 4, 5, 6, 7])), "frozenset([1, 2, 3, 4, 5, 6, ...])") + + # collections.deque after 6 + eq(r(deque([1, 2, 3, 4, 5, 6, 7])), "deque([1, 2, 3, 4, 5, 6, ...])") + + # Dictionaries give up after 4. + eq(r({}), "{}") + d = {'alice': 1, 'bob': 2, 'charles': 3, 'dave': 4} + eq(r(d), "{'alice': 1, 'bob': 2, 'charles': 3, 'dave': 4}") + d['arthur'] = 1 + eq(r(d), "{'alice': 1, 'arthur': 1, 'bob': 2, 'charles': 3, ...}") + + # array.array after 5. + eq(r(array('i')), "array('i', [])") + eq(r(array('i', [1])), "array('i', [1])") + eq(r(array('i', [1, 2])), "array('i', [1, 2])") + eq(r(array('i', [1, 2, 3])), "array('i', [1, 2, 3])") + eq(r(array('i', [1, 2, 3, 4])), "array('i', [1, 2, 3, 4])") + eq(r(array('i', [1, 2, 3, 4, 5])), "array('i', [1, 2, 3, 4, 5])") + eq(r(array('i', [1, 2, 3, 4, 5, 6])), + "array('i', [1, 2, 3, 4, 5, ...])") + + def test_numbers(self): + eq = self.assertEquals + eq(r(123), repr(123)) + eq(r(123L), repr(123L)) + eq(r(1.0/3), repr(1.0/3)) + + n = 10L**100 + expected = repr(n)[:18] + "..." + repr(n)[-19:] + eq(r(n), expected) + + def test_instance(self): + # Disabled for PyPy because it relies on oldstyle class behaviour. + # Running the test under oldstyle results in many more other problems + # though. + eq = self.assertEquals + i1 = ClassWithRepr("a") + eq(r(i1), repr(i1)) + + i2 = ClassWithRepr("x"*1000) + expected = repr(i2)[:13] + "..." + repr(i2)[-14:] + eq(r(i2), expected) + + i3 = ClassWithFailingRepr() + eq(r(i3), (""%id(i3))) + + s = r(ClassWithFailingRepr) + self.failUnless(s.startswith("")) + self.failUnless(s.find("...") == 8) + + def test_file(self): + fp = open(unittest.__file__) + self.failUnless(repr(fp).startswith( + "') + # Methods + self.failUnless(repr(''.split).find( + "bound method str.split of '' at 0x") > -1) + + def test_xrange(self): + import warnings + eq = self.assertEquals + eq(repr(xrange(1)), 'xrange(1)') + eq(repr(xrange(1, 2)), 'xrange(1, 2)') + eq(repr(xrange(1, 2, 3)), 'xrange(1, 4, 3)') + + def test_nesting(self): + eq = self.assertEquals + # everything is meant to give up after 6 levels. + eq(r([[[[[[[]]]]]]]), "[[[[[[[]]]]]]]") + eq(r([[[[[[[[]]]]]]]]), "[[[[[[[...]]]]]]]") + + eq(r(nestedTuple(6)), "(((((((),),),),),),)") + eq(r(nestedTuple(7)), "(((((((...),),),),),),)") + + eq(r({ nestedTuple(5) : nestedTuple(5) }), + "{((((((),),),),),): ((((((),),),),),)}") + eq(r({ nestedTuple(6) : nestedTuple(6) }), + "{((((((...),),),),),): ((((((...),),),),),)}") + + eq(r([[[[[[{}]]]]]]), "[[[[[[{}]]]]]]") + eq(r([[[[[[[{}]]]]]]]), "[[[[[[[...]]]]]]]") + + def test_buffer(self): + # XXX doesn't test buffers with no b_base or read-write buffers (see + # bufferobject.c). The test is fairly incomplete too. Sigh. + x = buffer('foo') + self.failUnless(repr(x).startswith('") + # XXX member descriptors + # XXX attribute descriptors + # XXX slot descriptors + # static and class methods + class C: + def foo(cls): pass + x = staticmethod(C.foo) + self.failUnless(repr(x).startswith('" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__)) + eq(repr(sys), "") + + def test_type(self): + eq = self.assertEquals + touch(os.path.join(self.subpkgname, 'foo'+os.extsep+'py'), '''\ +class foo(object): + pass +''') + from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import foo + eq(repr(foo.foo), + "" % foo.__name__) + + def test_object(self): + # XXX Test the repr of a type with a really long tp_name but with no + # tp_repr. WIBNI we had ::Inline? :) + pass + + def test_class(self): + touch(os.path.join(self.subpkgname, 'bar'+os.extsep+'py'), '''\ +class bar: + pass +''') + from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import bar + # Module name may be prefixed with "test.", depending on how run. + self.failUnless(repr(bar.bar).startswith( + " -1) + # Bound method next + iqux = qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa() + self.failUnless(repr(iqux.amethod).find( + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') > -1) + + def test_builtin_function(self): + # XXX test built-in functions and methods with really long names + pass + +class ClassWithRepr: + def __init__(self, s): + self.s = s + def __repr__(self): + return "ClassWithLongRepr(%r)" % self.s + + +class ClassWithFailingRepr: + def __repr__(self): + raise Exception("This should be caught by Repr.repr_instance") + + +def test_main(): + run_unittest(ReprTests) + if os.name != 'mac': + run_unittest(LongReprTest) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_scope.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_scope.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,562 @@ +from test.test_support import verify, TestFailed, check_syntax, vereq + +import warnings +warnings.filterwarnings("ignore", r"import \*", SyntaxWarning, "") + +print "1. simple nesting" + +def make_adder(x): + def adder(y): + return x + y + return adder + +inc = make_adder(1) +plus10 = make_adder(10) + +vereq(inc(1), 2) +vereq(plus10(-2), 8) + +print "2. extra nesting" + +def make_adder2(x): + def extra(): # check freevars passing through non-use scopes + def adder(y): + return x + y + return adder + return extra() + +inc = make_adder2(1) +plus10 = make_adder2(10) + +vereq(inc(1), 2) +vereq(plus10(-2), 8) + +print "3. simple nesting + rebinding" + +def make_adder3(x): + def adder(y): + return x + y + x = x + 1 # check tracking of assignment to x in defining scope + return adder + +inc = make_adder3(0) +plus10 = make_adder3(9) + +vereq(inc(1), 2) +vereq(plus10(-2), 8) + +print "4. nesting with global but no free" + +def make_adder4(): # XXX add exta level of indirection + def nest(): + def nest(): + def adder(y): + return global_x + y # check that plain old globals work + return adder + return nest() + return nest() + +global_x = 1 +adder = make_adder4() +vereq(adder(1), 2) + +global_x = 10 +vereq(adder(-2), 8) + +print "5. nesting through class" + +def make_adder5(x): + class Adder: + def __call__(self, y): + return x + y + return Adder() + +inc = make_adder5(1) +plus10 = make_adder5(10) + +vereq(inc(1), 2) +vereq(plus10(-2), 8) + +print "6. nesting plus free ref to global" + +def make_adder6(x): + global global_nest_x + def adder(y): + return global_nest_x + y + global_nest_x = x + return adder + +inc = make_adder6(1) +plus10 = make_adder6(10) + +vereq(inc(1), 11) # there's only one global +vereq(plus10(-2), 8) + +print "7. nearest enclosing scope" + +def f(x): + def g(y): + x = 42 # check that this masks binding in f() + def h(z): + return x + z + return h + return g(2) + +test_func = f(10) +vereq(test_func(5), 47) + +print "8. mixed freevars and cellvars" + +def identity(x): + return x + +def f(x, y, z): + def g(a, b, c): + a = a + x # 3 + def h(): + # z * (4 + 9) + # 3 * 13 + return identity(z * (b + y)) + y = c + z # 9 + return h + return g + +g = f(1, 2, 3) +h = g(2, 4, 6) +vereq(h(), 39) + +print "9. free variable in method" + +def test(): + method_and_var = "var" + class Test: + def method_and_var(self): + return "method" + def test(self): + return method_and_var + def actual_global(self): + return str("global") + def str(self): + return str(self) + return Test() + +t = test() +vereq(t.test(), "var") +vereq(t.method_and_var(), "method") +vereq(t.actual_global(), "global") + +method_and_var = "var" +class Test: + # this class is not nested, so the rules are different + def method_and_var(self): + return "method" + def test(self): + return method_and_var + def actual_global(self): + return str("global") + def str(self): + return str(self) + +t = Test() +vereq(t.test(), "var") +vereq(t.method_and_var(), "method") +vereq(t.actual_global(), "global") + +print "10. recursion" + +def f(x): + def fact(n): + if n == 0: + return 1 + else: + return n * fact(n - 1) + if x >= 0: + return fact(x) + else: + raise ValueError, "x must be >= 0" + +vereq(f(6), 720) + + +print "11. unoptimized namespaces" + +check_syntax("""\ +def unoptimized_clash1(strip): + def f(s): + from string import * + return strip(s) # ambiguity: free or local + return f +""") + +check_syntax("""\ +def unoptimized_clash2(): + from string import * + def f(s): + return strip(s) # ambiguity: global or local + return f +""") + +check_syntax("""\ +def unoptimized_clash2(): + from string import * + def g(): + def f(s): + return strip(s) # ambiguity: global or local + return f +""") + +# XXX could allow this for exec with const argument, but what's the point +check_syntax("""\ +def error(y): + exec "a = 1" + def f(x): + return x + y + return f +""") + +check_syntax("""\ +def f(x): + def g(): + return x + del x # can't del name +""") + +check_syntax("""\ +def f(): + def g(): + from string import * + return strip # global or local? +""") + +# and verify a few cases that should work + +exec """ +def noproblem1(): + from string import * + f = lambda x:x + +def noproblem2(): + from string import * + def f(x): + return x + 1 + +def noproblem3(): + from string import * + def f(x): + global y + y = x +""" + +print "12. lambdas" + +f1 = lambda x: lambda y: x + y +inc = f1(1) +plus10 = f1(10) +vereq(inc(1), 2) +vereq(plus10(5), 15) + +f2 = lambda x: (lambda : lambda y: x + y)() +inc = f2(1) +plus10 = f2(10) +vereq(inc(1), 2) +vereq(plus10(5), 15) + +f3 = lambda x: lambda y: global_x + y +global_x = 1 +inc = f3(None) +vereq(inc(2), 3) + +f8 = lambda x, y, z: lambda a, b, c: lambda : z * (b + y) +g = f8(1, 2, 3) +h = g(2, 4, 6) +vereq(h(), 18) + +print "13. UnboundLocal" + +def errorInOuter(): + print y + def inner(): + return y + y = 1 + +def errorInInner(): + def inner(): + return y + inner() + y = 1 + +try: + errorInOuter() +except UnboundLocalError: + pass +else: + raise TestFailed + +try: + errorInInner() +except NameError: + pass +else: + raise TestFailed + +# test for bug #1501934: incorrect LOAD/STORE_GLOBAL generation +global_x = 1 +def f(): + global_x += 1 +try: + f() +except UnboundLocalError: + pass +else: + raise TestFailed, 'scope of global_x not correctly determined' + +print "14. complex definitions" + +def makeReturner(*lst): + def returner(): + return lst + return returner + +vereq(makeReturner(1,2,3)(), (1,2,3)) + +def makeReturner2(**kwargs): + def returner(): + return kwargs + return returner + +vereq(makeReturner2(a=11)()['a'], 11) + +def makeAddPair((a, b)): + def addPair((c, d)): + return (a + c, b + d) + return addPair + +vereq(makeAddPair((1, 2))((100, 200)), (101,202)) + +print "15. scope of global statements" +# Examples posted by Samuele Pedroni to python-dev on 3/1/2001 + +# I +x = 7 +def f(): + x = 1 + def g(): + global x + def i(): + def h(): + return x + return h() + return i() + return g() +vereq(f(), 7) +vereq(x, 7) + +# II +x = 7 +def f(): + x = 1 + def g(): + x = 2 + def i(): + def h(): + return x + return h() + return i() + return g() +vereq(f(), 2) +vereq(x, 7) + +# III +x = 7 +def f(): + x = 1 + def g(): + global x + x = 2 + def i(): + def h(): + return x + return h() + return i() + return g() +vereq(f(), 2) +vereq(x, 2) + +# IV +x = 7 +def f(): + x = 3 + def g(): + global x + x = 2 + def i(): + def h(): + return x + return h() + return i() + return g() +vereq(f(), 2) +vereq(x, 2) + +# XXX what about global statements in class blocks? +# do they affect methods? + +x = 12 +class Global: + global x + x = 13 + def set(self, val): + x = val + def get(self): + return x + +g = Global() +vereq(g.get(), 13) +g.set(15) +vereq(g.get(), 13) + +print "16. check leaks" + +class Foo: + count = 0 + + def __init__(self): + Foo.count += 1 + + def __del__(self): + Foo.count -= 1 + +def f1(): + x = Foo() + def f2(): + return x + f2() + +for i in range(100): + f1() + +import gc; gc.collect(); gc.collect(); gc.collect() +vereq(Foo.count, 0) + +print "17. class and global" + +def test(x): + class Foo: + global x + def __call__(self, y): + return x + y + return Foo() + +x = 0 +vereq(test(6)(2), 8) +x = -1 +vereq(test(3)(2), 5) + +looked_up_by_load_name = False +class X: + # Implicit globals inside classes are be looked up by LOAD_NAME, not + # LOAD_GLOBAL. + locals()['looked_up_by_load_name'] = True + passed = looked_up_by_load_name + +verify(X.passed) + +print "18. verify that locals() works" + +def f(x): + def g(y): + def h(z): + return y + z + w = x + y + y += 3 + return locals() + return g + +d = f(2)(4) +verify(d.has_key('h')) +del d['h'] +vereq(d, {'x': 2, 'y': 7, 'w': 6}) + +print "19. var is bound and free in class" + +def f(x): + class C: + def m(self): + return x + a = x + return C + +inst = f(3)() +vereq(inst.a, inst.m()) + +print "20. interaction with trace function" + +import sys +def tracer(a,b,c): + return tracer + +def adaptgetter(name, klass, getter): + kind, des = getter + if kind == 1: # AV happens when stepping from this line to next + if des == "": + des = "_%s__%s" % (klass.__name__, name) + return lambda obj: getattr(obj, des) + +class TestClass: + pass + +sys.settrace(tracer) +adaptgetter("foo", TestClass, (1, "")) +sys.settrace(None) + +try: sys.settrace() +except TypeError: pass +else: raise TestFailed, 'sys.settrace() did not raise TypeError' + +print "20. eval and exec with free variables" + +def f(x): + return lambda: x + 1 + +g = f(3) +try: + eval(g.func_code) +except TypeError: + pass +else: + print "eval() should have failed, because code contained free vars" + +try: + exec g.func_code +except TypeError: + pass +else: + print "exec should have failed, because code contained free vars" + +print "21. list comprehension with local variables" + +try: + print bad +except NameError: + pass +else: + print "bad should not be defined" + +def x(): + [bad for s in 'a b' for bad in s.split()] + +x() +try: + print bad +except NameError: + pass + +print "22. eval with free variables" + +def f(x): + def g(): + x + eval("x + 1") + return g + +f(4)() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_set.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_set.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,1537 @@ +import unittest +from test import test_support +#from weakref import proxy +import operator +import copy +import pickle +import os +from random import randrange, shuffle +import sys + +class PassThru(Exception): + pass + +def check_pass_thru(): + raise PassThru + yield 1 + +class BadCmp: + def __hash__(self): + return 1 + def __cmp__(self, other): + raise RuntimeError + +class ReprWrapper: + 'Used to test self-referential repr() calls' + def __repr__(self): + return repr(self.value) + +class HashCountingInt(int): + 'int-like object that counts the number of times __hash__ is called' + def __init__(self, *args): + self.hash_count = 0 + def __hash__(self): + self.hash_count += 1 + return int.__hash__(self) + +class TestJointOps(unittest.TestCase): + # Tests common to both set and frozenset + + def setUp(self): + self.word = word = 'simsalabim' + self.otherword = 'madagascar' + self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + self.s = self.thetype(word) + self.d = dict.fromkeys(word) + + def test_new_or_init(self): + self.assertRaises(TypeError, self.thetype, [], 2) + + def test_uniquification(self): + actual = sorted(self.s) + expected = sorted(self.d) + self.assertEqual(actual, expected) + self.assertRaises(PassThru, self.thetype, check_pass_thru()) + self.assertRaises(TypeError, self.thetype, [[]]) + + def test_len(self): + self.assertEqual(len(self.s), len(self.d)) + + def test_contains(self): + for c in self.letters: + self.assertEqual(c in self.s, c in self.d) + self.assertRaises(TypeError, self.s.__contains__, [[]]) + s = self.thetype([frozenset(self.letters)]) + self.assert_(self.thetype(self.letters) in s) + + def test_union(self): + u = self.s.union(self.otherword) + for c in self.letters: + self.assertEqual(c in u, c in self.d or c in self.otherword) + self.assertEqual(self.s, self.thetype(self.word)) + self.assertEqual(type(u), self.thetype) + self.assertRaises(PassThru, self.s.union, check_pass_thru()) + self.assertRaises(TypeError, self.s.union, [[]]) + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd')) + self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg')) + self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc')) + self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef')) + + def test_or(self): + i = self.s.union(self.otherword) + self.assertEqual(self.s | set(self.otherword), i) + self.assertEqual(self.s | frozenset(self.otherword), i) + try: + self.s | self.otherword + except TypeError: + pass + else: + self.fail("s|t did not screen-out general iterables") + + def test_intersection(self): + i = self.s.intersection(self.otherword) + for c in self.letters: + self.assertEqual(c in i, c in self.d and c in self.otherword) + self.assertEqual(self.s, self.thetype(self.word)) + self.assertEqual(type(i), self.thetype) + self.assertRaises(PassThru, self.s.intersection, check_pass_thru()) + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc')) + self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set('')) + self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc')) + self.assertEqual(self.thetype('abcba').intersection(C('ef')), set('')) + + def test_and(self): + i = self.s.intersection(self.otherword) + self.assertEqual(self.s & set(self.otherword), i) + self.assertEqual(self.s & frozenset(self.otherword), i) + try: + self.s & self.otherword + except TypeError: + pass + else: + self.fail("s&t did not screen-out general iterables") + + def test_difference(self): + i = self.s.difference(self.otherword) + for c in self.letters: + self.assertEqual(c in i, c in self.d and c not in self.otherword) + self.assertEqual(self.s, self.thetype(self.word)) + self.assertEqual(type(i), self.thetype) + self.assertRaises(PassThru, self.s.difference, check_pass_thru()) + self.assertRaises(TypeError, self.s.difference, [[]]) + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab')) + self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc')) + self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a')) + self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc')) + + def test_sub(self): + i = self.s.difference(self.otherword) + self.assertEqual(self.s - set(self.otherword), i) + self.assertEqual(self.s - frozenset(self.otherword), i) + try: + self.s - self.otherword + except TypeError: + pass + else: + self.fail("s-t did not screen-out general iterables") + + def test_symmetric_difference(self): + i = self.s.symmetric_difference(self.otherword) + for c in self.letters: + self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword)) + self.assertEqual(self.s, self.thetype(self.word)) + self.assertEqual(type(i), self.thetype) + self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru()) + self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd')) + self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg')) + self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a')) + self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef')) + + def test_xor(self): + i = self.s.symmetric_difference(self.otherword) + self.assertEqual(self.s ^ set(self.otherword), i) + self.assertEqual(self.s ^ frozenset(self.otherword), i) + try: + self.s ^ self.otherword + except TypeError: + pass + else: + self.fail("s^t did not screen-out general iterables") + + def test_equality(self): + self.assertEqual(self.s, set(self.word)) + self.assertEqual(self.s, frozenset(self.word)) + self.assertEqual(self.s == self.word, False) + self.assertNotEqual(self.s, set(self.otherword)) + self.assertNotEqual(self.s, frozenset(self.otherword)) + self.assertEqual(self.s != self.word, True) + + def test_setOfFrozensets(self): + t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba']) + s = self.thetype(t) + self.assertEqual(len(s), 3) + + def test_compare(self): + self.assertRaises(TypeError, self.s.__cmp__, self.s) + + def test_sub_and_super(self): + p, q, r = map(self.thetype, ['ab', 'abcde', 'def']) + self.assert_(p < q) + self.assert_(p <= q) + self.assert_(q <= q) + self.assert_(q > p) + self.assert_(q >= p) + self.failIf(q < r) + self.failIf(q <= r) + self.failIf(q > r) + self.failIf(q >= r) + self.assert_(set('a').issubset('abc')) + self.assert_(set('abc').issuperset('a')) + self.failIf(set('a').issubset('cbs')) + self.failIf(set('cbs').issuperset('a')) + + def test_pickling(self): + for i in (0, 1, 2): + p = pickle.dumps(self.s, i) + dup = pickle.loads(p) + self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup)) + if type(self.s) not in (set, frozenset): + self.s.x = 10 + p = pickle.dumps(self.s) + dup = pickle.loads(p) + self.assertEqual(self.s.x, dup.x) + + def test_deepcopy(self): + class Tracer: + def __init__(self, value): + self.value = value + def __hash__(self): + return self.value + def __deepcopy__(self, memo=None): + return Tracer(self.value + 1) + t = Tracer(10) + s = self.thetype([t]) + dup = copy.deepcopy(s) + self.assertNotEqual(id(s), id(dup)) + for elem in dup: + newt = elem + self.assertNotEqual(id(t), id(newt)) + self.assertEqual(t.value + 1, newt.value) + + def test_gc(self): + # Create a nest of cycles to exercise overall ref count check + class A: + pass + s = set(A() for i in xrange(1000)) + for elem in s: + elem.cycle = s + elem.sub = elem + elem.set = set([elem]) + + def test_subclass_with_custom_hash(self): + # Bug #1257731 + class H(self.thetype): + def __hash__(self): + return int(id(self) & 0x7fffffff) + s=H() + f=set() + f.add(s) + self.assert_(s in f) + f.remove(s) + f.add(s) + f.discard(s) + + def test_badcmp(self): + s = self.thetype([BadCmp()]) + # Detect comparison errors during insertion and lookup + self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()]) + self.assertRaises(RuntimeError, s.__contains__, BadCmp()) + # Detect errors during mutating operations + if hasattr(s, 'add'): + self.assertRaises(RuntimeError, s.add, BadCmp()) + self.assertRaises(RuntimeError, s.discard, BadCmp()) + self.assertRaises(RuntimeError, s.remove, BadCmp()) + + def test_cyclical_repr(self): + w = ReprWrapper() + s = self.thetype([w]) + w.value = s + name = repr(s).partition('(')[0] # strip class name from repr string + self.assertEqual(repr(s), '%s([%s(...)])' % (name, name)) + + def test_cyclical_print(self): + w = ReprWrapper() + s = self.thetype([w]) + w.value = s + try: + fo = open(test_support.TESTFN, "wb") + print >> fo, s, + fo.close() + fo = open(test_support.TESTFN, "rb") + self.assertEqual(fo.read(), repr(s)) + finally: + fo.close() + os.remove(test_support.TESTFN) + + def test_do_not_rehash_dict_keys(self): + n = 10 + d = dict.fromkeys(map(HashCountingInt, xrange(n))) + self.assertEqual(sum(elem.hash_count for elem in d), n) + s = self.thetype(d) + self.assertEqual(sum(elem.hash_count for elem in d), n) + s.difference(d) + self.assertEqual(sum(elem.hash_count for elem in d), n) + if hasattr(s, 'symmetric_difference_update'): + s.symmetric_difference_update(d) + self.assertEqual(sum(elem.hash_count for elem in d), n) + d2 = dict.fromkeys(set(d)) + self.assertEqual(sum(elem.hash_count for elem in d), n) + d3 = dict.fromkeys(frozenset(d)) + self.assertEqual(sum(elem.hash_count for elem in d), n) + d3 = dict.fromkeys(frozenset(d), 123) + self.assertEqual(sum(elem.hash_count for elem in d), n) + self.assertEqual(d3, dict.fromkeys(d, 123)) + +class TestSet(TestJointOps): + thetype = set + + def test_init(self): + s = self.thetype() + s.__init__(self.word) + self.assertEqual(s, set(self.word)) + s.__init__(self.otherword) + self.assertEqual(s, set(self.otherword)) + self.assertRaises(TypeError, s.__init__, s, 2); + self.assertRaises(TypeError, s.__init__, 1); + + def test_constructor_identity(self): + s = self.thetype(range(3)) + t = self.thetype(s) + self.assertNotEqual(id(s), id(t)) + + def test_hash(self): + self.assertRaises(TypeError, hash, self.s) + + def test_clear(self): + self.s.clear() + self.assertEqual(self.s, set()) + self.assertEqual(len(self.s), 0) + + def test_copy(self): + dup = self.s.copy() + self.assertEqual(self.s, dup) + self.assertNotEqual(id(self.s), id(dup)) + + def test_add(self): + self.s.add('Q') + self.assert_('Q' in self.s) + dup = self.s.copy() + self.s.add('Q') + self.assertEqual(self.s, dup) + self.assertRaises(TypeError, self.s.add, []) + + def test_remove(self): + self.s.remove('a') + self.assert_('a' not in self.s) + self.assertRaises(KeyError, self.s.remove, 'Q') + self.assertRaises(TypeError, self.s.remove, []) + s = self.thetype([frozenset(self.word)]) + self.assert_(self.thetype(self.word) in s) + s.remove(self.thetype(self.word)) + self.assert_(self.thetype(self.word) not in s) + self.assertRaises(KeyError, self.s.remove, self.thetype(self.word)) + + def test_remove_keyerror_unpacking(self): + # bug: www.python.org/sf/1576657 + for v1 in ['Q', (1,)]: + try: + self.s.remove(v1) + except KeyError, e: + v2 = e.args[0] + self.assertEqual(v1, v2) + else: + self.fail() + + def test_discard(self): + self.s.discard('a') + self.assert_('a' not in self.s) + self.s.discard('Q') + self.assertRaises(TypeError, self.s.discard, []) + s = self.thetype([frozenset(self.word)]) + self.assert_(self.thetype(self.word) in s) + s.discard(self.thetype(self.word)) + self.assert_(self.thetype(self.word) not in s) + s.discard(self.thetype(self.word)) + + def test_pop(self): + for i in xrange(len(self.s)): + elem = self.s.pop() + self.assert_(elem not in self.s) + self.assertRaises(KeyError, self.s.pop) + + def test_update(self): + retval = self.s.update(self.otherword) + self.assertEqual(retval, None) + for c in (self.word + self.otherword): + self.assert_(c in self.s) + self.assertRaises(PassThru, self.s.update, check_pass_thru()) + self.assertRaises(TypeError, self.s.update, [[]]) + for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')): + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + s = self.thetype('abcba') + self.assertEqual(s.update(C(p)), None) + self.assertEqual(s, set(q)) + + def test_ior(self): + self.s |= set(self.otherword) + for c in (self.word + self.otherword): + self.assert_(c in self.s) + + def test_intersection_update(self): + retval = self.s.intersection_update(self.otherword) + self.assertEqual(retval, None) + for c in (self.word + self.otherword): + if c in self.otherword and c in self.word: + self.assert_(c in self.s) + else: + self.assert_(c not in self.s) + self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru()) + self.assertRaises(TypeError, self.s.intersection_update, [[]]) + for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')): + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + s = self.thetype('abcba') + self.assertEqual(s.intersection_update(C(p)), None) + self.assertEqual(s, set(q)) + + def test_iand(self): + self.s &= set(self.otherword) + for c in (self.word + self.otherword): + if c in self.otherword and c in self.word: + self.assert_(c in self.s) + else: + self.assert_(c not in self.s) + + def test_difference_update(self): + retval = self.s.difference_update(self.otherword) + self.assertEqual(retval, None) + for c in (self.word + self.otherword): + if c in self.word and c not in self.otherword: + self.assert_(c in self.s) + else: + self.assert_(c not in self.s) + self.assertRaises(PassThru, self.s.difference_update, check_pass_thru()) + self.assertRaises(TypeError, self.s.difference_update, [[]]) + self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) + for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')): + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + s = self.thetype('abcba') + self.assertEqual(s.difference_update(C(p)), None) + self.assertEqual(s, set(q)) + + def test_isub(self): + self.s -= set(self.otherword) + for c in (self.word + self.otherword): + if c in self.word and c not in self.otherword: + self.assert_(c in self.s) + else: + self.assert_(c not in self.s) + + def test_symmetric_difference_update(self): + retval = self.s.symmetric_difference_update(self.otherword) + self.assertEqual(retval, None) + for c in (self.word + self.otherword): + if (c in self.word) ^ (c in self.otherword): + self.assert_(c in self.s) + else: + self.assert_(c not in self.s) + self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru()) + self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) + for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')): + for C in set, frozenset, dict.fromkeys, str, unicode, list, tuple: + s = self.thetype('abcba') + self.assertEqual(s.symmetric_difference_update(C(p)), None) + self.assertEqual(s, set(q)) + + def test_ixor(self): + self.s ^= set(self.otherword) + for c in (self.word + self.otherword): + if (c in self.word) ^ (c in self.otherword): + self.assert_(c in self.s) + else: + self.assert_(c not in self.s) + + def test_inplace_on_self(self): + t = self.s.copy() + t |= t + self.assertEqual(t, self.s) + t &= t + self.assertEqual(t, self.s) + t -= t + self.assertEqual(t, self.thetype()) + t = self.s.copy() + t ^= t + self.assertEqual(t, self.thetype()) + + # XXX disabled until weakref works + def XXXtest_weakref(self): + s = self.thetype('gallahad') + p = proxy(s) + self.assertEqual(str(p), str(s)) + s = None + self.assertRaises(ReferenceError, str, p) + + # C API test only available in a debug build + if hasattr(set, "test_c_api"): + def test_c_api(self): + self.assertEqual(set('abc').test_c_api(), True) + +class SetSubclass(set): + pass + +class TestSetSubclass(TestSet): + thetype = SetSubclass + +class SetSubclassWithKeywordArgs(set): + def __init__(self, iterable=[], newarg=None): + set.__init__(self, iterable) + +class TestSetSubclassWithKeywordArgs(TestSet): + + def test_keywords_in_subclass(self): + 'SF bug #1486663 -- this used to erroneously raise a TypeError' + SetSubclassWithKeywordArgs(newarg=1) + +class TestFrozenSet(TestJointOps): + thetype = frozenset + + def test_init(self): + s = self.thetype(self.word) + s.__init__(self.otherword) + self.assertEqual(s, set(self.word)) + + def test_singleton_empty_frozenset(self): + f = frozenset() + efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''), + frozenset(), frozenset([]), frozenset(()), frozenset(''), + frozenset(xrange(0)), frozenset(frozenset()), + frozenset(f), f] + # All of the empty frozensets should have just one id() + self.assertEqual(len(set(map(id, efs))), 1) + + def test_constructor_identity(self): + s = self.thetype(range(3)) + t = self.thetype(s) + self.assertEqual(id(s), id(t)) + + def test_hash(self): + self.assertEqual(hash(self.thetype('abcdeb')), + hash(self.thetype('ebecda'))) + + # make sure that all permutations give the same hash value + n = 100 + seq = [randrange(n) for i in xrange(n)] + results = set() + for i in xrange(200): + shuffle(seq) + results.add(hash(self.thetype(seq))) + self.assertEqual(len(results), 1) + + def test_copy(self): + dup = self.s.copy() + self.assertEqual(id(self.s), id(dup)) + + def test_frozen_as_dictkey(self): + seq = range(10) + list('abcdefg') + ['apple'] + key1 = self.thetype(seq) + key2 = self.thetype(reversed(seq)) + self.assertEqual(key1, key2) + self.assertNotEqual(id(key1), id(key2)) + d = {} + d[key1] = 42 + self.assertEqual(d[key2], 42) + + def test_hash_caching(self): + f = self.thetype('abcdcda') + self.assertEqual(hash(f), hash(f)) + + # disabled since it depends on CPython specific hash algorithm + def _test_hash_effectiveness(self): + n = 13 + hashvalues = set() + addhashvalue = hashvalues.add + elemmasks = [(i+1, 1<> fo, self.set, + fo.close() + fo = open(test_support.TESTFN, "rb") + self.assertEqual(fo.read(), repr(self.set)) + finally: + fo.close() + os.remove(test_support.TESTFN) + + def test_length(self): + self.assertEqual(len(self.set), self.length) + + def test_self_equality(self): + self.assertEqual(self.set, self.set) + + def test_equivalent_equality(self): + self.assertEqual(self.set, self.dup) + + def test_copy(self): + self.assertEqual(self.set.copy(), self.dup) + + def test_self_union(self): + result = self.set | self.set + self.assertEqual(result, self.dup) + + def test_empty_union(self): + result = self.set | empty_set + self.assertEqual(result, self.dup) + + def test_union_empty(self): + result = empty_set | self.set + self.assertEqual(result, self.dup) + + def test_self_intersection(self): + result = self.set & self.set + self.assertEqual(result, self.dup) + + def test_empty_intersection(self): + result = self.set & empty_set + self.assertEqual(result, empty_set) + + def test_intersection_empty(self): + result = empty_set & self.set + self.assertEqual(result, empty_set) + + def test_self_symmetric_difference(self): + result = self.set ^ self.set + self.assertEqual(result, empty_set) + + def checkempty_symmetric_difference(self): + result = self.set ^ empty_set + self.assertEqual(result, self.set) + + def test_self_difference(self): + result = self.set - self.set + self.assertEqual(result, empty_set) + + def test_empty_difference(self): + result = self.set - empty_set + self.assertEqual(result, self.dup) + + def test_empty_difference_rev(self): + result = empty_set - self.set + self.assertEqual(result, empty_set) + + def test_iteration(self): + for v in self.set: + self.assert_(v in self.values) + setiter = iter(self.set) + # note: __length_hint__ is an internal undocumented API, + # don't rely on it in your own programs + self.assertEqual(setiter.__length_hint__(), len(self.set)) + + def test_pickling(self): + p = pickle.dumps(self.set) + copy = pickle.loads(p) + self.assertEqual(self.set, copy, + "%s != %s" % (self.set, copy)) + +#------------------------------------------------------------------------------ + +class TestBasicOpsEmpty(TestBasicOps): + def setUp(self): + self.case = "empty set" + self.values = [] + self.set = set(self.values) + self.dup = set(self.values) + self.length = 0 + self.repr = "set([])" + +#------------------------------------------------------------------------------ + +class TestBasicOpsSingleton(TestBasicOps): + def setUp(self): + self.case = "unit set (number)" + self.values = [3] + self.set = set(self.values) + self.dup = set(self.values) + self.length = 1 + self.repr = "set([3])" + + def test_in(self): + self.failUnless(3 in self.set) + + def test_not_in(self): + self.failUnless(2 not in self.set) + +#------------------------------------------------------------------------------ + +class TestBasicOpsTuple(TestBasicOps): + def setUp(self): + self.case = "unit set (tuple)" + self.values = [(0, "zero")] + self.set = set(self.values) + self.dup = set(self.values) + self.length = 1 + self.repr = "set([(0, 'zero')])" + + def test_in(self): + self.failUnless((0, "zero") in self.set) + + def test_not_in(self): + self.failUnless(9 not in self.set) + +#------------------------------------------------------------------------------ + +class TestBasicOpsTriple(TestBasicOps): + def setUp(self): + self.case = "triple set" + self.values = [0, "zero", operator.add] + self.set = set(self.values) + self.dup = set(self.values) + self.length = 3 + self.repr = None + +#============================================================================== + +def baditer(): + raise TypeError + yield True + +def gooditer(): + yield True + +class TestExceptionPropagation(unittest.TestCase): + """SF 628246: Set constructor should not trap iterator TypeErrors""" + + def test_instanceWithException(self): + self.assertRaises(TypeError, set, baditer()) + + def test_instancesWithoutException(self): + # All of these iterables should load without exception. + set([1,2,3]) + set((1,2,3)) + set({'one':1, 'two':2, 'three':3}) + set(xrange(3)) + set('abc') + set(gooditer()) + + def test_changingSizeWhileIterating(self): + s = set([1,2,3]) + try: + for i in s: + s.update([4]) + except RuntimeError: + pass + else: + self.fail("no exception when changing size during iteration") + +#============================================================================== + +class TestSetOfSets(unittest.TestCase): + def test_constructor(self): + inner = frozenset([1]) + outer = set([inner]) + element = outer.pop() + self.assertEqual(type(element), frozenset) + outer.add(inner) # Rebuild set of sets with .add method + outer.remove(inner) + self.assertEqual(outer, set()) # Verify that remove worked + outer.discard(inner) # Absence of KeyError indicates working fine + +#============================================================================== + +class TestBinaryOps(unittest.TestCase): + def setUp(self): + self.set = set((2, 4, 6)) + + def test_eq(self): # SF bug 643115 + self.assertEqual(self.set, set({2:1,4:3,6:5})) + + def test_union_subset(self): + result = self.set | set([2]) + self.assertEqual(result, set((2, 4, 6))) + + def test_union_superset(self): + result = self.set | set([2, 4, 6, 8]) + self.assertEqual(result, set([2, 4, 6, 8])) + + def test_union_overlap(self): + result = self.set | set([3, 4, 5]) + self.assertEqual(result, set([2, 3, 4, 5, 6])) + + def test_union_non_overlap(self): + result = self.set | set([8]) + self.assertEqual(result, set([2, 4, 6, 8])) + + def test_intersection_subset(self): + result = self.set & set((2, 4)) + self.assertEqual(result, set((2, 4))) + + def test_intersection_superset(self): + result = self.set & set([2, 4, 6, 8]) + self.assertEqual(result, set([2, 4, 6])) + + def test_intersection_overlap(self): + result = self.set & set([3, 4, 5]) + self.assertEqual(result, set([4])) + + def test_intersection_non_overlap(self): + result = self.set & set([8]) + self.assertEqual(result, empty_set) + + def test_sym_difference_subset(self): + result = self.set ^ set((2, 4)) + self.assertEqual(result, set([6])) + + def test_sym_difference_superset(self): + result = self.set ^ set((2, 4, 6, 8)) + self.assertEqual(result, set([8])) + + def test_sym_difference_overlap(self): + result = self.set ^ set((3, 4, 5)) + self.assertEqual(result, set([2, 3, 5, 6])) + + def test_sym_difference_non_overlap(self): + result = self.set ^ set([8]) + self.assertEqual(result, set([2, 4, 6, 8])) + + def test_cmp(self): + a, b = set('a'), set('b') + self.assertRaises(TypeError, cmp, a, b) + + # You can view this as a buglet: cmp(a, a) does not raise TypeError, + # because __eq__ is tried before __cmp__, and a.__eq__(a) returns True, + # which Python thinks is good enough to synthesize a cmp() result + # without calling __cmp__. + self.assertEqual(cmp(a, a), 0) + + self.assertRaises(TypeError, cmp, a, 12) + self.assertRaises(TypeError, cmp, "abc", a) + +#============================================================================== + +class TestUpdateOps(unittest.TestCase): + def setUp(self): + self.set = set((2, 4, 6)) + + def test_union_subset(self): + self.set |= set([2]) + self.assertEqual(self.set, set((2, 4, 6))) + + def test_union_superset(self): + self.set |= set([2, 4, 6, 8]) + self.assertEqual(self.set, set([2, 4, 6, 8])) + + def test_union_overlap(self): + self.set |= set([3, 4, 5]) + self.assertEqual(self.set, set([2, 3, 4, 5, 6])) + + def test_union_non_overlap(self): + self.set |= set([8]) + self.assertEqual(self.set, set([2, 4, 6, 8])) + + def test_union_method_call(self): + self.set.update(set([3, 4, 5])) + self.assertEqual(self.set, set([2, 3, 4, 5, 6])) + + def test_intersection_subset(self): + self.set &= set((2, 4)) + self.assertEqual(self.set, set((2, 4))) + + def test_intersection_superset(self): + self.set &= set([2, 4, 6, 8]) + self.assertEqual(self.set, set([2, 4, 6])) + + def test_intersection_overlap(self): + self.set &= set([3, 4, 5]) + self.assertEqual(self.set, set([4])) + + def test_intersection_non_overlap(self): + self.set &= set([8]) + self.assertEqual(self.set, empty_set) + + def test_intersection_method_call(self): + self.set.intersection_update(set([3, 4, 5])) + self.assertEqual(self.set, set([4])) + + def test_sym_difference_subset(self): + self.set ^= set((2, 4)) + self.assertEqual(self.set, set([6])) + + def test_sym_difference_superset(self): + self.set ^= set((2, 4, 6, 8)) + self.assertEqual(self.set, set([8])) + + def test_sym_difference_overlap(self): + self.set ^= set((3, 4, 5)) + self.assertEqual(self.set, set([2, 3, 5, 6])) + + def test_sym_difference_non_overlap(self): + self.set ^= set([8]) + self.assertEqual(self.set, set([2, 4, 6, 8])) + + def test_sym_difference_method_call(self): + self.set.symmetric_difference_update(set([3, 4, 5])) + self.assertEqual(self.set, set([2, 3, 5, 6])) + + def test_difference_subset(self): + self.set -= set((2, 4)) + self.assertEqual(self.set, set([6])) + + def test_difference_superset(self): + self.set -= set((2, 4, 6, 8)) + self.assertEqual(self.set, set([])) + + def test_difference_overlap(self): + self.set -= set((3, 4, 5)) + self.assertEqual(self.set, set([2, 6])) + + def test_difference_non_overlap(self): + self.set -= set([8]) + self.assertEqual(self.set, set([2, 4, 6])) + + def test_difference_method_call(self): + self.set.difference_update(set([3, 4, 5])) + self.assertEqual(self.set, set([2, 6])) + +#============================================================================== + +class TestMutate(unittest.TestCase): + def setUp(self): + self.values = ["a", "b", "c"] + self.set = set(self.values) + + def test_add_present(self): + self.set.add("c") + self.assertEqual(self.set, set("abc")) + + def test_add_absent(self): + self.set.add("d") + self.assertEqual(self.set, set("abcd")) + + def test_add_until_full(self): + tmp = set() + expected_len = 0 + for v in self.values: + tmp.add(v) + expected_len += 1 + self.assertEqual(len(tmp), expected_len) + self.assertEqual(tmp, self.set) + + def test_remove_present(self): + self.set.remove("b") + self.assertEqual(self.set, set("ac")) + + def test_remove_absent(self): + try: + self.set.remove("d") + self.fail("Removing missing element should have raised LookupError") + except LookupError: + pass + + def test_remove_until_empty(self): + expected_len = len(self.set) + for v in self.values: + self.set.remove(v) + expected_len -= 1 + self.assertEqual(len(self.set), expected_len) + + def test_discard_present(self): + self.set.discard("c") + self.assertEqual(self.set, set("ab")) + + def test_discard_absent(self): + self.set.discard("d") + self.assertEqual(self.set, set("abc")) + + def test_clear(self): + self.set.clear() + self.assertEqual(len(self.set), 0) + + def test_pop(self): + popped = {} + while self.set: + popped[self.set.pop()] = None + self.assertEqual(len(popped), len(self.values)) + for v in self.values: + self.failUnless(v in popped) + + def test_update_empty_tuple(self): + self.set.update(()) + self.assertEqual(self.set, set(self.values)) + + def test_update_unit_tuple_overlap(self): + self.set.update(("a",)) + self.assertEqual(self.set, set(self.values)) + + def test_update_unit_tuple_non_overlap(self): + self.set.update(("a", "z")) + self.assertEqual(self.set, set(self.values + ["z"])) + +#============================================================================== + +class TestSubsets(unittest.TestCase): + + case2method = {"<=": "issubset", + ">=": "issuperset", + } + + reverse = {"==": "==", + "!=": "!=", + "<": ">", + ">": "<", + "<=": ">=", + ">=": "<=", + } + + def test_issubset(self): + x = self.left + y = self.right + for case in "!=", "==", "<", "<=", ">", ">=": + expected = case in self.cases + # Test the binary infix spelling. + result = eval("x" + case + "y", locals()) + self.assertEqual(result, expected) + # Test the "friendly" method-name spelling, if one exists. + if case in TestSubsets.case2method: + method = getattr(x, TestSubsets.case2method[case]) + result = method(y) + self.assertEqual(result, expected) + + # Now do the same for the operands reversed. + rcase = TestSubsets.reverse[case] + result = eval("y" + rcase + "x", locals()) + self.assertEqual(result, expected) + if rcase in TestSubsets.case2method: + method = getattr(y, TestSubsets.case2method[rcase]) + result = method(x) + self.assertEqual(result, expected) +#------------------------------------------------------------------------------ + +class TestSubsetEqualEmpty(TestSubsets): + left = set() + right = set() + name = "both empty" + cases = "==", "<=", ">=" + +#------------------------------------------------------------------------------ + +class TestSubsetEqualNonEmpty(TestSubsets): + left = set([1, 2]) + right = set([1, 2]) + name = "equal pair" + cases = "==", "<=", ">=" + +#------------------------------------------------------------------------------ + +class TestSubsetEmptyNonEmpty(TestSubsets): + left = set() + right = set([1, 2]) + name = "one empty, one non-empty" + cases = "!=", "<", "<=" + +#------------------------------------------------------------------------------ + +class TestSubsetPartial(TestSubsets): + left = set([1]) + right = set([1, 2]) + name = "one a non-empty proper subset of other" + cases = "!=", "<", "<=" + +#------------------------------------------------------------------------------ + +class TestSubsetNonOverlap(TestSubsets): + left = set([1]) + right = set([2]) + name = "neither empty, neither contains" + cases = "!=" + +#============================================================================== + +class TestOnlySetsInBinaryOps(unittest.TestCase): + + def test_eq_ne(self): + # Unlike the others, this is testing that == and != *are* allowed. + self.assertEqual(self.other == self.set, False) + self.assertEqual(self.set == self.other, False) + self.assertEqual(self.other != self.set, True) + self.assertEqual(self.set != self.other, True) + + def test_ge_gt_le_lt(self): + self.assertRaises(TypeError, lambda: self.set < self.other) + self.assertRaises(TypeError, lambda: self.set <= self.other) + self.assertRaises(TypeError, lambda: self.set > self.other) + self.assertRaises(TypeError, lambda: self.set >= self.other) + + self.assertRaises(TypeError, lambda: self.other < self.set) + self.assertRaises(TypeError, lambda: self.other <= self.set) + self.assertRaises(TypeError, lambda: self.other > self.set) + self.assertRaises(TypeError, lambda: self.other >= self.set) + + def test_update_operator(self): + try: + self.set |= self.other + except TypeError: + pass + else: + self.fail("expected TypeError") + + def test_update(self): + if self.otherIsIterable: + self.set.update(self.other) + else: + self.assertRaises(TypeError, self.set.update, self.other) + + def test_union(self): + self.assertRaises(TypeError, lambda: self.set | self.other) + self.assertRaises(TypeError, lambda: self.other | self.set) + if self.otherIsIterable: + self.set.union(self.other) + else: + self.assertRaises(TypeError, self.set.union, self.other) + + def test_intersection_update_operator(self): + try: + self.set &= self.other + except TypeError: + pass + else: + self.fail("expected TypeError") + + def test_intersection_update(self): + if self.otherIsIterable: + self.set.intersection_update(self.other) + else: + self.assertRaises(TypeError, + self.set.intersection_update, + self.other) + + def test_intersection(self): + self.assertRaises(TypeError, lambda: self.set & self.other) + self.assertRaises(TypeError, lambda: self.other & self.set) + if self.otherIsIterable: + self.set.intersection(self.other) + else: + self.assertRaises(TypeError, self.set.intersection, self.other) + + def test_sym_difference_update_operator(self): + try: + self.set ^= self.other + except TypeError: + pass + else: + self.fail("expected TypeError") + + def test_sym_difference_update(self): + if self.otherIsIterable: + self.set.symmetric_difference_update(self.other) + else: + self.assertRaises(TypeError, + self.set.symmetric_difference_update, + self.other) + + def test_sym_difference(self): + self.assertRaises(TypeError, lambda: self.set ^ self.other) + self.assertRaises(TypeError, lambda: self.other ^ self.set) + if self.otherIsIterable: + self.set.symmetric_difference(self.other) + else: + self.assertRaises(TypeError, self.set.symmetric_difference, self.other) + + def test_difference_update_operator(self): + try: + self.set -= self.other + except TypeError: + pass + else: + self.fail("expected TypeError") + + def test_difference_update(self): + if self.otherIsIterable: + self.set.difference_update(self.other) + else: + self.assertRaises(TypeError, + self.set.difference_update, + self.other) + + def test_difference(self): + self.assertRaises(TypeError, lambda: self.set - self.other) + self.assertRaises(TypeError, lambda: self.other - self.set) + if self.otherIsIterable: + self.set.difference(self.other) + else: + self.assertRaises(TypeError, self.set.difference, self.other) + +#------------------------------------------------------------------------------ + +class TestOnlySetsNumeric(TestOnlySetsInBinaryOps): + def setUp(self): + self.set = set((1, 2, 3)) + self.other = 19 + self.otherIsIterable = False + +#------------------------------------------------------------------------------ + +class TestOnlySetsDict(TestOnlySetsInBinaryOps): + def setUp(self): + self.set = set((1, 2, 3)) + self.other = {1:2, 3:4} + self.otherIsIterable = True + +#------------------------------------------------------------------------------ + +class TestOnlySetsOperator(TestOnlySetsInBinaryOps): + def setUp(self): + self.set = set((1, 2, 3)) + self.other = operator.add + self.otherIsIterable = False + +#------------------------------------------------------------------------------ + +class TestOnlySetsTuple(TestOnlySetsInBinaryOps): + def setUp(self): + self.set = set((1, 2, 3)) + self.other = (2, 4, 6) + self.otherIsIterable = True + +#------------------------------------------------------------------------------ + +class TestOnlySetsString(TestOnlySetsInBinaryOps): + def setUp(self): + self.set = set((1, 2, 3)) + self.other = 'abc' + self.otherIsIterable = True + +#------------------------------------------------------------------------------ + +class TestOnlySetsGenerator(TestOnlySetsInBinaryOps): + def setUp(self): + def gen(): + for i in xrange(0, 10, 2): + yield i + self.set = set((1, 2, 3)) + self.other = gen() + self.otherIsIterable = True + +#============================================================================== + +class TestCopying(unittest.TestCase): + + def test_copy(self): + dup = self.set.copy() + dup_list = list(dup); dup_list.sort() + set_list = list(self.set); set_list.sort() + self.assertEqual(len(dup_list), len(set_list)) + for i in range(len(dup_list)): + self.failUnless(dup_list[i] is set_list[i]) + + def test_deep_copy(self): + dup = copy.deepcopy(self.set) + ##print type(dup), repr(dup) + dup_list = list(dup); dup_list.sort() + set_list = list(self.set); set_list.sort() + self.assertEqual(len(dup_list), len(set_list)) + for i in range(len(dup_list)): + self.assertEqual(dup_list[i], set_list[i]) + +#------------------------------------------------------------------------------ + +class TestCopyingEmpty(TestCopying): + def setUp(self): + self.set = set() + +#------------------------------------------------------------------------------ + +class TestCopyingSingleton(TestCopying): + def setUp(self): + self.set = set(["hello"]) + +#------------------------------------------------------------------------------ + +class TestCopyingTriple(TestCopying): + def setUp(self): + self.set = set(["zero", 0, None]) + +#------------------------------------------------------------------------------ + +class TestCopyingTuple(TestCopying): + def setUp(self): + self.set = set([(1, 2)]) + +#------------------------------------------------------------------------------ + +class TestCopyingNested(TestCopying): + def setUp(self): + self.set = set([((1, 2), (3, 4))]) + +#============================================================================== + +class TestIdentities(unittest.TestCase): + def setUp(self): + self.a = set('abracadabra') + self.b = set('alacazam') + + def test_binopsVsSubsets(self): + a, b = self.a, self.b + self.assert_(a - b < a) + self.assert_(b - a < b) + self.assert_(a & b < a) + self.assert_(a & b < b) + self.assert_(a | b > a) + self.assert_(a | b > b) + self.assert_(a ^ b < a | b) + + def test_commutativity(self): + a, b = self.a, self.b + self.assertEqual(a&b, b&a) + self.assertEqual(a|b, b|a) + self.assertEqual(a^b, b^a) + if a != b: + self.assertNotEqual(a-b, b-a) + + def test_summations(self): + # check that sums of parts equal the whole + a, b = self.a, self.b + self.assertEqual((a-b)|(a&b)|(b-a), a|b) + self.assertEqual((a&b)|(a^b), a|b) + self.assertEqual(a|(b-a), a|b) + self.assertEqual((a-b)|b, a|b) + self.assertEqual((a-b)|(a&b), a) + self.assertEqual((b-a)|(a&b), b) + self.assertEqual((a-b)|(b-a), a^b) + + def test_exclusion(self): + # check that inverse operations show non-overlap + a, b, zero = self.a, self.b, set() + self.assertEqual((a-b)&b, zero) + self.assertEqual((b-a)&a, zero) + self.assertEqual((a&b)&(a^b), zero) + +# Tests derived from test_itertools.py ======================================= + +def R(seqn): + 'Regular generator' + for i in seqn: + yield i + +class G: + 'Sequence using __getitem__' + def __init__(self, seqn): + self.seqn = seqn + def __getitem__(self, i): + return self.seqn[i] + +class I: + 'Sequence using iterator protocol' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class Ig: + 'Sequence using iterator protocol defined with a generator' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + for val in self.seqn: + yield val + +class X: + 'Missing __getitem__ and __iter__' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def next(self): + if self.i >= len(self.seqn): raise StopIteration + v = self.seqn[self.i] + self.i += 1 + return v + +class N: + 'Iterator missing next()' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + +class E: + 'Test propagation of exceptions' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def next(self): + 3 // 0 + +class S: + 'Test immediate stop' + def __init__(self, seqn): + pass + def __iter__(self): + return self + def next(self): + raise StopIteration + +from itertools import chain, imap +def L(seqn): + 'Test multiple tiers of iterators' + return chain(imap(lambda x:x, R(Ig(G(seqn))))) + +class TestVariousIteratorArgs(unittest.TestCase): + + def test_constructor(self): + for cons in (set, frozenset): + for s in ("123", "", range(100), ('do', 1.2), xrange(2000,2200,5)): + for g in (G, I, Ig, S, L, R): + self.assertEqual(sorted(cons(g(s))), sorted(g(s))) + self.assertRaises(TypeError, cons , X(s)) + self.assertRaises(TypeError, cons , N(s)) + self.assertRaises(ZeroDivisionError, cons , E(s)) + + def test_inline_methods(self): + s = set('november') + for data in ("123", "", range(100), ('do', 1.2), xrange(2000,2200,5), 'december'): + for meth in (s.union, s.intersection, s.difference, s.symmetric_difference): + for g in (G, I, Ig, L, R): + expected = meth(data) + actual = meth(G(data)) + self.assertEqual(sorted(actual), sorted(expected)) + self.assertRaises(TypeError, meth, X(s)) + self.assertRaises(TypeError, meth, N(s)) + self.assertRaises(ZeroDivisionError, meth, E(s)) + + def test_inplace_methods(self): + for data in ("123", "", range(100), ('do', 1.2), xrange(2000,2200,5), 'december'): + for methname in ('update', 'intersection_update', + 'difference_update', 'symmetric_difference_update'): + for g in (G, I, Ig, S, L, R): + s = set('january') + t = s.copy() + getattr(s, methname)(list(g(data))) + getattr(t, methname)(g(data)) + self.assertEqual(sorted(s), sorted(t)) + + self.assertRaises(TypeError, getattr(set('january'), methname), X(data)) + self.assertRaises(TypeError, getattr(set('january'), methname), N(data)) + self.assertRaises(ZeroDivisionError, getattr(set('january'), methname), E(data)) + +#============================================================================== + +def test_main(verbose=None): + from test import test_sets + test_classes = ( + TestSet, + TestSetSubclass, + TestSetSubclassWithKeywordArgs, + TestFrozenSet, + TestFrozenSetSubclass, + TestSetOfSets, + TestExceptionPropagation, + TestBasicOpsEmpty, + TestBasicOpsSingleton, + TestBasicOpsTuple, + TestBasicOpsTriple, + TestBinaryOps, + TestUpdateOps, + TestMutate, + TestSubsetEqualEmpty, + TestSubsetEqualNonEmpty, + TestSubsetEmptyNonEmpty, + TestSubsetPartial, + TestSubsetNonOverlap, + TestOnlySetsNumeric, + TestOnlySetsDict, + TestOnlySetsOperator, + TestOnlySetsTuple, + TestOnlySetsString, + TestOnlySetsGenerator, + TestCopyingEmpty, + TestCopyingSingleton, + TestCopyingTriple, + TestCopyingTuple, + TestCopyingNested, + TestIdentities, + TestVariousIteratorArgs, + ) + + test_support.run_unittest(*test_classes) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*test_classes) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sha.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sha.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,53 @@ +# Testing sha module (NIST's Secure Hash Algorithm) + +# use the three examples from Federal Information Processing Standards +# Publication 180-1, Secure Hash Standard, 1995 April 17 +# http://www.itl.nist.gov/div897/pubs/fip180-1.htm + +import sha +import unittest +from test import test_support + + +class SHATestCase(unittest.TestCase): + def check(self, data, digest): + # Check digest matches the expected value + obj = sha.new(data) + computed = obj.hexdigest() + self.assert_(computed == digest) + + # Verify that the value doesn't change between two consecutive + # digest operations. + computed_again = obj.hexdigest() + self.assert_(computed == computed_again) + + # Check hexdigest() output matches digest()'s output + digest = obj.digest() + hexd = "" + for c in digest: + hexd += '%02x' % ord(c) + self.assert_(computed == hexd) + + def test_case_1(self): + self.check("abc", + "a9993e364706816aba3e25717850c26c9cd0d89d") + + def test_case_2(self): + self.check("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + "84983e441c3bd26ebaae4aa1f95129e5e54670f1") + + # Disabled for PyPy + def too_slow_test_case_3(self): + self.check("a" * 1000000, + "34aa973cd4c4daa4f61eeb2bdbad27316534016f") + + def test_case_4(self): + self.check(chr(0xAA) * 80, + '4ca0ef38f1794b28a8f8ee110ee79d48ce13be25') + +def test_main(): + test_support.run_unittest(SHATestCase) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_slice.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_slice.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,111 @@ +# tests for slice objects; in particular the indices method. + +import unittest +from test import test_support + +import sys + +class SliceTest(unittest.TestCase): + + def test_constructor(self): + self.assertRaises(TypeError, slice) + self.assertRaises(TypeError, slice, 1, 2, 3, 4) + + def test_repr(self): + self.assertEqual(repr(slice(1, 2, 3)), "slice(1, 2, 3)") + + def test_hash(self): + # Verify clearing of SF bug #800796 + self.assertRaises(TypeError, hash, slice(5)) + self.assertRaises(TypeError, slice(5).__hash__) + + def test_cmp(self): + s1 = slice(1, 2, 3) + s2 = slice(1, 2, 3) + s3 = slice(1, 2, 4) + self.assertEqual(s1, s2) + self.assertNotEqual(s1, s3) + + class Exc(Exception): + pass + + class BadCmp(object): + def __eq__(self, other): + raise Exc + + s1 = slice(BadCmp()) + s2 = slice(BadCmp()) + self.assertRaises(Exc, cmp, s1, s2) + self.assertEqual(s1, s1) + + s1 = slice(1, BadCmp()) + s2 = slice(1, BadCmp()) + self.assertEqual(s1, s1) + self.assertRaises(Exc, cmp, s1, s2) + + s1 = slice(1, 2, BadCmp()) + s2 = slice(1, 2, BadCmp()) + self.assertEqual(s1, s1) + self.assertRaises(Exc, cmp, s1, s2) + + def test_members(self): + s = slice(1) + self.assertEqual(s.start, None) + self.assertEqual(s.stop, 1) + self.assertEqual(s.step, None) + + s = slice(1, 2) + self.assertEqual(s.start, 1) + self.assertEqual(s.stop, 2) + self.assertEqual(s.step, None) + + s = slice(1, 2, 3) + self.assertEqual(s.start, 1) + self.assertEqual(s.stop, 2) + self.assertEqual(s.step, 3) + + class AnyClass: + pass + + obj = AnyClass() + s = slice(obj) + self.assert_(s.stop is obj) + + def test_indices(self): + self.assertEqual(slice(None ).indices(10), (0, 10, 1)) + self.assertEqual(slice(None, None, 2).indices(10), (0, 10, 2)) + self.assertEqual(slice(1, None, 2).indices(10), (1, 10, 2)) + self.assertEqual(slice(None, None, -1).indices(10), (9, -1, -1)) + self.assertEqual(slice(None, None, -2).indices(10), (9, -1, -2)) + self.assertEqual(slice(3, None, -2).indices(10), (3, -1, -2)) + self.assertEqual( + slice(-100, 100 ).indices(10), + slice(None).indices(10) + ) + self.assertEqual( + slice(100, -100, -1).indices(10), + slice(None, None, -1).indices(10) + ) + self.assertEqual(slice(-100L, 100L, 2L).indices(10), (0, 10, 2)) + + self.assertEqual(range(10)[::sys.maxint - 1], [0]) + + # Disabled for PyPy since we don't really have these constraints for now + #self.assertRaises(OverflowError, slice(None).indices, 1L<<100) + + def test_setslice_without_getslice(self): + tmp = [] + class X(object): + def __setslice__(self, i, j, k): + tmp.append((i, j, k)) + + x = X() + x[1:2] = 42 + self.assertEquals(tmp, [(1, 2, 42)]) + + +def test_main(): + test_support.run_unittest(SliceTest) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_socket.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_socket.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,997 @@ +#!/usr/bin/env python + +import unittest +from test import test_support + +import socket +import select +import time +import thread, threading +import Queue +import sys, gc +import array +from weakref import proxy +import signal + +PORT = 50007 +HOST = 'localhost' +MSG = 'Michael Gilfix was here\n' + +class SocketTCPTest(unittest.TestCase): + + def setUp(self): + self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + global PORT + PORT = test_support.bind_port(self.serv, HOST, PORT) + self.serv.listen(1) + + def tearDown(self): + self.serv.close() + self.serv = None + +class SocketUDPTest(unittest.TestCase): + + def setUp(self): + self.serv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + global PORT + PORT = test_support.bind_port(self.serv, HOST, PORT) + + def tearDown(self): + self.serv.close() + self.serv = None + +class ThreadableTest: + """Threadable Test class + + The ThreadableTest class makes it easy to create a threaded + client/server pair from an existing unit test. To create a + new threaded class from an existing unit test, use multiple + inheritance: + + class NewClass (OldClass, ThreadableTest): + pass + + This class defines two new fixture functions with obvious + purposes for overriding: + + clientSetUp () + clientTearDown () + + Any new test functions within the class must then define + tests in pairs, where the test name is preceeded with a + '_' to indicate the client portion of the test. Ex: + + def testFoo(self): + # Server portion + + def _testFoo(self): + # Client portion + + Any exceptions raised by the clients during their tests + are caught and transferred to the main thread to alert + the testing framework. + + Note, the server setup function cannot call any blocking + functions that rely on the client thread during setup, + unless serverExplicityReady() is called just before + the blocking call (such as in setting up a client/server + connection and performing the accept() in setUp(). + """ + + def __init__(self): + # Swap the true setup function + self.__setUp = self.setUp + self.__tearDown = self.tearDown + self.setUp = self._setUp + self.tearDown = self._tearDown + + def serverExplicitReady(self): + """This method allows the server to explicitly indicate that + it wants the client thread to proceed. This is useful if the + server is about to execute a blocking routine that is + dependent upon the client thread during its setup routine.""" + self.server_ready.set() + + def _setUp(self): + self.server_ready = threading.Event() + self.client_ready = threading.Event() + self.done = threading.Event() + self.queue = Queue.Queue(1) + + # Do some munging to start the client test. + methodname = self.id() + i = methodname.rfind('.') + methodname = methodname[i+1:] + test_method = getattr(self, '_' + methodname) + self.client_thread = thread.start_new_thread( + self.clientRun, (test_method,)) + + self.__setUp() + if not self.server_ready.isSet(): + self.server_ready.set() + self.client_ready.wait() + + def _tearDown(self): + self.__tearDown() + self.done.wait() + + if not self.queue.empty(): + msg = self.queue.get() + self.fail(msg) + + def clientRun(self, test_func): + self.server_ready.wait() + self.client_ready.set() + self.clientSetUp() + if not callable(test_func): + raise TypeError, "test_func must be a callable function" + try: + test_func() + except Exception, strerror: + self.queue.put(strerror) + self.clientTearDown() + + def clientSetUp(self): + raise NotImplementedError, "clientSetUp must be implemented." + + def clientTearDown(self): + self.done.set() + thread.exit() + +class ThreadedTCPSocketTest(SocketTCPTest, ThreadableTest): + + def __init__(self, methodName='runTest'): + SocketTCPTest.__init__(self, methodName=methodName) + ThreadableTest.__init__(self) + + def clientSetUp(self): + self.cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + def clientTearDown(self): + self.cli.close() + self.cli = None + ThreadableTest.clientTearDown(self) + +class ThreadedUDPSocketTest(SocketUDPTest, ThreadableTest): + + def __init__(self, methodName='runTest'): + SocketUDPTest.__init__(self, methodName=methodName) + ThreadableTest.__init__(self) + + def clientSetUp(self): + self.cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + +class SocketConnectedTest(ThreadedTCPSocketTest): + + def __init__(self, methodName='runTest'): + ThreadedTCPSocketTest.__init__(self, methodName=methodName) + + def setUp(self): + ThreadedTCPSocketTest.setUp(self) + # Indicate explicitly we're ready for the client thread to + # proceed and then perform the blocking call to accept + self.serverExplicitReady() + conn, addr = self.serv.accept() + self.cli_conn = conn + + def tearDown(self): + self.cli_conn.close() + self.cli_conn = None + ThreadedTCPSocketTest.tearDown(self) + + def clientSetUp(self): + ThreadedTCPSocketTest.clientSetUp(self) + self.cli.connect((HOST, PORT)) + self.serv_conn = self.cli + + def clientTearDown(self): + self.serv_conn.close() + self.serv_conn = None + ThreadedTCPSocketTest.clientTearDown(self) + +class SocketPairTest(unittest.TestCase, ThreadableTest): + + def __init__(self, methodName='runTest'): + unittest.TestCase.__init__(self, methodName=methodName) + ThreadableTest.__init__(self) + + def setUp(self): + self.serv, self.cli = socket.socketpair() + + def tearDown(self): + self.serv.close() + self.serv = None + + def clientSetUp(self): + pass + + def clientTearDown(self): + self.cli.close() + self.cli = None + ThreadableTest.clientTearDown(self) + + +####################################################################### +## Begin Tests + +class GeneralModuleTests(unittest.TestCase): + + def test_weakref(self): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + p = proxy(s) + self.assertEqual(p.fileno(), s.fileno()) + s.close() + s = None + gc.collect() + gc.collect() + try: + p.fileno() + except ReferenceError: + pass + else: + self.fail('Socket proxy still exists') + + def testSocketError(self): + # Testing socket module exceptions + def raise_error(*args, **kwargs): + raise socket.error + def raise_herror(*args, **kwargs): + raise socket.herror + def raise_gaierror(*args, **kwargs): + raise socket.gaierror + self.failUnlessRaises(socket.error, raise_error, + "Error raising socket exception.") + self.failUnlessRaises(socket.error, raise_herror, + "Error raising socket exception.") + self.failUnlessRaises(socket.error, raise_gaierror, + "Error raising socket exception.") + + def testCrucialConstants(self): + # Testing for mission critical constants + socket.AF_INET + socket.SOCK_STREAM + socket.SOCK_DGRAM + socket.SOCK_RAW + socket.SOCK_RDM + socket.SOCK_SEQPACKET + socket.SOL_SOCKET + socket.SO_REUSEADDR + + def testHostnameRes(self): + # Testing hostname resolution mechanisms + hostname = socket.gethostname() + try: + ip = socket.gethostbyname(hostname) + except socket.error: + # Probably name lookup wasn't set up right; skip this test + return + self.assert_(ip.find('.') >= 0, "Error resolving host to ip.") + try: + hname, aliases, ipaddrs = socket.gethostbyaddr(ip) + except socket.error: + # Probably a similar problem as above; skip this test + return + all_host_names = [hostname, hname] + aliases + fqhn = socket.getfqdn(ip) + if not fqhn in all_host_names: + self.fail("Error testing host resolution mechanisms. (fqdn: %s, all: %s)" % (fqhn, repr(all_host_names))) + + def testRefCountGetNameInfo(self): + # Testing reference count for getnameinfo + import sys + if hasattr(sys, "getrefcount"): + try: + # On some versions, this loses a reference + orig = sys.getrefcount(__name__) + socket.getnameinfo(__name__,0) + except SystemError: + if sys.getrefcount(__name__) <> orig: + self.fail("socket.getnameinfo loses a reference") + + def testInterpreterCrash(self): + # Making sure getnameinfo doesn't crash the interpreter + try: + # On some versions, this crashes the interpreter. + socket.getnameinfo(('x', 0, 0, 0), 0) + except socket.error: + pass + + def testNtoH(self): + # This just checks that htons etc. are their own inverse, + # when looking at the lower 16 or 32 bits. + sizes = {socket.htonl: 32, socket.ntohl: 32, + socket.htons: 16, socket.ntohs: 16} + for func, size in sizes.items(): + mask = (1L< 2: + raise RuntimeError + def __del__(self): + del data[:] + data[:] = range(20) + self.assertRaises(RuntimeError, data.sort, key=SortKiller) + ## major honking subtlety: we *can't* do: + ## + ## self.assertEqual(data, dup) + ## + ## because there is a reference to a SortKiller in the + ## traceback and by the time it dies we're outside the call to + ## .sort() and so the list protection gimmicks are out of + ## date (this cost some brain cells to figure out...). + + def test_reverse(self): + data = range(100) + random.shuffle(data) + data.sort(reverse=True) + self.assertEqual(data, range(99,-1,-1)) + self.assertRaises(TypeError, data.sort, "wrong type") + + def test_reverse_stability(self): + data = [(random.randrange(100), i) for i in xrange(200)] + copy1 = data[:] + copy2 = data[:] + data.sort(cmp=lambda x,y: cmp(x[0],y[0]), reverse=True) + copy1.sort(cmp=lambda x,y: cmp(y[0],x[0])) + self.assertEqual(data, copy1) + copy2.sort(key=lambda x: x[0], reverse=True) + self.assertEqual(data, copy2) + +#============================================================================== + +def test_main(verbose=None): + test_classes = ( + TestBase, + TestDecorateSortUndecorate, + TestBugs, + ) + + test_support.run_unittest(*test_classes) + + # verify reference counting + if verbose and hasattr(sys, "gettotalrefcount"): + import gc + counts = [None] * 5 + for i in xrange(len(counts)): + test_support.run_unittest(*test_classes) + gc.collect() + counts[i] = sys.gettotalrefcount() + print counts + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_struct.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_struct.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,634 @@ +from test.test_support import TestFailed, verbose, verify, vereq +import test.test_support +import struct +import array +import warnings + +import sys +ISBIGENDIAN = sys.byteorder == "big" +del sys +verify((struct.pack('=i', 1)[0] == chr(0)) == ISBIGENDIAN, + "bigendian determination appears wrong") + +try: + import _struct +except ImportError: + PY_STRUCT_RANGE_CHECKING = 0 + PY_STRUCT_OVERFLOW_MASKING = 1 + PY_STRUCT_FLOAT_COERCE = 2 +else: + PY_STRUCT_RANGE_CHECKING = getattr(_struct, '_PY_STRUCT_RANGE_CHECKING', 0) + PY_STRUCT_OVERFLOW_MASKING = getattr(_struct, '_PY_STRUCT_OVERFLOW_MASKING', 0) + PY_STRUCT_FLOAT_COERCE = getattr(_struct, '_PY_STRUCT_FLOAT_COERCE', 0) + +def string_reverse(s): + return "".join(reversed(s)) + +def bigendian_to_native(value): + if ISBIGENDIAN: + return value + else: + return string_reverse(value) + +def simple_err(func, *args): + try: + func(*args) + except struct.error: + pass + else: + raise TestFailed, "%s%s did not raise struct.error" % ( + func.__name__, args) + +def any_err(func, *args): + try: + func(*args) + except (struct.error, OverflowError, TypeError, ValueError): + pass + else: + raise TestFailed, "%s%s did not raise error" % ( + func.__name__, args) + +def with_warning_restore(func): + def _with_warning_restore(*args, **kw): + # The `warnings` module doesn't have an advertised way to restore + # its filter list. Cheat. + save_warnings_filters = warnings.filters[:] + # Grrr, we need this function to warn every time. Without removing + # the warningregistry, running test_tarfile then test_struct would fail + # on 64-bit platforms. + globals = func.func_globals + if '__warningregistry__' in globals: + del globals['__warningregistry__'] + warnings.filterwarnings("error", r"""^struct.*""", DeprecationWarning) + warnings.filterwarnings("error", r""".*format requires.*""", + DeprecationWarning) + try: + return func(*args, **kw) + finally: + warnings.filters[:] = save_warnings_filters[:] + return _with_warning_restore + +def deprecated_err(func, *args): + try: + func(*args) + except (struct.error, TypeError): + pass + except DeprecationWarning: + if not PY_STRUCT_OVERFLOW_MASKING: + raise TestFailed, "%s%s expected to raise struct.error" % ( + func.__name__, args) + else: + raise TestFailed, "%s%s did not raise error" % ( + func.__name__, args) +deprecated_err = with_warning_restore(deprecated_err) + + +simple_err(struct.calcsize, 'Z') + +sz = struct.calcsize('i') +if sz * 3 != struct.calcsize('iii'): + raise TestFailed, 'inconsistent sizes' + +fmt = 'cbxxxxxxhhhhiillffd' +fmt3 = '3c3b18x12h6i6l6f3d' +sz = struct.calcsize(fmt) +sz3 = struct.calcsize(fmt3) +if sz * 3 != sz3: + raise TestFailed, 'inconsistent sizes (3*%r -> 3*%d = %d, %r -> %d)' % ( + fmt, sz, 3*sz, fmt3, sz3) + +simple_err(struct.pack, 'iii', 3) +simple_err(struct.pack, 'i', 3, 3, 3) +any_err(struct.pack, 'i', 'foo') +any_err(struct.pack, 'P', 'foo') +any_err(struct.unpack, 'd', 'flap') +s = struct.pack('ii', 1, 2) +simple_err(struct.unpack, 'iii', s) +simple_err(struct.unpack, 'i', s) + +c = 'a' +b = 1 +h = 255 +i = 65535 +l = 65536 +f = 3.1415 +d = 3.1415 + +for prefix in ('', '@', '<', '>', '=', '!'): + for format in ('xcbhilfd', 'xcBHILfd'): + format = prefix + format + if verbose: + print "trying:", format + s = struct.pack(format, c, b, h, i, l, f, d) + cp, bp, hp, ip, lp, fp, dp = struct.unpack(format, s) + if (cp != c or bp != b or hp != h or ip != i or lp != l or + int(100 * fp) != int(100 * f) or int(100 * dp) != int(100 * d)): + # ^^^ calculate only to two decimal places + raise TestFailed, "unpack/pack not transitive (%s, %s)" % ( + str(format), str((cp, bp, hp, ip, lp, fp, dp))) + +# Test some of the new features in detail + +# (format, argument, big-endian result, little-endian result, asymmetric) +tests = [ + ('c', 'a', 'a', 'a', 0), + ('xc', 'a', '\0a', '\0a', 0), + ('cx', 'a', 'a\0', 'a\0', 0), + ('s', 'a', 'a', 'a', 0), + ('0s', 'helloworld', '', '', 1), + ('1s', 'helloworld', 'h', 'h', 1), + ('9s', 'helloworld', 'helloworl', 'helloworl', 1), + ('10s', 'helloworld', 'helloworld', 'helloworld', 0), + ('11s', 'helloworld', 'helloworld\0', 'helloworld\0', 1), + ('20s', 'helloworld', 'helloworld'+10*'\0', 'helloworld'+10*'\0', 1), + ('b', 7, '\7', '\7', 0), + ('b', -7, '\371', '\371', 0), + ('B', 7, '\7', '\7', 0), + ('B', 249, '\371', '\371', 0), + ('h', 700, '\002\274', '\274\002', 0), + ('h', -700, '\375D', 'D\375', 0), + ('H', 700, '\002\274', '\274\002', 0), + ('H', 0x10000-700, '\375D', 'D\375', 0), + ('i', 70000000, '\004,\035\200', '\200\035,\004', 0), + ('i', -70000000, '\373\323\342\200', '\200\342\323\373', 0), + ('I', 70000000L, '\004,\035\200', '\200\035,\004', 0), + ('I', 0x100000000L-70000000, '\373\323\342\200', '\200\342\323\373', 0), + ('l', 70000000, '\004,\035\200', '\200\035,\004', 0), + ('l', -70000000, '\373\323\342\200', '\200\342\323\373', 0), + ('L', 70000000L, '\004,\035\200', '\200\035,\004', 0), + ('L', 0x100000000L-70000000, '\373\323\342\200', '\200\342\323\373', 0), + ('f', 2.0, '@\000\000\000', '\000\000\000@', 0), + ('d', 2.0, '@\000\000\000\000\000\000\000', + '\000\000\000\000\000\000\000@', 0), + ('f', -2.0, '\300\000\000\000', '\000\000\000\300', 0), + ('d', -2.0, '\300\000\000\000\000\000\000\000', + '\000\000\000\000\000\000\000\300', 0), +] + +for fmt, arg, big, lil, asy in tests: + if verbose: + print "%r %r %r %r" % (fmt, arg, big, lil) + for (xfmt, exp) in [('>'+fmt, big), ('!'+fmt, big), ('<'+fmt, lil), + ('='+fmt, ISBIGENDIAN and big or lil)]: + res = struct.pack(xfmt, arg) + if res != exp: + raise TestFailed, "pack(%r, %r) -> %r # expected %r" % ( + fmt, arg, res, exp) + n = struct.calcsize(xfmt) + if n != len(res): + raise TestFailed, "calcsize(%r) -> %d # expected %d" % ( + xfmt, n, len(res)) + rev = struct.unpack(xfmt, res)[0] + if rev != arg and not asy: + raise TestFailed, "unpack(%r, %r) -> (%r,) # expected (%r,)" % ( + fmt, res, rev, arg) + +########################################################################### +# Simple native q/Q tests. + +has_native_qQ = 1 +try: + struct.pack("q", 5) +except struct.error: + has_native_qQ = 0 + +if verbose: + print "Platform has native q/Q?", has_native_qQ and "Yes." or "No." + +any_err(struct.pack, "Q", -1) # can't pack -1 as unsigned regardless +any_err(struct.pack, "q", "a") # can't pack string as 'q' regardless +any_err(struct.pack, "Q", "a") # ditto, but 'Q' + +def test_native_qQ(): + bytes = struct.calcsize('q') + # The expected values here are in big-endian format, primarily because + # I'm on a little-endian machine and so this is the clearest way (for + # me) to force the code to get exercised. + for format, input, expected in ( + ('q', -1, '\xff' * bytes), + ('q', 0, '\x00' * bytes), + ('Q', 0, '\x00' * bytes), + ('q', 1L, '\x00' * (bytes-1) + '\x01'), + ('Q', (1L << (8*bytes))-1, '\xff' * bytes), + ('q', (1L << (8*bytes-1))-1, '\x7f' + '\xff' * (bytes - 1))): + got = struct.pack(format, input) + native_expected = bigendian_to_native(expected) + verify(got == native_expected, + "%r-pack of %r gave %r, not %r" % + (format, input, got, native_expected)) + retrieved = struct.unpack(format, got)[0] + verify(retrieved == input, + "%r-unpack of %r gave %r, not %r" % + (format, got, retrieved, input)) + +if has_native_qQ: + test_native_qQ() + +########################################################################### +# Standard integer tests (bBhHiIlLqQ). + +import binascii + +class IntTester: + + # XXX Most std integer modes fail to test for out-of-range. + # The "i" and "l" codes appear to range-check OK on 32-bit boxes, but + # fail to check correctly on some 64-bit ones (Tru64 Unix + Compaq C + # reported by Mark Favas). + BUGGY_RANGE_CHECK = "bBhHiIlL" + + def __init__(self, formatpair, bytesize): + assert len(formatpair) == 2 + self.formatpair = formatpair + for direction in "<>!=": + for code in formatpair: + format = direction + code + verify(struct.calcsize(format) == bytesize) + self.bytesize = bytesize + self.bitsize = bytesize * 8 + self.signed_code, self.unsigned_code = formatpair + self.unsigned_min = 0 + self.unsigned_max = 2L**self.bitsize - 1 + self.signed_min = -(2L**(self.bitsize-1)) + self.signed_max = 2L**(self.bitsize-1) - 1 + + def test_one(self, x, pack=struct.pack, + unpack=struct.unpack, + unhexlify=binascii.unhexlify): + if verbose: + print "trying std", self.formatpair, "on", x, "==", hex(x) + + # Try signed. + code = self.signed_code + if self.signed_min <= x <= self.signed_max: + # Try big-endian. + expected = long(x) + if x < 0: + expected += 1L << self.bitsize + assert expected > 0 + expected = hex(expected)[2:-1] # chop "0x" and trailing 'L' + if len(expected) & 1: + expected = "0" + expected + expected = unhexlify(expected) + expected = "\x00" * (self.bytesize - len(expected)) + expected + + # Pack work? + format = ">" + code + got = pack(format, x) + verify(got == expected, + "'%s'-pack of %r gave %r, not %r" % + (format, x, got, expected)) + + # Unpack work? + retrieved = unpack(format, got)[0] + verify(x == retrieved, + "'%s'-unpack of %r gave %r, not %r" % + (format, got, retrieved, x)) + + # Adding any byte should cause a "too big" error. + any_err(unpack, format, '\x01' + got) + + # Try little-endian. + format = "<" + code + expected = string_reverse(expected) + + # Pack work? + got = pack(format, x) + verify(got == expected, + "'%s'-pack of %r gave %r, not %r" % + (format, x, got, expected)) + + # Unpack work? + retrieved = unpack(format, got)[0] + verify(x == retrieved, + "'%s'-unpack of %r gave %r, not %r" % + (format, got, retrieved, x)) + + # Adding any byte should cause a "too big" error. + any_err(unpack, format, '\x01' + got) + + else: + # x is out of range -- verify pack realizes that. + if not PY_STRUCT_RANGE_CHECKING and code in self.BUGGY_RANGE_CHECK: + if verbose: + print "Skipping buggy range check for code", code + else: + deprecated_err(pack, ">" + code, x) + deprecated_err(pack, "<" + code, x) + + # Much the same for unsigned. + code = self.unsigned_code + if self.unsigned_min <= x <= self.unsigned_max: + # Try big-endian. + format = ">" + code + expected = long(x) + expected = hex(expected)[2:-1] # chop "0x" and trailing 'L' + if len(expected) & 1: + expected = "0" + expected + expected = unhexlify(expected) + expected = "\x00" * (self.bytesize - len(expected)) + expected + + # Pack work? + got = pack(format, x) + verify(got == expected, + "'%s'-pack of %r gave %r, not %r" % + (format, x, got, expected)) + + # Unpack work? + retrieved = unpack(format, got)[0] + verify(x == retrieved, + "'%s'-unpack of %r gave %r, not %r" % + (format, got, retrieved, x)) + + # Adding any byte should cause a "too big" error. + any_err(unpack, format, '\x01' + got) + + # Try little-endian. + format = "<" + code + expected = string_reverse(expected) + + # Pack work? + got = pack(format, x) + verify(got == expected, + "'%s'-pack of %r gave %r, not %r" % + (format, x, got, expected)) + + # Unpack work? + retrieved = unpack(format, got)[0] + verify(x == retrieved, + "'%s'-unpack of %r gave %r, not %r" % + (format, got, retrieved, x)) + + # Adding any byte should cause a "too big" error. + any_err(unpack, format, '\x01' + got) + + else: + # x is out of range -- verify pack realizes that. + if not PY_STRUCT_RANGE_CHECKING and code in self.BUGGY_RANGE_CHECK: + if verbose: + print "Skipping buggy range check for code", code + else: + deprecated_err(pack, ">" + code, x) + deprecated_err(pack, "<" + code, x) + + def run(self): + from random import randrange + + # Create all interesting powers of 2. + allvalues = [] + for exp in range(self.bitsize + 3): + allvalues.append(1L << exp) + + # reduce the number of values again + values = [] + i = 1 + while i <= len(allvalues): + values.append(allvalues[i-1]) + i *= 2 + + # + # XXX doesn't seem like good practice to run with random values + # + #for i in range(self.bitsize): + # val = 0L + # for j in range(self.bytesize): + # val = (val << 8) | randrange(256) + # values.append(val) + + # Try all those, and their negations, and +-1 from them. Note + # that this tests all power-of-2 boundaries in range, and a few out + # of range, plus +-(2**n +- 1). + for base in values: + for val in -base, base: + for incr in -1, 0, 1: + x = val + incr + try: + x = int(x) + except OverflowError: + pass + self.test_one(x) + + # Some error cases. + for direction in "<>": + for code in self.formatpair: + for badobject in "a string", 3+42j, randrange: + any_err(struct.pack, direction + code, badobject) + +for args in [("bB", 1), + ("hH", 2), + ("iI", 4), + ("lL", 4), + ("qQ", 8)]: + t = IntTester(*args) + t.run() + + +########################################################################### +# The p ("Pascal string") code. + +def test_p_code(): + for code, input, expected, expectedback in [ + ('p','abc', '\x00', ''), + ('1p', 'abc', '\x00', ''), + ('2p', 'abc', '\x01a', 'a'), + ('3p', 'abc', '\x02ab', 'ab'), + ('4p', 'abc', '\x03abc', 'abc'), + ('5p', 'abc', '\x03abc\x00', 'abc'), + ('6p', 'abc', '\x03abc\x00\x00', 'abc'), + ('1000p', 'x'*1000, '\xff' + 'x'*999, 'x'*255)]: + got = struct.pack(code, input) + if got != expected: + raise TestFailed("pack(%r, %r) == %r but expected %r" % + (code, input, got, expected)) + (got,) = struct.unpack(code, got) + if got != expectedback: + raise TestFailed("unpack(%r, %r) == %r but expected %r" % + (code, input, got, expectedback)) + +test_p_code() + + +########################################################################### +# SF bug 705836. "f" had a severe rounding bug, where a carry +# from the low-order discarded bits could propagate into the exponent +# field, causing the result to be wrong by a factor of 2. + +def test_705836(): + import math + + for base in range(1, 33): + # smaller <- largest representable float less than base. + delta = 0.5 + while base - delta / 2.0 != base: + delta /= 2.0 + smaller = base - delta + # Packing this rounds away a solid string of trailing 1 bits. + packed = struct.pack("f", smaller) + verify(bigpacked == string_reverse(packed), + ">f pack should be byte-reversal of f", bigpacked)[0] + verify(base == unpacked) + + # Largest finite IEEE single. + big = (1 << 24) - 1 + big = math.ldexp(big, 127 - 23) + packed = struct.pack(">f", big) + unpacked = struct.unpack(">f", packed)[0] + verify(big == unpacked) + + # The same, but tack on a 1 bit so it rounds up to infinity. + big = (1 << 25) - 1 + big = math.ldexp(big, 127 - 24) + try: + packed = struct.pack(">f", big) + except OverflowError: + pass + else: + TestFailed("expected OverflowError") + +test_705836() + +########################################################################### +# SF bug 1229380. No struct.pack exception for some out of range integers + +def test_1229380(): + import sys + for endian in ('', '>', '<'): + for cls in (int, long): + for fmt in ('B', 'H', 'I', 'L'): + deprecated_err(struct.pack, endian + fmt, cls(-1)) + + deprecated_err(struct.pack, endian + 'B', cls(300)) + deprecated_err(struct.pack, endian + 'H', cls(70000)) + + deprecated_err(struct.pack, endian + 'I', sys.maxint * 4L) + deprecated_err(struct.pack, endian + 'L', sys.maxint * 4L) + +if PY_STRUCT_RANGE_CHECKING: + test_1229380() + +########################################################################### +# SF bug 1530559. struct.pack raises TypeError where it used to convert. + +def check_float_coerce(format, number): + if PY_STRUCT_FLOAT_COERCE == 2: + # Test for pre-2.5 struct module + packed = struct.pack(format, number) + floored = struct.unpack(format, packed)[0] + if floored != int(number): + raise TestFailed("did not correcly coerce float to int") + return + try: + func(*args) + except (struct.error, TypeError): + if PY_STRUCT_FLOAT_COERCE: + raise TestFailed("expected DeprecationWarning for float coerce") + except DeprecationWarning: + if not PY_STRUCT_FLOAT_COERCE: + raise TestFailed("expected to raise struct.error for float coerce") + else: + raise TestFailed("did not raise error for float coerce") + +check_float_coerce = with_warning_restore(deprecated_err) + +def test_1530559(): + for endian in ('', '>', '<'): + for fmt in ('B', 'H', 'I', 'L', 'b', 'h', 'i', 'l'): + check_float_coerce(endian + fmt, 1.0) + check_float_coerce(endian + fmt, 1.5) + +test_1530559() + +########################################################################### +# Packing and unpacking to/from buffers. + +# Copied and modified from unittest. +def assertRaises(excClass, callableObj, *args, **kwargs): + try: + callableObj(*args, **kwargs) + except excClass: + return + else: + raise TestFailed("%s not raised." % excClass) + +def test_unpack_from(): + test_string = 'abcd01234' + fmt = '4s' + s = struct.Struct(fmt) + for cls in (str, buffer): + data = cls(test_string) + vereq(s.unpack_from(data), ('abcd',)) + vereq(s.unpack_from(data, 2), ('cd01',)) + vereq(s.unpack_from(data, 4), ('0123',)) + for i in xrange(6): + vereq(s.unpack_from(data, i), (data[i:i+4],)) + for i in xrange(6, len(test_string) + 1): + simple_err(s.unpack_from, data, i) + for cls in (str, buffer): + data = cls(test_string) + vereq(struct.unpack_from(fmt, data), ('abcd',)) + vereq(struct.unpack_from(fmt, data, 2), ('cd01',)) + vereq(struct.unpack_from(fmt, data, 4), ('0123',)) + for i in xrange(6): + vereq(struct.unpack_from(fmt, data, i), (data[i:i+4],)) + for i in xrange(6, len(test_string) + 1): + simple_err(struct.unpack_from, fmt, data, i) + +def test_pack_into(): + test_string = 'Reykjavik rocks, eow!' + writable_buf = array.array('c', ' '*100) + fmt = '21s' + s = struct.Struct(fmt) + + # Test without offset + s.pack_into(writable_buf, 0, test_string) + from_buf = writable_buf.tostring()[:len(test_string)] + vereq(from_buf, test_string) + + # Test with offset. + s.pack_into(writable_buf, 10, test_string) + from_buf = writable_buf.tostring()[:len(test_string)+10] + vereq(from_buf, test_string[:10] + test_string) + + # Go beyond boundaries. + small_buf = array.array('c', ' '*10) + assertRaises(struct.error, s.pack_into, small_buf, 0, test_string) + assertRaises(struct.error, s.pack_into, small_buf, 2, test_string) + +def test_pack_into_fn(): + test_string = 'Reykjavik rocks, eow!' + writable_buf = array.array('c', ' '*100) + fmt = '21s' + pack_into = lambda *args: struct.pack_into(fmt, *args) + + # Test without offset. + pack_into(writable_buf, 0, test_string) + from_buf = writable_buf.tostring()[:len(test_string)] + vereq(from_buf, test_string) + + # Test with offset. + pack_into(writable_buf, 10, test_string) + from_buf = writable_buf.tostring()[:len(test_string)+10] + vereq(from_buf, test_string[:10] + test_string) + + # Go beyond boundaries. + small_buf = array.array('c', ' '*10) + assertRaises(struct.error, pack_into, small_buf, 0, test_string) + assertRaises(struct.error, pack_into, small_buf, 2, test_string) + +def test_unpack_with_buffer(): + # SF bug 1563759: struct.unpack doens't support buffer protocol objects + data = array.array('B', '\x12\x34\x56\x78') + value, = struct.unpack('>I', data) + vereq(value, 0x12345678) + +# Test methods to pack and unpack from buffers rather than strings. +test_unpack_from() +test_pack_into() +test_pack_into_fn() +test_unpack_with_buffer() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sys.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sys.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,357 @@ +# -*- coding: iso-8859-1 -*- +import unittest, test.test_support +import sys, cStringIO + +class SysModuleTest(unittest.TestCase): + + def test_original_displayhook(self): + import __builtin__ + savestdout = sys.stdout + out = cStringIO.StringIO() + sys.stdout = out + + dh = sys.__displayhook__ + + self.assertRaises(TypeError, dh) + if hasattr(__builtin__, "_"): + del __builtin__._ + + dh(None) + self.assertEqual(out.getvalue(), "") + self.assert_(not hasattr(__builtin__, "_")) + dh(42) + self.assertEqual(out.getvalue(), "42\n") + self.assertEqual(__builtin__._, 42) + + del sys.stdout + self.assertRaises(RuntimeError, dh, 42) + + sys.stdout = savestdout + + def test_lost_displayhook(self): + olddisplayhook = sys.displayhook + del sys.displayhook + code = compile("42", "", "single") + self.assertRaises(RuntimeError, eval, code) + sys.displayhook = olddisplayhook + + def test_custom_displayhook(self): + olddisplayhook = sys.displayhook + def baddisplayhook(obj): + raise ValueError + sys.displayhook = baddisplayhook + code = compile("42", "", "single") + self.assertRaises(ValueError, eval, code) + sys.displayhook = olddisplayhook + + def test_original_excepthook(self): + savestderr = sys.stderr + err = cStringIO.StringIO() + sys.stderr = err + + eh = sys.__excepthook__ + + self.assertRaises(TypeError, eh) + try: + raise ValueError(42) + except ValueError, exc: + eh(*sys.exc_info()) + + sys.stderr = savestderr + self.assert_(err.getvalue().endswith("ValueError: 42\n")) + + # FIXME: testing the code for a lost or replaced excepthook in + # Python/pythonrun.c::PyErr_PrintEx() is tricky. + + def test_exc_clear(self): + self.assertRaises(TypeError, sys.exc_clear, 42) + + # Verify that exc_info is present and matches exc, then clear it, and + # check that it worked. + def clear_check(exc): + typ, value, traceback = sys.exc_info() + self.assert_(typ is not None) + self.assert_(value is exc) + self.assert_(traceback is not None) + + sys.exc_clear() + + typ, value, traceback = sys.exc_info() + self.assert_(typ is None) + self.assert_(value is None) + self.assert_(traceback is None) + + def clear(): + try: + raise ValueError, 42 + except ValueError, exc: + clear_check(exc) + + # Raise an exception and check that it can be cleared + clear() + + # Verify that a frame currently handling an exception is + # unaffected by calling exc_clear in a nested frame. + try: + raise ValueError, 13 + except ValueError, exc: + typ1, value1, traceback1 = sys.exc_info() + clear() + typ2, value2, traceback2 = sys.exc_info() + + self.assert_(typ1 is typ2) + self.assert_(value1 is exc) + self.assert_(value1 is value2) + self.assert_(traceback1 is traceback2) + + # Check that an exception can be cleared outside of an except block + clear_check(exc) + + def test_exit(self): + self.assertRaises(TypeError, sys.exit, 42, 42) + + # call without argument + try: + sys.exit(0) + except SystemExit, exc: + self.assertEquals(exc.code, 0) + except: + self.fail("wrong exception") + else: + self.fail("no exception") + + # call with tuple argument with one entry + # entry will be unpacked + try: + sys.exit(42) + except SystemExit, exc: + self.assertEquals(exc.code, 42) + except: + self.fail("wrong exception") + else: + self.fail("no exception") + + # call with integer argument + try: + sys.exit((42,)) + except SystemExit, exc: + self.assertEquals(exc.code, 42) + except: + self.fail("wrong exception") + else: + self.fail("no exception") + + # call with string argument + try: + sys.exit("exit") + except SystemExit, exc: + self.assertEquals(exc.code, "exit") + except: + self.fail("wrong exception") + else: + self.fail("no exception") + + # call with tuple argument with two entries + try: + sys.exit((17, 23)) + except SystemExit, exc: + self.assertEquals(exc.code, (17, 23)) + except: + self.fail("wrong exception") + else: + self.fail("no exception") + + # test that the exit machinery handles SystemExits properly + import subprocess + # both unnormalized... + rc = subprocess.call([sys.executable, "-c", + "raise SystemExit, 46"]) + self.assertEqual(rc, 46) + # ... and normalized + rc = subprocess.call([sys.executable, "-c", + "raise SystemExit(47)"]) + self.assertEqual(rc, 47) + + + def test_getdefaultencoding(self): + if test.test_support.have_unicode: + self.assertRaises(TypeError, sys.getdefaultencoding, 42) + # can't check more than the type, as the user might have changed it + self.assert_(isinstance(sys.getdefaultencoding(), str)) + + # testing sys.settrace() is done in test_trace.py + # testing sys.setprofile() is done in test_profile.py + + def test_setcheckinterval(self): + self.assertRaises(TypeError, sys.setcheckinterval) + orig = sys.getcheckinterval() + for n in 0, 100, 120, orig: # orig last to restore starting state + sys.setcheckinterval(n) + self.assertEquals(sys.getcheckinterval(), n) + + def test_recursionlimit(self): + self.assertRaises(TypeError, sys.getrecursionlimit, 42) + oldlimit = sys.getrecursionlimit() + self.assertRaises(TypeError, sys.setrecursionlimit) + self.assertRaises(ValueError, sys.setrecursionlimit, -42) + sys.setrecursionlimit(10000) + self.assertEqual(sys.getrecursionlimit(), 10000) + sys.setrecursionlimit(oldlimit) + + def test_getwindowsversion(self): + if hasattr(sys, "getwindowsversion"): + v = sys.getwindowsversion() + self.assert_(isinstance(v, tuple)) + self.assertEqual(len(v), 5) + self.assert_(isinstance(v[0], int)) + self.assert_(isinstance(v[1], int)) + self.assert_(isinstance(v[2], int)) + self.assert_(isinstance(v[3], int)) + self.assert_(isinstance(v[4], str)) + + def test_dlopenflags(self): + if hasattr(sys, "setdlopenflags"): + self.assert_(hasattr(sys, "getdlopenflags")) + self.assertRaises(TypeError, sys.getdlopenflags, 42) + oldflags = sys.getdlopenflags() + self.assertRaises(TypeError, sys.setdlopenflags) + sys.setdlopenflags(oldflags+1) + self.assertEqual(sys.getdlopenflags(), oldflags+1) + sys.setdlopenflags(oldflags) + + def DONT_test_refcount(self): + self.assertRaises(TypeError, sys.getrefcount) + c = sys.getrefcount(None) + n = None + self.assertEqual(sys.getrefcount(None), c+1) + del n + self.assertEqual(sys.getrefcount(None), c) + if hasattr(sys, "gettotalrefcount"): + self.assert_(isinstance(sys.gettotalrefcount(), int)) + + def test_getframe(self): + self.assertRaises(TypeError, sys._getframe, 42, 42) + self.assertRaises(ValueError, sys._getframe, 2000000000) + self.assert_( + SysModuleTest.test_getframe.im_func.func_code \ + is sys._getframe().f_code + ) + + # sys._current_frames() is a CPython-only gimmick. + def test_current_frames(self): + have_threads = True + try: + import thread + except ImportError: + have_threads = False + + if have_threads: + self.current_frames_with_threads() + else: + self.current_frames_without_threads() + + # Test sys._current_frames() in a WITH_THREADS build. + def current_frames_with_threads(self): + import threading, thread + import traceback + + # Spawn a thread that blocks at a known place. Then the main + # thread does sys._current_frames(), and verifies that the frames + # returned make sense. + entered_g = threading.Event() + leave_g = threading.Event() + thread_info = [] # the thread's id + + def f123(): + g456() + + def g456(): + thread_info.append(thread.get_ident()) + entered_g.set() + leave_g.wait() + + t = threading.Thread(target=f123) + t.start() + entered_g.wait() + + # At this point, t has finished its entered_g.set(), although it's + # impossible to guess whether it's still on that line or has moved on + # to its leave_g.wait(). + self.assertEqual(len(thread_info), 1) + thread_id = thread_info[0] + + d = sys._current_frames() + + main_id = thread.get_ident() + self.assert_(main_id in d) + self.assert_(thread_id in d) + + # Verify that the captured main-thread frame is _this_ frame. + frame = d.pop(main_id) + self.assert_(frame is sys._getframe()) + + # Verify that the captured thread frame is blocked in g456, called + # from f123. This is a litte tricky, since various bits of + # threading.py are also in the thread's call stack. + frame = d.pop(thread_id) + stack = traceback.extract_stack(frame) + for i, (filename, lineno, funcname, sourceline) in enumerate(stack): + if funcname == "f123": + break + else: + self.fail("didn't find f123() on thread's call stack") + + self.assertEqual(sourceline, "g456()") + + # And the next record must be for g456(). + filename, lineno, funcname, sourceline = stack[i+1] + self.assertEqual(funcname, "g456") + self.assert_(sourceline in ["leave_g.wait()", "entered_g.set()"]) + + # Reap the spawned thread. + leave_g.set() + t.join() + + # Test sys._current_frames() when thread support doesn't exist. + def current_frames_without_threads(self): + # Not much happens here: there is only one thread, with artificial + # "thread id" 0. + d = sys._current_frames() + self.assertEqual(len(d), 1) + self.assert_(0 in d) + self.assert_(d[0] is sys._getframe()) + + def test_attributes(self): + self.assert_(isinstance(sys.api_version, int)) + self.assert_(isinstance(sys.argv, list)) + self.assert_(sys.byteorder in ("little", "big")) + self.assert_(isinstance(sys.builtin_module_names, tuple)) + self.assert_(isinstance(sys.copyright, basestring)) + self.assert_(isinstance(sys.exec_prefix, basestring)) + self.assert_(isinstance(sys.executable, basestring)) + self.assert_(isinstance(sys.hexversion, int)) + self.assert_(isinstance(sys.maxint, int)) + if test.test_support.have_unicode: + self.assert_(isinstance(sys.maxunicode, int)) + self.assert_(isinstance(sys.platform, basestring)) + self.assert_(isinstance(sys.prefix, basestring)) + self.assert_(isinstance(sys.version, basestring)) + vi = sys.version_info + self.assert_(isinstance(vi, tuple)) + self.assertEqual(len(vi), 5) + self.assert_(isinstance(vi[0], int)) + self.assert_(isinstance(vi[1], int)) + self.assert_(isinstance(vi[2], int)) + self.assert_(vi[3] in ("alpha", "beta", "candidate", "final")) + self.assert_(isinstance(vi[4], int)) + + def test_43581(self): + # Can't use sys.stdout, as this is a cStringIO object when + # the test runs under regrtest. + self.assert_(sys.__stdout__.encoding == sys.__stderr__.encoding) + +def test_main(): + test.test_support.run_unittest(SysModuleTest) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_tempfile.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_tempfile.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,686 @@ +# tempfile.py unit tests. + +import tempfile +import os +import sys +import re +import errno +import warnings + +import unittest +from test import test_support + +warnings.filterwarnings("ignore", + category=RuntimeWarning, + message="mktemp", module=__name__) + +if hasattr(os, 'stat'): + import stat + has_stat = 1 +else: + has_stat = 0 + +has_textmode = (tempfile._text_openflags != tempfile._bin_openflags) +has_spawnl = hasattr(os, 'spawnl') + +# TEST_FILES may need to be tweaked for systems depending on the maximum +# number of files that can be opened at one time (see ulimit -n) +if sys.platform == 'mac': + TEST_FILES = 32 +elif sys.platform in ('openbsd3', 'openbsd4'): + TEST_FILES = 48 +else: + TEST_FILES = 100 + +# Reduced for PyPy +TEST_FILES = 10 + +# This is organized as one test for each chunk of code in tempfile.py, +# in order of their appearance in the file. Testing which requires +# threads is not done here. + +# Common functionality. +class TC(unittest.TestCase): + + str_check = re.compile(r"[a-zA-Z0-9_-]{6}$") + + def failOnException(self, what, ei=None): + if ei is None: + ei = sys.exc_info() + self.fail("%s raised %s: %s" % (what, ei[0], ei[1])) + + def nameCheck(self, name, dir, pre, suf): + (ndir, nbase) = os.path.split(name) + npre = nbase[:len(pre)] + nsuf = nbase[len(nbase)-len(suf):] + + # check for equality of the absolute paths! + self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir), + "file '%s' not in directory '%s'" % (name, dir)) + self.assertEqual(npre, pre, + "file '%s' does not begin with '%s'" % (nbase, pre)) + self.assertEqual(nsuf, suf, + "file '%s' does not end with '%s'" % (nbase, suf)) + + nbase = nbase[len(pre):len(nbase)-len(suf)] + self.assert_(self.str_check.match(nbase), + "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/" + % nbase) + +test_classes = [] + +class test_exports(TC): + def test_exports(self): + # There are no surprising symbols in the tempfile module + dict = tempfile.__dict__ + + expected = { + "NamedTemporaryFile" : 1, + "TemporaryFile" : 1, + "mkstemp" : 1, + "mkdtemp" : 1, + "mktemp" : 1, + "TMP_MAX" : 1, + "gettempprefix" : 1, + "gettempdir" : 1, + "tempdir" : 1, + "template" : 1 + } + + unexp = [] + for key in dict: + if key[0] != '_' and key not in expected: + unexp.append(key) + self.failUnless(len(unexp) == 0, + "unexpected keys: %s" % unexp) + +test_classes.append(test_exports) + + +class test__RandomNameSequence(TC): + """Test the internal iterator object _RandomNameSequence.""" + + def setUp(self): + self.r = tempfile._RandomNameSequence() + + def test_get_six_char_str(self): + # _RandomNameSequence returns a six-character string + s = self.r.next() + self.nameCheck(s, '', '', '') + + def test_many(self): + # _RandomNameSequence returns no duplicate strings (stochastic) + + dict = {} + r = self.r + for i in xrange(TEST_FILES): + s = r.next() + self.nameCheck(s, '', '', '') + self.failIf(s in dict) + dict[s] = 1 + + def test_supports_iter(self): + # _RandomNameSequence supports the iterator protocol + + i = 0 + r = self.r + try: + for s in r: + i += 1 + if i == 20: + break + except: + failOnException("iteration") + +test_classes.append(test__RandomNameSequence) + + +class test__candidate_tempdir_list(TC): + """Test the internal function _candidate_tempdir_list.""" + + def test_nonempty_list(self): + # _candidate_tempdir_list returns a nonempty list of strings + + cand = tempfile._candidate_tempdir_list() + + self.failIf(len(cand) == 0) + for c in cand: + self.assert_(isinstance(c, basestring), + "%s is not a string" % c) + + def test_wanted_dirs(self): + # _candidate_tempdir_list contains the expected directories + + # Make sure the interesting environment variables are all set. + added = [] + try: + for envname in 'TMPDIR', 'TEMP', 'TMP': + dirname = os.getenv(envname) + if not dirname: + os.environ[envname] = os.path.abspath(envname) + added.append(envname) + + cand = tempfile._candidate_tempdir_list() + + for envname in 'TMPDIR', 'TEMP', 'TMP': + dirname = os.getenv(envname) + if not dirname: raise ValueError + self.assert_(dirname in cand) + + try: + dirname = os.getcwd() + except (AttributeError, os.error): + dirname = os.curdir + + self.assert_(dirname in cand) + + # Not practical to try to verify the presence of OS-specific + # paths in this list. + finally: + for p in added: + del os.environ[p] + +test_classes.append(test__candidate_tempdir_list) + + +# We test _get_default_tempdir by testing gettempdir. + + +class test__get_candidate_names(TC): + """Test the internal function _get_candidate_names.""" + + def test_retval(self): + # _get_candidate_names returns a _RandomNameSequence object + obj = tempfile._get_candidate_names() + self.assert_(isinstance(obj, tempfile._RandomNameSequence)) + + def test_same_thing(self): + # _get_candidate_names always returns the same object + a = tempfile._get_candidate_names() + b = tempfile._get_candidate_names() + + self.assert_(a is b) + +test_classes.append(test__get_candidate_names) + + +class test__mkstemp_inner(TC): + """Test the internal function _mkstemp_inner.""" + + class mkstemped: + _bflags = tempfile._bin_openflags + _tflags = tempfile._text_openflags + _close = os.close + _unlink = os.unlink + + def __init__(self, dir, pre, suf, bin): + if bin: flags = self._bflags + else: flags = self._tflags + + (self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags) + + def write(self, str): + os.write(self.fd, str) + + def close(self): + self._close(self.fd) + self._unlink(self.name) + + def do_create(self, dir=None, pre="", suf="", bin=1): + if dir is None: + dir = tempfile.gettempdir() + try: + file = self.mkstemped(dir, pre, suf, bin) + except: + self.failOnException("_mkstemp_inner") + + self.nameCheck(file.name, dir, pre, suf) + return file + + def create_blat(self, *args, **kwds): + f = self.do_create(*args, **kwds) + f.write("blat") + f.close() + + def test_basic(self): + # _mkstemp_inner can create files + self.create_blat() + self.create_blat(pre="a") + self.create_blat(suf="b") + self.create_blat(pre="a", suf="b") + self.create_blat(pre="aa", suf=".txt") + + def test_basic_many(self): + # _mkstemp_inner can create many files (stochastic) + extant = range(TEST_FILES) + for i in extant: + extant[i] = self.do_create(pre="aa") + for f in extant: + f.close() + + def test_choose_directory(self): + # _mkstemp_inner can create files in a user-selected directory + dir = tempfile.mkdtemp() + try: + self.create_blat(dir=dir) + finally: + os.rmdir(dir) + + def test_file_mode(self): + # _mkstemp_inner creates files with the proper mode + if not has_stat: + return # ugh, can't use TestSkipped. + + file = self.do_create() + mode = stat.S_IMODE(os.stat(file.name).st_mode) + file.close() + expected = 0600 + if sys.platform in ('win32', 'os2emx', 'mac'): + # There's no distinction among 'user', 'group' and 'world'; + # replicate the 'user' bits. + user = expected >> 6 + expected = user * (1 + 8 + 64) + self.assertEqual(mode, expected) + + def test_noinherit(self): + # _mkstemp_inner file handles are not inherited by child processes + if not has_spawnl: + return # ugh, can't use TestSkipped. + + if test_support.verbose: + v="v" + else: + v="q" + + file = self.do_create() + fd = "%d" % file.fd + + try: + me = __file__ + except NameError: + me = sys.argv[0] + + # We have to exec something, so that FD_CLOEXEC will take + # effect. The core of this test is therefore in + # tf_inherit_check.py, which see. + tester = os.path.join(os.path.dirname(os.path.abspath(me)), + "tf_inherit_check.py") + + # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted, + # but an arg with embedded spaces should be decorated with double + # quotes on each end + if sys.platform in ('win32'): + decorated = '"%s"' % sys.executable + tester = '"%s"' % tester + else: + decorated = sys.executable + try: + import fcntl + except ImportError: + return # for interpreters without fcntl, on Unix platforms, + # we can't set the FD_CLOEXEC flag + + retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd) + file.close() + self.failIf(retval < 0, + "child process caught fatal signal %d" % -retval) + self.failIf(retval > 0, "child process reports failure") + + def test_textmode(self): + # _mkstemp_inner can create files in text mode + if not has_textmode: + return # ugh, can't use TestSkipped. + + self.create_blat(bin=0) + # XXX should test that the file really is a text file + +test_classes.append(test__mkstemp_inner) + + +class test_gettempprefix(TC): + """Test gettempprefix().""" + + def test_sane_template(self): + # gettempprefix returns a nonempty prefix string + p = tempfile.gettempprefix() + + self.assert_(isinstance(p, basestring)) + self.assert_(len(p) > 0) + + def test_usable_template(self): + # gettempprefix returns a usable prefix string + + # Create a temp directory, avoiding use of the prefix. + # Then attempt to create a file whose name is + # prefix + 'xxxxxx.xxx' in that directory. + p = tempfile.gettempprefix() + "xxxxxx.xxx" + d = tempfile.mkdtemp(prefix="") + try: + p = os.path.join(d, p) + try: + fd = os.open(p, os.O_RDWR | os.O_CREAT) + except: + self.failOnException("os.open") + os.close(fd) + os.unlink(p) + finally: + os.rmdir(d) + +test_classes.append(test_gettempprefix) + + +class test_gettempdir(TC): + """Test gettempdir().""" + + def test_directory_exists(self): + # gettempdir returns a directory which exists + + dir = tempfile.gettempdir() + self.assert_(os.path.isabs(dir) or dir == os.curdir, + "%s is not an absolute path" % dir) + self.assert_(os.path.isdir(dir), + "%s is not a directory" % dir) + + def test_directory_writable(self): + # gettempdir returns a directory writable by the user + + # sneaky: just instantiate a NamedTemporaryFile, which + # defaults to writing into the directory returned by + # gettempdir. + try: + file = tempfile.NamedTemporaryFile() + file.write("blat") + file.close() + except: + self.failOnException("create file in %s" % tempfile.gettempdir()) + + def test_same_thing(self): + # gettempdir always returns the same object + a = tempfile.gettempdir() + b = tempfile.gettempdir() + + self.assert_(a is b) + +test_classes.append(test_gettempdir) + + +class test_mkstemp(TC): + """Test mkstemp().""" + + def do_create(self, dir=None, pre="", suf="", ): + if dir is None: + dir = tempfile.gettempdir() + try: + (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf) + (ndir, nbase) = os.path.split(name) + adir = os.path.abspath(dir) + self.assertEqual(adir, ndir, + "Directory '%s' incorrectly returned as '%s'" % (adir, ndir)) + except: + self.failOnException("mkstemp") + + try: + self.nameCheck(name, dir, pre, suf) + finally: + os.close(fd) + os.unlink(name) + + def test_basic(self): + # mkstemp can create files + self.do_create() + self.do_create(pre="a") + self.do_create(suf="b") + self.do_create(pre="a", suf="b") + self.do_create(pre="aa", suf=".txt") + self.do_create(dir=".") + + def test_choose_directory(self): + # mkstemp can create directories in a user-selected directory + dir = tempfile.mkdtemp() + try: + self.do_create(dir=dir) + finally: + os.rmdir(dir) + +test_classes.append(test_mkstemp) + + +class test_mkdtemp(TC): + """Test mkdtemp().""" + + def do_create(self, dir=None, pre="", suf=""): + if dir is None: + dir = tempfile.gettempdir() + try: + name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf) + except: + self.failOnException("mkdtemp") + + try: + self.nameCheck(name, dir, pre, suf) + return name + except: + os.rmdir(name) + raise + + def test_basic(self): + # mkdtemp can create directories + os.rmdir(self.do_create()) + os.rmdir(self.do_create(pre="a")) + os.rmdir(self.do_create(suf="b")) + os.rmdir(self.do_create(pre="a", suf="b")) + os.rmdir(self.do_create(pre="aa", suf=".txt")) + + def test_basic_many(self): + # mkdtemp can create many directories (stochastic) + extant = range(TEST_FILES) + try: + for i in extant: + extant[i] = self.do_create(pre="aa") + finally: + for i in extant: + if(isinstance(i, basestring)): + os.rmdir(i) + + def test_choose_directory(self): + # mkdtemp can create directories in a user-selected directory + dir = tempfile.mkdtemp() + try: + os.rmdir(self.do_create(dir=dir)) + finally: + os.rmdir(dir) + + def test_mode(self): + # mkdtemp creates directories with the proper mode + if not has_stat: + return # ugh, can't use TestSkipped. + + dir = self.do_create() + try: + mode = stat.S_IMODE(os.stat(dir).st_mode) + mode &= 0777 # Mask off sticky bits inherited from /tmp + expected = 0700 + if sys.platform in ('win32', 'os2emx', 'mac'): + # There's no distinction among 'user', 'group' and 'world'; + # replicate the 'user' bits. + user = expected >> 6 + expected = user * (1 + 8 + 64) + self.assertEqual(mode, expected) + finally: + os.rmdir(dir) + +test_classes.append(test_mkdtemp) + + +class test_mktemp(TC): + """Test mktemp().""" + + # For safety, all use of mktemp must occur in a private directory. + # We must also suppress the RuntimeWarning it generates. + def setUp(self): + self.dir = tempfile.mkdtemp() + + def tearDown(self): + import shutil + if self.dir: + shutil.rmtree(self.dir, ignore_errors=True) + self.dir = None + + class mktemped: + _unlink = os.unlink + _bflags = tempfile._bin_openflags + + def __init__(self, dir, pre, suf): + self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf) + # Create the file. This will raise an exception if it's + # mysteriously appeared in the meanwhile. + os.close(os.open(self.name, self._bflags, 0600)) + + def __del__(self): + self._unlink(self.name) + + def do_create(self, pre="", suf=""): + try: + file = self.mktemped(self.dir, pre, suf) + except: + self.failOnException("mktemp") + + self.nameCheck(file.name, self.dir, pre, suf) + return file + + def test_basic(self): + # mktemp can choose usable file names + self.do_create() + self.do_create(pre="a") + self.do_create(suf="b") + self.do_create(pre="a", suf="b") + self.do_create(pre="aa", suf=".txt") + + def test_many(self): + # mktemp can choose many usable file names (stochastic) + extant = range(TEST_FILES) + for i in extant: + extant[i] = self.do_create(pre="aa") + +## def test_warning(self): +## # mktemp issues a warning when used +## warnings.filterwarnings("error", +## category=RuntimeWarning, +## message="mktemp") +## self.assertRaises(RuntimeWarning, +## tempfile.mktemp, dir=self.dir) + +test_classes.append(test_mktemp) + + +# We test _TemporaryFileWrapper by testing NamedTemporaryFile. + + +class test_NamedTemporaryFile(TC): + """Test NamedTemporaryFile().""" + + def do_create(self, dir=None, pre="", suf=""): + if dir is None: + dir = tempfile.gettempdir() + try: + file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf) + except: + self.failOnException("NamedTemporaryFile") + + self.nameCheck(file.name, dir, pre, suf) + return file + + + def test_basic(self): + # NamedTemporaryFile can create files + self.do_create() + self.do_create(pre="a") + self.do_create(suf="b") + self.do_create(pre="a", suf="b") + self.do_create(pre="aa", suf=".txt") + + def test_creates_named(self): + # NamedTemporaryFile creates files with names + f = tempfile.NamedTemporaryFile() + self.failUnless(os.path.exists(f.name), + "NamedTemporaryFile %s does not exist" % f.name) + + def test_del_on_close(self): + # A NamedTemporaryFile is deleted when closed + dir = tempfile.mkdtemp() + try: + f = tempfile.NamedTemporaryFile(dir=dir) + f.write('blat') + f.close() + self.failIf(os.path.exists(f.name), + "NamedTemporaryFile %s exists after close" % f.name) + finally: + os.rmdir(dir) + + def test_multiple_close(self): + # A NamedTemporaryFile can be closed many times without error + + f = tempfile.NamedTemporaryFile() + f.write('abc\n') + f.close() + try: + f.close() + f.close() + except: + self.failOnException("close") + + # How to test the mode and bufsize parameters? + +test_classes.append(test_NamedTemporaryFile) + + +class test_TemporaryFile(TC): + """Test TemporaryFile().""" + + def test_basic(self): + # TemporaryFile can create files + # No point in testing the name params - the file has no name. + try: + tempfile.TemporaryFile() + except: + self.failOnException("TemporaryFile") + + def test_has_no_name(self): + # TemporaryFile creates files with no names (on this system) + dir = tempfile.mkdtemp() + f = tempfile.TemporaryFile(dir=dir) + f.write('blat') + + # Sneaky: because this file has no name, it should not prevent + # us from removing the directory it was created in. + try: + os.rmdir(dir) + except: + ei = sys.exc_info() + # cleanup + f.close() + os.rmdir(dir) + self.failOnException("rmdir", ei) + + def test_multiple_close(self): + # A TemporaryFile can be closed many times without error + f = tempfile.TemporaryFile() + f.write('abc\n') + f.close() + try: + f.close() + f.close() + except: + self.failOnException("close") + + # How to test the mode and bufsize parameters? + + +if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile: + test_classes.append(test_TemporaryFile) + +def test_main(): + test_support.run_unittest(*test_classes) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_trace.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_trace.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,626 @@ +# Testing the line trace facility. + +from test import test_support +import unittest +import sys +import difflib + +# A very basic example. If this fails, we're in deep trouble. +def basic(): + return 1 + +basic.events = [(0, 'call'), + (1, 'line'), + (1, 'return')] + +# Many of the tests below are tricky because they involve pass statements. +# If there is implicit control flow around a pass statement (in an except +# clause or else caluse) under what conditions do you set a line number +# following that clause? + + +# The entire "while 0:" statement is optimized away. No code +# exists for it, so the line numbers skip directly from "del x" +# to "x = 1". +def arigo_example(): + x = 1 + del x + while 0: + pass + x = 1 + +arigo_example.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (5, 'line'), + (5, 'return')] + +# check that lines consisting of just one instruction get traced: +def one_instr_line(): + x = 1 + del x + x = 1 + +one_instr_line.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (3, 'line'), + (3, 'return')] + +def no_pop_tops(): # 0 + x = 1 # 1 + for a in range(2): # 2 + if a: # 3 + x = 1 # 4 + else: # 5 + x = 1 # 6 + +no_pop_tops.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (3, 'line'), + (6, 'line'), + (2, 'line'), + (3, 'line'), + (4, 'line'), + (2, 'line'), + (2, 'return')] + +def no_pop_blocks(): + y = 1 + while not y: + bla + x = 1 + +no_pop_blocks.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (4, 'line'), + (4, 'return')] + +def called(): # line -3 + x = 1 + +def call(): # line 0 + called() + +call.events = [(0, 'call'), + (1, 'line'), + (-3, 'call'), + (-2, 'line'), + (-2, 'return'), + (1, 'return')] + +def raises(): + raise Exception + +def test_raise(): + try: + raises() + except Exception, exc: + x = 1 + +test_raise.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (-3, 'call'), + (-2, 'line'), + (-2, 'exception'), + (-2, 'return'), + (2, 'exception'), + (3, 'line'), + (4, 'line'), + (4, 'return')] + +def _settrace_and_return(tracefunc): + sys.settrace(tracefunc) + sys._getframe().f_back.f_trace = tracefunc +def settrace_and_return(tracefunc): + _settrace_and_return(tracefunc) + +settrace_and_return.events = [(1, 'return')] + +def _settrace_and_raise(tracefunc): + sys.settrace(tracefunc) + sys._getframe().f_back.f_trace = tracefunc + raise RuntimeError +def settrace_and_raise(tracefunc): + try: + _settrace_and_raise(tracefunc) + except RuntimeError, exc: + pass + +settrace_and_raise.events = [(2, 'exception'), + (3, 'line'), + (4, 'line'), + (4, 'return')] + +# implicit return example +# This test is interesting because of the else: pass +# part of the code. The code generate for the true +# part of the if contains a jump past the else branch. +# The compiler then generates an implicit "return None" +# Internally, the compiler visits the pass statement +# and stores its line number for use on the next instruction. +# The next instruction is the implicit return None. +def ireturn_example(): + a = 5 + b = 5 + if a == b: + b = a+1 + else: + pass + +ireturn_example.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (3, 'line'), + (4, 'line'), + (6, 'line'), + (6, 'return')] + +# Tight loop with while(1) example (SF #765624) +def tightloop_example(): + items = range(0, 3) + try: + i = 0 + while 1: + b = items[i]; i+=1 + except IndexError: + pass + +tightloop_example.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (3, 'line'), + (4, 'line'), + (5, 'line'), + (5, 'line'), + (5, 'line'), + (5, 'line'), + (5, 'exception'), + (6, 'line'), + (7, 'line'), + (7, 'return')] + +def tighterloop_example(): + items = range(1, 4) + try: + i = 0 + while 1: i = items[i] + except IndexError: + pass + +tighterloop_example.events = [(0, 'call'), + (1, 'line'), + (2, 'line'), + (3, 'line'), + (4, 'line'), + (4, 'line'), + (4, 'line'), + (4, 'line'), + (4, 'exception'), + (5, 'line'), + (6, 'line'), + (6, 'return')] + +class Tracer: + def __init__(self): + self.events = [] + def trace(self, frame, event, arg): + self.events.append((frame.f_lineno, event)) + return self.trace + +class TraceTestCase(unittest.TestCase): + def compare_events(self, line_offset, events, expected_events): + events = [(l - line_offset, e) for (l, e) in events] + if events != expected_events: + self.fail( + "events did not match expectation:\n" + + "\n".join(difflib.ndiff(map(str, expected_events), + map(str, events)))) + + + def run_test(self, func): + tracer = Tracer() + sys.settrace(tracer.trace) + func() + sys.settrace(None) + self.compare_events(func.func_code.co_firstlineno, + tracer.events, func.events) + + def run_test2(self, func): + tracer = Tracer() + func(tracer.trace) + sys.settrace(None) + self.compare_events(func.func_code.co_firstlineno, + tracer.events, func.events) + + def test_01_basic(self): + self.run_test(basic) + def test_02_arigo(self): + self.run_test(arigo_example) + def test_03_one_instr(self): + self.run_test(one_instr_line) + def test_04_no_pop_blocks(self): + self.run_test(no_pop_blocks) + def test_05_no_pop_tops(self): + self.run_test(no_pop_tops) + def test_06_call(self): + self.run_test(call) + def test_07_raise(self): + self.run_test(test_raise) + + def test_08_settrace_and_return(self): + self.run_test2(settrace_and_return) + def test_09_settrace_and_raise(self): + self.run_test2(settrace_and_raise) + def test_10_ireturn(self): + self.run_test(ireturn_example) + def test_11_tightloop(self): + self.run_test(tightloop_example) + def test_12_tighterloop(self): + self.run_test(tighterloop_example) + +class RaisingTraceFuncTestCase(unittest.TestCase): + def trace(self, frame, event, arg): + """A trace function that raises an exception in response to a + specific trace event.""" + if event == self.raiseOnEvent: + raise ValueError # just something that isn't RuntimeError + else: + return self.trace + + def f(self): + """The function to trace; raises an exception if that's the case + we're testing, so that the 'exception' trace event fires.""" + if self.raiseOnEvent == 'exception': + x = 0 + y = 1/x + else: + return 1 + + def run_test_for_event(self, event): + """Tests that an exception raised in response to the given event is + handled OK.""" + self.raiseOnEvent = event + try: + for i in xrange(sys.getrecursionlimit() + 1): + sys.settrace(self.trace) + try: + self.f() + except ValueError: + pass + else: + self.fail("exception not thrown!") + except RuntimeError: + self.fail("recursion counter not reset") + + # Test the handling of exceptions raised by each kind of trace event. + def test_call(self): + self.run_test_for_event('call') + def test_line(self): + self.run_test_for_event('line') + def test_return(self): + self.run_test_for_event('return') + def test_exception(self): + self.run_test_for_event('exception') + + def test_trash_stack(self): + def f(): + for i in range(5): + print i # line tracing will raise an exception at this line + + def g(frame, why, extra): + if (why == 'line' and + frame.f_lineno == f.func_code.co_firstlineno + 2): + raise RuntimeError, "i am crashing" + return g + + sys.settrace(g) + try: + f() + except RuntimeError: + # the test is really that this doesn't segfault: + import gc + gc.collect() + else: + self.fail("exception not propagated") + + +# 'Jump' tests: assigning to frame.f_lineno within a trace function +# moves the execution position - it's how debuggers implement a Jump +# command (aka. "Set next statement"). + +class JumpTracer: + """Defines a trace function that jumps from one place to another, + with the source and destination lines of the jump being defined by + the 'jump' property of the function under test.""" + + def __init__(self, function): + self.function = function + self.jumpFrom = function.jump[0] + self.jumpTo = function.jump[1] + self.done = False + + def trace(self, frame, event, arg): + if not self.done and frame.f_code == self.function.func_code: + firstLine = frame.f_code.co_firstlineno + if frame.f_lineno == firstLine + self.jumpFrom: + # Cope with non-integer self.jumpTo (because of + # no_jump_to_non_integers below). + try: + frame.f_lineno = firstLine + self.jumpTo + except TypeError: + frame.f_lineno = self.jumpTo + self.done = True + return self.trace + +# The first set of 'jump' tests are for things that are allowed: + +def jump_simple_forwards(output): + output.append(1) + output.append(2) + output.append(3) + +jump_simple_forwards.jump = (1, 3) +jump_simple_forwards.output = [3] + +def jump_simple_backwards(output): + output.append(1) + output.append(2) + +jump_simple_backwards.jump = (2, 1) +jump_simple_backwards.output = [1, 1, 2] + +def jump_out_of_block_forwards(output): + for i in 1, 2: + output.append(2) + for j in [3]: # Also tests jumping over a block + output.append(4) + output.append(5) + +jump_out_of_block_forwards.jump = (3, 5) +jump_out_of_block_forwards.output = [2, 5] + +def jump_out_of_block_backwards(output): + output.append(1) + for i in [1]: + output.append(3) + for j in [2]: # Also tests jumping over a block + output.append(5) + output.append(6) + output.append(7) + +jump_out_of_block_backwards.jump = (6, 1) +jump_out_of_block_backwards.output = [1, 3, 5, 1, 3, 5, 6, 7] + +def jump_to_codeless_line(output): + output.append(1) + # Jumping to this line should skip to the next one. + output.append(3) + +jump_to_codeless_line.jump = (1, 2) +jump_to_codeless_line.output = [3] + +def jump_to_same_line(output): + output.append(1) + output.append(2) + output.append(3) + +jump_to_same_line.jump = (2, 2) +jump_to_same_line.output = [1, 2, 3] + +# Tests jumping within a finally block, and over one. +def jump_in_nested_finally(output): + try: + output.append(2) + finally: + output.append(4) + try: + output.append(6) + finally: + output.append(8) + output.append(9) + +jump_in_nested_finally.jump = (4, 9) +jump_in_nested_finally.output = [2, 9] + +# The second set of 'jump' tests are for things that are not allowed: + +def no_jump_too_far_forwards(output): + try: + output.append(2) + output.append(3) + except ValueError, e: + output.append('after' in str(e)) + +no_jump_too_far_forwards.jump = (3, 6) +no_jump_too_far_forwards.output = [2, True] + +def no_jump_too_far_backwards(output): + try: + output.append(2) + output.append(3) + except ValueError, e: + output.append('before' in str(e)) + +no_jump_too_far_backwards.jump = (3, -1) +no_jump_too_far_backwards.output = [2, True] + +# Test each kind of 'except' line. +def no_jump_to_except_1(output): + try: + output.append(2) + except Exception: + e = sys.exc_info()[1] + output.append('except' in str(e)) + +no_jump_to_except_1.jump = (2, 3) +no_jump_to_except_1.output = [True] + +def no_jump_to_except_2(output): + try: + output.append(2) + except ValueError: + e = sys.exc_info()[1] + output.append('except' in str(e)) + +no_jump_to_except_2.jump = (2, 3) +no_jump_to_except_2.output = [True] + +def no_jump_to_except_3(output): + try: + output.append(2) + except ValueError, e: + output.append('except' in str(e)) + +no_jump_to_except_3.jump = (2, 3) +no_jump_to_except_3.output = [True] + +def no_jump_to_except_4(output): + try: + output.append(2) + except (ValueError, RuntimeError), e: + output.append('except' in str(e)) + +no_jump_to_except_4.jump = (2, 3) +no_jump_to_except_4.output = [True] + +def no_jump_forwards_into_block(output): + try: + output.append(2) + for i in 1, 2: + output.append(4) + except ValueError, e: + output.append('into' in str(e)) + +no_jump_forwards_into_block.jump = (2, 4) +no_jump_forwards_into_block.output = [True] + +def no_jump_backwards_into_block(output): + try: + for i in 1, 2: + output.append(3) + output.append(4) + except ValueError, e: + output.append('into' in str(e)) + +no_jump_backwards_into_block.jump = (4, 3) +no_jump_backwards_into_block.output = [3, 3, True] + +def no_jump_into_finally_block(output): + try: + try: + output.append(3) + x = 1 + finally: + output.append(6) + except ValueError, e: + output.append('finally' in str(e)) + +no_jump_into_finally_block.jump = (4, 6) +no_jump_into_finally_block.output = [3, 6, True] # The 'finally' still runs + +def no_jump_out_of_finally_block(output): + try: + try: + output.append(3) + finally: + output.append(5) + output.append(6) + except ValueError, e: + output.append('finally' in str(e)) + +no_jump_out_of_finally_block.jump = (5, 1) +no_jump_out_of_finally_block.output = [3, True] + +# This verifies the line-numbers-must-be-integers rule. +def no_jump_to_non_integers(output): + try: + output.append(2) + except ValueError, e: + output.append('integer' in str(e)) + +no_jump_to_non_integers.jump = (2, "Spam") +no_jump_to_non_integers.output = [True] + +# This verifies that you can't set f_lineno via _getframe or similar +# trickery. +def no_jump_without_trace_function(): + try: + previous_frame = sys._getframe().f_back + previous_frame.f_lineno = previous_frame.f_lineno + except ValueError, e: + # This is the exception we wanted; make sure the error message + # talks about trace functions. + if 'trace' not in str(e): + raise + else: + # Something's wrong - the expected exception wasn't raised. + raise RuntimeError, "Trace-function-less jump failed to fail" + + +class JumpTestCase(unittest.TestCase): + def compare_jump_output(self, expected, received): + if received != expected: + self.fail( "Outputs don't match:\n" + + "Expected: " + repr(expected) + "\n" + + "Received: " + repr(received)) + + def run_test(self, func): + tracer = JumpTracer(func) + sys.settrace(tracer.trace) + output = [] + func(output) + sys.settrace(None) + self.compare_jump_output(func.output, output) + + def test_01_jump_simple_forwards(self): + self.run_test(jump_simple_forwards) + def test_02_jump_simple_backwards(self): + self.run_test(jump_simple_backwards) + def test_03_jump_out_of_block_forwards(self): + self.run_test(jump_out_of_block_forwards) + def test_04_jump_out_of_block_backwards(self): + self.run_test(jump_out_of_block_backwards) + def test_05_jump_to_codeless_line(self): + self.run_test(jump_to_codeless_line) + def test_06_jump_to_same_line(self): + self.run_test(jump_to_same_line) + def test_07_jump_in_nested_finally(self): + self.run_test(jump_in_nested_finally) + def test_08_no_jump_too_far_forwards(self): + self.run_test(no_jump_too_far_forwards) + def test_09_no_jump_too_far_backwards(self): + self.run_test(no_jump_too_far_backwards) + def test_10_no_jump_to_except_1(self): + self.run_test(no_jump_to_except_1) + def test_11_no_jump_to_except_2(self): + self.run_test(no_jump_to_except_2) + def test_12_no_jump_to_except_3(self): + self.run_test(no_jump_to_except_3) + def test_13_no_jump_to_except_4(self): + self.run_test(no_jump_to_except_4) + def test_14_no_jump_forwards_into_block(self): + self.run_test(no_jump_forwards_into_block) + def test_15_no_jump_backwards_into_block(self): + self.run_test(no_jump_backwards_into_block) + def test_16_no_jump_into_finally_block(self): + self.run_test(no_jump_into_finally_block) + def test_17_no_jump_out_of_finally_block(self): + self.run_test(no_jump_out_of_finally_block) + def test_18_no_jump_to_non_integers(self): + self.run_test(no_jump_to_non_integers) + def test_19_no_jump_without_trace_function(self): + no_jump_without_trace_function() + +def test_main(): + test_support.run_unittest( + TraceTestCase, + RaisingTraceFuncTestCase, + JumpTestCase + ) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_traceback.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_traceback.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,156 @@ +"""Test cases for traceback module""" + +import unittest +from test.test_support import run_unittest, is_jython + +import traceback + +class TracebackCases(unittest.TestCase): + # For now, a very minimal set of tests. I want to be sure that + # formatting of SyntaxErrors works based on changes for 2.1. + + def get_exception_format(self, func, exc): + try: + func() + except exc, value: + return traceback.format_exception_only(exc, value) + else: + raise ValueError, "call did not raise exception" + + def syntax_error_with_caret(self): + compile("def fact(x):\n\treturn x!\n", "?", "exec") + + def syntax_error_without_caret(self): + # XXX why doesn't compile raise the same traceback? + import test.badsyntax_nocaret + + def syntax_error_bad_indentation(self): + compile("def spam():\n print 1\n print 2", "?", "exec") + + def test_caret(self): + err = self.get_exception_format(self.syntax_error_with_caret, + SyntaxError) + self.assert_(len(err) == 4) + self.assert_(err[1].strip() == "return x!") + self.assert_("^" in err[2]) # third line has caret + self.assert_(err[1].find("!") == err[2].find("^")) # in the right place + + def test_bad_indentation(self): + err = self.get_exception_format(self.syntax_error_bad_indentation, + IndentationError) + self.assert_(len(err) == 4) + self.assert_(err[1].strip() == "print 2") + self.assert_("^" in err[2]) + self.assert_(err[1].find("2") == err[2].find("^")) + + def test_bug737473(self): + import sys, os, tempfile, time + + savedpath = sys.path[:] + testdir = tempfile.mkdtemp() + try: + sys.path.insert(0, testdir) + testfile = os.path.join(testdir, 'test_bug737473.py') + f = open(testfile, 'w') + print >> f, """ +def test(): + raise ValueError""" + + if 'test_bug737473' in sys.modules: + del sys.modules['test_bug737473'] + import test_bug737473 + + try: + test_bug737473.test() + except ValueError: + # this loads source code to linecache + traceback.extract_tb(sys.exc_traceback) + + # If this test runs too quickly, test_bug737473.py's mtime + # attribute will remain unchanged even if the file is rewritten. + # Consequently, the file would not reload. So, added a sleep() + # delay to assure that a new, distinct timestamp is written. + # Since WinME with FAT32 has multisecond resolution, more than + # three seconds are needed for this test to pass reliably :-( + time.sleep(4) + + f = open(testfile, 'w') + print >> f, """ +def test(): + raise NotImplementedError""" + f.close() + reload(test_bug737473) + try: + test_bug737473.test() + except NotImplementedError: + src = traceback.extract_tb(sys.exc_traceback)[-1][-1] + self.failUnlessEqual(src, 'raise NotImplementedError') + finally: + sys.path[:] = savedpath + for f in os.listdir(testdir): + os.unlink(os.path.join(testdir, f)) + os.rmdir(testdir) + + def test_members(self): + # Covers Python/structmember.c::listmembers() + try: + 1/0 + except: + import sys + sys.exc_traceback.__members__ + + def test_base_exception(self): + # Test that exceptions derived from BaseException are formatted right + e = KeyboardInterrupt() + lst = traceback.format_exception_only(e.__class__, e) + self.assertEqual(lst, ['KeyboardInterrupt\n']) + + # String exceptions are deprecated, but legal. The quirky form with + # separate "type" and "value" tends to break things, because + # not isinstance(value, type) + # and a string cannot be the first argument to issubclass. + # + # Note that sys.last_type and sys.last_value do not get set if an + # exception is caught, so we sort of cheat and just emulate them. + # + # test_string_exception1 is equivalent to + # + # >>> raise "String Exception" + # + # test_string_exception2 is equivalent to + # + # >>> raise "String Exception", "String Value" + # + def test_string_exception1(self): + str_type = "String Exception" + err = traceback.format_exception_only(str_type, None) + self.assertEqual(len(err), 1) + self.assertEqual(err[0], str_type + '\n') + + def test_string_exception2(self): + str_type = "String Exception" + str_value = "String Value" + err = traceback.format_exception_only(str_type, str_value) + self.assertEqual(len(err), 1) + self.assertEqual(err[0], str_type + ': ' + str_value + '\n') + + def test_format_exception_only_bad__str__(self): + class X(Exception): + def __str__(self): + 1/0 + err = traceback.format_exception_only(X, X()) + self.assertEqual(len(err), 1) + str_value = '' % X.__name__ + self.assertEqual(err[0], X.__name__ + ': ' + str_value + '\n') + + def test_without_exception(self): + err = traceback.format_exception_only(None, None) + self.assertEqual(err, ['None\n']) + + +def test_main(): + run_unittest(TracebackCases) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unicodedata.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unicodedata.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,225 @@ +""" Test script for the unicodedata module. + + Written by Marc-Andre Lemburg (mal at lemburg.com). + + (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +"""#" +import unittest, test.test_support +import hashlib + +encoding = 'utf-8' + + +### Run tests + +class UnicodeMethodsTest(unittest.TestCase): + + # update this, if the database changes + expectedchecksum = '9f6a3e76196a8327ccf95d2d6404880be2ab5c2f' + + def test_method_checksum(self): + h = hashlib.sha1() + for i in range(65536): + char = unichr(i) + data = [ + # Predicates (single char) + u"01"[char.isalnum()], + u"01"[char.isalpha()], + u"01"[char.isdecimal()], + u"01"[char.isdigit()], + u"01"[char.islower()], + u"01"[char.isnumeric()], + u"01"[char.isspace()], + u"01"[char.istitle()], + u"01"[char.isupper()], + + # Predicates (multiple chars) + u"01"[(char + u'abc').isalnum()], + u"01"[(char + u'abc').isalpha()], + u"01"[(char + u'123').isdecimal()], + u"01"[(char + u'123').isdigit()], + u"01"[(char + u'abc').islower()], + u"01"[(char + u'123').isnumeric()], + u"01"[(char + u' \t').isspace()], + u"01"[(char + u'abc').istitle()], + u"01"[(char + u'ABC').isupper()], + + # Mappings (single char) + char.lower(), + char.upper(), + char.title(), + + # Mappings (multiple chars) + (char + u'abc').lower(), + (char + u'ABC').upper(), + (char + u'abc').title(), + (char + u'ABC').title(), + + ] + h.update(u''.join(data).encode(encoding)) + result = h.hexdigest() + self.assertEqual(result, self.expectedchecksum) + +class UnicodeDatabaseTest(unittest.TestCase): + + def setUp(self): + # In case unicodedata is not available, this will raise an ImportError, + # but the other test cases will still be run + import unicodedata + self.db = unicodedata + + def tearDown(self): + del self.db + +class UnicodeFunctionsTest(UnicodeDatabaseTest): + + # update this, if the database changes + expectedchecksum = 'c05cbc8b0d87b2f102fba8d832e21aca3ad6df2f' + + def test_function_checksum(self): + data = [] + h = hashlib.sha1() + + for i in range(0x10000): + char = unichr(i) + data = [ + # Properties + str(self.db.digit(char, -1)), + str(self.db.numeric(char, -1)), + str(self.db.decimal(char, -1)), + self.db.category(char), + self.db.bidirectional(char), + self.db.decomposition(char), + str(self.db.mirrored(char)), + str(self.db.combining(char)), + ] + h.update(''.join(data)) + result = h.hexdigest() + self.assertEqual(result, self.expectedchecksum) + + def test_digit(self): + self.assertEqual(self.db.digit(u'A', None), None) + self.assertEqual(self.db.digit(u'9'), 9) + self.assertEqual(self.db.digit(u'\u215b', None), None) + self.assertEqual(self.db.digit(u'\u2468'), 9) + + self.assertRaises(TypeError, self.db.digit) + self.assertRaises(TypeError, self.db.digit, u'xx') + self.assertRaises(ValueError, self.db.digit, u'x') + + def test_numeric(self): + self.assertEqual(self.db.numeric(u'A',None), None) + self.assertEqual(self.db.numeric(u'9'), 9) + self.assertEqual(self.db.numeric(u'\u215b'), 0.125) + self.assertEqual(self.db.numeric(u'\u2468'), 9.0) + + self.assertRaises(TypeError, self.db.numeric) + self.assertRaises(TypeError, self.db.numeric, u'xx') + self.assertRaises(ValueError, self.db.numeric, u'x') + + def test_decimal(self): + self.assertEqual(self.db.decimal(u'A',None), None) + self.assertEqual(self.db.decimal(u'9'), 9) + self.assertEqual(self.db.decimal(u'\u215b', None), None) + self.assertEqual(self.db.decimal(u'\u2468', None), None) + + self.assertRaises(TypeError, self.db.decimal) + self.assertRaises(TypeError, self.db.decimal, u'xx') + self.assertRaises(ValueError, self.db.decimal, u'x') + + def test_category(self): + self.assertEqual(self.db.category(u'\uFFFE'), 'Cn') + self.assertEqual(self.db.category(u'a'), 'Ll') + self.assertEqual(self.db.category(u'A'), 'Lu') + + self.assertRaises(TypeError, self.db.category) + self.assertRaises(TypeError, self.db.category, u'xx') + + def test_bidirectional(self): + self.assertEqual(self.db.bidirectional(u'\uFFFE'), '') + self.assertEqual(self.db.bidirectional(u' '), 'WS') + self.assertEqual(self.db.bidirectional(u'A'), 'L') + + self.assertRaises(TypeError, self.db.bidirectional) + self.assertRaises(TypeError, self.db.bidirectional, u'xx') + + def test_decomposition(self): + self.assertEqual(self.db.decomposition(u'\uFFFE'),'') + self.assertEqual(self.db.decomposition(u'\u00bc'), ' 0031 2044 0034') + + self.assertRaises(TypeError, self.db.decomposition) + self.assertRaises(TypeError, self.db.decomposition, u'xx') + + def test_mirrored(self): + self.assertEqual(self.db.mirrored(u'\uFFFE'), 0) + self.assertEqual(self.db.mirrored(u'a'), 0) + self.assertEqual(self.db.mirrored(u'\u2201'), 1) + + self.assertRaises(TypeError, self.db.mirrored) + self.assertRaises(TypeError, self.db.mirrored, u'xx') + + def test_combining(self): + self.assertEqual(self.db.combining(u'\uFFFE'), 0) + self.assertEqual(self.db.combining(u'a'), 0) + self.assertEqual(self.db.combining(u'\u20e1'), 230) + + self.assertRaises(TypeError, self.db.combining) + self.assertRaises(TypeError, self.db.combining, u'xx') + + def test_normalize(self): + self.assertRaises(TypeError, self.db.normalize) + self.assertRaises(ValueError, self.db.normalize, 'unknown', u'xx') + self.assertEqual(self.db.normalize('NFKC', u''), u'') + # The rest can be found in test_normalization.py + # which requires an external file. + + def test_east_asian_width(self): + eaw = self.db.east_asian_width + self.assertRaises(TypeError, eaw, 'a') + self.assertRaises(TypeError, eaw, u'') + self.assertRaises(TypeError, eaw, u'ra') + self.assertEqual(eaw(u'\x1e'), 'N') + self.assertEqual(eaw(u'\x20'), 'Na') + self.assertEqual(eaw(u'\uC894'), 'W') + self.assertEqual(eaw(u'\uFF66'), 'H') + self.assertEqual(eaw(u'\uFF1F'), 'F') + self.assertEqual(eaw(u'\u2010'), 'A') + +class UnicodeMiscTest(UnicodeDatabaseTest): + + def test_decimal_numeric_consistent(self): + # Test that decimal and numeric are consistent, + # i.e. if a character has a decimal value, + # its numeric value should be the same. + count = 0 + for i in xrange(0x10000): + c = unichr(i) + dec = self.db.decimal(c, -1) + if dec != -1: + self.assertEqual(dec, self.db.numeric(c)) + count += 1 + self.assert_(count >= 10) # should have tested at least the ASCII digits + + def test_digit_numeric_consistent(self): + # Test that digit and numeric are consistent, + # i.e. if a character has a digit value, + # its numeric value should be the same. + count = 0 + for i in xrange(0x10000): + c = unichr(i) + dec = self.db.digit(c, -1) + if dec != -1: + self.assertEqual(dec, self.db.numeric(c)) + count += 1 + self.assert_(count >= 10) # should have tested at least the ASCII digits + +def test_main(): + test.test_support.run_unittest( + UnicodeMiscTest, + UnicodeMethodsTest, + UnicodeFunctionsTest + ) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unpack.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unpack.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,131 @@ +doctests = """ + +Unpack tuple + + >>> t = (1, 2, 3) + >>> a, b, c = t + >>> a == 1 and b == 2 and c == 3 + True + +Unpack list + + >>> l = [4, 5, 6] + >>> a, b, c = l + >>> a == 4 and b == 5 and c == 6 + True + +Unpack implied tuple + + >>> a, b, c = 7, 8, 9 + >>> a == 7 and b == 8 and c == 9 + True + +Unpack string... fun! + + >>> a, b, c = 'one' + >>> a == 'o' and b == 'n' and c == 'e' + True + +Unpack generic sequence + + >>> class Seq: + ... def __getitem__(self, i): + ... if i >= 0 and i < 3: return i + ... raise IndexError + ... + >>> a, b, c = Seq() + >>> a == 0 and b == 1 and c == 2 + True + +Single element unpacking, with extra syntax + + >>> st = (99,) + >>> sl = [100] + >>> a, = st + >>> a + 99 + >>> b, = sl + >>> b + 100 + +Now for some failures + +Unpacking non-sequence + + >>> a, b, c = 7 + Traceback (most recent call last): + ... + TypeError: iteration over non-sequence + +Unpacking tuple of wrong size + + >>> a, b = t + Traceback (most recent call last): + ... + ValueError: too many values to unpack + +Unpacking tuple of wrong size + + >>> a, b = l + Traceback (most recent call last): + ... + ValueError: too many values to unpack + +Unpacking sequence too short + + >>> a, b, c, d = Seq() + Traceback (most recent call last): + ... + ValueError: need more than 3 values to unpack + +Unpacking sequence too long + + >>> a, b = Seq() + Traceback (most recent call last): + ... + ValueError: too many values to unpack + +Unpacking a sequence where the test for too long raises a different kind of +error + + >>> class BozoError(Exception): + ... pass + ... + >>> class BadSeq: + ... def __getitem__(self, i): + ... if i >= 0 and i < 3: + ... return i + ... elif i == 3: + ... raise BozoError + ... else: + ... raise IndexError + ... + +Trigger code while not expecting an IndexError (unpack sequence too long, wrong +error) + + >>> a, b, c, d, e = BadSeq() + Traceback (most recent call last): + ... + BozoError + +Trigger code while expecting an IndexError (unpack sequence too short, wrong +error) + + >>> a, b, c = BadSeq() + Traceback (most recent call last): + ... + BozoError + +""" + +__test__ = {'doctests' : doctests} + +def test_main(verbose=False): + import sys + from test import test_support + from test import test_unpack + test_support.run_doctest(test_unpack, verbose) + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_urllib2.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_urllib2.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,1057 @@ +import unittest +from test import test_support + +import os, socket +import StringIO + +import urllib2 +from urllib2 import Request, OpenerDirector + +# XXX +# Request +# CacheFTPHandler (hard to write) +# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler + +class TrivialTests(unittest.TestCase): + def test_trivial(self): + # A couple trivial tests + + self.assertRaises(ValueError, urllib2.urlopen, 'bogus url') + + # XXX Name hacking to get this to work on Windows. + fname = os.path.abspath(urllib2.__file__).replace('\\', '/') + if fname[1:2] == ":": + fname = fname[2:] + # And more hacking to get it to work on MacOS. This assumes + # urllib.pathname2url works, unfortunately... + if os.name == 'mac': + fname = '/' + fname.replace(':', '/') + elif os.name == 'riscos': + import string + fname = os.expand(fname) + fname = fname.translate(string.maketrans("/.", "./")) + + file_url = "file://%s" % fname + f = urllib2.urlopen(file_url) + + buf = f.read() + f.close() + + def test_parse_http_list(self): + tests = [('a,b,c', ['a', 'b', 'c']), + ('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']), + ('a, b, "c", "d", "e,f", g, h', ['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']), + ('a="b\\"c", d="e\\,f", g="h\\\\i"', ['a="b"c"', 'd="e,f"', 'g="h\\i"'])] + for string, list in tests: + self.assertEquals(urllib2.parse_http_list(string), list) + + +def test_request_headers_dict(): + """ + The Request.headers dictionary is not a documented interface. It should + stay that way, because the complete set of headers are only accessible + through the .get_header(), .has_header(), .header_items() interface. + However, .headers pre-dates those methods, and so real code will be using + the dictionary. + + The introduction in 2.4 of those methods was a mistake for the same reason: + code that previously saw all (urllib2 user)-provided headers in .headers + now sees only a subset (and the function interface is ugly and incomplete). + A better change would have been to replace .headers dict with a dict + subclass (or UserDict.DictMixin instance?) that preserved the .headers + interface and also provided access to the "unredirected" headers. It's + probably too late to fix that, though. + + + Check .capitalize() case normalization: + + >>> url = "http://example.com" + >>> Request(url, headers={"Spam-eggs": "blah"}).headers["Spam-eggs"] + 'blah' + >>> Request(url, headers={"spam-EggS": "blah"}).headers["Spam-eggs"] + 'blah' + + Currently, Request(url, "Spam-eggs").headers["Spam-Eggs"] raises KeyError, + but that could be changed in future. + + """ + +def test_request_headers_methods(): + """ + Note the case normalization of header names here, to .capitalize()-case. + This should be preserved for backwards-compatibility. (In the HTTP case, + normalization to .title()-case is done by urllib2 before sending headers to + httplib). + + >>> url = "http://example.com" + >>> r = Request(url, headers={"Spam-eggs": "blah"}) + >>> r.has_header("Spam-eggs") + True + >>> r.header_items() + [('Spam-eggs', 'blah')] + >>> r.add_header("Foo-Bar", "baz") + >>> items = r.header_items() + >>> items.sort() + >>> items + [('Foo-bar', 'baz'), ('Spam-eggs', 'blah')] + + Note that e.g. r.has_header("spam-EggS") is currently False, and + r.get_header("spam-EggS") returns None, but that could be changed in + future. + + >>> r.has_header("Not-there") + False + >>> print r.get_header("Not-there") + None + >>> r.get_header("Not-there", "default") + 'default' + + """ + + +def test_password_manager(self): + """ + >>> mgr = urllib2.HTTPPasswordMgr() + >>> add = mgr.add_password + >>> add("Some Realm", "http://example.com/", "joe", "password") + >>> add("Some Realm", "http://example.com/ni", "ni", "ni") + >>> add("c", "http://example.com/foo", "foo", "ni") + >>> add("c", "http://example.com/bar", "bar", "nini") + >>> add("b", "http://example.com/", "first", "blah") + >>> add("b", "http://example.com/", "second", "spam") + >>> add("a", "http://example.com", "1", "a") + >>> add("Some Realm", "http://c.example.com:3128", "3", "c") + >>> add("Some Realm", "d.example.com", "4", "d") + >>> add("Some Realm", "e.example.com:3128", "5", "e") + + >>> mgr.find_user_password("Some Realm", "example.com") + ('joe', 'password') + >>> mgr.find_user_password("Some Realm", "http://example.com") + ('joe', 'password') + >>> mgr.find_user_password("Some Realm", "http://example.com/") + ('joe', 'password') + >>> mgr.find_user_password("Some Realm", "http://example.com/spam") + ('joe', 'password') + >>> mgr.find_user_password("Some Realm", "http://example.com/spam/spam") + ('joe', 'password') + >>> mgr.find_user_password("c", "http://example.com/foo") + ('foo', 'ni') + >>> mgr.find_user_password("c", "http://example.com/bar") + ('bar', 'nini') + + Actually, this is really undefined ATM +## Currently, we use the highest-level path where more than one match: + +## >>> mgr.find_user_password("Some Realm", "http://example.com/ni") +## ('joe', 'password') + + Use latest add_password() in case of conflict: + + >>> mgr.find_user_password("b", "http://example.com/") + ('second', 'spam') + + No special relationship between a.example.com and example.com: + + >>> mgr.find_user_password("a", "http://example.com/") + ('1', 'a') + >>> mgr.find_user_password("a", "http://a.example.com/") + (None, None) + + Ports: + + >>> mgr.find_user_password("Some Realm", "c.example.com") + (None, None) + >>> mgr.find_user_password("Some Realm", "c.example.com:3128") + ('3', 'c') + >>> mgr.find_user_password("Some Realm", "http://c.example.com:3128") + ('3', 'c') + >>> mgr.find_user_password("Some Realm", "d.example.com") + ('4', 'd') + >>> mgr.find_user_password("Some Realm", "e.example.com:3128") + ('5', 'e') + + """ + pass + + +def test_password_manager_default_port(self): + """ + >>> mgr = urllib2.HTTPPasswordMgr() + >>> add = mgr.add_password + + The point to note here is that we can't guess the default port if there's + no scheme. This applies to both add_password and find_user_password. + + >>> add("f", "http://g.example.com:80", "10", "j") + >>> add("g", "http://h.example.com", "11", "k") + >>> add("h", "i.example.com:80", "12", "l") + >>> add("i", "j.example.com", "13", "m") + >>> mgr.find_user_password("f", "g.example.com:100") + (None, None) + >>> mgr.find_user_password("f", "g.example.com:80") + ('10', 'j') + >>> mgr.find_user_password("f", "g.example.com") + (None, None) + >>> mgr.find_user_password("f", "http://g.example.com:100") + (None, None) + >>> mgr.find_user_password("f", "http://g.example.com:80") + ('10', 'j') + >>> mgr.find_user_password("f", "http://g.example.com") + ('10', 'j') + >>> mgr.find_user_password("g", "h.example.com") + ('11', 'k') + >>> mgr.find_user_password("g", "h.example.com:80") + ('11', 'k') + >>> mgr.find_user_password("g", "http://h.example.com:80") + ('11', 'k') + >>> mgr.find_user_password("h", "i.example.com") + (None, None) + >>> mgr.find_user_password("h", "i.example.com:80") + ('12', 'l') + >>> mgr.find_user_password("h", "http://i.example.com:80") + ('12', 'l') + >>> mgr.find_user_password("i", "j.example.com") + ('13', 'm') + >>> mgr.find_user_password("i", "j.example.com:80") + (None, None) + >>> mgr.find_user_password("i", "http://j.example.com") + ('13', 'm') + >>> mgr.find_user_password("i", "http://j.example.com:80") + (None, None) + + """ + +class MockOpener: + addheaders = [] + def open(self, req, data=None): + self.req, self.data = req, data + def error(self, proto, *args): + self.proto, self.args = proto, args + +class MockFile: + def read(self, count=None): pass + def readline(self, count=None): pass + def close(self): pass + +class MockHeaders(dict): + def getheaders(self, name): + return self.values() + +class MockResponse(StringIO.StringIO): + def __init__(self, code, msg, headers, data, url=None): + StringIO.StringIO.__init__(self, data) + self.code, self.msg, self.headers, self.url = code, msg, headers, url + def info(self): + return self.headers + def geturl(self): + return self.url + +class MockCookieJar: + def add_cookie_header(self, request): + self.ach_req = request + def extract_cookies(self, response, request): + self.ec_req, self.ec_r = request, response + +class FakeMethod: + def __init__(self, meth_name, action, handle): + self.meth_name = meth_name + self.handle = handle + self.action = action + def __call__(self, *args): + return self.handle(self.meth_name, self.action, *args) + +class MockHandler: + # useful for testing handler machinery + # see add_ordered_mock_handlers() docstring + handler_order = 500 + def __init__(self, methods): + self._define_methods(methods) + def _define_methods(self, methods): + for spec in methods: + if len(spec) == 2: name, action = spec + else: name, action = spec, None + meth = FakeMethod(name, action, self.handle) + setattr(self.__class__, name, meth) + def handle(self, fn_name, action, *args, **kwds): + self.parent.calls.append((self, fn_name, args, kwds)) + if action is None: + return None + elif action == "return self": + return self + elif action == "return response": + res = MockResponse(200, "OK", {}, "") + return res + elif action == "return request": + return Request("http://blah/") + elif action.startswith("error"): + code = action[action.rfind(" ")+1:] + try: + code = int(code) + except ValueError: + pass + res = MockResponse(200, "OK", {}, "") + return self.parent.error("http", args[0], res, code, "", {}) + elif action == "raise": + raise urllib2.URLError("blah") + assert False + def close(self): pass + def add_parent(self, parent): + self.parent = parent + self.parent.calls = [] + def __lt__(self, other): + if not hasattr(other, "handler_order"): + # No handler_order, leave in original order. Yuck. + return True + return self.handler_order < other.handler_order + +def add_ordered_mock_handlers(opener, meth_spec): + """Create MockHandlers and add them to an OpenerDirector. + + meth_spec: list of lists of tuples and strings defining methods to define + on handlers. eg: + + [["http_error", "ftp_open"], ["http_open"]] + + defines methods .http_error() and .ftp_open() on one handler, and + .http_open() on another. These methods just record their arguments and + return None. Using a tuple instead of a string causes the method to + perform some action (see MockHandler.handle()), eg: + + [["http_error"], [("http_open", "return request")]] + + defines .http_error() on one handler (which simply returns None), and + .http_open() on another handler, which returns a Request object. + + """ + handlers = [] + count = 0 + for meths in meth_spec: + class MockHandlerSubclass(MockHandler): pass + h = MockHandlerSubclass(meths) + h.handler_order += count + h.add_parent(opener) + count = count + 1 + handlers.append(h) + opener.add_handler(h) + return handlers + +def build_test_opener(*handler_instances): + opener = OpenerDirector() + for h in handler_instances: + opener.add_handler(h) + return opener + +class MockHTTPHandler(urllib2.BaseHandler): + # useful for testing redirections and auth + # sends supplied headers and code as first response + # sends 200 OK as second response + def __init__(self, code, headers): + self.code = code + self.headers = headers + self.reset() + def reset(self): + self._count = 0 + self.requests = [] + def http_open(self, req): + import mimetools, httplib, copy + from StringIO import StringIO + self.requests.append(copy.deepcopy(req)) + if self._count == 0: + self._count = self._count + 1 + name = httplib.responses[self.code] + msg = mimetools.Message(StringIO(self.headers)) + return self.parent.error( + "http", req, MockFile(), self.code, name, msg) + else: + self.req = req + msg = mimetools.Message(StringIO("\r\n\r\n")) + return MockResponse(200, "OK", msg, "", req.get_full_url()) + +class MockPasswordManager: + def add_password(self, realm, uri, user, password): + self.realm = realm + self.url = uri + self.user = user + self.password = password + def find_user_password(self, realm, authuri): + self.target_realm = realm + self.target_url = authuri + return self.user, self.password + + +class OpenerDirectorTests(unittest.TestCase): + + def test_badly_named_methods(self): + # test work-around for three methods that accidentally follow the + # naming conventions for handler methods + # (*_open() / *_request() / *_response()) + + # These used to call the accidentally-named methods, causing a + # TypeError in real code; here, returning self from these mock + # methods would either cause no exception, or AttributeError. + + from urllib2 import URLError + + o = OpenerDirector() + meth_spec = [ + [("do_open", "return self"), ("proxy_open", "return self")], + [("redirect_request", "return self")], + ] + handlers = add_ordered_mock_handlers(o, meth_spec) + o.add_handler(urllib2.UnknownHandler()) + for scheme in "do", "proxy", "redirect": + self.assertRaises(URLError, o.open, scheme+"://example.com/") + + def test_handled(self): + # handler returning non-None means no more handlers will be called + o = OpenerDirector() + meth_spec = [ + ["http_open", "ftp_open", "http_error_302"], + ["ftp_open"], + [("http_open", "return self")], + [("http_open", "return self")], + ] + handlers = add_ordered_mock_handlers(o, meth_spec) + + req = Request("http://example.com/") + r = o.open(req) + # Second .http_open() gets called, third doesn't, since second returned + # non-None. Handlers without .http_open() never get any methods called + # on them. + # In fact, second mock handler defining .http_open() returns self + # (instead of response), which becomes the OpenerDirector's return + # value. + self.assertEqual(r, handlers[2]) + calls = [(handlers[0], "http_open"), (handlers[2], "http_open")] + for expected, got in zip(calls, o.calls): + handler, name, args, kwds = got + self.assertEqual((handler, name), expected) + self.assertEqual(args, (req,)) + + def test_handler_order(self): + o = OpenerDirector() + handlers = [] + for meths, handler_order in [ + ([("http_open", "return self")], 500), + (["http_open"], 0), + ]: + class MockHandlerSubclass(MockHandler): pass + h = MockHandlerSubclass(meths) + h.handler_order = handler_order + handlers.append(h) + o.add_handler(h) + + r = o.open("http://example.com/") + # handlers called in reverse order, thanks to their sort order + self.assertEqual(o.calls[0][0], handlers[1]) + self.assertEqual(o.calls[1][0], handlers[0]) + + def test_raise(self): + # raising URLError stops processing of request + o = OpenerDirector() + meth_spec = [ + [("http_open", "raise")], + [("http_open", "return self")], + ] + handlers = add_ordered_mock_handlers(o, meth_spec) + + req = Request("http://example.com/") + self.assertRaises(urllib2.URLError, o.open, req) + self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})]) + +## def test_error(self): +## # XXX this doesn't actually seem to be used in standard library, +## # but should really be tested anyway... + + def test_http_error(self): + # XXX http_error_default + # http errors are a special case + o = OpenerDirector() + meth_spec = [ + [("http_open", "error 302")], + [("http_error_400", "raise"), "http_open"], + [("http_error_302", "return response"), "http_error_303", + "http_error"], + [("http_error_302")], + ] + handlers = add_ordered_mock_handlers(o, meth_spec) + + class Unknown: + def __eq__(self, other): return True + + req = Request("http://example.com/") + r = o.open(req) + assert len(o.calls) == 2 + calls = [(handlers[0], "http_open", (req,)), + (handlers[2], "http_error_302", + (req, Unknown(), 302, "", {}))] + for expected, got in zip(calls, o.calls): + handler, method_name, args = expected + self.assertEqual((handler, method_name), got[:2]) + self.assertEqual(args, got[2]) + + def test_processors(self): + # *_request / *_response methods get called appropriately + o = OpenerDirector() + meth_spec = [ + [("http_request", "return request"), + ("http_response", "return response")], + [("http_request", "return request"), + ("http_response", "return response")], + ] + handlers = add_ordered_mock_handlers(o, meth_spec) + + req = Request("http://example.com/") + r = o.open(req) + # processor methods are called on *all* handlers that define them, + # not just the first handler that handles the request + calls = [ + (handlers[0], "http_request"), (handlers[1], "http_request"), + (handlers[0], "http_response"), (handlers[1], "http_response")] + + for i, (handler, name, args, kwds) in enumerate(o.calls): + if i < 2: + # *_request + self.assertEqual((handler, name), calls[i]) + self.assertEqual(len(args), 1) + self.assert_(isinstance(args[0], Request)) + else: + # *_response + self.assertEqual((handler, name), calls[i]) + self.assertEqual(len(args), 2) + self.assert_(isinstance(args[0], Request)) + # response from opener.open is None, because there's no + # handler that defines http_open to handle it + self.assert_(args[1] is None or + isinstance(args[1], MockResponse)) + + +def sanepathname2url(path): + import urllib + urlpath = urllib.pathname2url(path) + if os.name == "nt" and urlpath.startswith("///"): + urlpath = urlpath[2:] + # XXX don't ask me about the mac... + return urlpath + +class HandlerTests(unittest.TestCase): + + def test_ftp(self): + class MockFTPWrapper: + def __init__(self, data): self.data = data + def retrfile(self, filename, filetype): + self.filename, self.filetype = filename, filetype + return StringIO.StringIO(self.data), len(self.data) + + class NullFTPHandler(urllib2.FTPHandler): + def __init__(self, data): self.data = data + def connect_ftp(self, user, passwd, host, port, dirs): + self.user, self.passwd = user, passwd + self.host, self.port = host, port + self.dirs = dirs + self.ftpwrapper = MockFTPWrapper(self.data) + return self.ftpwrapper + + import ftplib, socket + data = "rheum rhaponicum" + h = NullFTPHandler(data) + o = h.parent = MockOpener() + + for url, host, port, type_, dirs, filename, mimetype in [ + ("ftp://localhost/foo/bar/baz.html", + "localhost", ftplib.FTP_PORT, "I", + ["foo", "bar"], "baz.html", "text/html"), + ("ftp://localhost:80/foo/bar/", + "localhost", 80, "D", + ["foo", "bar"], "", None), + ("ftp://localhost/baz.gif;type=a", + "localhost", ftplib.FTP_PORT, "A", + [], "baz.gif", None), # XXX really this should guess image/gif + ]: + r = h.ftp_open(Request(url)) + # ftp authentication not yet implemented by FTPHandler + self.assert_(h.user == h.passwd == "") + self.assertEqual(h.host, socket.gethostbyname(host)) + self.assertEqual(h.port, port) + self.assertEqual(h.dirs, dirs) + self.assertEqual(h.ftpwrapper.filename, filename) + self.assertEqual(h.ftpwrapper.filetype, type_) + headers = r.info() + self.assertEqual(headers.get("Content-type"), mimetype) + self.assertEqual(int(headers["Content-length"]), len(data)) + + def test_file(self): + import time, rfc822, socket + h = urllib2.FileHandler() + o = h.parent = MockOpener() + + TESTFN = test_support.TESTFN + urlpath = sanepathname2url(os.path.abspath(TESTFN)) + towrite = "hello, world\n" + urls = [ + "file://localhost%s" % urlpath, + "file://%s" % urlpath, + "file://%s%s" % (socket.gethostbyname('localhost'), urlpath), + ] + try: + localaddr = socket.gethostbyname(socket.gethostname()) + except socket.gaierror: + localaddr = '' + if localaddr: + urls.append("file://%s%s" % (localaddr, urlpath)) + + for url in urls: + f = open(TESTFN, "wb") + try: + try: + f.write(towrite) + finally: + f.close() + + r = h.file_open(Request(url)) + try: + data = r.read() + headers = r.info() + newurl = r.geturl() + finally: + r.close() + stats = os.stat(TESTFN) + modified = rfc822.formatdate(stats.st_mtime) + finally: + os.remove(TESTFN) + self.assertEqual(data, towrite) + self.assertEqual(headers["Content-type"], "text/plain") + self.assertEqual(headers["Content-length"], "13") + self.assertEqual(headers["Last-modified"], modified) + + for url in [ + "file://localhost:80%s" % urlpath, +# XXXX bug: these fail with socket.gaierror, should be URLError +## "file://%s:80%s/%s" % (socket.gethostbyname('localhost'), +## os.getcwd(), TESTFN), +## "file://somerandomhost.ontheinternet.com%s/%s" % +## (os.getcwd(), TESTFN), + ]: + try: + f = open(TESTFN, "wb") + try: + f.write(towrite) + finally: + f.close() + + self.assertRaises(urllib2.URLError, + h.file_open, Request(url)) + finally: + os.remove(TESTFN) + + h = urllib2.FileHandler() + o = h.parent = MockOpener() + # XXXX why does // mean ftp (and /// mean not ftp!), and where + # is file: scheme specified? I think this is really a bug, and + # what was intended was to distinguish between URLs like: + # file:/blah.txt (a file) + # file://localhost/blah.txt (a file) + # file:///blah.txt (a file) + # file://ftp.example.com/blah.txt (an ftp URL) + for url, ftp in [ + ("file://ftp.example.com//foo.txt", True), + ("file://ftp.example.com///foo.txt", False), +# XXXX bug: fails with OSError, should be URLError + ("file://ftp.example.com/foo.txt", False), + ]: + req = Request(url) + try: + h.file_open(req) + # XXXX remove OSError when bug fixed + except (urllib2.URLError, OSError): + self.assert_(not ftp) + else: + self.assert_(o.req is req) + self.assertEqual(req.type, "ftp") + + def test_http(self): + class MockHTTPResponse: + def __init__(self, fp, msg, status, reason): + self.fp = fp + self.msg = msg + self.status = status + self.reason = reason + def read(self): + return '' + class MockHTTPClass: + def __init__(self): + self.req_headers = [] + self.data = None + self.raise_on_endheaders = False + def __call__(self, host): + self.host = host + return self + def set_debuglevel(self, level): + self.level = level + def request(self, method, url, body=None, headers={}): + self.method = method + self.selector = url + self.req_headers += headers.items() + self.req_headers.sort() + if body: + self.data = body + if self.raise_on_endheaders: + import socket + raise socket.error() + def getresponse(self): + return MockHTTPResponse(MockFile(), {}, 200, "OK") + + h = urllib2.AbstractHTTPHandler() + o = h.parent = MockOpener() + + url = "http://example.com/" + for method, data in [("GET", None), ("POST", "blah")]: + req = Request(url, data, {"Foo": "bar"}) + req.add_unredirected_header("Spam", "eggs") + http = MockHTTPClass() + r = h.do_open(http, req) + + # result attributes + r.read; r.readline # wrapped MockFile methods + r.info; r.geturl # addinfourl methods + r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply() + hdrs = r.info() + hdrs.get; hdrs.has_key # r.info() gives dict from .getreply() + self.assertEqual(r.geturl(), url) + + self.assertEqual(http.host, "example.com") + self.assertEqual(http.level, 0) + self.assertEqual(http.method, method) + self.assertEqual(http.selector, "/") + self.assertEqual(http.req_headers, + [("Connection", "close"), + ("Foo", "bar"), ("Spam", "eggs")]) + self.assertEqual(http.data, data) + + # check socket.error converted to URLError + http.raise_on_endheaders = True + + # The raising of socket.error is not recognized as an + # exception by pypy, so this test fails + # self.assertRaises(urllib2.URLError, h.do_open, http, req) + + # check adding of standard headers + o.addheaders = [("Spam", "eggs")] + for data in "", None: # POST, GET + req = Request("http://example.com/", data) + r = MockResponse(200, "OK", {}, "") + newreq = h.do_request_(req) + if data is None: # GET + self.assert_("Content-length" not in req.unredirected_hdrs) + self.assert_("Content-type" not in req.unredirected_hdrs) + else: # POST + self.assertEqual(req.unredirected_hdrs["Content-length"], "0") + self.assertEqual(req.unredirected_hdrs["Content-type"], + "application/x-www-form-urlencoded") + # XXX the details of Host could be better tested + self.assertEqual(req.unredirected_hdrs["Host"], "example.com") + self.assertEqual(req.unredirected_hdrs["Spam"], "eggs") + + # don't clobber existing headers + req.add_unredirected_header("Content-length", "foo") + req.add_unredirected_header("Content-type", "bar") + req.add_unredirected_header("Host", "baz") + req.add_unredirected_header("Spam", "foo") + newreq = h.do_request_(req) + self.assertEqual(req.unredirected_hdrs["Content-length"], "foo") + self.assertEqual(req.unredirected_hdrs["Content-type"], "bar") + self.assertEqual(req.unredirected_hdrs["Host"], "baz") + self.assertEqual(req.unredirected_hdrs["Spam"], "foo") + + def test_errors(self): + h = urllib2.HTTPErrorProcessor() + o = h.parent = MockOpener() + + url = "http://example.com/" + req = Request(url) + # 200 OK is passed through + r = MockResponse(200, "OK", {}, "", url) + newr = h.http_response(req, r) + self.assert_(r is newr) + self.assert_(not hasattr(o, "proto")) # o.error not called + # anything else calls o.error (and MockOpener returns None, here) + r = MockResponse(201, "Created", {}, "", url) + self.assert_(h.http_response(req, r) is None) + self.assertEqual(o.proto, "http") # o.error called + self.assertEqual(o.args, (req, r, 201, "Created", {})) + + def test_cookies(self): + cj = MockCookieJar() + h = urllib2.HTTPCookieProcessor(cj) + o = h.parent = MockOpener() + + req = Request("http://example.com/") + r = MockResponse(200, "OK", {}, "") + newreq = h.http_request(req) + self.assert_(cj.ach_req is req is newreq) + self.assertEquals(req.get_origin_req_host(), "example.com") + self.assert_(not req.is_unverifiable()) + newr = h.http_response(req, r) + self.assert_(cj.ec_req is req) + self.assert_(cj.ec_r is r is newr) + + def test_redirect(self): + from_url = "http://example.com/a.html" + to_url = "http://example.com/b.html" + h = urllib2.HTTPRedirectHandler() + o = h.parent = MockOpener() + + # ordinary redirect behaviour + for code in 301, 302, 303, 307: + for data in None, "blah\nblah\n": + method = getattr(h, "http_error_%s" % code) + req = Request(from_url, data) + req.add_header("Nonsense", "viking=withhold") + req.add_unredirected_header("Spam", "spam") + try: + method(req, MockFile(), code, "Blah", + MockHeaders({"location": to_url})) + except urllib2.HTTPError: + # 307 in response to POST requires user OK + self.assert_(code == 307 and data is not None) + self.assertEqual(o.req.get_full_url(), to_url) + try: + self.assertEqual(o.req.get_method(), "GET") + except AttributeError: + self.assert_(not o.req.has_data()) + self.assertEqual(o.req.headers["Nonsense"], + "viking=withhold") + self.assert_("Spam" not in o.req.headers) + self.assert_("Spam" not in o.req.unredirected_hdrs) + + # loop detection + req = Request(from_url) + def redirect(h, req, url=to_url): + h.http_error_302(req, MockFile(), 302, "Blah", + MockHeaders({"location": url})) + # Note that the *original* request shares the same record of + # redirections with the sub-requests caused by the redirections. + + # detect infinite loop redirect of a URL to itself + req = Request(from_url, origin_req_host="example.com") + count = 0 + try: + while 1: + redirect(h, req, "http://example.com/") + count = count + 1 + except urllib2.HTTPError: + # don't stop until max_repeats, because cookies may introduce state + self.assertEqual(count, urllib2.HTTPRedirectHandler.max_repeats) + + # detect endless non-repeating chain of redirects + req = Request(from_url, origin_req_host="example.com") + count = 0 + try: + while 1: + redirect(h, req, "http://example.com/%d" % count) + count = count + 1 + except urllib2.HTTPError: + self.assertEqual(count, + urllib2.HTTPRedirectHandler.max_redirections) + + def test_cookie_redirect(self): + # cookies shouldn't leak into redirected requests + from cookielib import CookieJar + + from test.test_cookielib import interact_netscape + + cj = CookieJar() + interact_netscape(cj, "http://www.example.com/", "spam=eggs") + hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n") + hdeh = urllib2.HTTPDefaultErrorHandler() + hrh = urllib2.HTTPRedirectHandler() + cp = urllib2.HTTPCookieProcessor(cj) + o = build_test_opener(hh, hdeh, hrh, cp) + o.open("http://www.example.com/") + self.assert_(not hh.req.has_header("Cookie")) + + def test_proxy(self): + o = OpenerDirector() + ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128")) + o.add_handler(ph) + meth_spec = [ + [("http_open", "return response")] + ] + handlers = add_ordered_mock_handlers(o, meth_spec) + + req = Request("http://acme.example.com/") + self.assertEqual(req.get_host(), "acme.example.com") + r = o.open(req) + self.assertEqual(req.get_host(), "proxy.example.com:3128") + + self.assertEqual([(handlers[0], "http_open")], + [tup[0:2] for tup in o.calls]) + + def test_basic_auth(self): + opener = OpenerDirector() + password_manager = MockPasswordManager() + auth_handler = urllib2.HTTPBasicAuthHandler(password_manager) + realm = "ACME Widget Store" + http_handler = MockHTTPHandler( + 401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm) + opener.add_handler(auth_handler) + opener.add_handler(http_handler) + self._test_basic_auth(opener, auth_handler, "Authorization", + realm, http_handler, password_manager, + "http://acme.example.com/protected", + "http://acme.example.com/protected", + ) + + def test_proxy_basic_auth(self): + opener = OpenerDirector() + ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128")) + opener.add_handler(ph) + password_manager = MockPasswordManager() + auth_handler = urllib2.ProxyBasicAuthHandler(password_manager) + realm = "ACME Networks" + http_handler = MockHTTPHandler( + 407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm) + opener.add_handler(auth_handler) + opener.add_handler(http_handler) + self._test_basic_auth(opener, auth_handler, "Proxy-authorization", + realm, http_handler, password_manager, + "http://acme.example.com:3128/protected", + "proxy.example.com:3128", + ) + + def test_basic_and_digest_auth_handlers(self): + # HTTPDigestAuthHandler threw an exception if it couldn't handle a 40* + # response (http://python.org/sf/1479302), where it should instead + # return None to allow another handler (especially + # HTTPBasicAuthHandler) to handle the response. + + # Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must + # try digest first (since it's the strongest auth scheme), so we record + # order of calls here to check digest comes first: + class RecordingOpenerDirector(OpenerDirector): + def __init__(self): + OpenerDirector.__init__(self) + self.recorded = [] + def record(self, info): + self.recorded.append(info) + class TestDigestAuthHandler(urllib2.HTTPDigestAuthHandler): + def http_error_401(self, *args, **kwds): + self.parent.record("digest") + urllib2.HTTPDigestAuthHandler.http_error_401(self, + *args, **kwds) + class TestBasicAuthHandler(urllib2.HTTPBasicAuthHandler): + def http_error_401(self, *args, **kwds): + self.parent.record("basic") + urllib2.HTTPBasicAuthHandler.http_error_401(self, + *args, **kwds) + + opener = RecordingOpenerDirector() + password_manager = MockPasswordManager() + digest_handler = TestDigestAuthHandler(password_manager) + basic_handler = TestBasicAuthHandler(password_manager) + realm = "ACME Networks" + http_handler = MockHTTPHandler( + 401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm) + opener.add_handler(basic_handler) + opener.add_handler(digest_handler) + opener.add_handler(http_handler) + + # check basic auth isn't blocked by digest handler failing + self._test_basic_auth(opener, basic_handler, "Authorization", + realm, http_handler, password_manager, + "http://acme.example.com/protected", + "http://acme.example.com/protected", + ) + # check digest was tried before basic (twice, because + # _test_basic_auth called .open() twice) + self.assertEqual(opener.recorded, ["digest", "basic"]*2) + + def _test_basic_auth(self, opener, auth_handler, auth_header, + realm, http_handler, password_manager, + request_url, protected_url): + import base64, httplib + user, password = "wile", "coyote" + + # .add_password() fed through to password manager + auth_handler.add_password(realm, request_url, user, password) + self.assertEqual(realm, password_manager.realm) + self.assertEqual(request_url, password_manager.url) + self.assertEqual(user, password_manager.user) + self.assertEqual(password, password_manager.password) + + r = opener.open(request_url) + + # should have asked the password manager for the username/password + self.assertEqual(password_manager.target_realm, realm) + self.assertEqual(password_manager.target_url, protected_url) + + # expect one request without authorization, then one with + self.assertEqual(len(http_handler.requests), 2) + self.assertFalse(http_handler.requests[0].has_header(auth_header)) + userpass = '%s:%s' % (user, password) + auth_hdr_value = 'Basic '+base64.encodestring(userpass).strip() + self.assertEqual(http_handler.requests[1].get_header(auth_header), + auth_hdr_value) + + # if the password manager can't find a password, the handler won't + # handle the HTTP auth error + password_manager.user = password_manager.password = None + http_handler.reset() + r = opener.open(request_url) + self.assertEqual(len(http_handler.requests), 1) + self.assertFalse(http_handler.requests[0].has_header(auth_header)) + + +class MiscTests(unittest.TestCase): + + def test_build_opener(self): + class MyHTTPHandler(urllib2.HTTPHandler): pass + class FooHandler(urllib2.BaseHandler): + def foo_open(self): pass + class BarHandler(urllib2.BaseHandler): + def bar_open(self): pass + + build_opener = urllib2.build_opener + + o = build_opener(FooHandler, BarHandler) + self.opener_has_handler(o, FooHandler) + self.opener_has_handler(o, BarHandler) + + # can take a mix of classes and instances + o = build_opener(FooHandler, BarHandler()) + self.opener_has_handler(o, FooHandler) + self.opener_has_handler(o, BarHandler) + + # subclasses of default handlers override default handlers + o = build_opener(MyHTTPHandler) + self.opener_has_handler(o, MyHTTPHandler) + + # a particular case of overriding: default handlers can be passed + # in explicitly + o = build_opener() + self.opener_has_handler(o, urllib2.HTTPHandler) + o = build_opener(urllib2.HTTPHandler) + self.opener_has_handler(o, urllib2.HTTPHandler) + o = build_opener(urllib2.HTTPHandler()) + self.opener_has_handler(o, urllib2.HTTPHandler) + + def opener_has_handler(self, opener, handler_class): + for h in opener.handlers: + if h.__class__ == handler_class: + break + else: + self.assert_(False) + + +def test_main(verbose=None): + from test import test_urllib2 + test_support.run_doctest(test_urllib2, verbose) + test_support.run_doctest(urllib2, verbose) + tests = (TrivialTests, + OpenerDirectorTests, + HandlerTests, + MiscTests) + test_support.run_unittest(*tests) + +if __name__ == "__main__": + test_main(verbose=True) Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_userstring.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_userstring.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,95 @@ +#!/usr/bin/env python +# UserString is a wrapper around the native builtin string type. +# UserString instances should behave similar to builtin string objects. + +import unittest +from test import test_support, string_tests + +from UserString import UserString, MutableString + +class UserStringTest( + string_tests.CommonTest, + string_tests.MixinStrUnicodeUserStringTest, + string_tests.MixinStrStringUserStringTest, + string_tests.MixinStrUserStringTest + ): + + type2test = UserString + + fixargs = lambda self, args: args + subclasscheck = False + +class MutableStringTest(UserStringTest): + type2test = MutableString + + # MutableStrings can be hashed => deactivate test + def test_hash(self): + pass + + def test_setitem(self): + s = self.type2test("foo") + self.assertRaises(IndexError, s.__setitem__, -4, "bar") + self.assertRaises(IndexError, s.__setitem__, 3, "bar") + s[-1] = "bar" + self.assertEqual(s, "fobar") + s[0] = "bar" + self.assertEqual(s, "barobar") + + def test_delitem(self): + s = self.type2test("foo") + self.assertRaises(IndexError, s.__delitem__, -4) + self.assertRaises(IndexError, s.__delitem__, 3) + del s[-1] + self.assertEqual(s, "fo") + del s[0] + self.assertEqual(s, "o") + del s[0] + self.assertEqual(s, "") + + def test_setslice(self): + s = self.type2test("foo") + s[:] = "bar" + self.assertEqual(s, "bar") + s[1:2] = "foo" + self.assertEqual(s, "bfoor") + s[1:-1] = UserString("a") + self.assertEqual(s, "bar") + s[0:10] = 42 + self.assertEqual(s, "42") + + def test_delslice(self): + s = self.type2test("foobar") + del s[3:10] + self.assertEqual(s, "foo") + del s[-1:10] + self.assertEqual(s, "fo") + + def test_immutable(self): + s = self.type2test("foobar") + s2 = s.immutable() + self.assertEqual(s, s2) + self.assert_(isinstance(s2, UserString)) + + def test_iadd(self): + s = self.type2test("foo") + s += "bar" + self.assertEqual(s, "foobar") + s += UserString("baz") + self.assertEqual(s, "foobarbaz") + s += 42 + self.assertEqual(s, "foobarbaz42") + + def test_imul(self): + s = self.type2test("foo") + s *= 1 + self.assertEqual(s, "foo") + s *= 2 + self.assertEqual(s, "foofoo") + s *= -1 + self.assertEqual(s, "") + +def test_main(): + test_support.run_unittest(UserStringTest, MutableStringTest) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_weakref.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_weakref.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,1206 @@ +import gc +import sys +import unittest +import UserList +import weakref + +from test import test_support + +# Used in ReferencesTestCase.test_ref_created_during_del() . +ref_from_del = None + +class C: + def method(self): + pass + + +class Callable: + bar = None + + def __call__(self, x): + self.bar = x + + +def create_function(): + def f(): pass + return f + +def create_bound_method(): + return C().method + +def create_unbound_method(): + return C.method + + +class TestBase(unittest.TestCase): + + def setUp(self): + self.cbcalled = 0 + + def callback(self, ref): + self.cbcalled += 1 + + +class ReferencesTestCase(TestBase): + + def test_basic_ref(self): + self.check_basic_ref(C) + self.check_basic_ref(create_function) + self.check_basic_ref(create_bound_method) + self.check_basic_ref(create_unbound_method) + + # Just make sure the tp_repr handler doesn't raise an exception. + # Live reference: + o = C() + wr = weakref.ref(o) + `wr` + # Dead reference: + del o + `wr` + + def test_basic_callback(self): + self.check_basic_callback(C) + self.check_basic_callback(create_function) + self.check_basic_callback(create_bound_method) + self.check_basic_callback(create_unbound_method) + + def test_multiple_callbacks(self): + o = C() + ref1 = weakref.ref(o, self.callback) + ref2 = weakref.ref(o, self.callback) + del o + gc.collect() + gc.collect() + gc.collect() + self.assert_(ref1() is None, + "expected reference to be invalidated") + self.assert_(ref2() is None, + "expected reference to be invalidated") + self.assert_(self.cbcalled == 2, + "callback not called the right number of times") + + def test_multiple_selfref_callbacks(self): + # Make sure all references are invalidated before callbacks are called + # + # What's important here is that we're using the first + # reference in the callback invoked on the second reference + # (the most recently created ref is cleaned up first). This + # tests that all references to the object are invalidated + # before any of the callbacks are invoked, so that we only + # have one invocation of _weakref.c:cleanup_helper() active + # for a particular object at a time. + # + def callback(object, self=self): + self.ref() + c = C() + self.ref = weakref.ref(c, callback) + ref1 = weakref.ref(c, callback) + del c + + def test_proxy_ref(self): + o = C() + o.bar = 1 + ref1 = weakref.proxy(o, self.callback) + ref2 = weakref.proxy(o, self.callback) + del o + gc.collect() + gc.collect() + gc.collect() + + def check(proxy): + proxy.bar + + self.assertRaises(weakref.ReferenceError, check, ref1) + self.assertRaises(weakref.ReferenceError, check, ref2) + # Works only with refcounting + # self.assertRaises(weakref.ReferenceError, bool, weakref.proxy(C())) + self.assert_(self.cbcalled == 2) + + def check_basic_ref(self, factory): + o = factory() + ref = weakref.ref(o) + self.assert_(ref() is not None, + "weak reference to live object should be live") + o2 = ref() + self.assert_(o is o2, + "() should return original object if live") + + def check_basic_callback(self, factory): + self.cbcalled = 0 + o = factory() + ref = weakref.ref(o, self.callback) + del o + gc.collect() + gc.collect() + gc.collect() + + self.assert_(self.cbcalled == 1, + "callback did not properly set 'cbcalled'") + self.assert_(ref() is None, + "ref2 should be dead after deleting object reference") + + def test_ref_reuse(self): + o = C() + ref1 = weakref.ref(o) + # create a proxy to make sure that there's an intervening creation + # between these two; it should make no difference + proxy = weakref.proxy(o) + ref2 = weakref.ref(o) + self.assert_(ref1 is ref2, + "reference object w/out callback should be re-used") + + o = C() + proxy = weakref.proxy(o) + ref1 = weakref.ref(o) + ref2 = weakref.ref(o) + self.assert_(ref1 is ref2, + "reference object w/out callback should be re-used") + self.assert_(weakref.getweakrefcount(o) == 2, + "wrong weak ref count for object") + del proxy + gc.collect() + self.assert_(weakref.getweakrefcount(o) == 1, + "wrong weak ref count for object after deleting proxy") + + def test_proxy_reuse(self): + o = C() + proxy1 = weakref.proxy(o) + ref = weakref.ref(o) + proxy2 = weakref.proxy(o) + self.assert_(proxy1 is proxy2, + "proxy object w/out callback should have been re-used") + + def test_basic_proxy(self): + o = C() + self.check_proxy(o, weakref.proxy(o)) + + L = UserList.UserList() + p = weakref.proxy(L) + self.failIf(p, "proxy for empty UserList should be false") + p.append(12) + self.assertEqual(len(L), 1) + self.failUnless(p, "proxy for non-empty UserList should be true") + p[:] = [2, 3] + self.assertEqual(len(L), 2) + self.assertEqual(len(p), 2) + self.failUnless(3 in p, + "proxy didn't support __contains__() properly") + p[1] = 5 + self.assertEqual(L[1], 5) + self.assertEqual(p[1], 5) + L2 = UserList.UserList(L) + p2 = weakref.proxy(L2) + self.assertEqual(p, p2) + ## self.assertEqual(repr(L2), repr(p2)) + L3 = UserList.UserList(range(10)) + p3 = weakref.proxy(L3) + self.assertEqual(L3[:], p3[:]) + self.assertEqual(L3[5:], p3[5:]) + self.assertEqual(L3[:5], p3[:5]) + self.assertEqual(L3[2:5], p3[2:5]) + + # The PyWeakref_* C API is documented as allowing either NULL or + # None as the value for the callback, where either means "no + # callback". The "no callback" ref and proxy objects are supposed + # to be shared so long as they exist by all callers so long as + # they are active. In Python 2.3.3 and earlier, this guaranttee + # was not honored, and was broken in different ways for + # PyWeakref_NewRef() and PyWeakref_NewProxy(). (Two tests.) + + def test_shared_ref_without_callback(self): + self.check_shared_without_callback(weakref.ref) + + def test_shared_proxy_without_callback(self): + self.check_shared_without_callback(weakref.proxy) + + def check_shared_without_callback(self, makeref): + o = Object(1) + p1 = makeref(o, None) + p2 = makeref(o, None) + self.assert_(p1 is p2, "both callbacks were None in the C API") + del p1, p2 + p1 = makeref(o) + p2 = makeref(o, None) + self.assert_(p1 is p2, "callbacks were NULL, None in the C API") + del p1, p2 + p1 = makeref(o) + p2 = makeref(o) + self.assert_(p1 is p2, "both callbacks were NULL in the C API") + del p1, p2 + p1 = makeref(o, None) + p2 = makeref(o) + self.assert_(p1 is p2, "callbacks were None, NULL in the C API") + + def test_callable_proxy(self): + o = Callable() + ref1 = weakref.proxy(o) + + self.check_proxy(o, ref1) + + self.assert_(type(ref1) is weakref.CallableProxyType, + "proxy is not of callable type") + ref1('twinkies!') + self.assert_(o.bar == 'twinkies!', + "call through proxy not passed through to original") + ref1(x='Splat.') + self.assert_(o.bar == 'Splat.', + "call through proxy not passed through to original") + + # expect due to too few args + self.assertRaises(TypeError, ref1) + + # expect due to too many args + self.assertRaises(TypeError, ref1, 1, 2, 3) + + def check_proxy(self, o, proxy): + o.foo = 1 + self.assert_(proxy.foo == 1, + "proxy does not reflect attribute addition") + o.foo = 2 + self.assert_(proxy.foo == 2, + "proxy does not reflect attribute modification") + del o.foo + self.assert_(not hasattr(proxy, 'foo'), + "proxy does not reflect attribute removal") + + proxy.foo = 1 + self.assert_(o.foo == 1, + "object does not reflect attribute addition via proxy") + proxy.foo = 2 + self.assert_( + o.foo == 2, + "object does not reflect attribute modification via proxy") + del proxy.foo + self.assert_(not hasattr(o, 'foo'), + "object does not reflect attribute removal via proxy") + + def test_proxy_deletion(self): + # Test clearing of SF bug #762891 + class Foo: + result = None + def __delitem__(self, accessor): + self.result = accessor + g = Foo() + f = weakref.proxy(g) + del f[0] + self.assertEqual(f.result, 0) + + def test_proxy_bool(self): + # Test clearing of SF bug #1170766 + class List(list): pass + lyst = List() + self.assertEqual(bool(weakref.proxy(lyst)), bool(lyst)) + + def test_getweakrefcount(self): + o = C() + ref1 = weakref.ref(o) + ref2 = weakref.ref(o, self.callback) + self.assert_(weakref.getweakrefcount(o) == 2, + "got wrong number of weak reference objects") + + proxy1 = weakref.proxy(o) + proxy2 = weakref.proxy(o, self.callback) + self.assert_(weakref.getweakrefcount(o) == 4, + "got wrong number of weak reference objects") + + del ref1, ref2, proxy1, proxy2 + gc.collect() + self.assert_(weakref.getweakrefcount(o) == 0, + "weak reference objects not unlinked from" + " referent when discarded.") + + # assumes ints do not support weakrefs + self.assert_(weakref.getweakrefcount(1) == 0, + "got wrong number of weak reference objects for int") + + def test_getweakrefs(self): + o = C() + ref1 = weakref.ref(o, self.callback) + ref2 = weakref.ref(o, self.callback) + del ref1 + gc.collect() + self.assert_(weakref.getweakrefs(o) == [ref2], + "list of refs does not match") + + o = C() + ref1 = weakref.ref(o, self.callback) + ref2 = weakref.ref(o, self.callback) + del ref2 + gc.collect() + gc.collect() + gc.collect() + self.assert_(weakref.getweakrefs(o) == [ref1], + "list of refs does not match") + + del ref1 + gc.collect() + self.assert_(weakref.getweakrefs(o) == [], + "list of refs not cleared") + + # assumes ints do not support weakrefs + self.assert_(weakref.getweakrefs(1) == [], + "list of refs does not match for int") + + def test_newstyle_number_ops(self): + class F(float): + pass + f = F(2.0) + p = weakref.proxy(f) + self.assert_(p + 1.0 == 3.0) + self.assert_(1.0 + p == 3.0) # this used to SEGV + + def test_callbacks_protected(self): + # Callbacks protected from already-set exceptions? + # Regression test for SF bug #478534. + class BogusError(Exception): + pass + data = {} + def remove(k): + del data[k] + def encapsulate(): + f = lambda : () + data[weakref.ref(f, remove)] = None + raise BogusError + try: + encapsulate() + except BogusError: + pass + else: + self.fail("exception not properly restored") + try: + encapsulate() + except BogusError: + pass + else: + self.fail("exception not properly restored") + + def test_sf_bug_840829(self): + # "weakref callbacks and gc corrupt memory" + # subtype_dealloc erroneously exposed a new-style instance + # already in the process of getting deallocated to gc, + # causing double-deallocation if the instance had a weakref + # callback that triggered gc. + # If the bug exists, there probably won't be an obvious symptom + # in a release build. In a debug build, a segfault will occur + # when the second attempt to remove the instance from the "list + # of all objects" occurs. + + import gc + + class C(object): + pass + + c = C() + wr = weakref.ref(c, lambda ignore: gc.collect()) + del c + + # There endeth the first part. It gets worse. + del wr + + c1 = C() + c1.i = C() + wr = weakref.ref(c1.i, lambda ignore: gc.collect()) + + c2 = C() + c2.c1 = c1 + del c1 # still alive because c2 points to it + + # Now when subtype_dealloc gets called on c2, it's not enough just + # that c2 is immune from gc while the weakref callbacks associated + # with c2 execute (there are none in this 2nd half of the test, btw). + # subtype_dealloc goes on to call the base classes' deallocs too, + # so any gc triggered by weakref callbacks associated with anything + # torn down by a base class dealloc can also trigger double + # deallocation of c2. + del c2 + + def test_callback_in_cycle_1(self): + import gc + + class J(object): + pass + + class II(object): + def acallback(self, ignore): + self.J + + I = II() + I.J = J + I.wr = weakref.ref(J, I.acallback) + + # Now J and II are each in a self-cycle (as all new-style class + # objects are, since their __mro__ points back to them). I holds + # both a weak reference (I.wr) and a strong reference (I.J) to class + # J. I is also in a cycle (I.wr points to a weakref that references + # I.acallback). When we del these three, they all become trash, but + # the cycles prevent any of them from getting cleaned up immediately. + # Instead they have to wait for cyclic gc to deduce that they're + # trash. + # + # gc used to call tp_clear on all of them, and the order in which + # it does that is pretty accidental. The exact order in which we + # built up these things manages to provoke gc into running tp_clear + # in just the right order (I last). Calling tp_clear on II leaves + # behind an insane class object (its __mro__ becomes NULL). Calling + # tp_clear on J breaks its self-cycle, but J doesn't get deleted + # just then because of the strong reference from I.J. Calling + # tp_clear on I starts to clear I's __dict__, and just happens to + # clear I.J first -- I.wr is still intact. That removes the last + # reference to J, which triggers the weakref callback. The callback + # tries to do "self.J", and instances of new-style classes look up + # attributes ("J") in the class dict first. The class (II) wants to + # search II.__mro__, but that's NULL. The result was a segfault in + # a release build, and an assert failure in a debug build. + del I, J, II + gc.collect() + + def test_callback_in_cycle_2(self): + import gc + + # This is just like test_callback_in_cycle_1, except that II is an + # old-style class. The symptom is different then: an instance of an + # old-style class looks in its own __dict__ first. 'J' happens to + # get cleared from I.__dict__ before 'wr', and 'J' was never in II's + # __dict__, so the attribute isn't found. The difference is that + # the old-style II doesn't have a NULL __mro__ (it doesn't have any + # __mro__), so no segfault occurs. Instead it got: + # test_callback_in_cycle_2 (__main__.ReferencesTestCase) ... + # Exception exceptions.AttributeError: + # "II instance has no attribute 'J'" in > ignored + + class J(object): + pass + + class II: + def acallback(self, ignore): + self.J + + I = II() + I.J = J + I.wr = weakref.ref(J, I.acallback) + + del I, J, II + gc.collect() + + def test_callback_in_cycle_3(self): + import gc + + # This one broke the first patch that fixed the last two. In this + # case, the objects reachable from the callback aren't also reachable + # from the object (c1) *triggering* the callback: you can get to + # c1 from c2, but not vice-versa. The result was that c2's __dict__ + # got tp_clear'ed by the time the c2.cb callback got invoked. + + class C: + def cb(self, ignore): + self.me + self.c1 + self.wr + + c1, c2 = C(), C() + + c2.me = c2 + c2.c1 = c1 + c2.wr = weakref.ref(c1, c2.cb) + + del c1, c2 + gc.collect() + + def test_callback_in_cycle_4(self): + import gc + + # Like test_callback_in_cycle_3, except c2 and c1 have different + # classes. c2's class (C) isn't reachable from c1 then, so protecting + # objects reachable from the dying object (c1) isn't enough to stop + # c2's class (C) from getting tp_clear'ed before c2.cb is invoked. + # The result was a segfault (C.__mro__ was NULL when the callback + # tried to look up self.me). + + class C(object): + def cb(self, ignore): + self.me + self.c1 + self.wr + + class D: + pass + + c1, c2 = D(), C() + + c2.me = c2 + c2.c1 = c1 + c2.wr = weakref.ref(c1, c2.cb) + + del c1, c2, C, D + gc.collect() + + def XXX_test_callback_in_cycle_resurrection(self): + # We can't guarrantee the behaviour tested with our + # current weakref implementations. + # If an object and a weakref to it gets collected at the + # same time it is unclear whether the callback is called. + import gc + + # Do something nasty in a weakref callback: resurrect objects + # from dead cycles. For this to be attempted, the weakref and + # its callback must also be part of the cyclic trash (else the + # objects reachable via the callback couldn't be in cyclic trash + # to begin with -- the callback would act like an external root). + # But gc clears trash weakrefs with callbacks early now, which + # disables the callbacks, so the callbacks shouldn't get called + # at all (and so nothing actually gets resurrected). + + alist = [] + class C(object): + def __init__(self, value): + self.attribute = value + + def acallback(self, ignore): + alist.append(self.c) + + c1, c2 = C(1), C(2) + c1.c = c2 + c2.c = c1 + c1.wr = weakref.ref(c2, c1.acallback) + c2.wr = weakref.ref(c1, c2.acallback) + + def C_went_away(ignore): + alist.append("C went away") + wr = weakref.ref(C, C_went_away) + + del c1, c2, C # make them all trash + self.assertEqual(alist, []) # del isn't enough to reclaim anything + + gc.collect() + # c1.wr and c2.wr were part of the cyclic trash, so should have + # been cleared without their callbacks executing. OTOH, the weakref + # to C is bound to a function local (wr), and wasn't trash, so that + # callback should have been invoked when C went away. + self.assertEqual(alist, ["C went away"]) + # The remaining weakref should be dead now (its callback ran). + self.assertEqual(wr(), None) + + del alist[:] + gc.collect() + self.assertEqual(alist, []) + + def XXX_test_callbacks_on_callback(self): + # See XXX_test_callback_in_cycle_resurrection above + import gc + + # Set up weakref callbacks *on* weakref callbacks. + alist = [] + def safe_callback(ignore): + alist.append("safe_callback called") + + class C(object): + def cb(self, ignore): + alist.append("cb called") + + c, d = C(), C() + c.other = d + d.other = c + callback = c.cb + c.wr = weakref.ref(d, callback) # this won't trigger + d.wr = weakref.ref(callback, d.cb) # ditto + external_wr = weakref.ref(callback, safe_callback) # but this will + self.assert_(external_wr() is callback) + + # The weakrefs attached to c and d should get cleared, so that + # C.cb is never called. But external_wr isn't part of the cyclic + # trash, and no cyclic trash is reachable from it, so safe_callback + # should get invoked when the bound method object callback (c.cb) + # -- which is itself a callback, and also part of the cyclic trash -- + # gets reclaimed at the end of gc. + + del callback, c, d, C + self.assertEqual(alist, []) # del isn't enough to clean up cycles + gc.collect() + self.assertEqual(alist, ["safe_callback called"]) + self.assertEqual(external_wr(), None) + + del alist[:] + gc.collect() + self.assertEqual(alist, []) + + def test_gc_during_ref_creation(self): + self.check_gc_during_creation(weakref.ref) + + def test_gc_during_proxy_creation(self): + self.check_gc_during_creation(weakref.proxy) + + def check_gc_during_creation(self, makeref): + # gc.get/set_threshold does not exist in pypy + # The tests calling this function probaly don't test anything + # usefull anymore + + #thresholds = gc.get_threshold() + #gc.set_threshold(1, 1, 1) + gc.collect() + class A: + pass + + def callback(*args): + pass + + referenced = A() + + a = A() + a.a = a + a.wr = makeref(referenced) + + try: + # now make sure the object and the ref get labeled as + # cyclic trash: + a = A() + weakref.ref(referenced, callback) + + finally: + pass #gc.set_threshold(*thresholds) + + def test_ref_created_during_del(self): + # Bug #1377858 + # A weakref created in an object's __del__() would crash the + # interpreter when the weakref was cleaned up since it would refer to + # non-existent memory. This test should not segfault the interpreter. + class Target(object): + def __del__(self): + global ref_from_del + ref_from_del = weakref.ref(self) + + w = Target() + + +class SubclassableWeakrefTestCase(unittest.TestCase): + + def test_subclass_refs(self): + class MyRef(weakref.ref): + def __init__(self, ob, callback=None, value=42): + self.value = value + super(MyRef, self).__init__(ob, callback) + def __call__(self): + self.called = True + return super(MyRef, self).__call__() + o = Object("foo") + mr = MyRef(o, value=24) + self.assert_(mr() is o) + self.assert_(mr.called) + self.assertEqual(mr.value, 24) + del o + self.assert_(mr() is None) + self.assert_(mr.called) + + def test_subclass_refs_dont_replace_standard_refs(self): + class MyRef(weakref.ref): + pass + o = Object(42) + r1 = MyRef(o) + r2 = weakref.ref(o) + self.assert_(r1 is not r2) + self.assertEqual(weakref.getweakrefs(o), [r2, r1]) + self.assertEqual(weakref.getweakrefcount(o), 2) + r3 = MyRef(o) + self.assertEqual(weakref.getweakrefcount(o), 3) + refs = weakref.getweakrefs(o) + self.assertEqual(len(refs), 3) + self.assert_(r2 is refs[0]) + self.assert_(r1 in refs[1:]) + self.assert_(r3 in refs[1:]) + + def test_subclass_refs_dont_conflate_callbacks(self): + class MyRef(weakref.ref): + pass + o = Object(42) + r1 = MyRef(o, id) + r2 = MyRef(o, str) + self.assert_(r1 is not r2) + refs = weakref.getweakrefs(o) + self.assert_(r1 in refs) + self.assert_(r2 in refs) + + def test_subclass_refs_with_slots(self): + class MyRef(weakref.ref): + __slots__ = "slot1", "slot2" + def __new__(type, ob, callback, slot1, slot2): + return weakref.ref.__new__(type, ob, callback) + def __init__(self, ob, callback, slot1, slot2): + self.slot1 = slot1 + self.slot2 = slot2 + def meth(self): + return self.slot1 + self.slot2 + o = Object(42) + r = MyRef(o, None, "abc", "def") + self.assertEqual(r.slot1, "abc") + self.assertEqual(r.slot2, "def") + self.assertEqual(r.meth(), "abcdef") + self.failIf(hasattr(r, "__dict__")) + + +class Object: + def __init__(self, arg): + self.arg = arg + def __repr__(self): + return "" % self.arg + + +class MappingTestCase(TestBase): + + COUNT = 10 + + def test_weak_values(self): + # + # This exercises d.copy(), d.items(), d[], del d[], len(d). + # + import gc + dict, objects = self.make_weak_valued_dict() + for o in objects: + self.assert_(weakref.getweakrefcount(o) == 1, + "wrong number of weak references to %r!" % o) + self.assert_(o is dict[o.arg], + "wrong object returned by weak dict!") + items1 = dict.items() + items2 = dict.copy().items() + items1.sort() + items2.sort() + self.assert_(items1 == items2, + "cloning of weak-valued dictionary did not work!") + del items1, items2 + gc.collect() + self.assert_(len(dict) == self.COUNT) + del objects[0] + gc.collect() + gc.collect() + gc.collect() + self.assert_(len(dict) == (self.COUNT - 1), + "deleting object did not cause dictionary update") + del objects, o + gc.collect() + gc.collect() + gc.collect() + self.assert_(len(dict) == 0, + "deleting the values did not clear the dictionary") + # regression on SF bug #447152: + dict = weakref.WeakValueDictionary() + self.assertRaises(KeyError, dict.__getitem__, 1) + dict[2] = C() + gc.collect() + gc.collect() + gc.collect() + self.assertRaises(KeyError, dict.__getitem__, 2) + + def test_weak_keys(self): + # + # This exercises d.copy(), d.items(), d[] = v, d[], del d[], + # len(d), d.has_key(). + # + import gc + dict, objects = self.make_weak_keyed_dict() + for o in objects: + self.assert_(weakref.getweakrefcount(o) == 1, + "wrong number of weak references to %r!" % o) + self.assert_(o.arg is dict[o], + "wrong object returned by weak dict!") + items1 = dict.items() + items2 = dict.copy().items() + self.assert_(set(items1) == set(items2), + "cloning of weak-keyed dictionary did not work!") + del items1, items2 + gc.collect() + gc.collect() + gc.collect() + self.assert_(len(dict) == self.COUNT) + del objects[0] + gc.collect() + gc.collect() + gc.collect() + self.assert_(len(dict) == (self.COUNT - 1), + "deleting object did not cause dictionary update") + del objects, o + gc.collect() + self.assert_(len(dict) == 0, + "deleting the keys did not clear the dictionary") + o = Object(42) + dict[o] = "What is the meaning of the universe?" + self.assert_(dict.has_key(o)) + self.assert_(not dict.has_key(34)) + + def test_weak_keyed_iters(self): + dict, objects = self.make_weak_keyed_dict() + self.check_iters(dict) + + # Test keyrefs() + refs = dict.keyrefs() + self.assertEqual(len(refs), len(objects)) + objects2 = list(objects) + for wr in refs: + ob = wr() + self.assert_(dict.has_key(ob)) + self.assert_(ob in dict) + self.assertEqual(ob.arg, dict[ob]) + objects2.remove(ob) + self.assertEqual(len(objects2), 0) + + # Test iterkeyrefs() + objects2 = list(objects) + self.assertEqual(len(list(dict.iterkeyrefs())), len(objects)) + for wr in dict.iterkeyrefs(): + ob = wr() + self.assert_(dict.has_key(ob)) + self.assert_(ob in dict) + self.assertEqual(ob.arg, dict[ob]) + objects2.remove(ob) + self.assertEqual(len(objects2), 0) + + def test_weak_valued_iters(self): + dict, objects = self.make_weak_valued_dict() + self.check_iters(dict) + + # Test valuerefs() + refs = dict.valuerefs() + self.assertEqual(len(refs), len(objects)) + objects2 = list(objects) + for wr in refs: + ob = wr() + self.assertEqual(ob, dict[ob.arg]) + self.assertEqual(ob.arg, dict[ob.arg].arg) + objects2.remove(ob) + self.assertEqual(len(objects2), 0) + + # Test itervaluerefs() + objects2 = list(objects) + self.assertEqual(len(list(dict.itervaluerefs())), len(objects)) + for wr in dict.itervaluerefs(): + ob = wr() + self.assertEqual(ob, dict[ob.arg]) + self.assertEqual(ob.arg, dict[ob.arg].arg) + objects2.remove(ob) + self.assertEqual(len(objects2), 0) + + def check_iters(self, dict): + # item iterator: + items = dict.items() + for item in dict.iteritems(): + items.remove(item) + self.assert_(len(items) == 0, "iteritems() did not touch all items") + + # key iterator, via __iter__(): + keys = dict.keys() + for k in dict: + keys.remove(k) + self.assert_(len(keys) == 0, "__iter__() did not touch all keys") + + # key iterator, via iterkeys(): + keys = dict.keys() + for k in dict.iterkeys(): + keys.remove(k) + self.assert_(len(keys) == 0, "iterkeys() did not touch all keys") + + # value iterator: + values = dict.values() + for v in dict.itervalues(): + values.remove(v) + self.assert_(len(values) == 0, + "itervalues() did not touch all values") + + def test_make_weak_keyed_dict_from_dict(self): + o = Object(3) + dict = weakref.WeakKeyDictionary({o:364}) + self.assert_(dict[o] == 364) + + def test_make_weak_keyed_dict_from_weak_keyed_dict(self): + o = Object(3) + dict = weakref.WeakKeyDictionary({o:364}) + dict2 = weakref.WeakKeyDictionary(dict) + self.assert_(dict[o] == 364) + + def make_weak_keyed_dict(self): + dict = weakref.WeakKeyDictionary() + objects = map(Object, range(self.COUNT)) + for o in objects: + dict[o] = o.arg + return dict, objects + + def make_weak_valued_dict(self): + dict = weakref.WeakValueDictionary() + objects = map(Object, range(self.COUNT)) + for o in objects: + dict[o.arg] = o + return dict, objects + + def check_popitem(self, klass, key1, value1, key2, value2): + weakdict = klass() + weakdict[key1] = value1 + weakdict[key2] = value2 + self.assert_(len(weakdict) == 2) + k, v = weakdict.popitem() + self.assert_(len(weakdict) == 1) + if k is key1: + self.assert_(v is value1) + else: + self.assert_(v is value2) + k, v = weakdict.popitem() + self.assert_(len(weakdict) == 0) + if k is key1: + self.assert_(v is value1) + else: + self.assert_(v is value2) + + def test_weak_valued_dict_popitem(self): + self.check_popitem(weakref.WeakValueDictionary, + "key1", C(), "key2", C()) + + def test_weak_keyed_dict_popitem(self): + self.check_popitem(weakref.WeakKeyDictionary, + C(), "value 1", C(), "value 2") + + def check_setdefault(self, klass, key, value1, value2): + self.assert_(value1 is not value2, + "invalid test" + " -- value parameters must be distinct objects") + weakdict = klass() + o = weakdict.setdefault(key, value1) + self.assert_(o is value1) + self.assert_(weakdict.has_key(key)) + self.assert_(weakdict.get(key) is value1) + self.assert_(weakdict[key] is value1) + + o = weakdict.setdefault(key, value2) + self.assert_(o is value1) + self.assert_(weakdict.has_key(key)) + self.assert_(weakdict.get(key) is value1) + self.assert_(weakdict[key] is value1) + + def test_weak_valued_dict_setdefault(self): + self.check_setdefault(weakref.WeakValueDictionary, + "key", C(), C()) + + def test_weak_keyed_dict_setdefault(self): + self.check_setdefault(weakref.WeakKeyDictionary, + C(), "value 1", "value 2") + + def check_update(self, klass, dict): + # + # This exercises d.update(), len(d), d.keys(), d.has_key(), + # d.get(), d[]. + # + weakdict = klass() + weakdict.update(dict) + self.assert_(len(weakdict) == len(dict)) + for k in weakdict.keys(): + self.assert_(dict.has_key(k), + "mysterious new key appeared in weak dict") + v = dict.get(k) + self.assert_(v is weakdict[k]) + self.assert_(v is weakdict.get(k)) + for k in dict.keys(): + self.assert_(weakdict.has_key(k), + "original key disappeared in weak dict") + v = dict[k] + self.assert_(v is weakdict[k]) + self.assert_(v is weakdict.get(k)) + + def test_weak_valued_dict_update(self): + self.check_update(weakref.WeakValueDictionary, + {1: C(), 'a': C(), C(): C()}) + + def test_weak_keyed_dict_update(self): + self.check_update(weakref.WeakKeyDictionary, + {C(): 1, C(): 2, C(): 3}) + + def test_weak_keyed_delitem(self): + d = weakref.WeakKeyDictionary() + o1 = Object('1') + o2 = Object('2') + d[o1] = 'something' + d[o2] = 'something' + self.assert_(len(d) == 2) + del d[o1] + self.assert_(len(d) == 1) + self.assert_(d.keys() == [o2]) + + def test_weak_valued_delitem(self): + d = weakref.WeakValueDictionary() + o1 = Object('1') + o2 = Object('2') + d['something'] = o1 + d['something else'] = o2 + self.assert_(len(d) == 2) + del d['something'] + self.assert_(len(d) == 1) + self.assert_(d.items() == [('something else', o2)]) + + def test_weak_keyed_bad_delitem(self): + d = weakref.WeakKeyDictionary() + o = Object('1') + # An attempt to delete an object that isn't there should raise + # KeyError. It didn't before 2.3. + self.assertRaises(KeyError, d.__delitem__, o) + self.assertRaises(KeyError, d.__getitem__, o) + + # If a key isn't of a weakly referencable type, __getitem__ and + # __setitem__ raise TypeError. __delitem__ should too. + self.assertRaises(TypeError, d.__delitem__, 13) + self.assertRaises(TypeError, d.__getitem__, 13) + self.assertRaises(TypeError, d.__setitem__, 13, 13) + + def test_weak_keyed_cascading_deletes(self): + # SF bug 742860. For some reason, before 2.3 __delitem__ iterated + # over the keys via self.data.iterkeys(). If things vanished from + # the dict during this (or got added), that caused a RuntimeError. + + d = weakref.WeakKeyDictionary() + mutate = False + + class C(object): + def __init__(self, i): + self.value = i + def __hash__(self): + return hash(self.value) + def __eq__(self, other): + if mutate: + # Side effect that mutates the dict, by removing the + # last strong reference to a key. + del objs[-1] + return self.value == other.value + + objs = [C(i) for i in range(4)] + for o in objs: + d[o] = o.value + del o # now the only strong references to keys are in objs + gc.collect() + # Find the order in which iterkeys sees the keys. + objs = d.keys() + # Reverse it, so that the iteration implementation of __delitem__ + # has to keep looping to find the first object we delete. + objs.reverse() + + # Turn on mutation in C.__eq__. The first time thru the loop, + # under the iterkeys() business the first comparison will delete + # the last item iterkeys() would see, and that causes a + # RuntimeError: dictionary changed size during iteration + # when the iterkeys() loop goes around to try comparing the next + # key. After this was fixed, it just deletes the last object *our* + # "for o in obj" loop would have gotten to. + mutate = True + count = 0 + for o in objs: + count += 1 + del d[o] + gc.collect() + self.assertEqual(len(d), 0) + self.assertEqual(count, 2) + +from test import mapping_tests + +class WeakValueDictionaryTestCase(mapping_tests.BasicTestMappingProtocol): + """Check that WeakValueDictionary conforms to the mapping protocol""" + __ref = {"key1":Object(1), "key2":Object(2), "key3":Object(3)} + type2test = weakref.WeakValueDictionary + def _reference(self): + return self.__ref.copy() + +class WeakKeyDictionaryTestCase(mapping_tests.BasicTestMappingProtocol): + """Check that WeakKeyDictionary conforms to the mapping protocol""" + __ref = {Object("key1"):1, Object("key2"):2, Object("key3"):3} + type2test = weakref.WeakKeyDictionary + def _reference(self): + return self.__ref.copy() + +libreftest = """ Doctest for examples in the library reference: libweakref.tex + +>>> import weakref +>>> class Dict(dict): +... pass +... +>>> obj = Dict(red=1, green=2, blue=3) # this object is weak referencable +>>> r = weakref.ref(obj) +>>> print r() is obj +True + +>>> import weakref +>>> class Object: +... pass +... +>>> o = Object() +>>> r = weakref.ref(o) +>>> o2 = r() +>>> o is o2 +True +>>> del o, o2 +>>> print r() +None + +>>> import weakref +>>> class ExtendedRef(weakref.ref): +... def __init__(self, ob, callback=None, **annotations): +... super(ExtendedRef, self).__init__(ob, callback) +... self.__counter = 0 +... for k, v in annotations.iteritems(): +... setattr(self, k, v) +... def __call__(self): +... '''Return a pair containing the referent and the number of +... times the reference has been called. +... ''' +... ob = super(ExtendedRef, self).__call__() +... if ob is not None: +... self.__counter += 1 +... ob = (ob, self.__counter) +... return ob +... +>>> class A: # not in docs from here, just testing the ExtendedRef +... pass +... +>>> a = A() +>>> r = ExtendedRef(a, foo=1, bar="baz") +>>> r.foo +1 +>>> r.bar +'baz' +>>> r()[1] +1 +>>> r()[1] +2 +>>> r()[0] is a +True + + +>>> import weakref +>>> _id2obj_dict = weakref.WeakValueDictionary() +>>> def remember(obj): +... oid = id(obj) +... _id2obj_dict[oid] = obj +... return oid +... +>>> def id2obj(oid): +... return _id2obj_dict[oid] +... +>>> a = A() # from here, just testing +>>> a_id = remember(a) +>>> id2obj(a_id) is a +True +>>> del a +>>> try: +... id2obj(a_id) +... except KeyError: +... print 'OK' +... else: +... print 'WeakValueDictionary error' +OK + +""" + +__test__ = {'libreftest' : libreftest} + +def test_main(): + test_support.run_unittest( + ReferencesTestCase, + MappingTestCase, + WeakValueDictionaryTestCase, + WeakKeyDictionaryTestCase, + ) + test_support.run_doctest(sys.modules[__name__]) + + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_xrange.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_xrange.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,66 @@ +# Python test set -- built-in functions + +import test.test_support, unittest +import sys + +import warnings +warnings.filterwarnings("ignore", "integer argument expected", + DeprecationWarning, "unittest") + +class XrangeTest(unittest.TestCase): + def test_xrange(self): + self.assertEqual(list(xrange(3)), [0, 1, 2]) + self.assertEqual(list(xrange(1, 5)), [1, 2, 3, 4]) + self.assertEqual(list(xrange(0)), []) + self.assertEqual(list(xrange(-3)), []) + self.assertEqual(list(xrange(1, 10, 3)), [1, 4, 7]) + self.assertEqual(list(xrange(5, -5, -3)), [5, 2, -1, -4]) + + a = 10 + b = 100 + c = 50 + + self.assertEqual(list(xrange(a, a+2)), [a, a+1]) + self.assertEqual(list(xrange(a+2, a, -1L)), [a+2, a+1]) + self.assertEqual(list(xrange(a+4, a, -2)), [a+4, a+2]) + + seq = list(xrange(a, b, c)) + self.assert_(a in seq) + self.assert_(b not in seq) + self.assertEqual(len(seq), 2) + + seq = list(xrange(b, a, -c)) + self.assert_(b in seq) + self.assert_(a not in seq) + self.assertEqual(len(seq), 2) + + seq = list(xrange(-a, -b, -c)) + self.assert_(-a in seq) + self.assert_(-b not in seq) + self.assertEqual(len(seq), 2) + + self.assertRaises(TypeError, xrange) + self.assertRaises(TypeError, xrange, 1, 2, 3, 4) + self.assertRaises(ValueError, xrange, 1, 2, 0) + + # Overflow tests disabled for PyPy since it handles long arguments + # as well. + #self.assertRaises(OverflowError, xrange, 1e100, 1e101, 1e101) + + self.assertRaises(TypeError, xrange, 0, "spam") + self.assertRaises(TypeError, xrange, 0, 42, "spam") + + self.assertEqual(len(xrange(0, sys.maxint, sys.maxint-1)), 2) + + #self.assertRaises(OverflowError, xrange, -sys.maxint, sys.maxint) + #self.assertRaises(OverflowError, xrange, 0, 2*sys.maxint) + + r = xrange(-sys.maxint, sys.maxint, 2) + self.assertEqual(len(r), sys.maxint) + #self.assertRaises(OverflowError, xrange, -sys.maxint-1, sys.maxint, 2) + +def test_main(): + test.test_support.run_unittest(XrangeTest) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_zipimport.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_zipimport.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,429 @@ +import sys +import os +import marshal +import imp +import struct +import time +import unittest + +import zlib # implied prerequisite +from zipfile import ZipFile, ZipInfo, ZIP_STORED, ZIP_DEFLATED +from test import test_support +from test.test_importhooks import ImportHooksBaseTestCase, test_src, test_co + +import zipimport +import linecache +import doctest +import inspect +import StringIO +from traceback import extract_tb, extract_stack, print_tb +raise_src = 'def do_raise(): raise TypeError\n' + +# so we only run testAFakeZlib once if this test is run repeatedly +# which happens when we look for ref leaks +test_imported = False + + +def make_pyc(co, mtime): + data = marshal.dumps(co) + if type(mtime) is type(0.0): + # Mac mtimes need a bit of special casing + if mtime < 0x7fffffff: + mtime = int(mtime) + else: + mtime = int(-0x100000000L + long(mtime)) + pyc = imp.get_magic() + struct.pack("", "exec"), NOW) + files = {TESTMOD + pyc_ext: (NOW, pyc), + "some.data": (NOW, "some data")} + self.doTest(pyc_ext, files, TESTMOD) + + def testImport_WithStuff(self): + # try importing from a zipfile which contains additional + # stuff at the beginning of the file + files = {TESTMOD + ".py": (NOW, test_src)} + self.doTest(".py", files, TESTMOD, + stuff="Some Stuff"*31) + + def assertModuleSource(self, module): + self.assertEqual(inspect.getsource(module), test_src) + + def testGetSource(self): + files = {TESTMOD + ".py": (NOW, test_src)} + self.doTest(".py", files, TESTMOD, call=self.assertModuleSource) + + def testGetCompiledSource(self): + pyc = make_pyc(compile(test_src, "", "exec"), NOW) + files = {TESTMOD + ".py": (NOW, test_src), + TESTMOD + pyc_ext: (NOW, pyc)} + self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource) + + def runDoctest(self, callback): + files = {TESTMOD + ".py": (NOW, test_src), + "xyz.txt": (NOW, ">>> log.append(True)\n")} + self.doTest(".py", files, TESTMOD, call=callback) + + def doDoctestFile(self, module): + log = [] + old_master, doctest.master = doctest.master, None + try: + doctest.testfile( + 'xyz.txt', package=module, module_relative=True, + globs=locals() + ) + finally: + doctest.master = old_master + self.assertEqual(log,[True]) + + def testDoctestFile(self): + self.runDoctest(self.doDoctestFile) + + def doDoctestSuite(self, module): + log = [] + doctest.DocFileTest( + 'xyz.txt', package=module, module_relative=True, + globs=locals() + ).run() + self.assertEqual(log,[True]) + + def testDoctestSuite(self): + self.runDoctest(self.doDoctestSuite) + + + def doTraceback(self, module): + try: + module.do_raise() + except: + tb = sys.exc_info()[2].tb_next + + f,lno,n,line = extract_tb(tb, 1)[0] + self.assertEqual(line, raise_src.strip()) + + f,lno,n,line = extract_stack(tb.tb_frame, 1)[0] + self.assertEqual(line, raise_src.strip()) + + s = StringIO.StringIO() + print_tb(tb, 1, s) + self.failUnless(s.getvalue().endswith(raise_src)) + else: + raise AssertionError("This ought to be impossible") + + def testTraceback(self): + files = {TESTMOD + ".py": (NOW, raise_src)} + self.doTest(None, files, TESTMOD, call=self.doTraceback) + + +class CompressedZipImportTestCase(UncompressedZipImportTestCase): + compression = ZIP_DEFLATED + + +class BadFileZipImportTestCase(unittest.TestCase): + def assertZipFailure(self, filename): + self.assertRaises(zipimport.ZipImportError, + zipimport.zipimporter, filename) + + def testNoFile(self): + self.assertZipFailure('AdfjdkFJKDFJjdklfjs') + + def testEmptyFilename(self): + self.assertZipFailure('') + + def testBadArgs(self): + self.assertRaises(TypeError, zipimport.zipimporter, None) + self.assertRaises(TypeError, zipimport.zipimporter, TESTMOD, kwd=None) + + def testFilenameTooLong(self): + self.assertZipFailure('A' * 33000) + + def testEmptyFile(self): + test_support.unlink(TESTMOD) + open(TESTMOD, 'w+').close() + self.assertZipFailure(TESTMOD) + + def testFileUnreadable(self): + test_support.unlink(TESTMOD) + fd = os.open(TESTMOD, os.O_CREAT, 000) + try: + os.close(fd) + self.assertZipFailure(TESTMOD) + finally: + # If we leave "the read-only bit" set on Windows, nothing can + # delete TESTMOD, and later tests suffer bogus failures. + os.chmod(TESTMOD, 0666) + test_support.unlink(TESTMOD) + + def testNotZipFile(self): + test_support.unlink(TESTMOD) + fp = open(TESTMOD, 'w+') + fp.write('a' * 22) + fp.close() + self.assertZipFailure(TESTMOD) + + # XXX: disabled until this works on Big-endian machines + def _testBogusZipFile(self): + test_support.unlink(TESTMOD) + fp = open(TESTMOD, 'w+') + fp.write(struct.pack('=I', 0x06054B50)) + fp.write('a' * 18) + fp.close() + z = zipimport.zipimporter(TESTMOD) + + try: + self.assertRaises(TypeError, z.find_module, None) + self.assertRaises(TypeError, z.load_module, None) + self.assertRaises(TypeError, z.is_package, None) + self.assertRaises(TypeError, z.get_code, None) + self.assertRaises(TypeError, z.get_data, None) + self.assertRaises(TypeError, z.get_source, None) + + error = zipimport.ZipImportError + self.assertEqual(z.find_module('abc'), None) + + self.assertRaises(error, z.load_module, 'abc') + self.assertRaises(error, z.get_code, 'abc') + self.assertRaises(IOError, z.get_data, 'abc') + self.assertRaises(error, z.get_source, 'abc') + self.assertRaises(error, z.is_package, 'abc') + finally: + zipimport._zip_directory_cache.clear() + + +def cleanup(): + # this is necessary if test is run repeated (like when finding leaks) + global test_imported + if test_imported: + zipimport._zip_directory_cache.clear() + if hasattr(UncompressedZipImportTestCase, 'testAFakeZlib'): + delattr(UncompressedZipImportTestCase, 'testAFakeZlib') + if hasattr(CompressedZipImportTestCase, 'testAFakeZlib'): + delattr(CompressedZipImportTestCase, 'testAFakeZlib') + test_imported = True + +def test_main(): + cleanup() + try: + test_support.run_unittest( + UncompressedZipImportTestCase, + CompressedZipImportTestCase, + BadFileZipImportTestCase, + ) + finally: + test_support.unlink(TESTMOD) + +if __name__ == "__main__": + test_main() Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/tf_inherit_check.py ============================================================================== --- (empty file) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/tf_inherit_check.py Mon Aug 18 06:29:12 2008 @@ -0,0 +1,25 @@ +# Helper script for test_tempfile.py. argv[2] is the number of a file +# descriptor which should _not_ be open. Check this by attempting to +# write to it -- if we succeed, something is wrong. + +import sys +import os + +verbose = (sys.argv[1] == 'v') +try: + fd = int(sys.argv[2]) + + try: + os.write(fd, "blat") + except os.error: + # Success -- could not write to fd. + sys.exit(0) + else: + if verbose: + sys.stderr.write("fd %d is open in child" % fd) + sys.exit(1) + +except StandardError: + if verbose: + raise + sys.exit(1) From arigo at codespeak.net Mon Aug 18 09:42:25 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 18 Aug 2008 09:42:25 +0200 (CEST) Subject: [pypy-svn] r57384 - pypy/dist/pypy/translator/goal Message-ID: <20080818074225.6E48C169E2D@codespeak.net> Author: arigo Date: Mon Aug 18 09:42:24 2008 New Revision: 57384 Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py Log: Fix to pass the --opt option correctly. Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py ============================================================================== --- pypy/dist/pypy/translator/goal/bench-cronjob.py (original) +++ pypy/dist/pypy/translator/goal/bench-cronjob.py Mon Aug 18 09:42:24 2008 @@ -181,15 +181,15 @@ if backends == []: #_ prefix means target specific option, # prefix to outcomment backends = [backend.strip() for backend in """ c - c--stackless--_O3 - c--_O3--_allworkingmodules - c--thread--gc=hybrid--_O3 - c--gc=semispace--_O3 - c--gc=generation--_O3 - c--gc=hybrid--_O3 - cli--_O3 - jvm--_O3 - jvm--inline-threshold=0--_O3 + c--stackless--opt=3 + c--opt=3--_allworkingmodules + c--thread--gc=hybrid--opt=3 + c--gc=semispace--opt=3 + c--gc=generation--opt=3 + c--gc=hybrid--opt=3 + cli--opt=3 + jvm--opt=3 + jvm--inline-threshold=0--opt=3 """.split('\n') if backend.strip() and not backend.strip().startswith('#')] print time.ctime() for backend in backends: From arigo at codespeak.net Mon Aug 18 09:55:18 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 18 Aug 2008 09:55:18 +0200 (CEST) Subject: [pypy-svn] r57385 - in pypy/dist/pypy: config translator Message-ID: <20080818075518.629B5169EC3@codespeak.net> Author: arigo Date: Mon Aug 18 09:55:15 2008 New Revision: 57385 Modified: pypy/dist/pypy/config/translationoption.py pypy/dist/pypy/translator/driver.py Log: Fix typo. Add defaults for the backend and the type_system; this makes test_driver.py pass again, and is probably a good idea. Modified: pypy/dist/pypy/config/translationoption.py ============================================================================== --- pypy/dist/pypy/config/translationoption.py (original) +++ pypy/dist/pypy/config/translationoption.py Mon Aug 18 09:55:15 2008 @@ -17,7 +17,7 @@ default=False, cmdline="--stackless", requires=[("translation.type_system", "lltype")]), ChoiceOption("type_system", "Type system to use when RTyping", - ["lltype", "ootype"], cmdline=None, + ["lltype", "ootype"], cmdline=None, default="lltype", requires={ "ootype": [ ("translation.backendopt.constfold", False), @@ -27,7 +27,7 @@ ] }), ChoiceOption("backend", "Backend to use for code generation", - ["c", "llvm", "cli", "jvm", "js"], + ["c", "llvm", "cli", "jvm", "js"], default="c", requires={ "c": [("translation.type_system", "lltype")], "llvm": [("translation.type_system", "lltype"), Modified: pypy/dist/pypy/translator/driver.py ============================================================================== --- pypy/dist/pypy/translator/driver.py (original) +++ pypy/dist/pypy/translator/driver.py Mon Aug 18 09:55:15 2008 @@ -115,7 +115,7 @@ explicit_task = task parts = task.split('_') if len(parts) == 1: - if task in ('annotate'): + if task in ('annotate',): expose_task(task) else: task, postfix = parts From arigo at codespeak.net Mon Aug 18 10:13:35 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 18 Aug 2008 10:13:35 +0200 (CEST) Subject: [pypy-svn] r57386 - pypy/dist/pypy/translator/goal Message-ID: <20080818081335.1B071168071@codespeak.net> Author: arigo Date: Mon Aug 18 10:13:34 2008 New Revision: 57386 Modified: pypy/dist/pypy/translator/goal/translate.py Log: Remove now-unnecessary line. Modified: pypy/dist/pypy/translator/goal/translate.py ============================================================================== --- pypy/dist/pypy/translator/goal/translate.py (original) +++ pypy/dist/pypy/translator/goal/translate.py Mon Aug 18 10:13:34 2008 @@ -77,7 +77,6 @@ OVERRIDES = { 'translation.debug': False, - 'translation.backend': 'c', } import py From arigo at codespeak.net Mon Aug 18 10:16:37 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 18 Aug 2008 10:16:37 +0200 (CEST) Subject: [pypy-svn] r57387 - in pypy/dist/pypy: config doc/config translator/goal Message-ID: <20080818081637.D435D169EE5@codespeak.net> Author: arigo Date: Mon Aug 18 10:16:36 2008 New Revision: 57387 Modified: pypy/dist/pypy/config/pypyoption.py pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt pypy/dist/pypy/translator/goal/targetpypystandalone.py Log: Enable --allworkingmodules by default from targetpypystandalone. Modified: pypy/dist/pypy/config/pypyoption.py ============================================================================== --- pypy/dist/pypy/config/pypyoption.py (original) +++ pypy/dist/pypy/config/pypyoption.py Mon Aug 18 10:16:36 2008 @@ -131,12 +131,15 @@ for modname in all_modules]), BoolOption("allworkingmodules", "use as many working modules as possible", + # NB. defaults to False for py.py and tests, but + # targetpypystandalone suggests True, which can be overridden + # with --no-allworkingmodules. default=False, cmdline="--allworkingmodules", suggests=[("objspace.usemodules.%s" % (modname, ), True) for modname in working_modules if modname in all_modules], - negation=False), + negation=True), BoolOption("geninterp", "specify whether geninterp should be used", cmdline=None, Modified: pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt ============================================================================== --- pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt (original) +++ pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt Mon Aug 18 10:16:36 2008 @@ -1,2 +1,5 @@ This option enables the usage of all modules that are known to be working well and that translate without problems. + +Note that this option is set to True by default by targetpypystandalone.py. +To force it to False, use ``--no-allworkingmodules``. Modified: pypy/dist/pypy/translator/goal/targetpypystandalone.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetpypystandalone.py (original) +++ pypy/dist/pypy/translator/goal/targetpypystandalone.py Mon Aug 18 10:16:36 2008 @@ -132,6 +132,15 @@ # expose the following variables to ease debugging global space, entry_point + # not really clean, but setting the default of allworkingmodules + # to True has two problems: it doesn't implies its suggests (the + # config machinery doesn't handle that case), and we don't want + # allworkingmodules to be enabled for all spaces by default + # (e.g. in py.py or in tests). Auto-generated helps report the + # default of allworkingmodules to be False, though, which is a + # bit annoying. + config.objspace.suggest(allworkingmodules=True) + if config.translation.thread: config.objspace.usemodules.thread = True elif config.objspace.usemodules.thread: From arigo at codespeak.net Mon Aug 18 10:40:16 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 18 Aug 2008 10:40:16 +0200 (CEST) Subject: [pypy-svn] r57388 - pypy/dist/pypy/translator/goal Message-ID: <20080818084016.397C2169EC3@codespeak.net> Author: arigo Date: Mon Aug 18 10:40:15 2008 New Revision: 57388 Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py pypy/dist/pypy/translator/goal/targetpypystandalone.py Log: * Build a baseline --opt=0 version of pypy-c. * Sorry, can't find a way to have --withmod-_stackless work... Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py ============================================================================== --- pypy/dist/pypy/translator/goal/bench-cronjob.py (original) +++ pypy/dist/pypy/translator/goal/bench-cronjob.py Mon Aug 18 10:40:15 2008 @@ -180,7 +180,7 @@ def main(backends=[]): if backends == []: #_ prefix means target specific option, # prefix to outcomment backends = [backend.strip() for backend in """ - c + c--opt=0 c--stackless--opt=3 c--opt=3--_allworkingmodules c--thread--gc=hybrid--opt=3 Modified: pypy/dist/pypy/translator/goal/targetpypystandalone.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetpypystandalone.py (original) +++ pypy/dist/pypy/translator/goal/targetpypystandalone.py Mon Aug 18 10:40:15 2008 @@ -158,7 +158,12 @@ if config.translation.stackless: config.objspace.usemodules._stackless = True elif config.objspace.usemodules._stackless: - config.translation.stackless = True + try: + config.translation.stackless = True + except ConflictConfigError: + raise ConflictConfigError("please use the --stackless option " + "to translate.py instead of " + "--withmod-_stackless directly") if not config.translation.rweakref: config.objspace.usemodules._weakref = False From arigo at codespeak.net Mon Aug 18 10:53:13 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Mon, 18 Aug 2008 10:53:13 +0200 (CEST) Subject: [pypy-svn] r57392 - pypy/dist/pypy/translator/goal Message-ID: <20080818085313.A0513169EE5@codespeak.net> Author: arigo Date: Mon Aug 18 10:53:11 2008 New Revision: 57392 Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py Log: Tweak the command lines to continue building the same pypy's as before. Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py ============================================================================== --- pypy/dist/pypy/translator/goal/bench-cronjob.py (original) +++ pypy/dist/pypy/translator/goal/bench-cronjob.py Mon Aug 18 10:53:11 2008 @@ -180,16 +180,16 @@ def main(backends=[]): if backends == []: #_ prefix means target specific option, # prefix to outcomment backends = [backend.strip() for backend in """ - c--opt=0 - c--stackless--opt=3 - c--opt=3--_allworkingmodules - c--thread--gc=hybrid--opt=3 - c--gc=semispace--opt=3 - c--gc=generation--opt=3 - c--gc=hybrid--opt=3 - cli--opt=3 - jvm--opt=3 - jvm--inline-threshold=0--opt=3 + c--opt=0--_no-allworkingmodules + c--stackless--opt=3--_no-allworkingmodules + c--opt=3 + c--thread--gc=hybrid--opt=3--_no-allworkingmodules + c--gc=semispace--opt=3--_no-allworkingmodules + c--gc=generation--opt=3--_no-allworkingmodules + c--gc=hybrid--opt=3--_no-allworkingmodules + cli--opt=3--_no-allworkingmodules + jvm--opt=3--_no-allworkingmodules + jvm--inline-threshold=0--opt=3--_no-allworkingmodules """.split('\n') if backend.strip() and not backend.strip().startswith('#')] print time.ctime() for backend in backends: From bgola at codespeak.net Mon Aug 18 14:27:06 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 14:27:06 +0200 (CEST) Subject: [pypy-svn] r57416 - pypy/branch/2.5-features/lib-python/modified-2.5.1 Message-ID: <20080818122706.23493169EB1@codespeak.net> Author: bgola Date: Mon Aug 18 14:27:05 2008 New Revision: 57416 Removed: pypy/branch/2.5-features/lib-python/modified-2.5.1/binhex.py pypy/branch/2.5-features/lib-python/modified-2.5.1/copy.py pypy/branch/2.5-features/lib-python/modified-2.5.1/locale.py pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py pypy/branch/2.5-features/lib-python/modified-2.5.1/tarfile.py pypy/branch/2.5-features/lib-python/modified-2.5.1/traceback.py Log: removing files without any changes from modified-2.5.1 From bgola at codespeak.net Mon Aug 18 14:38:10 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 14:38:10 +0200 (CEST) Subject: [pypy-svn] r57417 - pypy/branch/2.5-features/lib-python/modified-2.5.1 Message-ID: <20080818123810.46A53169F5F@codespeak.net> Author: bgola Date: Mon Aug 18 14:38:09 2008 New Revision: 57417 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/opcode.py Log: adding pypy experimental bytecodes (from modified-2.4.1) From bgola at codespeak.net Mon Aug 18 15:21:17 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 15:21:17 +0200 (CEST) Subject: [pypy-svn] r57422 - in pypy/branch/2.5-features/lib-python/modified-2.5.1: . encodings test Message-ID: <20080818132117.34BE6169F2D@codespeak.net> Author: bgola Date: Mon Aug 18 15:21:16 2008 New Revision: 57422 Removed: pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/ pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/ pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py Log: deleting files from modified/ (this is part1, part2 will be to copy files with svn cp from 2.5.1/ and then appling the changes) From bgola at codespeak.net Mon Aug 18 15:25:19 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 15:25:19 +0200 (CEST) Subject: [pypy-svn] r57423 - pypy/branch/2.5-features/lib-python/modified-2.5.1 Message-ID: <20080818132519.054DC169F49@codespeak.net> Author: bgola Date: Mon Aug 18 15:25:17 2008 New Revision: 57423 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/__future__.py pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/cmd.py pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/decimal.py pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/doctest.py pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/inspect.py pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/opcode.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pickle.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pickletools.py pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/popen2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pprint.py pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pydoc.py pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/site.py pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/socket.py pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/sre_compile.py pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/sre_constants.py pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/subprocess.py pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/types.py pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/uu.py pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/warnings.py Log: svn cp 2.5.1/ modifiled/ and changes applied Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/__future__.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/__future__.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py Mon Aug 18 15:25:17 2008 @@ -64,7 +64,7 @@ CO_NESTED = 0x0010 # nested_scopes CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000) CO_FUTURE_DIVISION = 0x2000 # division -CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default +CO_FUTURE_ABSIMPORT = 0x4000 # absolute_import CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement class _Feature: @@ -109,7 +109,7 @@ absolute_import = _Feature((2, 5, 0, "alpha", 1), (2, 7, 0, "alpha", 0), - CO_FUTURE_ABSOLUTE_IMPORT) + CO_FUTURE_ABSIMPORT) with_statement = _Feature((2, 5, 0, "alpha", 1), (2, 6, 0, "alpha", 0), Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/cmd.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/cmd.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/cmd.py Mon Aug 18 15:25:17 2008 @@ -113,7 +113,7 @@ self.old_completer = readline.get_completer() readline.set_completer(self.complete) readline.parse_and_bind(self.completekey+": complete") - except ImportError: + except (ImportError, AttributeError): pass try: if intro is not None: @@ -147,7 +147,7 @@ try: import readline readline.set_completer(self.old_completer) - except ImportError: + except (ImportError, AttributeError): pass Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/decimal.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/decimal.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/decimal.py Mon Aug 18 15:25:17 2008 @@ -1939,6 +1939,7 @@ ans = self._check_nans(context=context) if ans: return ans + return self if self._exp >= 0: return self if context is None: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/doctest.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/doctest.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/doctest.py Mon Aug 18 15:25:17 2008 @@ -839,12 +839,12 @@ return module.__dict__ is object.func_globals elif inspect.isclass(object): return module.__name__ == object.__module__ + elif isinstance(object, property): + return True # [XX] no way not be sure. elif inspect.getmodule(object) is not None: return module is inspect.getmodule(object) elif hasattr(object, '__module__'): return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. else: raise ValueError("object must be a class or function") Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/inspect.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/inspect.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/inspect.py Mon Aug 18 15:25:17 2008 @@ -677,9 +677,15 @@ 'varargs' and 'varkw' are the names of the * and ** arguments or None.""" if not iscode(co): - raise TypeError('arg is not a code object') + if hasattr(len, 'func_code') and type(co) is type(len.func_code): + # PyPy extension: built-in function objects have a func_code too. + # There is no co_code on it, but co_argcount and co_varnames and + # co_flags are present. + pass + else: + raise TypeError('arg is not a code object') - code = co.co_code + code = getattr(co, 'co_code', '') nargs = co.co_argcount names = co.co_varnames args = list(names[:nargs]) @@ -739,7 +745,9 @@ if ismethod(func): func = func.im_func - if not isfunction(func): + if not (isfunction(func) or + isbuiltin(func) and hasattr(func, 'func_code')): + # PyPy extension: this works for built-in functions too raise TypeError('arg is not a Python function') args, varargs, varkw = getargs(func.func_code) return args, varargs, varkw, func.func_defaults Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/opcode.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/opcode.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/opcode.py Mon Aug 18 15:25:17 2008 @@ -182,4 +182,10 @@ def_op('EXTENDED_ARG', 143) EXTENDED_ARG = 143 +# pypy modification, experimental bytecode +def_op('CALL_LIKELY_BUILTIN', 144) # #args + (#kwargs << 8) +def_op('LOOKUP_METHOD', 145) # Index in name list +def_op('CALL_METHOD', 146) # #args not including 'self' + + del def_op, name_op, jrel_op, jabs_op Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pickle.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/pickle.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pickle.py Mon Aug 18 15:25:17 2008 @@ -32,7 +32,6 @@ import marshal import sys import struct -import re __all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler", "Unpickler", "dump", "dumps", "load", "loads"] @@ -162,7 +161,7 @@ _tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3] -__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)]) +__all__.extend([x for x in dir() if x[0].isalpha() and x == x.upper()]) del x @@ -205,6 +204,31 @@ self.proto = int(protocol) self.bin = protocol >= 1 self.fast = 0 + + def _pickle_moduledict(self, obj): + try: + modict = self.module_dict_ids + except AttributeError: + modict = {} + from sys import modules + for mod in modules.values(): + if isinstance(mod, ModuleType): + try: + modict[id(mod.__dict__)] = mod + except KeyboardInterrupt: + raise + except: # obscure: the above can fail for + # arbitrary reasons, because of the py lib + pass + self.module_dict_ids = modict + + thisid = id(obj) + try: + themodule = modict[thisid] + except KeyError: + return None + from __builtin__ import getattr + return getattr, (themodule, '__dict__') def clear_memo(self): """Clears the pickler's "memo". @@ -638,6 +662,12 @@ # else tmp is empty, and we're done def save_dict(self, obj): + ## Stackless addition BEGIN + modict_saver = self._pickle_moduledict(obj) + if modict_saver is not None: + return self.save_reduce(*modict_saver) + ## Stackless addition END + write = self.write if self.bin: @@ -767,8 +797,30 @@ write(GLOBAL + module + '\n' + name + '\n') self.memoize(obj) + def save_function(self, obj): + try: + return self.save_global(obj) + except PicklingError, e: + pass + # Check copy_reg.dispatch_table + reduce = dispatch_table.get(type(obj)) + if reduce: + rv = reduce(obj) + else: + # Check for a __reduce_ex__ method, fall back to __reduce__ + reduce = getattr(obj, "__reduce_ex__", None) + if reduce: + rv = reduce(self.proto) + else: + reduce = getattr(obj, "__reduce__", None) + if reduce: + rv = reduce() + else: + raise e + return self.save_reduce(obj=obj, *rv) + dispatch[ClassType] = save_global - dispatch[FunctionType] = save_global + dispatch[FunctionType] = save_function dispatch[BuiltinFunctionType] = save_global dispatch[TypeType] = save_global Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pickletools.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/pickletools.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pickletools.py Mon Aug 18 15:25:17 2008 @@ -2058,11 +2058,10 @@ Exercise the INST/OBJ/BUILD family. ->>> import random ->>> dis(pickle.dumps(random.random, 0)) - 0: c GLOBAL 'random random' - 15: p PUT 0 - 18: . STOP +>>> dis(pickle.dumps(zip, 0)) + 0: c GLOBAL '__builtin__ zip' + 17: p PUT 0 + 20: . STOP highest protocol among opcodes = 0 >>> from pickletools import _Example Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/popen2.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/popen2.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/popen2.py Mon Aug 18 15:25:17 2008 @@ -50,14 +50,18 @@ c2pread, c2pwrite = os.pipe() if capturestderr: errout, errin = os.pipe() - self.pid = os.fork() - if self.pid == 0: - # Child - os.dup2(p2cread, 0) - os.dup2(c2pwrite, 1) - if capturestderr: - os.dup2(errin, 2) - self._run_child(cmd) + gc.disable_finalizers() + try: + self.pid = os.fork() + if self.pid == 0: + # Child + os.dup2(p2cread, 0) + os.dup2(c2pwrite, 1) + if capturestderr: + os.dup2(errin, 2) + self._run_child(cmd) + finally: + gc.enable_finalizers() os.close(p2cread) self.tochild = os.fdopen(p2cwrite, 'w', bufsize) os.close(c2pwrite) @@ -122,13 +126,17 @@ self.cmd = cmd p2cread, p2cwrite = os.pipe() c2pread, c2pwrite = os.pipe() - self.pid = os.fork() - if self.pid == 0: - # Child - os.dup2(p2cread, 0) - os.dup2(c2pwrite, 1) - os.dup2(c2pwrite, 2) - self._run_child(cmd) + gc.disable_finalizers() + try: + self.pid = os.fork() + if self.pid == 0: + # Child + os.dup2(p2cread, 0) + os.dup2(c2pwrite, 1) + os.dup2(c2pwrite, 2) + self._run_child(cmd) + finally: + gc.enable_finalizers() os.close(p2cread) self.tochild = os.fdopen(p2cwrite, 'w', bufsize) os.close(c2pwrite) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pprint.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/pprint.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pprint.py Mon Aug 18 15:25:17 2008 @@ -133,7 +133,7 @@ if sepLines: r = getattr(typ, "__repr__", None) - if issubclass(typ, dict) and r is dict.__repr__: + if issubclass(typ, dict) and r == dict.__repr__: write('{') if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') @@ -160,8 +160,8 @@ write('}') return - if (issubclass(typ, list) and r is list.__repr__) or \ - (issubclass(typ, tuple) and r is tuple.__repr__): + if (issubclass(typ, list) and r == list.__repr__) or \ + (issubclass(typ, tuple) and r == tuple.__repr__): if issubclass(typ, list): write('[') endchar = ']' @@ -231,7 +231,7 @@ return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False r = getattr(typ, "__repr__", None) - if issubclass(typ, dict) and r is dict.__repr__: + if issubclass(typ, dict) and r == dict.__repr__: if not object: return "{}", True, False objid = _id(object) @@ -256,8 +256,8 @@ del context[objid] return "{%s}" % _commajoin(components), readable, recursive - if (issubclass(typ, list) and r is list.__repr__) or \ - (issubclass(typ, tuple) and r is tuple.__repr__): + if (issubclass(typ, list) and r == list.__repr__) or \ + (issubclass(typ, tuple) and r == tuple.__repr__): if issubclass(typ, list): if not object: return "[]", True, False Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/pydoc.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/pydoc.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/pydoc.py Mon Aug 18 15:25:17 2008 @@ -874,7 +874,9 @@ reallink = realname title = '%s = %s' % ( anchor, name, reallink) - if inspect.isfunction(object): + if inspect.isfunction(object) or ( + inspect.isbuiltin(object) and hasattr(object, 'func_code')): + # PyPy extension: the code below works for built-in functions too args, varargs, varkw, defaults = inspect.getargspec(object) argspec = inspect.formatargspec( args, varargs, varkw, defaults, formatvalue=self.formatvalue) @@ -1240,7 +1242,9 @@ cl.__dict__[realname] is object): skipdocs = 1 title = self.bold(name) + ' = ' + realname - if inspect.isfunction(object): + if (inspect.isfunction(object) or + inspect.isbuiltin(object) and hasattr(object, 'func_code')): + # PyPy extension: the code below works for built-in functions too args, varargs, varkw, defaults = inspect.getargspec(object) argspec = inspect.formatargspec( args, varargs, varkw, defaults, formatvalue=self.formatvalue) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/site.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/site.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/site.py Mon Aug 18 15:25:17 2008 @@ -73,7 +73,10 @@ if hasattr(m, '__loader__'): continue # don't mess with a PEP 302-supplied __file__ try: - m.__file__ = os.path.abspath(m.__file__) + prev = m.__file__ + new = os.path.abspath(m.__file__) + if prev != new: + m.__file__ = new except AttributeError: continue @@ -314,22 +317,32 @@ if key == 'q': break +##def setcopyright(): +## """Set 'copyright' and 'credits' in __builtin__""" +## __builtin__.copyright = _Printer("copyright", sys.copyright) +## if sys.platform[:4] == 'java': +## __builtin__.credits = _Printer( +## "credits", +## "Jython is maintained by the Jython developers (www.jython.org).") +## else: +## __builtin__.credits = _Printer("credits", """\ +## Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands +## for supporting Python development. See www.python.org for more information.""") +## here = os.path.dirname(os.__file__) +## __builtin__.license = _Printer( +## "license", "See http://www.python.org/%.3s/license.html" % sys.version, +## ["LICENSE.txt", "LICENSE"], +## [os.path.join(here, os.pardir), here, os.curdir]) + def setcopyright(): - """Set 'copyright' and 'credits' in __builtin__""" - __builtin__.copyright = _Printer("copyright", sys.copyright) - if sys.platform[:4] == 'java': - __builtin__.credits = _Printer( - "credits", - "Jython is maintained by the Jython developers (www.jython.org).") - else: - __builtin__.credits = _Printer("credits", """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""") - here = os.path.dirname(os.__file__) + # XXX this is the PyPy-specific version. Should be unified with the above. + __builtin__.credits = _Printer( + "credits", + "PyPy is maintained by the PyPy developers: http://codespeak.net/pypy") __builtin__.license = _Printer( - "license", "See http://www.python.org/%.3s/license.html" % sys.version, - ["LICENSE.txt", "LICENSE"], - [os.path.join(here, os.pardir), here, os.curdir]) + "license", + "See http://codespeak.net/svn/pypy/dist/LICENSE") + class _Helper(object): @@ -355,7 +368,7 @@ if sys.platform == 'win32': import locale, codecs enc = locale.getdefaultlocale()[1] - if enc.startswith('cp'): # "cp***" ? + if enc is not None and enc.startswith('cp'): # "cp***" ? try: codecs.lookup(enc) except LookupError: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/socket.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/socket.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/socket.py Mon Aug 18 15:25:17 2008 @@ -139,6 +139,10 @@ __slots__ = [] def _dummy(*args): raise error(EBADF, 'Bad file descriptor') + def _drop(self): + pass + def _reuse(self): + pass # All _delegate_methods must also be initialized here. send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy __getattr__ = _dummy @@ -156,7 +160,14 @@ for method in _delegate_methods: setattr(self, method, getattr(_sock, method)) + def __del__(self): + self.close() + + + + def close(self): + self._sock._drop() self._sock = _closedsocket() dummy = self._sock._dummy for method in _delegate_methods: @@ -172,6 +183,7 @@ """dup() -> socket object Return a new socket object connected to the same system resource.""" + self._sock._reuse() return _socketobject(_sock=self._sock) def makefile(self, mode='r', bufsize=-1): @@ -179,6 +191,7 @@ Return a regular file object corresponding to the socket. The mode and bufsize arguments are as for the built-in open() function.""" + self._sock._reuse() return _fileobject(self._sock, mode, bufsize) family = property(lambda self: self._sock.family, doc="the socket family") @@ -227,13 +240,14 @@ closed = property(_getclosed, doc="True if the file is closed") def close(self): - try: - if self._sock: + if self._sock: + try: self.flush() - finally: - if self._close: - self._sock.close() - self._sock = None + finally: + if self._sock: + s = self._sock + self._sock = None + s._drop() def __del__(self): try: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/sre_compile.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/sre_compile.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_compile.py Mon Aug 18 15:25:17 2008 @@ -14,7 +14,8 @@ from sre_constants import * -assert _sre.MAGIC == MAGIC, "SRE module mismatch" +# XXX see PyPy hack in sre_constants to support both the 2.3 and 2.4 _sre.c implementation. +#assert _sre.MAGIC == MAGIC, "SRE module mismatch" if _sre.CODESIZE == 2: MAXCODE = 65535 Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/sre_constants.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/sre_constants.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/sre_constants.py Mon Aug 18 15:25:17 2008 @@ -127,6 +127,13 @@ ] +# PyPy hack to make the sre_*.py files from 2.4.1 work on the _sre +# engine of 2.3. +import _sre +if _sre.MAGIC < 20031017: + OPCODES.remove(GROUPREF_EXISTS) +del _sre + ATCODES = [ AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY, AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING, Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/subprocess.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/subprocess.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/subprocess.py Mon Aug 18 15:25:17 2008 @@ -1002,7 +1002,14 @@ errpipe_read, errpipe_write = os.pipe() self._set_cloexec_flag(errpipe_write) - self.pid = os.fork() + in_child = False + gc.disable_finalizers() + try: + self.pid = os.fork() + in_child = (self.pid == 0) + finally: + if not in_child: + gc.enable_finalizers() self._child_created = True if self.pid == 0: # Child Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/types.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/types.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/types.py Mon Aug 18 15:25:17 2008 @@ -54,10 +54,10 @@ class _C: def _m(self): pass -ClassType = type(_C) +ClassType = _classobj # PyPy-specific, from __builtin__ UnboundMethodType = type(_C._m) # Same as MethodType _x = _C() -InstanceType = type(_x) +InstanceType = _instance # PyPy-specific, from __builtin__ MethodType = type(_x._m) BuiltinFunctionType = type(len) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/uu.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/uu.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/uu.py Mon Aug 18 15:25:17 2008 @@ -120,7 +120,7 @@ elif isinstance(out_file, basestring): fp = open(out_file, 'wb') try: - os.path.chmod(out_file, mode) + os.chmod(out_file, mode) except AttributeError: pass out_file = fp Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/warnings.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/warnings.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py Mon Aug 18 15:25:17 2008 @@ -151,8 +151,8 @@ assert action in ("error", "ignore", "always", "default", "module", "once"), "invalid action: %r" % (action,) assert isinstance(message, basestring), "message must be a string" - assert isinstance(category, (type, types.ClassType)), \ - "category must be a class" +# assert isinstance(category, (type, types.ClassType)), \ +# "category must be a class" assert issubclass(category, Warning), "category must be a Warning subclass" assert isinstance(module, basestring), "module must be a string" assert isinstance(lineno, int) and lineno >= 0, \ From bgola at codespeak.net Mon Aug 18 15:46:48 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 15:46:48 +0200 (CEST) Subject: [pypy-svn] r57425 - in pypy/branch/2.5-features/lib-python/modified-2.5.1: encodings test test/output Message-ID: <20080818134648.2CF4F169F34@codespeak.net> Author: bgola Date: Mon Aug 18 15:46:38 2008 New Revision: 57425 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/__init__.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/__init__.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/aliases.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/aliases.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/ascii.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/ascii.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/base64_codec.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/base64_codec.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/big5.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/big5.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/big5hkscs.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/big5hkscs.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/bz2_codec.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/bz2_codec.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/charmap.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/charmap.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp037.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp037.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1006.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1006.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1026.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1026.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1140.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1140.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1250.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1250.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1251.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1251.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1252.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1252.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1253.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1253.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1254.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1254.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1255.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1255.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1256.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1256.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1257.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1257.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp1258.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp1258.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp424.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp424.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp437.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp437.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp500.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp500.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp737.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp737.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp775.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp775.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp850.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp850.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp852.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp852.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp855.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp855.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp856.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp856.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp857.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp857.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp860.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp860.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp861.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp861.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp862.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp862.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp863.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp863.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp864.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp864.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp865.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp865.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp866.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp866.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp869.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp869.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp874.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp874.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp875.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp875.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp932.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp932.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp949.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp949.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/cp950.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/cp950.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/euc_jis_2004.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/euc_jis_2004.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/euc_jisx0213.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/euc_jisx0213.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/euc_jp.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/euc_jp.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/euc_kr.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/euc_kr.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/gb18030.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/gb18030.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/gb2312.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/gb2312.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/gbk.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/gbk.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/hex_codec.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/hex_codec.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/hp_roman8.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/hp_roman8.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/hz.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/hz.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/idna.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/idna.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_jp.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_jp.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_jp_1.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_jp_1.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_jp_2.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_jp_2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_jp_2004.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_jp_2004.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_jp_3.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_jp_3.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_jp_ext.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_jp_ext.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso2022_kr.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso2022_kr.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_1.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_1.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_10.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_10.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_11.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_11.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_13.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_13.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_14.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_14.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_15.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_15.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_16.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_16.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_2.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_3.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_3.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_4.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_4.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_5.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_5.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_6.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_6.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_7.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_7.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_8.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_8.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/iso8859_9.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/iso8859_9.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/johab.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/johab.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/koi8_r.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/koi8_r.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/koi8_u.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/koi8_u.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/latin_1.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/latin_1.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_arabic.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_arabic.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_centeuro.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_centeuro.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_croatian.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_croatian.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_cyrillic.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_cyrillic.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_farsi.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_farsi.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_greek.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_greek.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_iceland.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_iceland.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_latin2.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_latin2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_roman.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_roman.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_romanian.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_romanian.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mac_turkish.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mac_turkish.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mbcs.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mbcs.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/palmos.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/palmos.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/ptcp154.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/ptcp154.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/punycode.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/punycode.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/quopri_codec.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/quopri_codec.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/raw_unicode_escape.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/raw_unicode_escape.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/rot_13.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/rot_13.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/shift_jis.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/shift_jis.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/shift_jis_2004.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/shift_jis_2004.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/shift_jisx0213.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/shift_jisx0213.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/string_escape.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/string_escape.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/tis_620.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/tis_620.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/undefined.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/undefined.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/unicode_escape.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/unicode_escape.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/unicode_internal.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/unicode_internal.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_16.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_16.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_16_be.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_16_be.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_16_le.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_16_le.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_7.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_7.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_8.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_8.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_8_sig.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_8_sig.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/uu_codec.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/uu_codec.py pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/zlib_codec.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/zlib_codec.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/__init__.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/__init__.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/list_tests.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/list_tests.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/mapping_tests.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/mapping_tests.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/output/ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/pickletester.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/pickletester.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/seq_tests.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/seq_tests.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/string_tests.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/string_tests.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test___all__.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test___all__.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_array.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_array.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_base64.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_base64.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bufio.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_bufio.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_builtin.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_builtin.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bz2.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_bz2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_class.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_class.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_codeop.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_codeop.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_compile.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_compile.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_complex.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_complex.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_copy.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_copy.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_cpickle.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_cpickle.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_deque.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_deque.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descr.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_descr.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descrtut.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_descrtut.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_dict.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_dict.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_enumerate.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_enumerate.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_exceptions.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_exceptions.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_file.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_file.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_format.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_format.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_funcattrs.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_funcattrs.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_generators.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_generators.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_genexps.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_genexps.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_iter.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_iter.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_itertools.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_itertools.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_marshal.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_marshal.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mmap.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_mmap.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_module.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_module.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mutants.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_mutants.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_operations.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_operations.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_optparse.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_optparse.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_os.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_os.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_parser.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_parser.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_quopri.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_quopri.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_random.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_random.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_re.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_re.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_repr.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_repr.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_scope.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_scope.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_set.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_set.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sha.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_sha.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_slice.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_slice.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_socket.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_socket.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sort.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_sort.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_struct.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_struct.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sys.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_sys.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_tempfile.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_tempfile.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_trace.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_trace.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_traceback.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_traceback.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unicodedata.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_unicodedata.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unpack.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_unpack.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_urllib2.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_urllib2.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_userstring.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_userstring.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_weakref.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_weakref.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_xrange.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_xrange.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_zipimport.py - copied, changed from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_zipimport.py pypy/branch/2.5-features/lib-python/modified-2.5.1/test/tf_inherit_check.py - copied unchanged from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/tf_inherit_check.py Log: modified-2.5.1/encodings/ and test/ copied from 2.5.1/ and then modified Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/ascii.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/ascii.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/ascii.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/ascii.py Mon Aug 18 15:46:38 2008 @@ -14,8 +14,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.ascii_encode - decode = codecs.ascii_decode + encode = staticmethod(codecs.ascii_encode) + decode = staticmethod(codecs.ascii_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/charmap.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/charmap.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/charmap.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/charmap.py Mon Aug 18 15:46:38 2008 @@ -18,8 +18,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.charmap_encode - decode = codecs.charmap_decode + encode = staticmethod(codecs.charmap_encode) + decode = staticmethod(codecs.charmap_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def __init__(self, errors='strict', mapping=None): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/latin_1.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/latin_1.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/latin_1.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/latin_1.py Mon Aug 18 15:46:38 2008 @@ -14,8 +14,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.latin_1_encode - decode = codecs.latin_1_decode + encode = staticmethod(codecs.latin_1_encode) + decode = staticmethod(codecs.latin_1_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mbcs.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/mbcs.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/mbcs.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/mbcs.py Mon Aug 18 15:46:38 2008 @@ -7,31 +7,27 @@ (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ -# Import them explicitly to cause an ImportError -# on non-Windows systems -from codecs import mbcs_encode, mbcs_decode -# for IncrementalDecoder, IncrementalEncoder, ... import codecs ### Codec APIs -encode = mbcs_encode +encode = codecs.mbcs_encode def decode(input, errors='strict'): - return mbcs_decode(input, errors, True) + return codecs.mbcs_decode(input, errors, True) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): - return mbcs_encode(input, self.errors)[0] + return codecs.mbcs_encode(input, self.errors)[0] class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - _buffer_decode = mbcs_decode + _buffer_decode = codecs.mbcs_decode class StreamWriter(codecs.StreamWriter): - encode = mbcs_encode + encode = codecs.mbcs_encode class StreamReader(codecs.StreamReader): - decode = mbcs_decode + decode = codecs.mbcs_decode ### encodings module API Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/raw_unicode_escape.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/raw_unicode_escape.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/raw_unicode_escape.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/raw_unicode_escape.py Mon Aug 18 15:46:38 2008 @@ -14,8 +14,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.raw_unicode_escape_encode - decode = codecs.raw_unicode_escape_decode + encode = staticmethod(codecs.raw_unicode_escape_encode) + decode = staticmethod(codecs.raw_unicode_escape_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/string_escape.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/string_escape.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/string_escape.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/string_escape.py Mon Aug 18 15:46:38 2008 @@ -9,8 +9,8 @@ class Codec(codecs.Codec): - encode = codecs.escape_encode - decode = codecs.escape_decode + encode = staticmethod(codecs.escape_encode) + decode = staticmethod(codecs.escape_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/unicode_escape.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/unicode_escape.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/unicode_escape.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/unicode_escape.py Mon Aug 18 15:46:38 2008 @@ -14,8 +14,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.unicode_escape_encode - decode = codecs.unicode_escape_decode + encode = staticmethod(codecs.unicode_escape_encode) + decode = staticmethod(codecs.unicode_escape_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/unicode_internal.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/unicode_internal.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/unicode_internal.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/unicode_internal.py Mon Aug 18 15:46:38 2008 @@ -14,8 +14,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.unicode_internal_encode - decode = codecs.unicode_internal_decode + encode = staticmethod(codecs.unicode_internal_encode) + decode = staticmethod(codecs.unicode_internal_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_7.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_7.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/encodings/utf_7.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/encodings/utf_7.py Mon Aug 18 15:46:38 2008 @@ -10,8 +10,8 @@ # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. - encode = codecs.utf_7_encode - decode = codecs.utf_7_decode + encode = staticmethod(codecs.utf_7_encode) + decode = staticmethod(codecs.utf_7_decode) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/__init__.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/__init__.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/__init__.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/__init__.py Mon Aug 18 15:46:38 2008 @@ -1 +1,11 @@ -# Dummy file to make this directory a package. +""" +This package only contains the tests that we have modified for PyPy. +It uses the 'official' hack to include the rest of the standard +'test' package from CPython. + +This assumes that sys.path is configured to contain +'lib-python/modified-2.5.1' before 'lib-python/2.5.1'. +""" + +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/list_tests.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/list_tests.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/list_tests.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/list_tests.py Mon Aug 18 15:46:38 2008 @@ -178,9 +178,10 @@ a[:] = tuple(range(10)) self.assertEqual(a, self.type2test(range(10))) - self.assertRaises(TypeError, a.__setslice__, 0, 1, 5) + if hasattr(a, '__setslice__'): + self.assertRaises(TypeError, a.__setslice__, 0, 1, 5) - self.assertRaises(TypeError, a.__setslice__) + self.assertRaises(TypeError, a.__setslice__) def test_delslice(self): a = self.type2test([0, 1]) @@ -307,7 +308,7 @@ self.assertRaises(BadExc, a.remove, BadCmp()) class BadCmp2: - def __eq__(self, other): + def __cmp__(self, other): raise BadExc() d = self.type2test('abcdefghcij') Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/mapping_tests.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/mapping_tests.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/mapping_tests.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/mapping_tests.py Mon Aug 18 15:46:38 2008 @@ -510,7 +510,7 @@ for copymode in -1, +1: # -1: b has same structure as a # +1: b is a.copy() - for log2size in range(12): + for log2size in range(4): # XXX 12 too large for PyPy size = 2**log2size a = self._empty_mapping() b = self._empty_mapping() @@ -640,7 +640,7 @@ class Exc(Exception): pass class BadCmp(object): - def __eq__(self, other): + def __cmp__(self, other): raise Exc() d1 = self._full_mapping({BadCmp(): 1}) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/pickletester.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/pickletester.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/pickletester.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/pickletester.py Mon Aug 18 15:46:38 2008 @@ -1,3 +1,9 @@ +# Notes about changes in this file: +# a prefix of "dont_" means the test makes no sense, +# because we don't use cPickle at all. +# "xxx_" means it works and can be done, but takes ages. +# When PyPy gets really fast, we should remove "xxx_". + import unittest import pickle import cPickle @@ -509,7 +515,7 @@ data = 'I' + str(maxint64) + 'JUNK\n.' self.assertRaises(ValueError, self.loads, data) - def test_long(self): + def xxx_test_long(self): for proto in protocols: # 256 bytes is where LONG4 begins. for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257: @@ -739,7 +745,7 @@ self.produce_global_ext(0x7fffffff, pickle.EXT4) # largest EXT4 code self.produce_global_ext(0x12abcdef, pickle.EXT4) # check endianness - def test_list_chunking(self): + def xxx_test_list_chunking(self): n = 10 # too small to chunk x = range(n) for proto in protocols: @@ -761,7 +767,7 @@ else: self.failUnless(num_appends >= 2) - def test_dict_chunking(self): + def xxx_test_dict_chunking(self): n = 10 # too small to chunk x = dict.fromkeys(range(n)) for proto in protocols: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/seq_tests.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/seq_tests.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/seq_tests.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/seq_tests.py Mon Aug 18 15:46:38 2008 @@ -196,7 +196,8 @@ self.assertEqual(a[ -pow(2,128L): 3 ], self.type2test([0,1,2])) self.assertEqual(a[ 3: pow(2,145L) ], self.type2test([3,4])) - self.assertRaises(TypeError, u.__getslice__) + if hasattr(u, '__getslice__'): + self.assertRaises(TypeError, u.__getslice__) def test_contains(self): u = self.type2test([0, 1, 2]) @@ -291,12 +292,13 @@ u *= 3 self.assertEqual(u, self.type2test([0, 1, 0, 1, 0, 1])) - def test_getitemoverwriteiter(self): - # Verify that __getitem__ overrides are not recognized by __iter__ - class T(self.type2test): - def __getitem__(self, key): - return str(key) + '!!!' - self.assertEqual(iter(T((1,2))).next(), 1) + #def test_getitemoverwriteiter(self): + # # Verify that __getitem__ overrides are not recognized by __iter__ + # XXX PyPy behaves differently on this detail + # class T(self.type2test): + # def __getitem__(self, key): + # return str(key) + '!!!' + # self.assertEqual(iter(T((1,2))).next(), 1) def test_repeat(self): for m in xrange(4): @@ -304,7 +306,7 @@ for n in xrange(-3, 5): self.assertEqual(self.type2test(s*n), self.type2test(s)*n) self.assertEqual(self.type2test(s)*(-4), self.type2test([])) - self.assertEqual(id(s), id(s*1)) + #self.assertEqual(id(s), id(s*1)) def test_subscript(self): a = self.type2test([10, 11]) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/string_tests.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/string_tests.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/string_tests.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/string_tests.py Mon Aug 18 15:46:38 2008 @@ -2,7 +2,7 @@ Common tests shared by test_str, test_unicode, test_userstring and test_string. """ -import unittest, string, sys +import unittest, string, sys, operator from test import test_support from UserList import UserList @@ -45,11 +45,15 @@ else: return obj + # single this out, because UserString cannot cope with fixed args + fixargs = fixtype + subclasscheck = True + # check that object.method(*args) returns result def checkequal(self, result, object, methodname, *args): result = self.fixtype(result) object = self.fixtype(object) - args = self.fixtype(args) + args = self.fixargs(args) realresult = getattr(object, methodname)(*args) self.assertEqual( result, @@ -57,29 +61,65 @@ ) # if the original is returned make sure that # this doesn't happen with subclasses - if object == realresult: + if object == realresult and self.subclasscheck: class subtype(self.__class__.type2test): pass object = subtype(object) realresult = getattr(object, methodname)(*args) self.assert_(object is not realresult) + # check that op(*args) returns result + def checkop(self, result, op, *args): + result = self.fixtype(result) + object = self.fixtype(args[0]) + args = self.fixargs(args[1:]) + realresult = op(object, *args) + self.assertEqual( + result, + realresult + ) + # if the original is returned make sure that + # this doesn't happen with subclasses + if object == realresult and self.subclasscheck: + class subtype(self.__class__.type2test): + pass + object = subtype(object) + realresult = op(object, *args) + self.assert_(object is not realresult) + # check that object.method(*args) raises exc def checkraises(self, exc, object, methodname, *args): object = self.fixtype(object) - args = self.fixtype(args) + args = self.fixargs(args) self.assertRaises( exc, getattr(object, methodname), *args ) + # check that op(*args) raises exc + def checkopraises(self, exc, op, *args): + object = self.fixtype(args[0]) + args = self.fixargs(args[1:]) + self.assertRaises( + exc, + op, + object, + *args + ) + # call object.method(*args) without any checks def checkcall(self, object, methodname, *args): object = self.fixtype(object) - args = self.fixtype(args) + args = self.fixargs(args) getattr(object, methodname)(*args) + # call op(*args) without any checks + def checkopcall(self, op, *args): + object = self.fixtype(args[0]) + args = self.fixargs(args[1:]) + op(object, *args) + def test_hash(self): # SF bug 1054139: += optimization was not invalidating cached hash value a = self.type2test('DNSSEC') @@ -889,49 +929,50 @@ self.checkraises(TypeError, 'hello', 'endswith', (42,)) def test___contains__(self): - self.checkequal(True, '', '__contains__', '') # vereq('' in '', True) - self.checkequal(True, 'abc', '__contains__', '') # vereq('' in 'abc', True) - self.checkequal(False, 'abc', '__contains__', '\0') # vereq('\0' in 'abc', False) - self.checkequal(True, '\0abc', '__contains__', '\0') # vereq('\0' in '\0abc', True) - self.checkequal(True, 'abc\0', '__contains__', '\0') # vereq('\0' in 'abc\0', True) - self.checkequal(True, '\0abc', '__contains__', 'a') # vereq('a' in '\0abc', True) - self.checkequal(True, 'asdf', '__contains__', 'asdf') # vereq('asdf' in 'asdf', True) - self.checkequal(False, 'asd', '__contains__', 'asdf') # vereq('asdf' in 'asd', False) - self.checkequal(False, '', '__contains__', 'asdf') # vereq('asdf' in '', False) + self.checkop(True, operator.contains, '', '') # vereq('' in '', True) + self.checkop(True, operator.contains, 'abc', '') # vereq('' in 'abc', True) + self.checkop(False, operator.contains, 'abc', '\0') # vereq('\0' in 'abc', False) + self.checkop(True, operator.contains, '\0abc', '\0') # vereq('\0' in '\0abc', True) + self.checkop(True, operator.contains, 'abc\0', '\0') # vereq('\0' in 'abc\0', True) + self.checkop(True, operator.contains, '\0abc', 'a') # vereq('a' in '\0abc', True) + self.checkop(True, operator.contains, 'asdf', 'asdf') # vereq('asdf' in 'asdf', True) + self.checkop(False, operator.contains, 'asd', 'asdf') # vereq('asdf' in 'asd', False) + self.checkop(False, operator.contains, '', 'asdf') # vereq('asdf' in '', False) def test_subscript(self): - self.checkequal(u'a', 'abc', '__getitem__', 0) - self.checkequal(u'c', 'abc', '__getitem__', -1) - self.checkequal(u'a', 'abc', '__getitem__', 0L) - self.checkequal(u'abc', 'abc', '__getitem__', slice(0, 3)) - self.checkequal(u'abc', 'abc', '__getitem__', slice(0, 1000)) - self.checkequal(u'a', 'abc', '__getitem__', slice(0, 1)) - self.checkequal(u'', 'abc', '__getitem__', slice(0, 0)) - # FIXME What about negative indices? This is handled differently by [] and __getitem__(slice) + self.checkop(u'a', operator.getitem, 'abc', 0) + self.checkop(u'c', operator.getitem, 'abc', -1) + self.checkop(u'a', operator.getitem, 'abc', 0L) + self.checkop(u'abc', operator.getitem, 'abc', slice(0, 3)) + self.checkop(u'abc', operator.getitem, 'abc', slice(0, 1000)) + self.checkop(u'a', operator.getitem, 'abc', slice(0, 1)) + self.checkop(u'', operator.getitem, 'abc', slice(0, 0)) + # FIXME What about negative indizes? This is handled differently by [] and __getitem__(slice) - self.checkraises(TypeError, 'abc', '__getitem__', 'def') + self.checkopraises(TypeError, operator.getitem, 'abc', 'def') def test_slice(self): - self.checkequal('abc', 'abc', '__getslice__', 0, 1000) - self.checkequal('abc', 'abc', '__getslice__', 0, 3) - self.checkequal('ab', 'abc', '__getslice__', 0, 2) - self.checkequal('bc', 'abc', '__getslice__', 1, 3) - self.checkequal('b', 'abc', '__getslice__', 1, 2) - self.checkequal('', 'abc', '__getslice__', 2, 2) - self.checkequal('', 'abc', '__getslice__', 1000, 1000) - self.checkequal('', 'abc', '__getslice__', 2000, 1000) - self.checkequal('', 'abc', '__getslice__', 2, 1) + self.checkop('abc', operator.getslice, 'abc', 0, 1000) + self.checkop('abc', operator.getslice, 'abc', 0, 3) + self.checkop('ab', operator.getslice, 'abc', 0, 2) + self.checkop('bc', operator.getslice, 'abc', 1, 3) + self.checkop('b', operator.getslice, 'abc', 1, 2) + self.checkop('', operator.getslice, 'abc', 2, 2) + self.checkop('', operator.getslice, 'abc', 1000, 1000) + self.checkop('', operator.getslice, 'abc', 2000, 1000) + self.checkop('', operator.getslice, 'abc', 2, 1) # FIXME What about negative indizes? This is handled differently by [] and __getslice__ - self.checkraises(TypeError, 'abc', '__getslice__', 'def') + self.checkopraises(TypeError, operator.getslice, 'abc', 'def') def test_mul(self): - self.checkequal('', 'abc', '__mul__', -1) - self.checkequal('', 'abc', '__mul__', 0) - self.checkequal('abc', 'abc', '__mul__', 1) - self.checkequal('abcabcabc', 'abc', '__mul__', 3) - self.checkraises(TypeError, 'abc', '__mul__') - self.checkraises(TypeError, 'abc', '__mul__', '') + self.checkop('', operator.mul, 'abc', -1) + self.checkop('', operator.mul, 'abc', 0) + self.checkop('abc', operator.mul, 'abc', 1) + self.checkop('abcabcabc', operator.mul, 'abc', 3) + self.checkopraises(TypeError, operator.mul, 'abc') + self.checkopraises(TypeError, operator.mul, 'abc', '') + self.checkopraises(OverflowError, operator.mul, 10000*'abc', 2000000000) # XXX: on a 64-bit system, this doesn't raise an overflow error, # but either raises a MemoryError, or succeeds (if you have 54TiB) #self.checkraises(OverflowError, 10000*'abc', '__mul__', 2000000000) @@ -977,56 +1018,58 @@ self.fail('exception not raised') def test_formatting(self): - self.checkequal('+hello+', '+%s+', '__mod__', 'hello') - self.checkequal('+10+', '+%d+', '__mod__', 10) - self.checkequal('a', "%c", '__mod__', "a") - self.checkequal('a', "%c", '__mod__', "a") - self.checkequal('"', "%c", '__mod__', 34) - self.checkequal('$', "%c", '__mod__', 36) - self.checkequal('10', "%d", '__mod__', 10) - self.checkequal('\x7f', "%c", '__mod__', 0x7f) + self.checkop('+hello+', operator.mod, '+%s+', 'hello') + self.checkop('+10+', operator.mod, '+%d+', 10) + self.checkop('a', operator.mod, "%c", "a") + self.checkop('a', operator.mod, "%c", "a") + self.checkop('"', operator.mod, "%c", 34) + self.checkop('$', operator.mod, "%c", 36) + self.checkop('10', operator.mod, "%d", 10) + self.checkop('\x7f', operator.mod, "%c", 0x7f) for ordinal in (-100, 0x200000): # unicode raises ValueError, str raises OverflowError - self.checkraises((ValueError, OverflowError), '%c', '__mod__', ordinal) + self.checkopraises((ValueError, OverflowError), operator.mod, '%c', ordinal) - self.checkequal(' 42', '%3ld', '__mod__', 42) - self.checkequal('0042.00', '%07.2f', '__mod__', 42) - self.checkequal('0042.00', '%07.2F', '__mod__', 42) - - self.checkraises(TypeError, 'abc', '__mod__') - self.checkraises(TypeError, '%(foo)s', '__mod__', 42) - self.checkraises(TypeError, '%s%s', '__mod__', (42,)) - self.checkraises(TypeError, '%c', '__mod__', (None,)) - self.checkraises(ValueError, '%(foo', '__mod__', {}) - self.checkraises(TypeError, '%(foo)s %(bar)s', '__mod__', ('foo', 42)) + self.checkop(' 42', operator.mod, '%3ld', 42) + self.checkop('0042.00', operator.mod, '%07.2f', 42) + self.checkop('0042.00', operator.mod, '%07.2F', 42) + + self.checkopraises(TypeError, operator.mod, 'abc') + self.checkopraises(TypeError, operator.mod, '%(foo)s', 42) + self.checkopraises(TypeError, operator.mod, '%s%s', (42,)) + self.checkopraises(TypeError, operator.mod, '%c', (None,)) + self.checkopraises(ValueError, operator.mod, '%(foo', {}) + self.checkopraises(TypeError, operator.mod, '%(foo)s %(bar)s', ('foo', 42)) # argument names with properly nested brackets are supported - self.checkequal('bar', '%((foo))s', '__mod__', {'(foo)': 'bar'}) + self.checkop('bar', operator.mod, '%((foo))s', {'(foo)': 'bar'}) # 100 is a magic number in PyUnicode_Format, this forces a resize - self.checkequal(103*'a'+'x', '%sx', '__mod__', 103*'a') + self.checkop(103*'a'+'x', operator.mod, '%sx', 103*'a') - self.checkraises(TypeError, '%*s', '__mod__', ('foo', 'bar')) - self.checkraises(TypeError, '%10.*f', '__mod__', ('foo', 42.)) - self.checkraises(ValueError, '%10', '__mod__', (42,)) + self.checkopraises(TypeError, operator.mod, '%*s', ('foo', 'bar')) + self.checkopraises(TypeError, operator.mod, '%10.*f', ('foo', 42.)) + self.checkopraises(ValueError, operator.mod, '%10', (42,)) def test_floatformatting(self): # float formatting - for prec in xrange(100): + # XXX changed for PyPy to be faster + for prec, value in [(0, 3.141592655), + (1, 0.01), + (2, 120394), + (5, 23.01958), + (20, 141414.51321098), + (49, 0.01), + (50, 1e50), + (99, 123)]: format = '%%.%if' % prec - value = 0.01 - for x in xrange(60): - value = value * 3.141592655 / 3.0 * 10.0 - # The formatfloat() code in stringobject.c and - # unicodeobject.c uses a 120 byte buffer and switches from - # 'f' formatting to 'g' at precision 50, so we expect - # OverflowErrors for the ranges x < 50 and prec >= 67. - if x < 50 and prec >= 67: - self.checkraises(OverflowError, format, "__mod__", value) - else: - self.checkcall(format, "__mod__", value) - + try: + self.checkopcall(operator.mod, format, value) + except OverflowError: + self.failUnless(abs(value) < 1e25 and prec >= 67, + "OverflowError on small examples") + def test_inplace_rewrites(self): # Check that strings don't copy and modify cached single-character strings self.checkequal('a', 'A', 'lower') @@ -1143,9 +1186,11 @@ self.assert_(s1 is not s2) self.assert_(type(s2) is t) - s1 = t("abcd") - s2 = t().join([s1]) - self.assert_(s1 is s2) + + # XXX impl. specific optimisation + #s1 = t("abcd") + #s2 = t().join([s1]) + #self.assert_(s1 is s2) # Should also test mixed-type join. if t is unicode: @@ -1154,9 +1199,10 @@ self.assert_(s1 is not s2) self.assert_(type(s2) is t) - s1 = t("abcd") - s2 = "".join([s1]) - self.assert_(s1 is s2) + # XXX impl. specific opt. + #s1 = t("abcd") + #s2 = "".join([s1]) + #self.assert_(s1 is s2) elif t is str: s1 = subclass("abcd") Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test___all__.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test___all__.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test___all__.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test___all__.py Mon Aug 18 15:46:38 2008 @@ -14,11 +14,26 @@ def check_all(self, modname): names = {} + original_sys_modules = sys.modules.copy() try: exec "import %s" % modname in names except ImportError: # Silent fail here seems the best route since some modules # may not be available in all environments. + # We restore sys.modules to avoid leaving broken modules behind, + # but we must not remove built-in modules from sys.modules + # (because they can't be re-imported, typically) + for name in sys.modules.keys(): + if name in original_sys_modules: + continue + # XXX hackish + mod = sys.modules[name] + if not hasattr(mod, '__file__'): + continue + if (mod.__file__.lower().endswith('.py') or + mod.__file__.lower().endswith('.pyc') or + mod.__file__.lower().endswith('.pyo')): + del sys.modules[name] return verify(hasattr(sys.modules[modname], "__all__"), "%s has no __all__ attribute" % modname) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_array.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_array.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_array.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_array.py Mon Aug 18 15:46:38 2008 @@ -5,10 +5,15 @@ import unittest from test import test_support -from weakref import proxy +#from weakref import proxy import array, cStringIO, math from cPickle import loads, dumps +# XXX as we use the struct module we get struct.error when trying to push +# objects of invalid types or out of range into an array. If this issue +# is fixed, remove all mentions of struct.error in the sequel. +import struct + class ArraySubclass(array.array): pass @@ -133,7 +138,7 @@ self.assertEqual(a[0], a[1]) self.assertRaises(TypeError, a.insert) self.assertRaises(TypeError, a.insert, None) - self.assertRaises(TypeError, a.insert, 0, None) + self.assertRaises((TypeError, struct.error), a.insert, 0, None) a = array.array(self.typecode, self.example) a.insert(-1, self.example[0]) @@ -193,7 +198,7 @@ self.assertRaises(TypeError, a.tolist, 42) self.assertRaises(TypeError, b.fromlist) self.assertRaises(TypeError, b.fromlist, 42) - self.assertRaises(TypeError, b.fromlist, [None]) + self.assertRaises((TypeError, struct.error), b.fromlist, [None]) b.fromlist(a.tolist()) self.assertEqual(a, b) @@ -378,7 +383,7 @@ self.assertRaises(TypeError, a.__setitem__) self.assertRaises(TypeError, a.__setitem__, None) - self.assertRaises(TypeError, a.__setitem__, 0, None) + self.assertRaises((TypeError, struct.error), a.__setitem__, 0, None) self.assertRaises( IndexError, a.__setitem__, @@ -685,7 +690,8 @@ b = buffer(a) self.assertEqual(b[0], a.tostring()[0]) - def test_weakref(self): + def DONOTtest_weakref(self): + # XXX disabled until PyPy grows weakref support s = array.array(self.typecode, self.example) p = proxy(s) self.assertEqual(p.tostring(), s.tostring()) @@ -712,7 +718,7 @@ def test_setitem(self): super(StringTest, self).test_setitem() a = array.array(self.typecode, self.example) - self.assertRaises(TypeError, a.__setitem__, 0, self.example[:2]) + self.assertRaises((TypeError, struct.error), a.__setitem__, 0, self.example[:2]) class CharacterTest(StringTest): typecode = 'c' @@ -763,7 +769,7 @@ minitemsize = 2 def test_unicode(self): - self.assertRaises(TypeError, array.array, 'b', unicode('foo', 'ascii')) + self.assertRaises((TypeError, struct.error), array.array, 'b', unicode('foo', 'ascii')) a = array.array('u', unicode(r'\xa0\xc2\u1234', 'unicode-escape')) a.fromunicode(unicode(' ', 'ascii')) @@ -849,14 +855,14 @@ a = array.array(self.typecode, [lower]) a[0] = lower # should overflow assigning less than lower limit - self.assertRaises(OverflowError, array.array, self.typecode, [lower-1]) - self.assertRaises(OverflowError, a.__setitem__, 0, lower-1) + self.assertRaises((OverflowError, struct.error, ValueError), array.array, self.typecode, [lower-1]) + self.assertRaises((OverflowError, struct.error, ValueError), a.__setitem__, 0, lower-1) # should not overflow assigning upper limit a = array.array(self.typecode, [upper]) a[0] = upper # should overflow assigning more than upper limit - self.assertRaises(OverflowError, array.array, self.typecode, [upper+1]) - self.assertRaises(OverflowError, a.__setitem__, 0, upper+1) + self.assertRaises((OverflowError, struct.error), array.array, self.typecode, [upper+1]) + self.assertRaises((OverflowError, struct.error), a.__setitem__, 0, upper+1) def test_subclassing(self): typecode = self.typecode Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_base64.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_base64.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_base64.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_base64.py Mon Aug 18 15:46:38 2008 @@ -119,9 +119,19 @@ # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') + + # This test is testing an implementation specific detail. + # In our implementation, we make a best attempt to translate + # strings with broken encoding back to their original form. + # Since the user will most likely be interested in what remains + # of a message which has been broken in transmission, this seems to + # be the most reasonable thing to do. + # In pypy we disable this test! + ''' def test_b64decode_error(self): self.assertRaises(TypeError, base64.b64decode, 'abc') - + ''' + def test_b32encode(self): eq = self.assertEqual eq(base64.b32encode(''), '') @@ -195,4 +205,4 @@ if __name__ == '__main__': - unittest.main(defaultTest='suite') + test_main() Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bufio.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_bufio.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_bufio.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bufio.py Mon Aug 18 15:46:38 2008 @@ -48,8 +48,11 @@ nullpat = "\0" * 1000 try: - for size in range(1, 257) + [512, 1000, 1024, 2048, 4096, 8192, 10000, - 16384, 32768, 65536, 1000000]: +# Too slow for PyPy. +# for size in range(1, 257) + [512, 1000, 1024, 2048, 4096, 8192, 10000, +# 16384, 32768, 65536, 1000000]: + for size in range(1, 9) + [9, 63, 64, 65, 128, 129, 254, 255, 256, 512, + 1000, 1024, 2048, 4096, 8192, 10000, 16384]: drive_one(primepat, size) drive_one(nullpat, size) finally: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_builtin.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_builtin.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_builtin.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_builtin.py Mon Aug 18 15:46:38 2008 @@ -336,12 +336,14 @@ self.assertEqual(eval('dir()', g, m), list('xyz')) self.assertEqual(eval('globals()', g, m), g) self.assertEqual(eval('locals()', g, m), m) - self.assertRaises(TypeError, eval, 'a', m) + # the following line checks a detail of CPython: the globals() of + # any frame must be a real dictionary + #self.assertRaises(TypeError, eval, 'a', m) class A: "Non-mapping" pass m = A() - self.assertRaises(TypeError, eval, 'a', g, m) + self.assertRaises((TypeError, AttributeError) , eval, 'a', g, m) # Verify that dict subclasses work as well class D(dict): @@ -966,7 +968,7 @@ # thread for the details: # http://sources.redhat.com/ml/newlib/2002/msg00369.html - self.assertRaises(MemoryError, list, xrange(sys.maxint // 2)) + self.assertRaises(MemoryError, [None].__mul__, sys.maxint // 2) # This code used to segfault in Py2.4a3 x = [] Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bz2.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_bz2.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_bz2.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_bz2.py Mon Aug 18 15:46:38 2008 @@ -240,6 +240,7 @@ self.createTempFile() for i in xrange(10000): o = BZ2File(self.filename) + o.close() del o def testOpenNonexistent(self): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_class.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_class.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_class.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_class.py Mon Aug 18 15:46:38 2008 @@ -252,6 +252,9 @@ if sys.platform[:4] == 'java': import java java.lang.System.gc() +else: + import gc + gc.collect() # Interfering tests Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_codeop.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_codeop.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_codeop.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_codeop.py Mon Aug 18 15:46:38 2008 @@ -168,14 +168,17 @@ ai("a = 'a\\ ") ai("a = 'a\\\n") - ai("a = 1","eval") - ai("a = (","eval") - ai("]","eval") - ai("())","eval") - ai("[}","eval") - ai("9+","eval") - ai("lambda z:","eval") - ai("a b","eval") + # XXX for PyPy: disable these tests (using compile_command with eval + # doesn't raise a SyntaxError if it appears to be an + # incomplete multi-line stmt) + # ai("a = 1","eval") + # ai("a = (","eval") + # ai("]","eval") + # ai("())","eval") + # ai("[}","eval") + # ai("9+","eval") + # ai("lambda z:","eval") + # ai("a b","eval") def test_filename(self): self.assertEquals(compile_command("a = 1\n", "abc").co_filename, Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_compile.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_compile.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_compile.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_compile.py Mon Aug 18 15:46:38 2008 @@ -75,10 +75,10 @@ self.assertEqual(m.results, ('z', m)) try: exec 'z = b' in m - except TypeError: + except NameError: pass else: - self.fail('Did not validate globals as a real dict') + self.fail('PyPy should accept not real dict globals ') class A: "Non-mapping" Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_complex.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_complex.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_complex.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_complex.py Mon Aug 18 15:46:38 2008 @@ -68,7 +68,8 @@ self.assertClose(q, x) def test_div(self): - simple_real = [float(i) for i in xrange(-5, 6)] + # too slow for PyPy --- simple_real = [float(i) for i in xrange(-5, 6)] + simple_real = [-2.0, 0.0, 1.0] simple_complex = [complex(x, y) for x in simple_real for y in simple_real] for x in simple_complex: for y in simple_complex: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_cpickle.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_cpickle.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_cpickle.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_cpickle.py Mon Aug 18 15:46:38 2008 @@ -96,7 +96,7 @@ cPickleTests, cPicklePicklerTests, cPickleListPicklerTests, - cPickleFastPicklerTests + # XXX undocumented, not supported by PyPy: cPickleFastPicklerTests ) if __name__ == "__main__": Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_deque.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_deque.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_deque.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_deque.py Mon Aug 18 15:46:38 2008 @@ -1,14 +1,14 @@ from collections import deque import unittest from test import test_support, seq_tests -from weakref import proxy +#from weakref import proxy import copy import cPickle as pickle from cStringIO import StringIO import random import os -BIG = 100000 +BIG = 10 def fail(): raise SyntaxError @@ -81,7 +81,7 @@ self.assertRaises(SyntaxError, d.extendleft, fail()) def test_getitem(self): - n = 200 + n = 10 d = deque(xrange(n)) l = range(n) for i in xrange(n): @@ -101,7 +101,7 @@ self.assertRaises(IndexError, d.__getitem__, -1) def test_setitem(self): - n = 200 + n = 10 d = deque(xrange(n)) for i in xrange(n): d[i] = 10 * i @@ -113,7 +113,7 @@ self.assertEqual(list(d), l) def test_delitem(self): - n = 500 # O(n**2) test, don't make this too big + n = 10 # O(n**2) test, don't make this too big d = deque(xrange(n)) self.assertRaises(IndexError, d.__delitem__, -n-1) self.assertRaises(IndexError, d.__delitem__, n) @@ -261,7 +261,7 @@ self.assertRaises(TypeError, hash, deque('abc')) def test_long_steadystate_queue_popleft(self): - for size in (0, 1, 2, 100, 1000): + for size in (0, 1, 2, 9): d = deque(xrange(size)) append, pop = d.append, d.popleft for i in xrange(size, BIG): @@ -272,7 +272,7 @@ self.assertEqual(list(d), range(BIG-size, BIG)) def test_long_steadystate_queue_popright(self): - for size in (0, 1, 2, 100, 1000): + for size in (0, 1, 2, 9): d = deque(reversed(xrange(size))) append, pop = d.appendleft, d.pop for i in xrange(size, BIG): @@ -366,7 +366,7 @@ self.assertEqual(list(d), list(e)) def test_reversed(self): - for s in ('abcd', xrange(2000)): + for s in ('abcd', xrange(200)): self.assertEqual(list(reversed(deque(s))), list(reversed(s))) def test_gc_doesnt_blowup(self): @@ -470,12 +470,12 @@ d = DequeWithBadIter('abc') self.assertRaises(TypeError, pickle.dumps, d) - def test_weakref(self): - d = deque('gallahad') - p = proxy(d) - self.assertEqual(str(p), str(d)) - d = None - self.assertRaises(ReferenceError, str, p) +# def test_weakref(self): +# d = deque('gallahad') +# p = proxy(d) +# self.assertEqual(str(p), str(d)) +# d = None +# self.assertRaises(ReferenceError, str, p) def test_strange_subclass(self): class X(deque): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descr.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_descr.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_descr.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descr.py Mon Aug 18 15:46:38 2008 @@ -3,6 +3,7 @@ from test.test_support import verify, vereq, verbose, TestFailed, TESTFN, get_original_stdout from copy import deepcopy import warnings +import gc warnings.filterwarnings("ignore", r'complex divmod\(\), // and % are deprecated$', @@ -20,7 +21,7 @@ m = getattr(t, meth) while meth not in t.__dict__: t = t.__bases__[0] - vereq(m, t.__dict__[meth]) + vereq(t.__dict__[meth](a), res) vereq(m(a), res) bm = getattr(a, meth) vereq(bm(), res) @@ -38,7 +39,7 @@ m = getattr(t, meth) while meth not in t.__dict__: t = t.__bases__[0] - vereq(m, t.__dict__[meth]) + vereq(t.__dict__[meth](a, b), res) vereq(m(a, b), res) bm = getattr(a, meth) vereq(bm(b), res) @@ -362,8 +363,8 @@ pass # Two essentially featureless objects, just inheriting stuff from - # object. - vereq(dir(None), dir(Ellipsis)) + # object. NB. in PyPy, dir(None) additionally contains '__nonzero__'. + vereq(dir(object()), dir(Ellipsis)) # Nasty test case for proxied objects class Wrapper(object): @@ -511,86 +512,6 @@ vereq(repr(a), "234.5") vereq(a.prec, 12) -def spamlists(): - if verbose: print "Testing spamlist operations..." - import copy, xxsubtype as spam - def spamlist(l, memo=None): - import xxsubtype as spam - return spam.spamlist(l) - # This is an ugly hack: - copy._deepcopy_dispatch[spam.spamlist] = spamlist - - testbinop(spamlist([1]), spamlist([2]), spamlist([1,2]), "a+b", "__add__") - testbinop(spamlist([1,2,3]), 2, 1, "b in a", "__contains__") - testbinop(spamlist([1,2,3]), 4, 0, "b in a", "__contains__") - testbinop(spamlist([1,2,3]), 1, 2, "a[b]", "__getitem__") - testternop(spamlist([1,2,3]), 0, 2, spamlist([1,2]), - "a[b:c]", "__getslice__") - testsetop(spamlist([1]), spamlist([2]), spamlist([1,2]), - "a+=b", "__iadd__") - testsetop(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "a*=b", "__imul__") - testunop(spamlist([1,2,3]), 3, "len(a)", "__len__") - testbinop(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "a*b", "__mul__") - testbinop(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "b*a", "__rmul__") - testset2op(spamlist([1,2]), 1, 3, spamlist([1,3]), "a[b]=c", "__setitem__") - testset3op(spamlist([1,2,3,4]), 1, 3, spamlist([5,6]), - spamlist([1,5,6,4]), "a[b:c]=d", "__setslice__") - # Test subclassing - class C(spam.spamlist): - def foo(self): return 1 - a = C() - vereq(a, []) - vereq(a.foo(), 1) - a.append(100) - vereq(a, [100]) - vereq(a.getstate(), 0) - a.setstate(42) - vereq(a.getstate(), 42) - -def spamdicts(): - if verbose: print "Testing spamdict operations..." - import copy, xxsubtype as spam - def spamdict(d, memo=None): - import xxsubtype as spam - sd = spam.spamdict() - for k, v in d.items(): sd[k] = v - return sd - # This is an ugly hack: - copy._deepcopy_dispatch[spam.spamdict] = spamdict - - testbinop(spamdict({1:2}), spamdict({2:1}), -1, "cmp(a,b)", "__cmp__") - testbinop(spamdict({1:2,3:4}), 1, 1, "b in a", "__contains__") - testbinop(spamdict({1:2,3:4}), 2, 0, "b in a", "__contains__") - testbinop(spamdict({1:2,3:4}), 1, 2, "a[b]", "__getitem__") - d = spamdict({1:2,3:4}) - l1 = [] - for i in d.keys(): l1.append(i) - l = [] - for i in iter(d): l.append(i) - vereq(l, l1) - l = [] - for i in d.__iter__(): l.append(i) - vereq(l, l1) - l = [] - for i in type(spamdict({})).__iter__(d): l.append(i) - vereq(l, l1) - straightd = {1:2, 3:4} - spamd = spamdict(straightd) - testunop(spamd, 2, "len(a)", "__len__") - testunop(spamd, repr(straightd), "repr(a)", "__repr__") - testset2op(spamdict({1:2,3:4}), 2, 3, spamdict({1:2,2:3,3:4}), - "a[b]=c", "__setitem__") - # Test subclassing - class C(spam.spamdict): - def foo(self): return 1 - a = C() - vereq(a.items(), []) - vereq(a.foo(), 1) - a['foo'] = 'bar' - vereq(a.items(), [('foo', 'bar')]) - vereq(a.getstate(), 0) - a.setstate(100) - vereq(a.getstate(), 100) def pydicts(): if verbose: print "Testing Python subclass of dict..." @@ -1062,8 +983,7 @@ (EditableScrollablePane, ScrollablePane, EditablePane, Pane, ScrollingMixin, EditingMixin, object)) -mro_err_msg = """Cannot create a consistent method resolution -order (MRO) for bases """ +mro_err_msg = "cycle among base classes:" def mro_disagreement(): if verbose: print "Testing error messages for MRO disagreement..." @@ -1275,12 +1195,13 @@ s.a = [Counted(), s] vereq(Counted.counter, 1) s = None - import gc + gc.collect() + gc.collect() gc.collect() vereq(Counted.counter, 0) # Test lookup leaks [SF bug 572567] - import sys,gc + import sys class G(object): def __cmp__(self, other): return 0 @@ -1515,7 +1436,6 @@ pass else: raise TestFailed, "classmethod should check for callability" - # Verify that classmethod() doesn't allow keyword args try: classmethod(f, kw=1) @@ -1524,19 +1444,6 @@ else: raise TestFailed, "classmethod shouldn't accept keyword args" -def classmethods_in_c(): - if verbose: print "Testing C-based class methods..." - import xxsubtype as spam - a = (1, 2, 3) - d = {'abc': 123} - x, a1, d1 = spam.spamlist.classmeth(*a, **d) - veris(x, spam.spamlist) - vereq(a, a1) - vereq(d, d1) - x, a1, d1 = spam.spamlist().classmeth(*a, **d) - veris(x, spam.spamlist) - vereq(a, a1) - vereq(d, d1) def staticmethods(): if verbose: print "Testing static methods..." @@ -1555,19 +1462,14 @@ vereq(d.foo(1), (d, 1)) vereq(D.foo(d, 1), (d, 1)) -def staticmethods_in_c(): - if verbose: print "Testing C-based static methods..." - import xxsubtype as spam - a = (1, 2, 3) - d = {"abc": 123} - x, a1, d1 = spam.spamlist.staticmeth(*a, **d) - veris(x, None) - vereq(a, a1) - vereq(d, d1) - x, a1, d2 = spam.spamlist().staticmeth(*a, **d) - veris(x, None) - vereq(a, a1) - vereq(d, d1) + + class D(C): + pass + d = D() + vereq(D.goo(1), (1,)) + vereq(d.goo(1), (1,)) + vereq(d.foo(1), (d, 1)) + vereq(D.foo(d, 1), (d, 1)) def classic(): if verbose: print "Testing classic classes..." @@ -1588,7 +1490,7 @@ class E: # *not* subclassing from C foo = C.foo vereq(E().foo, C.foo) # i.e., unbound - verify(repr(C.foo.__get__(C())).startswith("", testfunc.__name__, "FAILURE(%d/%d)" % (success, n), str(e) + else: + success += 1 + print "-->", testfunc.__name__, "OK(%d/%d)" % (success, n) - if verbose: print "All OK" + if n != success: + raise TestFailed, "%d/%d" % (success, n) + else: + if verbose: print "All OK" if __name__ == "__main__": test_main() Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descrtut.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_descrtut.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_descrtut.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_descrtut.py Mon Aug 18 15:46:38 2008 @@ -20,10 +20,11 @@ try: return dict.__getitem__(self, key) except KeyError: + if key == '__builtins__': raise return self.default def get(self, key, *args): - if not args: + if not args and key != '__builtins__': args = (self.default,) return dict.get(self, key, *args) @@ -72,8 +73,8 @@ [1, 2] >>> exec "x = 3; print x" in a 3 - >>> print sorted(a.keys()) - [1, 2, '__builtins__', 'x'] + >>> print sorted([str(key) for key in a.keys()]) + ['1', '2', '__builtins__', 'x'] >>> print a['x'] 3 >>> @@ -180,13 +181,11 @@ '__contains__', '__delattr__', '__delitem__', - '__delslice__', '__doc__', '__eq__', '__ge__', '__getattribute__', '__getitem__', - '__getslice__', '__gt__', '__hash__', '__iadd__', @@ -199,6 +198,7 @@ '__mul__', '__ne__', '__new__', + '__radd__', '__reduce__', '__reduce_ex__', '__repr__', @@ -206,7 +206,6 @@ '__rmul__', '__setattr__', '__setitem__', - '__setslice__', '__str__', 'append', 'count', Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_dict.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_dict.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_dict.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_dict.py Mon Aug 18 15:46:38 2008 @@ -285,7 +285,7 @@ for copymode in -1, +1: # -1: b has same structure as a # +1: b is a.copy() - for log2size in range(12): + for log2size in range(4): # XXX 12 too large for PyPy size = 2**log2size a = {} b = {} @@ -383,7 +383,7 @@ class Exc(Exception): pass class BadCmp(object): - def __eq__(self, other): + def __cmp__(self, other): raise Exc() d1 = {BadCmp(): 1} Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_enumerate.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_enumerate.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_enumerate.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_enumerate.py Mon Aug 18 15:46:38 2008 @@ -102,7 +102,8 @@ self.assertRaises(TypeError, self.enum, 1) # wrong type (not iterable) self.assertRaises(TypeError, self.enum, 'abc', 2) # too many arguments - def test_tuple_reuse(self): + #Don't test this in PyPy, since the tuple can't be reused + def DONOT_test_tuple_reuse(self): # Tests an implementation detail where tuple is reused # whenever nothing else holds a reference to it self.assertEqual(len(set(map(id, list(enumerate(self.seq))))), len(self.seq)) @@ -120,9 +121,12 @@ seq, res = '', [] class TestBig(EnumerateTestCase): + ##original test (takes too long in PyPy): + #seq = range(10,20000, 2) + #res = zip(range(20000), seq) - seq = range(10,20000,2) - res = zip(range(20000), seq) + seq = range(10, 200, 2) + res = zip(range(200), seq) class TestReversed(unittest.TestCase): @@ -138,9 +142,10 @@ self.assertEqual(list(data)[::-1], list(reversed(data))) self.assertRaises(TypeError, reversed, {}) - def test_xrange_optimization(self): - x = xrange(1) - self.assertEqual(type(reversed(x)), type(iter(x))) +# Implementation detail +# def test_xrange_optimization(self): +# x = xrange(1) +# self.assertEqual(type(reversed(x)), type(iter(x))) def test_len(self): # This is an implementation detail, not an interface requirement Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_exceptions.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_exceptions.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_exceptions.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_exceptions.py Mon Aug 18 15:46:38 2008 @@ -179,7 +179,11 @@ else: self.fail("Expected exception") - if not sys.platform.startswith('java'): + try: + import _testcapi + except ImportError: + pass + else: test_capi1() test_capi2() Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_file.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_file.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_file.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_file.py Mon Aug 18 15:46:38 2008 @@ -1,5 +1,6 @@ import sys import os +import gc import unittest from array import array from weakref import proxy @@ -22,10 +23,10 @@ # verify weak references p = proxy(self.f) p.write('teststring') - self.assertEquals(self.f.tell(), p.tell()) + self.assertEquals(self.f.name, p.name) self.f.close() self.f = None - self.assertRaises(ReferenceError, getattr, p, 'tell') + self.assertRaises(ReferenceError, getattr, p, 'name') def testAttributes(self): # verify expected attributes exist @@ -53,7 +54,9 @@ def testWritelinesUserList(self): # verify writelines with instance sequence - l = UserList(['1', '2']) + # XXX writeslines on UserList crashes interpreter + #l = UserList(['1', '2']) + l = ['1', '2'] self.f.writelines(l) self.f.close() self.f = open(TESTFN, 'rb') @@ -66,7 +69,9 @@ def testWritelinesIntegersUserList(self): # verify writelines with integers in UserList - l = UserList([1,2,3]) + # l = UserList([1,2,3]) + l = [1,2,3] + self.assertRaises(TypeError, self.f.writelines, l) def testWritelinesNonString(self): @@ -74,8 +79,8 @@ class NonString: pass - self.assertRaises(TypeError, self.f.writelines, - [NonString(), NonString()]) +# self.assertRaises(TypeError, self.f.writelines, +# [NonString(), NonString()]) def testRepr(self): # verify repr works @@ -92,21 +97,23 @@ self.assert_(f.closed) def testMethods(self): - methods = ['fileno', 'flush', 'isatty', 'next', 'read', 'readinto', - 'readline', 'readlines', 'seek', 'tell', 'truncate', - 'write', 'xreadlines', '__iter__'] + a = array('c', 'x'*10) + methods = {'fileno': (), 'flush': (), 'isatty': (), 'next': (), + 'read': (), 'readinto': (a,), 'readline': (), 'readlines': (), + 'seek': (0,), 'tell': (), 'truncate': (), 'write': ('',), + 'writelines': ([],), 'xreadlines': (), '__iter__': () } + if sys.platform.startswith('atheos'): - methods.remove('truncate') + del methods['truncate'] # __exit__ should close the file self.f.__exit__(None, None, None) self.assert_(self.f.closed) - for methodname in methods: + for methodname, args in methods.items(): method = getattr(self.f, methodname) # should raise on closed file - self.assertRaises(ValueError, method) - self.assertRaises(ValueError, self.f.writelines, []) + self.assertRaises(ValueError, method, *args) # file is closed, __exit__ shouldn't do anything self.assertEquals(self.f.__exit__(None, None, None), None) @@ -291,7 +298,7 @@ testline = testlines.pop(0) try: line = f.read(len(testline)) - except ValueError: + except ValueErrori self.fail("read() after next() with supposedly empty " "iteration-buffer failed anyway") if line != testline: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_format.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_format.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_format.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_format.py Mon Aug 18 15:46:38 2008 @@ -17,7 +17,7 @@ print "%s %% %s works? ..." % (repr(formatstr), repr(args)), try: result = formatstr % args - except OverflowError: + except (OverflowError, MemoryError): if not overflowok: raise if verbose: @@ -53,7 +53,8 @@ testboth("%#.*g", (110, -1.e+100/3.)) # test some ridiculously large precision, expect overflow -testboth('%12.*f', (123456, 1.0)) +#Too slow on PyPy +#testboth('%12.*f', (123456, 1.0)) # Formatting of long integers. Overflow is not ok overflowok = 0 @@ -242,7 +243,8 @@ # crashes 2.2.1 and earlier: try: "%*d"%(sys.maxint, -127) - except MemoryError: - pass + except (MemoryError, OverflowError): + pass # CPython raises MemoryError, but both CPython and PyPy raise + # OverflowError for string concatenation else: raise TestFailed, '"%*d"%(sys.maxint, -127) should fail' Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_funcattrs.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_funcattrs.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_funcattrs.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_funcattrs.py Mon Aug 18 15:46:38 2008 @@ -36,8 +36,8 @@ try: del b.__dict__ -except TypeError: pass -else: raise TestFailed, 'del func.__dict__ expected TypeError' +except (AttributeError, TypeError): pass +else: raise TestFailed, 'expected AttributeError or TypeError' b.publish = 1 try: @@ -175,13 +175,13 @@ try: del another.__dict__ -except TypeError: pass -else: raise TestFailed +except (TypeError, AttributeError): pass +else: raise TestFailed, 'del another.__dict__ did not fail' try: del another.func_dict -except TypeError: pass -else: raise TestFailed +except (TypeError, AttributeError): pass +else: raise TestFailed, 'del another.func_dict did not fail' try: another.func_dict = None Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_generators.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_generators.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_generators.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_generators.py Mon Aug 18 15:46:38 2008 @@ -190,7 +190,7 @@ File "", line 1, in ? File "", line 2, in g File "", line 2, in f - ZeroDivisionError: integer division or modulo by zero + ZeroDivisionError: integer division by zero >>> k.next() # and the generator cannot be resumed Traceback (most recent call last): File "", line 1, in ? Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_genexps.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_genexps.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_genexps.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_genexps.py Mon Aug 18 15:46:38 2008 @@ -2,8 +2,8 @@ Test simple loop with conditional - >>> sum(i*i for i in range(100) if i&1 == 1) - 166650 + >>> sum(i*i for i in range(10) if i&1 == 1) + 165 Test simple nesting @@ -18,8 +18,8 @@ Make sure the induction variable is not exposed >>> i = 20 - >>> sum(i*i for i in range(100)) - 328350 + >>> sum(i*i for i in range(5)) + 30 >>> i 20 @@ -128,21 +128,22 @@ Verify re-use of tuples (a side benefit of using genexps over listcomps) - >>> tupleids = map(id, ((i,i) for i in xrange(10))) - >>> int(max(tupleids) - min(tupleids)) - 0 +## >>> tupleids = map(id, ((i,i) for i in xrange(10))) +## >>> int(max(tupleids) - min(tupleids)) +## 0 Verify that syntax error's are raised for genexps used as lvalues >>> (y for y in (1,2)) = 10 Traceback (most recent call last): ... - SyntaxError: can't assign to generator expression (, line 1) + SyntaxError: assign to generator expression not possible >>> (y for y in (1,2)) += 10 Traceback (most recent call last): ... - SyntaxError: augmented assignment to generator expression not possible (, line 1) + SyntaxError: augmented assign to tuple literal or generator expression not possible + ########### Tests borrowed from or inspired by test_generators.py ############ @@ -202,7 +203,7 @@ g.next() File "", line 1, in g = (10 // i for i in (5, 0, 2)) - ZeroDivisionError: integer division or modulo by zero + ZeroDivisionError: integer division by zero >>> g.next() Traceback (most recent call last): File "", line 1, in -toplevel- Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_iter.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_iter.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_iter.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_iter.py Mon Aug 18 15:46:38 2008 @@ -1,5 +1,6 @@ # Test iterators. +import gc import unittest from test.test_support import run_unittest, TESTFN, unlink, have_unicode @@ -792,6 +793,7 @@ x = C() self.assertEqual(C.count, 1) del x + gc.collect() self.assertEqual(C.count, 0) l = [C(), C(), C()] self.assertEqual(C.count, 3) @@ -800,6 +802,7 @@ except ValueError: pass del l + gc.collect() self.assertEqual(C.count, 0) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_itertools.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_itertools.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_itertools.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_itertools.py Mon Aug 18 15:46:38 2008 @@ -193,8 +193,11 @@ zip('abc', 'def')) self.assertEqual([pair for pair in izip('abc', 'def')], zip('abc', 'def')) - ids = map(id, izip('abc', 'def')) - self.assertEqual(min(ids), max(ids)) + # the following test deals with a specific implementation detail, + # that izip "reuses" the SAME tuple object each time when it can; + # it does not apply correctly to pypy, so I'm commenting it -- AM + # ids = map(id, izip('abc', 'def')) + # self.assertEqual(min(ids), max(ids)) ids = map(id, list(izip('abc', 'def'))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) @@ -285,7 +288,9 @@ self.assertRaises(ValueError, islice, xrange(10), 1, 'a') self.assertRaises(ValueError, islice, xrange(10), 'a', 1, 1) self.assertRaises(ValueError, islice, xrange(10), 1, 'a', 1) - self.assertEqual(len(list(islice(count(), 1, 10, sys.maxint))), 1) + # too slow to test on pypy, weakened...: + # self.assertEqual(len(list(islice(count(), 1, 10, sys.maxint))), 1) + self.assertEqual(len(list(islice(count(), 1, 10, 99))), 1) def test_takewhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] @@ -313,7 +318,7 @@ self.assertRaises(ValueError, dropwhile(errfunc, [(4,5)]).next) def test_tee(self): - n = 200 + n = 20 def irange(n): for i in xrange(n): yield i @@ -330,16 +335,16 @@ self.assertEqual(list(b), range(n)) a, b = tee(irange(n)) # test dealloc of leading iterator - for i in xrange(100): + for i in xrange(n // 2): self.assertEqual(a.next(), i) del a self.assertEqual(list(b), range(n)) a, b = tee(irange(n)) # test dealloc of trailing iterator - for i in xrange(100): + for i in xrange(n // 2): self.assertEqual(a.next(), i) del b - self.assertEqual(list(a), range(100, n)) + self.assertEqual(list(a), range(n // 2, n)) for j in xrange(5): # test randomly interleaved order = [0]*n + [1]*n @@ -364,13 +369,13 @@ self.assertEqual(list(c), list('def')) # test long-lagged and multi-way split - a, b, c = tee(xrange(2000), 3) - for i in xrange(100): + a, b, c = tee(xrange(n), 3) + for i in xrange(n // 2): self.assertEqual(a.next(), i) - self.assertEqual(list(b), range(2000)) + self.assertEqual(list(b), range(n)) self.assertEqual([c.next(), c.next()], range(2)) - self.assertEqual(list(a), range(100,2000)) - self.assertEqual(list(c), range(2,2000)) + self.assertEqual(list(a), range(n // 2, n)) + self.assertEqual(list(c), range(2, n)) # test values of n self.assertRaises(TypeError, tee, 'abc', 'invalid') @@ -394,12 +399,13 @@ t3 = tnew(t1) self.assert_(list(t1) == list(t2) == list(t3) == list('abc')) - # test that tee objects are weak referencable - a, b = tee(xrange(10)) - p = proxy(a) - self.assertEqual(getattr(p, '__class__'), type(b)) - del a - self.assertRaises(ReferenceError, getattr, p, '__class__') + # Commented out until weakref support is implemented. +# # test that tee objects are weak referencable +# a, b = tee(xrange(10)) +# p = proxy(a) +# self.assertEqual(getattr(p, '__class__'), type(b)) +# del a +# self.assertRaises(ReferenceError, getattr, p, '__class__') def test_StopIteration(self): self.assertRaises(StopIteration, izip().next) @@ -559,7 +565,7 @@ class TestVariousIteratorArgs(unittest.TestCase): def test_chain(self): - for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(chain(g(s))), list(g(s))) self.assertEqual(list(chain(g(s), g(s))), list(g(s))+list(g(s))) @@ -568,7 +574,7 @@ self.assertRaises(ZeroDivisionError, list, chain(E(s))) def test_cycle(self): - for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): tgtlen = len(s) * 3 expected = list(g(s))*3 @@ -579,7 +585,7 @@ self.assertRaises(ZeroDivisionError, list, cycle(E(s))) def test_groupby(self): - for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): self.assertEqual([k for k, sb in groupby(g(s))], list(g(s))) self.assertRaises(TypeError, groupby, X(s)) @@ -587,7 +593,7 @@ self.assertRaises(ZeroDivisionError, list, groupby(E(s))) def test_ifilter(self): - for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(ifilter(isEven, g(s))), filter(isEven, g(s))) self.assertRaises(TypeError, ifilter, isEven, X(s)) @@ -595,7 +601,7 @@ self.assertRaises(ZeroDivisionError, list, ifilter(isEven, E(s))) def test_ifilterfalse(self): - for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(ifilterfalse(isEven, g(s))), filter(isOdd, g(s))) self.assertRaises(TypeError, ifilterfalse, isEven, X(s)) @@ -603,7 +609,7 @@ self.assertRaises(ZeroDivisionError, list, ifilterfalse(isEven, E(s))) def test_izip(self): - for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(izip(g(s))), zip(g(s))) self.assertEqual(list(izip(g(s), g(s))), zip(g(s), g(s))) @@ -621,7 +627,7 @@ self.assertRaises(ZeroDivisionError, list, imap(onearg, E(s))) def test_islice(self): - for s in ("12345", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for s in ("12345", "", range(10), ('do', 1.2), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(islice(g(s),1,None,2)), list(g(s))[1::2]) self.assertRaises(TypeError, islice, X(s), 10) @@ -638,7 +644,7 @@ self.assertRaises(ZeroDivisionError, list, starmap(operator.pow, E(ss))) def test_takewhile(self): - for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): tgt = [] for elem in g(s): @@ -650,7 +656,7 @@ self.assertRaises(ZeroDivisionError, list, takewhile(isEven, E(s))) def test_dropwhile(self): - for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): + for s in (range(10), range(0), range(10), (7,11), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): tgt = [] for elem in g(s): @@ -662,7 +668,7 @@ self.assertRaises(ZeroDivisionError, list, dropwhile(isOdd, E(s))) def test_tee(self): - for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for s in ("123", "", range(10), ('do', 1.2), xrange(2000,2030,5)): for g in (G, I, Ig, S, L, R): it1, it2 = tee(g(s)) self.assertEqual(list(it1), list(g(s))) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_marshal.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_marshal.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_marshal.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_marshal.py Mon Aug 18 15:46:38 2008 @@ -7,6 +7,15 @@ import unittest import os +def dump_and_load(obj): + f = file(test_support.TESTFN, "wb") + marshal.dump(obj, f) + f.close() + f = file(test_support.TESTFN, "rb") + got = marshal.load(f) + f.close() + return got + class IntTestCase(unittest.TestCase): def test_ints(self): # Test the full range of Python ints. @@ -16,8 +25,7 @@ s = marshal.dumps(expected) got = marshal.loads(s) self.assertEqual(expected, got) - marshal.dump(expected, file(test_support.TESTFN, "wb")) - got = marshal.load(file(test_support.TESTFN, "rb")) + got = dump_and_load(expected) self.assertEqual(expected, got) n = n >> 1 os.unlink(test_support.TESTFN) @@ -51,8 +59,7 @@ new = marshal.loads(marshal.dumps(b)) self.assertEqual(b, new) self.assertEqual(type(b), type(new)) - marshal.dump(b, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(b) self.assertEqual(b, new) self.assertEqual(type(b), type(new)) @@ -67,13 +74,12 @@ s = marshal.dumps(f) got = marshal.loads(s) self.assertEqual(f, got) - marshal.dump(f, file(test_support.TESTFN, "wb")) - got = marshal.load(file(test_support.TESTFN, "rb")) + got = dump_and_load(f) self.assertEqual(f, got) n /= 123.4567 f = 0.0 - s = marshal.dumps(f, 2) + s = marshal.dumps(f) got = marshal.loads(s) self.assertEqual(f, got) # and with version <= 1 (floats marshalled differently then) @@ -85,21 +91,10 @@ while n < small: for expected in (-n, n): f = float(expected) - s = marshal.dumps(f) got = marshal.loads(s) self.assertEqual(f, got) - - s = marshal.dumps(f, 1) - got = marshal.loads(s) - self.assertEqual(f, got) - - marshal.dump(f, file(test_support.TESTFN, "wb")) - got = marshal.load(file(test_support.TESTFN, "rb")) - self.assertEqual(f, got) - - marshal.dump(f, file(test_support.TESTFN, "wb"), 1) - got = marshal.load(file(test_support.TESTFN, "rb")) + got = dump_and_load(f) self.assertEqual(f, got) n *= 123.4567 os.unlink(test_support.TESTFN) @@ -110,8 +105,7 @@ new = marshal.loads(marshal.dumps(s)) self.assertEqual(s, new) self.assertEqual(type(s), type(new)) - marshal.dump(s, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(s) self.assertEqual(s, new) self.assertEqual(type(s), type(new)) os.unlink(test_support.TESTFN) @@ -121,8 +115,7 @@ new = marshal.loads(marshal.dumps(s)) self.assertEqual(s, new) self.assertEqual(type(s), type(new)) - marshal.dump(s, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(s) self.assertEqual(s, new) self.assertEqual(type(s), type(new)) os.unlink(test_support.TESTFN) @@ -132,8 +125,7 @@ b = buffer(s) new = marshal.loads(marshal.dumps(b)) self.assertEqual(s, new) - marshal.dump(b, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(b) self.assertEqual(s, new) os.unlink(test_support.TESTFN) @@ -161,8 +153,7 @@ def test_dict(self): new = marshal.loads(marshal.dumps(self.d)) self.assertEqual(self.d, new) - marshal.dump(self.d, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(self.d) self.assertEqual(self.d, new) os.unlink(test_support.TESTFN) @@ -170,8 +161,7 @@ lst = self.d.items() new = marshal.loads(marshal.dumps(lst)) self.assertEqual(lst, new) - marshal.dump(lst, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(lst) self.assertEqual(lst, new) os.unlink(test_support.TESTFN) @@ -179,8 +169,7 @@ t = tuple(self.d.keys()) new = marshal.loads(marshal.dumps(t)) self.assertEqual(t, new) - marshal.dump(t, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(t) self.assertEqual(t, new) os.unlink(test_support.TESTFN) @@ -191,8 +180,7 @@ self.assertEqual(t, new) self.assert_(isinstance(new, constructor)) self.assertNotEqual(id(t), id(new)) - marshal.dump(t, file(test_support.TESTFN, "wb")) - new = marshal.load(file(test_support.TESTFN, "rb")) + new = dump_and_load(t) self.assertEqual(t, new) os.unlink(test_support.TESTFN) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mmap.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_mmap.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_mmap.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mmap.py Mon Aug 18 15:46:38 2008 @@ -144,7 +144,9 @@ try: mapsize = 10 print " Creating", mapsize, "byte test data file." - open(TESTFN, "wb").write("a"*mapsize) + f = open(TESTFN, "wb") + f.write("a"*mapsize) + f.close() print " Opening mmap with access=ACCESS_READ" f = open(TESTFN, "rb") m = mmap.mmap(f.fileno(), mapsize, access=mmap.ACCESS_READ) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_module.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_module.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_module.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_module.py Mon Aug 18 15:46:38 2008 @@ -7,7 +7,7 @@ # An uninitialized module has no __dict__ or __name__, and __doc__ is None foo = module.__new__(module) -verify(foo.__dict__ is None) +verify(not foo.__dict__) try: s = foo.__name__ except AttributeError: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mutants.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_mutants.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_mutants.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_mutants.py Mon Aug 18 15:46:38 2008 @@ -157,7 +157,7 @@ test_one(random.randrange(1, 100)) # See last comment block for clues about good values for n. -test(100) +test(20) ########################################################################## # Another segfault bug, distilled by Michael Hudson from a c.l.py post. Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_optparse.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_optparse.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_optparse.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_optparse.py Mon Aug 18 15:46:38 2008 @@ -237,9 +237,11 @@ ["---"]) def test_attr_invalid(self): + d = {'foo': None, 'bar': None} + msg = ', '.join(d.keys()) self.assertOptionError( - "option -b: invalid keyword arguments: bar, foo", - ["-b"], {'foo': None, 'bar': None}) + "option -b: invalid keyword arguments: %s" % msg, + ["-b"], d) def test_action_invalid(self): self.assertOptionError( Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_os.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_os.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_os.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_os.py Mon Aug 18 15:46:38 2008 @@ -143,7 +143,7 @@ try: result.st_mode = 1 self.fail("No exception thrown") - except TypeError: + except (AttributeError, TypeError): pass try: @@ -155,13 +155,13 @@ try: result.parrot = 1 self.fail("No exception thrown") - except AttributeError: + except (AttributeError, TypeError): pass # Use the stat_result constructor with a too-short tuple. try: result2 = os.stat_result((10,)) - self.fail("No exception thrown") + #self.fail("No exception thrown") - XXX very much a detail IMHO except TypeError: pass @@ -201,13 +201,13 @@ try: result.f_bfree = 1 self.fail("No exception thrown") - except TypeError: + except (AttributeError, TypeError): pass try: result.parrot = 1 self.fail("No exception thrown") - except AttributeError: + except (AttributeError, TypeError): pass # Use the constructor with a too-short tuple. Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_parser.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_parser.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_parser.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_parser.py Mon Aug 18 15:46:38 2008 @@ -439,7 +439,6 @@ def test_main(): test_support.run_unittest( RoundtripLegalSyntaxTestCase, - IllegalSyntaxTestCase, CompileTestCase, ) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_quopri.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_quopri.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_quopri.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_quopri.py Mon Aug 18 15:46:38 2008 @@ -89,8 +89,8 @@ '''xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx=D8=D9=DA=DB=DC=DD=DE=DFx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'''), # A line of exactly 76 characters, no soft line break should be needed - ('yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', - 'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'), + #('yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', + #'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'), # A line of 77 characters, forcing a soft line break at position 75, # and a second line of exactly 2 characters (because the soft line # break `=' sign counts against the line length limit). @@ -99,18 +99,18 @@ zz'''), # A line of 151 characters, forcing a soft line break at position 75, # with a second line of exactly 76 characters and no trailing = - ('zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz', - '''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz= -zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''), + #('zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz', + #'''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz= +#zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''), # A string containing a hard line break, but which the first line is # 151 characters and the second line is exactly 76 characters. This # should leave us with three lines, the first which has a soft line # break, and which the second and third do not. - ('''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy -zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''', - '''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy= -yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy -zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''), + #('''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +#zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''', + #'''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy= +#yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +#zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''), # Now some really complex stuff ;) (DECSAMPLE, ENCSAMPLE), ) @@ -130,7 +130,9 @@ @withpythonimplementation def test_encodestring(self): for p, e in self.STRINGS: - self.assert_(quopri.encodestring(p) == e) + if encodestring(p) != e: + print '\n"%s" is different from \n"%s"' % (encodestring(p), e) + self.assert_(encodestring(p) == e) @withpythonimplementation def test_decodestring(self): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_random.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_random.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_random.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_random.py Mon Aug 18 15:46:38 2008 @@ -53,7 +53,8 @@ self.assertNotEqual(state2, state3) self.assertRaises(TypeError, self.gen.jumpahead) # needs an arg - self.assertRaises(TypeError, self.gen.jumpahead, "ick") # wrong type + # wrong type - can get ValueError if by any chance "ick" compares < 0 + self.assertRaises((TypeError, ValueError), self.gen.jumpahead, "ick") self.assertRaises(TypeError, self.gen.jumpahead, 2.3) # wrong type self.assertRaises(TypeError, self.gen.jumpahead, 2, 3) # too many Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_re.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_re.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_re.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_re.py Mon Aug 18 15:46:38 2008 @@ -4,8 +4,8 @@ from test.test_support import verbose, run_unittest import re from re import Scanner -import sys, os, traceback -from weakref import proxy +import sys, os #, traceback +#from weakref import proxy # Misc tests from Tim Peters' re.doc @@ -17,7 +17,7 @@ class ReTests(unittest.TestCase): - def test_weakref(self): + def DONOTtest_weakref(self): s = 'QabbbcR' x = re.compile('ab+c') y = proxy(x) @@ -428,7 +428,11 @@ oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)') s = pickle.dumps(oldpat) newpat = pickle.loads(s) - self.assertEqual(oldpat, newpat) + # Not using object identity for _sre.py, since some Python builds do + # not seem to preserve that in all cases (observed on an UCS-4 build + # of 2.4.1). + #self.assertEqual(oldpat, newpat) + self.assertEqual(oldpat.__dict__, newpat.__dict__) def test_constants(self): self.assertEqual(re.I, re.IGNORECASE) @@ -479,7 +483,9 @@ # should, instead provoking a TypeError. self.assertRaises(re.error, re.compile, 'foo[a-') - def test_bug_418626(self): + def DONOTtest_bug_418626(self): + # XXX disabled for PyPy, too time-consuming. But our implementation is + # in fact non-recursive as well. # bugs 418626 at al. -- Testing Greg Chapman's addition of op code # SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of # pattern '*?' on a long string. @@ -495,7 +501,9 @@ pat=u"["+re.escape(u"\u2039")+u"]" self.assertEqual(re.compile(pat) and 1, 1) - def test_stack_overflow(self): + def DONOTtest_stack_overflow(self): + # XXX disabled for PyPy, too time-consuming. But our implementation is + # in fact non-recursive as well. # nasty cases that used to overflow the straightforward recursive # implementation of repeated groups. self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x') @@ -640,8 +648,9 @@ except KeyboardInterrupt: raise KeyboardInterrupt except: print '*** Unexpected error ***', t - if verbose: - traceback.print_exc(file=sys.stdout) + # Traceback disabled in PyPy for speed reasons + #if verbose: + # traceback.print_exc(file=sys.stdout) else: try: result = obj.search(s) @@ -738,7 +747,9 @@ def test_main(): run_unittest(ReTests) - run_re_tests() + # XXX Disabled re_tests for PyPy because they take approximately forever + # to run ... + #run_re_tests() if __name__ == "__main__": test_main() Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_repr.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_repr.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_repr.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_repr.py Mon Aug 18 15:46:38 2008 @@ -98,6 +98,9 @@ eq(r(n), expected) def test_instance(self): + # Disabled for PyPy because it relies on oldstyle class behaviour. + # Running the test under oldstyle results in many more other problems + # though. eq = self.assertEquals i1 = ClassWithRepr("a") eq(r(i1), repr(i1)) @@ -132,8 +135,8 @@ # Functions eq(repr(hash), '') # Methods - self.failUnless(repr(''.split).startswith( - ' -1) def test_xrange(self): import warnings @@ -172,7 +175,7 @@ def test_descriptors(self): eq = self.assertEquals # method descriptors - eq(repr(dict.items), "") + eq(repr(dict.items), "") # XXX member descriptors # XXX attribute descriptors # XXX slot descriptors @@ -222,7 +225,8 @@ os.remove(p) del sys.path[0] - def test_module(self): + def DONOTtest_module(self): + # PyPy really doesn't (want to) do these complex module reprs. eq = self.assertEquals touch(os.path.join(self.subpkgname, self.pkgname + os.extsep + 'py')) from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation @@ -266,6 +270,7 @@ "<%s.baz instance at 0x" % baz.__name__)) def test_method(self): + # Modified for PyPy since method reprs slightly differ from CPython eq = self.assertEquals touch(os.path.join(self.subpkgname, 'qux'+os.extsep+'py'), '''\ class aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: @@ -273,13 +278,12 @@ ''') from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import qux # Unbound methods first - eq(repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod), - '') + self.failUnless(repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod) + .find("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") > -1) # Bound method next iqux = qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa() - self.failUnless(repr(iqux.amethod).startswith( - ' -1) def test_builtin_function(self): # XXX test built-in functions and methods with really long names Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_scope.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_scope.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_scope.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_scope.py Mon Aug 18 15:46:38 2008 @@ -435,6 +435,7 @@ for i in range(100): f1() +import gc; gc.collect(); gc.collect(); gc.collect() vereq(Foo.count, 0) print "17. class and global" Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_set.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_set.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_set.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_set.py Mon Aug 18 15:46:38 2008 @@ -1,6 +1,6 @@ import unittest from test import test_support -from weakref import proxy +#from weakref import proxy import operator import copy import pickle @@ -476,8 +476,9 @@ t = self.s.copy() t ^= t self.assertEqual(t, self.thetype()) - - def test_weakref(self): + + # XXX disabled until weakref works + def XXXtest_weakref(self): s = self.thetype('gallahad') p = proxy(s) self.assertEqual(str(p), str(s)) @@ -558,7 +559,8 @@ f = self.thetype('abcdcda') self.assertEqual(hash(f), hash(f)) - def test_hash_effectiveness(self): + # disabled since it depends on CPython specific hash algorithm + def _test_hash_effectiveness(self): n = 13 hashvalues = set() addhashvalue = hashvalues.add @@ -1446,7 +1448,7 @@ def test_constructor(self): for cons in (set, frozenset): - for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): + for s in ("123", "", range(100), ('do', 1.2), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(sorted(cons(g(s))), sorted(g(s))) self.assertRaises(TypeError, cons , X(s)) @@ -1455,7 +1457,7 @@ def test_inline_methods(self): s = set('november') - for data in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5), 'december'): + for data in ("123", "", range(100), ('do', 1.2), xrange(2000,2200,5), 'december'): for meth in (s.union, s.intersection, s.difference, s.symmetric_difference): for g in (G, I, Ig, L, R): expected = meth(data) @@ -1466,7 +1468,7 @@ self.assertRaises(ZeroDivisionError, meth, E(s)) def test_inplace_methods(self): - for data in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5), 'december'): + for data in ("123", "", range(100), ('do', 1.2), xrange(2000,2200,5), 'december'): for methname in ('update', 'intersection_update', 'difference_update', 'symmetric_difference_update'): for g in (G, I, Ig, S, L, R): Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sha.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_sha.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_sha.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sha.py Mon Aug 18 15:46:38 2008 @@ -36,7 +36,8 @@ self.check("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", "84983e441c3bd26ebaae4aa1f95129e5e54670f1") - def test_case_3(self): + # Disabled for PyPy + def too_slow_test_case_3(self): self.check("a" * 1000000, "34aa973cd4c4daa4f61eeb2bdbad27316534016f") Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_slice.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_slice.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_slice.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_slice.py Mon Aug 18 15:46:38 2008 @@ -90,7 +90,8 @@ self.assertEqual(range(10)[::sys.maxint - 1], [0]) - self.assertRaises(OverflowError, slice(None).indices, 1L<<100) + # Disabled for PyPy since we don't really have these constraints for now + #self.assertRaises(OverflowError, slice(None).indices, 1L<<100) def test_setslice_without_getslice(self): tmp = [] Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_socket.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_socket.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_socket.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_socket.py Mon Aug 18 15:46:38 2008 @@ -8,7 +8,7 @@ import time import thread, threading import Queue -import sys +import sys, gc import array from weakref import proxy import signal @@ -224,6 +224,8 @@ self.assertEqual(p.fileno(), s.fileno()) s.close() s = None + gc.collect() + gc.collect() try: p.fileno() except ReferenceError: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sort.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_sort.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_sort.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sort.py Mon Aug 18 15:46:38 2008 @@ -139,7 +139,11 @@ return random.random() < 0.5 L = [C() for i in range(50)] - self.assertRaises(ValueError, L.sort) + #self.assertRaises(ValueError, L.sort) + try: + L.sort() + except ValueError: + pass def test_cmpNone(self): # Testing None as a comparison function. @@ -151,6 +155,7 @@ def test_undetected_mutation(self): # Python 2.4a1 did not always detect mutation + # XXX neither does PyPy. Let's test at least that there is no crash. memorywaster = [] for i in range(20): def mutating_cmp(x, y): @@ -158,12 +163,18 @@ L.pop() return cmp(x, y) L = [1,2] - self.assertRaises(ValueError, L.sort, mutating_cmp) + try: + L.sort(mutating_cmp) + except ValueError: + pass def mutating_cmp(x, y): L.append(3) del L[:] return cmp(x, y) - self.assertRaises(ValueError, L.sort, mutating_cmp) + try: + L.sort(mutating_cmp) + except ValueError: + pass memorywaster = [memorywaster] #============================================================================== Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_struct.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_struct.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_struct.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_struct.py Mon Aug 18 15:46:38 2008 @@ -42,7 +42,7 @@ def any_err(func, *args): try: func(*args) - except (struct.error, TypeError): + except (struct.error, OverflowError, TypeError, ValueError): pass else: raise TestFailed, "%s%s did not raise error" % ( @@ -99,9 +99,9 @@ simple_err(struct.pack, 'iii', 3) simple_err(struct.pack, 'i', 3, 3, 3) -simple_err(struct.pack, 'i', 'foo') -simple_err(struct.pack, 'P', 'foo') -simple_err(struct.unpack, 'd', 'flap') +any_err(struct.pack, 'i', 'foo') +any_err(struct.pack, 'P', 'foo') +any_err(struct.unpack, 'd', 'flap') s = struct.pack('ii', 1, 2) simple_err(struct.unpack, 'iii', s) simple_err(struct.unpack, 'i', s) @@ -196,8 +196,8 @@ print "Platform has native q/Q?", has_native_qQ and "Yes." or "No." any_err(struct.pack, "Q", -1) # can't pack -1 as unsigned regardless -simple_err(struct.pack, "q", "a") # can't pack string as 'q' regardless -simple_err(struct.pack, "Q", "a") # ditto, but 'Q' +any_err(struct.pack, "q", "a") # can't pack string as 'q' regardless +any_err(struct.pack, "Q", "a") # ditto, but 'Q' def test_native_qQ(): bytes = struct.calcsize('q') @@ -375,16 +375,25 @@ from random import randrange # Create all interesting powers of 2. - values = [] + allvalues = [] for exp in range(self.bitsize + 3): - values.append(1L << exp) + allvalues.append(1L << exp) - # Add some random values. - for i in range(self.bitsize): - val = 0L - for j in range(self.bytesize): - val = (val << 8) | randrange(256) - values.append(val) + # reduce the number of values again + values = [] + i = 1 + while i <= len(allvalues): + values.append(allvalues[i-1]) + i *= 2 + + # + # XXX doesn't seem like good practice to run with random values + # + #for i in range(self.bitsize): + # val = 0L + # for j in range(self.bytesize): + # val = (val << 8) | randrange(256) + # values.append(val) # Try all those, and their negations, and +-1 from them. Note # that this tests all power-of-2 boundaries in range, and a few out Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sys.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_sys.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_sys.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_sys.py Mon Aug 18 15:46:38 2008 @@ -219,7 +219,7 @@ self.assertEqual(sys.getdlopenflags(), oldflags+1) sys.setdlopenflags(oldflags) - def test_refcount(self): + def DONT_test_refcount(self): self.assertRaises(TypeError, sys.getrefcount) c = sys.getrefcount(None) n = None Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_tempfile.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_tempfile.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_tempfile.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_tempfile.py Mon Aug 18 15:46:38 2008 @@ -32,6 +32,9 @@ else: TEST_FILES = 100 +# Reduced for PyPy +TEST_FILES = 10 + # This is organized as one test for each chunk of code in tempfile.py, # in order of their appearance in the file. Testing which requires # threads is not done here. @@ -219,7 +222,7 @@ def write(self, str): os.write(self.fd, str) - def __del__(self): + def close(self): self._close(self.fd) self._unlink(self.name) @@ -234,25 +237,32 @@ self.nameCheck(file.name, dir, pre, suf) return file + def create_blat(self, *args, **kwds): + f = self.do_create(*args, **kwds) + f.write("blat") + f.close() + def test_basic(self): # _mkstemp_inner can create files - self.do_create().write("blat") - self.do_create(pre="a").write("blat") - self.do_create(suf="b").write("blat") - self.do_create(pre="a", suf="b").write("blat") - self.do_create(pre="aa", suf=".txt").write("blat") + self.create_blat() + self.create_blat(pre="a") + self.create_blat(suf="b") + self.create_blat(pre="a", suf="b") + self.create_blat(pre="aa", suf=".txt") def test_basic_many(self): # _mkstemp_inner can create many files (stochastic) extant = range(TEST_FILES) for i in extant: extant[i] = self.do_create(pre="aa") + for f in extant: + f.close() def test_choose_directory(self): # _mkstemp_inner can create files in a user-selected directory dir = tempfile.mkdtemp() try: - self.do_create(dir=dir).write("blat") + self.create_blat(dir=dir) finally: os.rmdir(dir) @@ -263,6 +273,7 @@ file = self.do_create() mode = stat.S_IMODE(os.stat(file.name).st_mode) + file.close() expected = 0600 if sys.platform in ('win32', 'os2emx', 'mac'): # There's no distinction among 'user', 'group' and 'world'; @@ -303,18 +314,24 @@ tester = '"%s"' % tester else: decorated = sys.executable + try: + import fcntl + except ImportError: + return # for interpreters without fcntl, on Unix platforms, + # we can't set the FD_CLOEXEC flag retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd) + file.close() self.failIf(retval < 0, "child process caught fatal signal %d" % -retval) - self.failIf(retval > 0, "child process reports failure %d"%retval) + self.failIf(retval > 0, "child process reports failure") def test_textmode(self): # _mkstemp_inner can create files in text mode if not has_textmode: return # ugh, can't use TestSkipped. - self.do_create(bin=0).write("blat\n") + self.create_blat(bin=0) # XXX should test that the file really is a text file test_classes.append(test__mkstemp_inner) @@ -390,7 +407,7 @@ class test_mkstemp(TC): """Test mkstemp().""" - def do_create(self, dir=None, pre="", suf=""): + def do_create(self, dir=None, pre="", suf="", ): if dir is None: dir = tempfile.gettempdir() try: @@ -504,8 +521,9 @@ self.dir = tempfile.mkdtemp() def tearDown(self): + import shutil if self.dir: - os.rmdir(self.dir) + shutil.rmtree(self.dir, ignore_errors=True) self.dir = None class mktemped: Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_trace.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_trace.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_trace.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_trace.py Mon Aug 18 15:46:38 2008 @@ -452,7 +452,7 @@ def no_jump_to_except_1(output): try: output.append(2) - except: + except Exception: e = sys.exc_info()[1] output.append('except' in str(e)) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_traceback.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_traceback.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_traceback.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_traceback.py Mon Aug 18 15:46:38 2008 @@ -35,15 +35,6 @@ self.assert_("^" in err[2]) # third line has caret self.assert_(err[1].find("!") == err[2].find("^")) # in the right place - def test_nocaret(self): - if is_jython: - # jython adds a caret in this case (why shouldn't it?) - return - err = self.get_exception_format(self.syntax_error_without_caret, - SyntaxError) - self.assert_(len(err) == 3) - self.assert_(err[1].strip() == "[x for x in x] = x") - def test_bad_indentation(self): err = self.get_exception_format(self.syntax_error_bad_indentation, IndentationError) @@ -60,7 +51,8 @@ try: sys.path.insert(0, testdir) testfile = os.path.join(testdir, 'test_bug737473.py') - print >> open(testfile, 'w'), """ + f = open(testfile, 'w') + print >> f, """ def test(): raise ValueError""" @@ -81,10 +73,12 @@ # Since WinME with FAT32 has multisecond resolution, more than # three seconds are needed for this test to pass reliably :-( time.sleep(4) - - print >> open(testfile, 'w'), """ + + f = open(testfile, 'w') + print >> f, """ def test(): raise NotImplementedError""" + f.close() reload(test_bug737473) try: test_bug737473.test() Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unicodedata.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_unicodedata.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_unicodedata.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unicodedata.py Mon Aug 18 15:46:38 2008 @@ -16,7 +16,7 @@ class UnicodeMethodsTest(unittest.TestCase): # update this, if the database changes - expectedchecksum = 'c198ed264497f108434b3f576d4107237221cc8a' + expectedchecksum = '9f6a3e76196a8327ccf95d2d6404880be2ab5c2f' def test_method_checksum(self): h = hashlib.sha1() @@ -75,7 +75,7 @@ class UnicodeFunctionsTest(UnicodeDatabaseTest): # update this, if the database changes - expectedchecksum = '4e389f97e9f88b8b7ab743121fd643089116f9f2' + expectedchecksum = 'c05cbc8b0d87b2f102fba8d832e21aca3ad6df2f' def test_function_checksum(self): data = [] Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unpack.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_unpack.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_unpack.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_unpack.py Mon Aug 18 15:46:38 2008 @@ -55,7 +55,7 @@ >>> a, b, c = 7 Traceback (most recent call last): ... - TypeError: 'int' object is not iterable + TypeError: iteration over non-sequence Unpacking tuple of wrong size Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_urllib2.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_urllib2.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_urllib2.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_urllib2.py Mon Aug 18 15:46:38 2008 @@ -730,7 +730,10 @@ # check socket.error converted to URLError http.raise_on_endheaders = True - self.assertRaises(urllib2.URLError, h.do_open, http, req) + + # The raising of socket.error is not recognized as an + # exception by pypy, so this test fails + # self.assertRaises(urllib2.URLError, h.do_open, http, req) # check adding of standard headers o.addheaders = [("Spam", "eggs")] Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_userstring.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_userstring.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_userstring.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_userstring.py Mon Aug 18 15:46:38 2008 @@ -16,32 +16,8 @@ type2test = UserString - # Overwrite the three testing methods, because UserString - # can't cope with arguments propagated to UserString - # (and we don't test with subclasses) - def checkequal(self, result, object, methodname, *args): - result = self.fixtype(result) - object = self.fixtype(object) - # we don't fix the arguments, because UserString can't cope with it - realresult = getattr(object, methodname)(*args) - self.assertEqual( - result, - realresult - ) - - def checkraises(self, exc, object, methodname, *args): - object = self.fixtype(object) - # we don't fix the arguments, because UserString can't cope with it - self.assertRaises( - exc, - getattr(object, methodname), - *args - ) - - def checkcall(self, object, methodname, *args): - object = self.fixtype(object) - # we don't fix the arguments, because UserString can't cope with it - getattr(object, methodname)(*args) + fixargs = lambda self, args: args + subclasscheck = False class MutableStringTest(UserStringTest): type2test = MutableString Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_weakref.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_weakref.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_weakref.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_weakref.py Mon Aug 18 15:46:38 2008 @@ -69,6 +69,9 @@ ref1 = weakref.ref(o, self.callback) ref2 = weakref.ref(o, self.callback) del o + gc.collect() + gc.collect() + gc.collect() self.assert_(ref1() is None, "expected reference to be invalidated") self.assert_(ref2() is None, @@ -100,13 +103,17 @@ ref1 = weakref.proxy(o, self.callback) ref2 = weakref.proxy(o, self.callback) del o + gc.collect() + gc.collect() + gc.collect() def check(proxy): proxy.bar self.assertRaises(weakref.ReferenceError, check, ref1) self.assertRaises(weakref.ReferenceError, check, ref2) - self.assertRaises(weakref.ReferenceError, bool, weakref.proxy(C())) + # Works only with refcounting + # self.assertRaises(weakref.ReferenceError, bool, weakref.proxy(C())) self.assert_(self.cbcalled == 2) def check_basic_ref(self, factory): @@ -123,6 +130,10 @@ o = factory() ref = weakref.ref(o, self.callback) del o + gc.collect() + gc.collect() + gc.collect() + self.assert_(self.cbcalled == 1, "callback did not properly set 'cbcalled'") self.assert_(ref() is None, @@ -147,6 +158,7 @@ self.assert_(weakref.getweakrefcount(o) == 2, "wrong weak ref count for object") del proxy + gc.collect() self.assert_(weakref.getweakrefcount(o) == 1, "wrong weak ref count for object after deleting proxy") @@ -292,6 +304,7 @@ "got wrong number of weak reference objects") del ref1, ref2, proxy1, proxy2 + gc.collect() self.assert_(weakref.getweakrefcount(o) == 0, "weak reference objects not unlinked from" " referent when discarded.") @@ -305,6 +318,7 @@ ref1 = weakref.ref(o, self.callback) ref2 = weakref.ref(o, self.callback) del ref1 + gc.collect() self.assert_(weakref.getweakrefs(o) == [ref2], "list of refs does not match") @@ -312,10 +326,14 @@ ref1 = weakref.ref(o, self.callback) ref2 = weakref.ref(o, self.callback) del ref2 + gc.collect() + gc.collect() + gc.collect() self.assert_(weakref.getweakrefs(o) == [ref1], "list of refs does not match") del ref1 + gc.collect() self.assert_(weakref.getweakrefs(o) == [], "list of refs not cleared") @@ -517,7 +535,11 @@ del c1, c2, C, D gc.collect() - def test_callback_in_cycle_resurrection(self): + def XXX_test_callback_in_cycle_resurrection(self): + # We can't guarrantee the behaviour tested with our + # current weakref implementations. + # If an object and a weakref to it gets collected at the + # same time it is unclear whether the callback is called. import gc # Do something nasty in a weakref callback: resurrect objects @@ -563,7 +585,8 @@ gc.collect() self.assertEqual(alist, []) - def test_callbacks_on_callback(self): + def XXX_test_callbacks_on_callback(self): + # See XXX_test_callback_in_cycle_resurrection above import gc # Set up weakref callbacks *on* weakref callbacks. @@ -608,8 +631,12 @@ self.check_gc_during_creation(weakref.proxy) def check_gc_during_creation(self, makeref): - thresholds = gc.get_threshold() - gc.set_threshold(1, 1, 1) + # gc.get/set_threshold does not exist in pypy + # The tests calling this function probaly don't test anything + # usefull anymore + + #thresholds = gc.get_threshold() + #gc.set_threshold(1, 1, 1) gc.collect() class A: pass @@ -630,7 +657,7 @@ weakref.ref(referenced, callback) finally: - gc.set_threshold(*thresholds) + pass #gc.set_threshold(*thresholds) def test_ref_created_during_del(self): # Bug #1377858 @@ -725,6 +752,7 @@ # # This exercises d.copy(), d.items(), d[], del d[], len(d). # + import gc dict, objects = self.make_weak_valued_dict() for o in objects: self.assert_(weakref.getweakrefcount(o) == 1, @@ -738,17 +766,27 @@ self.assert_(items1 == items2, "cloning of weak-valued dictionary did not work!") del items1, items2 + gc.collect() self.assert_(len(dict) == self.COUNT) del objects[0] + gc.collect() + gc.collect() + gc.collect() self.assert_(len(dict) == (self.COUNT - 1), "deleting object did not cause dictionary update") del objects, o + gc.collect() + gc.collect() + gc.collect() self.assert_(len(dict) == 0, "deleting the values did not clear the dictionary") # regression on SF bug #447152: dict = weakref.WeakValueDictionary() self.assertRaises(KeyError, dict.__getitem__, 1) dict[2] = C() + gc.collect() + gc.collect() + gc.collect() self.assertRaises(KeyError, dict.__getitem__, 2) def test_weak_keys(self): @@ -756,6 +794,7 @@ # This exercises d.copy(), d.items(), d[] = v, d[], del d[], # len(d), d.has_key(). # + import gc dict, objects = self.make_weak_keyed_dict() for o in objects: self.assert_(weakref.getweakrefcount(o) == 1, @@ -767,11 +806,18 @@ self.assert_(set(items1) == set(items2), "cloning of weak-keyed dictionary did not work!") del items1, items2 + gc.collect() + gc.collect() + gc.collect() self.assert_(len(dict) == self.COUNT) del objects[0] + gc.collect() + gc.collect() + gc.collect() self.assert_(len(dict) == (self.COUNT - 1), "deleting object did not cause dictionary update") del objects, o + gc.collect() self.assert_(len(dict) == 0, "deleting the keys did not clear the dictionary") o = Object(42) @@ -1022,6 +1068,7 @@ for o in objs: d[o] = o.value del o # now the only strong references to keys are in objs + gc.collect() # Find the order in which iterkeys sees the keys. objs = d.keys() # Reverse it, so that the iteration implementation of __delitem__ @@ -1040,6 +1087,7 @@ for o in objs: count += 1 del d[o] + gc.collect() self.assertEqual(len(d), 0) self.assertEqual(count, 2) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_xrange.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_xrange.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_xrange.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_xrange.py Mon Aug 18 15:46:38 2008 @@ -43,19 +43,21 @@ self.assertRaises(TypeError, xrange, 1, 2, 3, 4) self.assertRaises(ValueError, xrange, 1, 2, 0) - self.assertRaises(OverflowError, xrange, 1e100, 1e101, 1e101) + # Overflow tests disabled for PyPy since it handles long arguments + # as well. + #self.assertRaises(OverflowError, xrange, 1e100, 1e101, 1e101) self.assertRaises(TypeError, xrange, 0, "spam") self.assertRaises(TypeError, xrange, 0, 42, "spam") self.assertEqual(len(xrange(0, sys.maxint, sys.maxint-1)), 2) - self.assertRaises(OverflowError, xrange, -sys.maxint, sys.maxint) - self.assertRaises(OverflowError, xrange, 0, 2*sys.maxint) + #self.assertRaises(OverflowError, xrange, -sys.maxint, sys.maxint) + #self.assertRaises(OverflowError, xrange, 0, 2*sys.maxint) r = xrange(-sys.maxint, sys.maxint, 2) self.assertEqual(len(r), sys.maxint) - self.assertRaises(OverflowError, xrange, -sys.maxint-1, sys.maxint, 2) + #self.assertRaises(OverflowError, xrange, -sys.maxint-1, sys.maxint, 2) def test_main(): test.test_support.run_unittest(XrangeTest) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_zipimport.py (from r57197, pypy/branch/2.5-features/lib-python/2.5.1/test/test_zipimport.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_zipimport.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_zipimport.py Mon Aug 18 15:46:38 2008 @@ -61,7 +61,7 @@ # We're reusing the zip archive path, so we must clear the # cached directory info and linecache linecache.clearcache() - zipimport._zip_directory_cache.clear() + #zipimport._zip_directory_cache.clear() ImportHooksBaseTestCase.setUp(self) def doTest(self, expected_ext, files, *modules, **kw): From bgola at codespeak.net Mon Aug 18 15:59:28 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 15:59:28 +0200 (CEST) Subject: [pypy-svn] r57426 - in pypy/branch/2.5-features: lib-python/modified-2.5.1 pypy/tool Message-ID: <20080818135928.E85B9169F08@codespeak.net> Author: bgola Date: Mon Aug 18 15:59:27 2008 New Revision: 57426 Removed: pypy/branch/2.5-features/lib-python/modified-2.5.1/__future__.py pypy/branch/2.5-features/lib-python/modified-2.5.1/warnings.py Modified: pypy/branch/2.5-features/pypy/tool/stdlib___future__.py Log: removing some files from modified-2.5.1/ Modified: pypy/branch/2.5-features/pypy/tool/stdlib___future__.py ============================================================================== --- pypy/branch/2.5-features/pypy/tool/stdlib___future__.py (original) +++ pypy/branch/2.5-features/pypy/tool/stdlib___future__.py Mon Aug 18 15:59:27 2008 @@ -2,7 +2,7 @@ def load_module(): import py - module_path = py.path.local(__file__).dirpath().dirpath().dirpath('lib-python/modified-2.4.1/__future__.py') + module_path = py.path.local(__file__).dirpath().dirpath().dirpath('lib-python/2.5.1/__future__.py') execfile(str(module_path), globals()) load_module() From bgola at codespeak.net Mon Aug 18 16:26:46 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 16:26:46 +0200 (CEST) Subject: [pypy-svn] r57428 - in pypy/branch/2.5-features/pypy/objspace/flow: . test Message-ID: <20080818142646.7D9E0169F6D@codespeak.net> Author: bgola Date: Mon Aug 18 16:26:45 2008 New Revision: 57428 Modified: pypy/branch/2.5-features/pypy/objspace/flow/specialcase.py pypy/branch/2.5-features/pypy/objspace/flow/test/test___import__.py Log: import needs 1 to 5 arguments now (the 5th is level, for absolute/relative imports) Modified: pypy/branch/2.5-features/pypy/objspace/flow/specialcase.py ============================================================================== --- pypy/branch/2.5-features/pypy/objspace/flow/specialcase.py (original) +++ pypy/branch/2.5-features/pypy/objspace/flow/specialcase.py Mon Aug 18 16:26:45 2008 @@ -8,7 +8,7 @@ def sc_import(space, fn, args): args_w, kwds_w = args.unpack() assert kwds_w == {}, "should not call %r with keyword arguments" % (fn,) - assert len(args_w) > 0 and len(args_w) <= 4, 'import needs 1 to 4 arguments' + assert len(args_w) > 0 and len(args_w) <= 5, 'import needs 1 to 5 arguments' w_name = args_w[0] w_None = space.wrap(None) w_glob, w_loc, w_frm = w_None, w_None, w_None Modified: pypy/branch/2.5-features/pypy/objspace/flow/test/test___import__.py ============================================================================== --- pypy/branch/2.5-features/pypy/objspace/flow/test/test___import__.py (original) +++ pypy/branch/2.5-features/pypy/objspace/flow/test/test___import__.py Mon Aug 18 16:26:45 2008 @@ -8,5 +8,5 @@ assert __import__(mod, None).__name__ == os.name assert __import__(mod, None, None).__name__ == os.name assert __import__(mod, None, None, None).__name__ == os.name - raises(TypeError, __import__, (mod, None, None, None, None)) - # XXX this will have to be adjusted for Python 2.5 pretty soon-ish :-) \ No newline at end of file + assert __import__(mod, None, None, None, -1).__name__ == os.name + raises(TypeError, __import__, (mod, None, None, None, None, None)) From bgola at codespeak.net Mon Aug 18 18:13:52 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 18:13:52 +0200 (CEST) Subject: [pypy-svn] r57435 - in pypy/branch/2.5-features/pypy/interpreter: . test Message-ID: <20080818161352.4C180169F51@codespeak.net> Author: bgola Date: Mon Aug 18 18:13:50 2008 New Revision: 57435 Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py pypy/branch/2.5-features/pypy/interpreter/pyopcode.py pypy/branch/2.5-features/pypy/interpreter/pytraceback.py pypy/branch/2.5-features/pypy/interpreter/test/test_generator.py Log: fix: throw() method now unwraps the w_tb value before testing and raising OpError Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/generator.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/generator.py Mon Aug 18 18:13:50 2008 @@ -72,25 +72,22 @@ def throw(self, w_type, w_val, w_tb): - from pypy.interpreter.typedef import PyTraceback + from pypy.interpreter.pytraceback import check_traceback space = self.space + + msg = "throw() third argument must be a traceback object" + tb = check_traceback(space, w_tb, msg) - if w_tb is not None: - if not space.is_true(space.isinstance(w_tb, - space.gettypeobject(PyTraceback.typedef))): - msg = "throw() third argument must be a traceback object" - raise OperationError(space.w_TypeError, space.wrap(msg)) - if space.is_true(space.abstract_isclass(w_type)) and \ space.is_true(space.issubtype(w_type, space.w_BaseException)): - exception = OperationError(w_type, w_val, w_tb) + exception = OperationError(w_type, w_val, tb) elif space.is_true(space.isinstance(w_type, space.w_BaseException)): if not space.is_w(w_val, space.w_None): msg = "instance exception may not have a separate value" raise OperationError(space.w_TypeError, space.wrap(msg)) else: - exception = OperationError(w_type.getclass(space), w_val, w_tb) + exception = OperationError(w_type.getclass(space), w_val, tb) else: if not space.is_true(space.isinstance(w_type, space.w_str)): @@ -98,7 +95,7 @@ w_type.typedef.name) raise OperationError(space.w_TypeError, space.wrap(msg)) else: - exception = OperationError(w_type, w_val, w_tb) + exception = OperationError(w_type, w_val, tb) ec = space.getexecutioncontext() next_instr = self.frame.handle_operation_error(ec, exception) Modified: pypy/branch/2.5-features/pypy/interpreter/pyopcode.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyopcode.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyopcode.py Mon Aug 18 18:13:50 2008 @@ -522,11 +522,9 @@ # common case raise operror else: - tb = space.interpclass_w(w_traceback) - if tb is None or not space.is_true(space.isinstance(tb, - space.gettypeobject(pytraceback.PyTraceback.typedef))): - raise OperationError(space.w_TypeError, - space.wrap("raise: arg 3 must be a traceback or None")) + from pypy.interpreter.pytraceback import check_traceback + msg = "raise: arg 3 must be a traceback or None" + tb = check_traceback(space, w_traceback, msg) operror.application_traceback = tb # re-raise, no new traceback obj will be attached f.last_exception = operror Modified: pypy/branch/2.5-features/pypy/interpreter/pytraceback.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pytraceback.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pytraceback.py Mon Aug 18 18:13:50 2008 @@ -1,4 +1,5 @@ from pypy.interpreter import baseobjspace +from pypy.interpreter.error import OperationError class PyTraceback(baseobjspace.Wrappable): @@ -62,3 +63,15 @@ break line = line + ord(tab[i+1]) return line + +def check_traceback(space, w_tb, msg): + from pypy.interpreter.typedef import PyTraceback + if w_tb is not None: + tb = space.interpclass_w(w_tb) + if tb is None or not space.is_true(space.isinstance(tb, + space.gettypeobject(PyTraceback.typedef))): + raise OperationError(space.w_TypeError, space.wrap(msg)) + else: + tb = None + return tb + Modified: pypy/branch/2.5-features/pypy/interpreter/test/test_generator.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/test/test_generator.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/test/test_generator.py Mon Aug 18 18:13:50 2008 @@ -58,7 +58,7 @@ assert g.throw(NameError("Error")) == 3 raises(StopIteration, g.next) - def test_throw3(self): + def test_throw4(self): def f(): try: yield 1 @@ -71,7 +71,7 @@ assert g.throw(NameError("Error")) == 3 raises(StopIteration, g.next) - def test_throw4(self): + def test_throw5(self): def f(): try: yield 1 From bgola at codespeak.net Mon Aug 18 22:48:48 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Mon, 18 Aug 2008 22:48:48 +0200 (CEST) Subject: [pypy-svn] r57450 - pypy/branch/2.5-features/pypy/interpreter/pyparser Message-ID: <20080818204848.0778D169FB6@codespeak.net> Author: bgola Date: Mon Aug 18 22:48:47 2008 New Revision: 57450 Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Log: fixes for translating pypy-c (asserts) Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Mon Aug 18 22:48:47 2008 @@ -842,10 +842,18 @@ """ atoms = get_atoms(builder, nb) index = 1 # skip from - while atoms[index].name == builder.parser.tokens['DOT']: + + atom = atoms[index] + assert isinstance(atom, TokenObject) + while atom.name == builder.parser.tokens['DOT']: index += 1 + atom = atoms[index] + assert isinstance(atom, TokenObject) + level = index - 1 - if atoms[index].value == 'import': + atom = atoms[index] + assert isinstance(atom, TokenObject) + if atom.value == 'import': # from . import x from_name = "" incr = 0 From bgola at codespeak.net Tue Aug 19 05:39:39 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Tue, 19 Aug 2008 05:39:39 +0200 (CEST) Subject: [pypy-svn] r57454 - pypy/branch/2.5-features/pypy/interpreter/pyparser Message-ID: <20080819033939.CA25F169F72@codespeak.net> Author: bgola Date: Tue Aug 19 05:39:38 2008 New Revision: 57454 Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Log: more fixes for translating pypy-c (assert) Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/astbuilder.py Tue Aug 19 05:39:38 2008 @@ -870,7 +870,9 @@ tokens = slicecut( atoms, index+1, -1 ) else: tokens = atoms[index:] - if tokens[-1].name == builder.parser.tokens['COMMA']: + token = tokens[-1] + assert isinstance(token, TokenObject) # XXX + if token.name == builder.parser.tokens['COMMA']: raise SyntaxError, "trailing comma not allowed without" \ "surrounding parentheses" From cfbolz at codespeak.net Tue Aug 19 10:52:03 2008 From: cfbolz at codespeak.net (cfbolz at codespeak.net) Date: Tue, 19 Aug 2008 10:52:03 +0200 (CEST) Subject: [pypy-svn] r57456 - pypy/branch/2.5-features/pypy/interpreter/astcompiler Message-ID: <20080819085203.33EC1169FA5@codespeak.net> Author: cfbolz Date: Tue Aug 19 10:52:00 2008 New Revision: 57456 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py Log: simplify this silly function Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/pyassem.py Tue Aug 19 10:52:00 2008 @@ -51,8 +51,7 @@ self.argcount = self.argcount - 1 def checkFlag(self, flag): - if self.flags & flag: - return 1 + return self.flags & flag def setFreeVars(self, names): self.freevars = list(names) From cfbolz at codespeak.net Tue Aug 19 10:53:30 2008 From: cfbolz at codespeak.net (cfbolz at codespeak.net) Date: Tue, 19 Aug 2008 10:53:30 +0200 (CEST) Subject: [pypy-svn] r57457 - pypy/branch/2.5-features/pypy/module/__builtin__ Message-ID: <20080819085330.8A1F7169FA5@codespeak.net> Author: cfbolz Date: Tue Aug 19 10:53:29 2008 New Revision: 57457 Modified: pypy/branch/2.5-features/pypy/module/__builtin__/importing.py Log: use the unwrap_spec for this unwrapping Modified: pypy/branch/2.5-features/pypy/module/__builtin__/importing.py ============================================================================== --- pypy/branch/2.5-features/pypy/module/__builtin__/importing.py (original) +++ pypy/branch/2.5-features/pypy/module/__builtin__/importing.py Tue Aug 19 10:53:29 2008 @@ -152,8 +152,7 @@ return None def importhook(space, modulename, w_globals=None, - w_locals=None, w_fromlist=None, w_level=-1): - level = space.int_w(w_level) + w_locals=None, w_fromlist=None, level=-1): if not modulename and level < 0: raise OperationError( space.w_ValueError, @@ -213,7 +212,7 @@ space.setitem(space.sys.get('modules'), w(rel_modulename),space.w_None) return w_mod # -importhook.unwrap_spec = [ObjSpace,str,W_Root,W_Root,W_Root,W_Root] +importhook.unwrap_spec = [ObjSpace, str, W_Root, W_Root, W_Root, int] def absolute_import(space, modulename, baselevel, w_fromlist, tentative): lock = getimportlock(space) From arigo at codespeak.net Tue Aug 19 11:38:32 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 19 Aug 2008 11:38:32 +0200 (CEST) Subject: [pypy-svn] r57459 - pypy/dist/pypy/doc Message-ID: <20080819093832.57854169F2E@codespeak.net> Author: arigo Date: Tue Aug 19 11:38:31 2008 New Revision: 57459 Modified: pypy/dist/pypy/doc/getting-started.txt Log: This is the default now. Modified: pypy/dist/pypy/doc/getting-started.txt ============================================================================== --- pypy/dist/pypy/doc/getting-started.txt (original) +++ pypy/dist/pypy/doc/getting-started.txt Tue Aug 19 11:38:31 2008 @@ -571,7 +571,7 @@ 3. Run:: cd pypy/translator/goal - python translate.py --opt=3 targetpypystandalone.py --allworkingmodules + python translate.py --opt=3 targetpypystandalone.py possibly replacing ``--opt=3`` with ``--opt=1`` or another `optimization level`_ of your choice. From fijal at codespeak.net Tue Aug 19 13:01:57 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 19 Aug 2008 13:01:57 +0200 (CEST) Subject: [pypy-svn] r57460 - in pypy/dist/pypy: tool translator/backendopt Message-ID: <20080819110157.4A7D9169FA6@codespeak.net> Author: fijal Date: Tue Aug 19 13:01:54 2008 New Revision: 57460 Added: pypy/dist/pypy/tool/compat.py (contents, props changed) Modified: pypy/dist/pypy/tool/gcc_cache.py pypy/dist/pypy/translator/backendopt/stat.py pypy/dist/pypy/translator/backendopt/support.py Log: Introduce pypy.tool.compat which handles platform-dependent imports. Right now it only contains platform-independent md5 import, which fallbacks to pure-python version. Added: pypy/dist/pypy/tool/compat.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/tool/compat.py Tue Aug 19 13:01:54 2008 @@ -0,0 +1,6 @@ + +try: + import md5 +except ImportError: + # no _md5 module on this platform + from pypy.lib import md5 Modified: pypy/dist/pypy/tool/gcc_cache.py ============================================================================== --- pypy/dist/pypy/tool/gcc_cache.py (original) +++ pypy/dist/pypy/tool/gcc_cache.py Tue Aug 19 13:01:54 2008 @@ -3,7 +3,7 @@ from pypy.translator.tool.cbuild import build_executable from pypy.translator.tool.cbuild import ExternalCompilationInfo from pypy.translator.tool.cbuild import CompilationError -import md5 +from pypy.tool.compat import md5 import py cache_dir_root = py.path.local(pypydir).join('_cache').ensure(dir=1) Modified: pypy/dist/pypy/translator/backendopt/stat.py ============================================================================== --- pypy/dist/pypy/translator/backendopt/stat.py (original) +++ pypy/dist/pypy/translator/backendopt/stat.py Tue Aug 19 13:01:54 2008 @@ -1,5 +1,5 @@ from pypy.translator.simplify import get_graph -import md5 +from pypy.tool.compat import md5 def get_statistics(graph, translator, save_per_graph_details=None, ignore_stack_checks=False): seen_graphs = {} Modified: pypy/dist/pypy/translator/backendopt/support.py ============================================================================== --- pypy/dist/pypy/translator/backendopt/support.py (original) +++ pypy/dist/pypy/translator/backendopt/support.py Tue Aug 19 13:01:54 2008 @@ -182,7 +182,7 @@ return loop def md5digest(translator): - import md5 + from pypy.tool.compat import md5 graph2digest = {} for graph in translator.graphs: m = md5.new() From witulski at codespeak.net Tue Aug 19 17:52:32 2008 From: witulski at codespeak.net (witulski at codespeak.net) Date: Tue, 19 Aug 2008 17:52:32 +0200 (CEST) Subject: [pypy-svn] r57467 - in pypy/branch/oo-jit/pypy/jit/codegen/x86_64: . test Message-ID: <20080819155232.A2422169FFA@codespeak.net> Author: witulski Date: Tue Aug 19 17:52:28 2008 New Revision: 57467 Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py Log: Interface MOV, ADD ... Changed New DEC Instruction added and tested New PUSH/POP Instruction added but not testet yet. Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/assembler.py Tue Aug 19 17:52:28 2008 @@ -1,4 +1,4 @@ -from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 +from pypy.jit.codegen.x86_64.objmodel import Register64, Immediate32 #Mapping from register to coding (Rex.W or Rex.B , ModRM) REGISTER_MAP = { @@ -21,25 +21,30 @@ } # This method wirtes the bitencodings into -# the memory. imm32 is used when the operation -# has an constant as operand -def make_two_operand_instr(opcode,imm32_mod=None): +# the memory. The parameters are overwritten +# if one of the operands is an register +def make_two_operand_instr(W = None, R = None, X = None, B = None, opcode =None, md1 = None, md2 = None): def quadreg_instr(self, arg1, arg2): + # move the parameter + # to the inner function + modrm1 = md1 + modrm2 = md2 + rexW = W + rexR = R + rexX = X + rexB = B # Todo: other cases e.g memory as operand if isinstance(arg1,Register64): rexR, modrm1 = self.get_register_bits(arg1.reg) if isinstance(arg2,Register64): rexB, modrm2 = self.get_register_bits(arg2.reg) - if isinstance(arg2,Constant32): # e.g IMMEDIATE32 - rexB = 0 - # rexW(1) = 64bitMode rexX(0) = doesn't matter - # exchange the two arguments (rexB/rexR) (modrm2/modrm1) - if isinstance(arg2,Constant32): - self.write_rex_byte(1, rexB, 0, rexR) + # exchange the two arguments (modrm2/modrm1) + if isinstance(arg2,Immediate32): + self.write_rex_byte(rexW, rexR, rexX, rexB) self.write(opcode) - self.write_modRM_byte(3, imm32_mod, modrm1) + self.write_modRM_byte(3, modrm2, modrm1) # FIXME: Bad solution # TODO: support values > 255 if(arg2.value<256): @@ -48,25 +53,35 @@ self.write(chr(0)) self.write(chr(0)) else: - self.write_rex_byte(1, rexB, 0, rexR) + # FIXME: exchange the two arguments (rexB/rexR) + self.write_rex_byte(rexW, rexB, rexX, rexR) self.write(opcode) self.write_modRM_byte(3, modrm2, modrm1) return quadreg_instr # This method wirtes the bitencodings into -# the memory. mod is operation specific -def make_one_operand_instr(opcode,mod = None): - def quadreg_instr(self, arg1): +# the memory. The parameters are overwritten +# if one of the operands is an register +def make_one_operand_instr(W = None, R = None, X = None, B = None, opcode = None, md1 = None, md2 = None): + def quadreg_instr(self, arg1): + # move the parameter + # to the inner function + modrm1 = md1 + modrm2 = md2 + rexW = W + rexR = R + rexX = X + rexB = B + # Todo: other cases e.g memory as operand if isinstance(arg1,Register64): rexB, modrm1 = self.get_register_bits(arg1.reg) - rexX = 0 # rexW(1) = 64bitMode - self.write_rex_byte(1, 0, rexX, rexB) + self.write_rex_byte(rexW, rexR, rexX, rexB) self.write(opcode) - self.write_modRM_byte(3, mod, modrm1) + self.write_modRM_byte(3, modrm2, modrm1) return quadreg_instr class X86_64CodeBuilder(object): @@ -79,20 +94,58 @@ """ tells the current position in memory""" raise NotImplementedError - # The opcodes differs depending on the operands - ADD_QWREG_IMM32 = make_two_operand_instr("\x81",2) - ADD_QWREG_QWREG = make_two_operand_instr("\x00") - INC_QWREG = make_one_operand_instr("\xFF",0) + # The opcodes differs depending on the operands - MOV_QWREG_IMM32 = make_two_operand_instr("\xC7",0) - MOV_QWREG_QWREG = make_two_operand_instr("\x89") + # FIXME: rexX,rexB are set + _ADD_QWREG_IMM32 = make_two_operand_instr( 1, 0, 0, 0, "\x81", None, 2) + _ADD_QWREG_QWREG = make_two_operand_instr( 1, None, 0, None, "\x00", None, None) - SUB_QWREG_QWREG = make_two_operand_instr("\x28") + _DEC_QWREG = make_one_operand_instr( 1, 0, 0, None, "\xFF", None, 1) + _INC_QWREG = make_one_operand_instr( 1, 0, 0, None, "\xFF", None, 0) + + + _MOV_QWREG_IMM32 = make_two_operand_instr( 1, 0, 0, None, "\xC7", None, 0) + _MOV_QWREG_QWREG = make_two_operand_instr( 1, None, 0, None, "\x89", None, None) + + # FIXME: rexW is set + _POP_QWREG = make_one_operand_instr( 1, 0, 0, None, "\x8F", None, 0) + _PUSH_QWREG = make_one_operand_instr( 1, 0, 0, None, "\xFF", None, 6) + + _SUB_QWREG_QWREG = make_two_operand_instr( 1, None, 0, None, "\x28", None, None) + + # TODO: maybe a problem with more ore less than two arg. + def ADD(self, op1, op2): + method = getattr(self, "_ADD"+op1.to_string()+op2.to_string()) + method(op1, op2) + + def DEC(self, op1): + method = getattr(self, "_DEC"+op1.to_string()) + method(op1) + + def INC(self, op1): + method = getattr(self, "_INC"+op1.to_string()) + method(op1) + + def POP(self, op1): + method = getattr(self, "_POP"+op1.to_string()) + method(op1) + + def PUSH(self, op1): + method = getattr(self, "_POP"+op1.to_string()) + method(op1) + + def MOV(self, op1, op2): + method = getattr(self, "_MOV"+op1.to_string()+op2.to_string()) + method(op1, op2) def RET(self): self.write("\xC3") + def SUB(self, op1, op2): + method = getattr(self, "_SUB"+op1.to_string()+op2.to_string()) + method(op1, op2) + def get_register_bits(self, register): return REGISTER_MAP[register] Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/objmodel.py Tue Aug 19 17:52:28 2008 @@ -1,13 +1,25 @@ from pypy.jit.codegen import model # Wrapper Classes - +# The opcaodes differ from the type of +# the operand. So every wrapper is necessary class Register64(model.GenVar): - _dispatchname = "_QWREG" def __init__(self, reg): self.reg = reg + + def to_string(self): + return "_QWREG" -# TODO: support 64-bit Constants -class Constant32(model.GenConst): - _dispatchname = "_IMM32" +class Immediate32(model.GenConst): + def __init__(self, value): + self.value = value + + def to_string(self): + return "_IMM32" + +# TODO: understand GenConst +class Immediate64(model.GenConst): def __init__(self, value): self.value = value + + def to_string(self): + return "_IMM64" \ No newline at end of file Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/rgenop.py Tue Aug 19 17:52:28 2008 @@ -1,6 +1,6 @@ from pypy.jit.codegen import model from pypy.rlib.objectmodel import specialize -from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 +from pypy.jit.codegen.x86_64.objmodel import Register64, Immediate32 from pypy.jit.codegen.x86_64.codebuf import InMemoryCodeBuilder #TODO: understand llTypesystem from pypy.rpython.lltypesystem import llmemory, lltype @@ -17,27 +17,20 @@ def make_two_argument_method(name): def op_int(self, gv_x, gv_y): gv_z = self.allocate_register() - self.mc.MOV_QWREG_QWREG(gv_z, gv_x) - method = getattr(self.mc, name + type_to_string(gv_x)+type_to_string(gv_y)) + self.mc.MOV(gv_z, gv_x) + method = getattr(self.mc, name) method(gv_z, gv_y) return gv_z return op_int def make_one_argument_method(name): def op_int(self, gv_x): - method = getattr(self.mc, name+type_to_string(gv_x)) + method = getattr(self.mc, name) method(gv_x) return gv_x return op_int - -# helper of "make_two_argument_method" to choose -# the right assembler method -def type_to_string(parse_me): - return parse_me._dispatchname - - # a small helper that provides correct type signature def map_arg(arg): if isinstance(arg, lltype.Ptr): @@ -84,10 +77,13 @@ op_int_add = make_two_argument_method("ADD") op_int_sub = make_two_argument_method("SUB") op_int_inc = make_one_argument_method("INC") + op_int_dec = make_one_argument_method("DEC") + op_int_push = make_one_argument_method("PUSH") + op_int_pop = make_one_argument_method("POP") def finish_and_return(self, sigtoken, gv_returnvar): #self.mc.write("\xB8\x0F\x00\x00\x00") - self.mc.MOV_QWREG_QWREG(Register64("rax"), gv_returnvar) + self.mc.MOV(Register64("rax"), gv_returnvar) self.mc.RET() def allocate_register(self, register=None): @@ -116,7 +112,7 @@ T = lltype.typeOf(llvalue) # TODO: other cases(?) if T is lltype.Signed: - return Constant32(llvalue) + return Immediate32(llvalue) def newgraph(self, sigtoken, name): arg_tokens, res_token = sigtoken @@ -128,5 +124,5 @@ register_list = ["rdi","rsi"] inputargs_gv = [builder.allocate_register(register_list[i]) for i in range(len(arg_tokens))] - return builder,Constant32(entrypoint), inputargs_gv + return builder,Immediate32(entrypoint), inputargs_gv Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_assembler.py Tue Aug 19 17:52:28 2008 @@ -1,5 +1,5 @@ from pypy.jit.codegen.x86_64 import assembler -from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 +from pypy.jit.codegen.x86_64.objmodel import Register64, Immediate32 class AsmTest(assembler.X86_64CodeBuilder): def __init__(self): @@ -13,12 +13,12 @@ def test_add(): mc = AsmTest() - mc.ADD_QWREG_QWREG(Register64("rax"), Register64("r11")) + mc.ADD(Register64("rax"), Register64("r11")) assert mc.get_as_string() == "\x4C\x00\xD8" - mc.ADD_QWREG_QWREG(Register64("rbx"), Register64("rbx")) + mc.ADD(Register64("rbx"), Register64("rbx")) assert mc.get_as_string() == "\x4C\x00\xD8\x48\x00\xDB" def test_mov(): mc = AsmTest() - mc.MOV_QWREG_QWREG(Register64("r15"),Register64("rsp")) + mc.MOV(Register64("r15"),Register64("rsp")) assert mc.get_as_string() == "\x49\x89\xE7" \ No newline at end of file Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_rgenop.py Tue Aug 19 17:52:28 2008 @@ -19,6 +19,15 @@ builder.end() return gv_inc +def make_dec(rgenop): + sigtoken = rgenop.sigToken(lltype.FuncType([lltype.Signed], lltype.Signed)) + builder, gv_dec, gv_x = rgenop.newgraph(sigtoken, "dec") + builder.start_writing() + gv_result = builder.genop1("int_dec", gv_x[0]) + builder.finish_and_return(sigtoken, gv_result) + builder.end() + return gv_dec + class TestRGenopDirect(AbstractRGenOpTestsDirect): RGenOp = RX86_64GenOp @@ -29,6 +38,20 @@ res = fnptr(0) assert res == 1 + def test_dec(self): + rgenop = self.RGenOp() + dec_result = make_dec(rgenop) + fnptr = self.cast(dec_result,1) + res = fnptr(2) + assert res == 1 + + #def test_push_and_pop(self): + # rgenop = self.RGenOp() + # push_result = make_push(rgenop) + # fnptr = self.cast(push_result,1) + # res = fnptr(2) + # assert res == 1 + test_directtesthelper_direct = skip test_dummy_compile = skip test_cast_raising = skip Modified: pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/x86_64/test/test_simple.py Tue Aug 19 17:52:28 2008 @@ -2,7 +2,7 @@ from pypy.jit.codegen.x86_64.rgenop import RX86_64GenOp from pypy.rpython.lltypesystem import lltype from ctypes import cast, c_void_p, CFUNCTYPE, c_long, c_double -from pypy.jit.codegen.x86_64.objmodel import Register64, Constant32 +from pypy.jit.codegen.x86_64.objmodel import Register64, Immediate32 from pypy.jit.codegen.test.rgenop_tests import AbstractTestBase from pypy.jit.codegen.test.rgenop_tests import AbstractRGenOpTestsDirect From cami at codespeak.net Wed Aug 20 09:45:06 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Wed, 20 Aug 2008 09:45:06 +0200 (CEST) Subject: [pypy-svn] r57493 - in pypy/dist/pypy/lang/gameboy: . test Message-ID: <20080820074506.531CD169FA9@codespeak.net> Author: cami Date: Wed Aug 20 09:45:04 2008 New Revision: 57493 Modified: pypy/dist/pypy/lang/gameboy/cartridge.py pypy/dist/pypy/lang/gameboy/cpu.py pypy/dist/pypy/lang/gameboy/gameboy.py pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py pypy/dist/pypy/lang/gameboy/test/test_video.py pypy/dist/pypy/lang/gameboy/video.py Log: fixed bug in vide.set_line_y_compare, missed a check case fixed bug in cartrdige.mbc1, params not correctly reset adapted test cases for these bugs Modified: pypy/dist/pypy/lang/gameboy/cartridge.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cartridge.py (original) +++ pypy/dist/pypy/lang/gameboy/cartridge.py Wed Aug 20 09:45:04 2008 @@ -16,7 +16,7 @@ # HELPERS ---------------------------------------------------------------------- -def has_cartridge_battery(self, cartridge_type): +def has_cartridge_battery(cartridge_type): return (cartridge_type == constants.TYPE_MBC1_RAM_BATTERY or cartridge_type == constants.TYPE_MBC2_BATTERY or cartridge_type == constants.TYPE_MBC3_RTC_BATTERY @@ -67,7 +67,10 @@ # CARTRIDGE class CartridgeManager(object): - + """ + Delegates the loading to the CartridgeFile, + verifies the Cartridge by calculating the checksums + """ def __init__(self, clock): assert isinstance(clock, Clock) self.clock = clock @@ -188,7 +191,8 @@ class CartridgeFile(object): """ - File mapping. Holds the file contents + File mapping. Holds the file contents and is responsible for reading + and writing """ def __init__(self, file=None): self.reset() @@ -288,6 +292,7 @@ self.set_ram(ram) def reset(self): + self.rom_bank = self.rom_bank_size self.ram_bank = 0 self.ram_enable = False self.rom_size = 0 Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Wed Aug 20 09:45:04 2008 @@ -2,6 +2,7 @@ from pypy.lang.gameboy import constants from pypy.lang.gameboy.ram import * from pypy.lang.gameboy.interrupt import * +import pdb # --------------------------------------------------------------------------- Modified: pypy/dist/pypy/lang/gameboy/gameboy.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy.py Wed Aug 20 09:45:04 2008 @@ -92,7 +92,7 @@ def emulate(self, ticks): while ticks > 0: count = self.get_cycles() - #print "emulating", ticks, "cycles, available", count + print "python: ticks", count self.cpu.emulate(count) self.serial.emulate(count) self.timer.emulate(count) @@ -204,5 +204,5 @@ for tile in range(0, 12): self.video.write(0x9904 + tile, tile + 1) self.video.write(0x9924 + tile, tile + 13) - self.video.write(0x9905 + 12, 25) + self.video.write(0x9904 + 12, 25) Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Wed Aug 20 09:45:04 2008 @@ -10,6 +10,7 @@ from pypy.rlib.rsdl import RSDL, RSDL_helper from pypy.rpython.lltypesystem import lltype, rffi from pypy.rlib.objectmodel import specialize +import time # GAMEBOY ---------------------------------------------------------------------- @@ -36,7 +37,8 @@ isRunning = True while isRunning and self.handle_events(): self.emulate(constants.GAMEBOY_CLOCK >> 2) - RSDL.Delay(1) + time.sleep(10/1000) + RSDL.Delay(10) except : lltype.free(self.event, flavor='raw') RSDL.Quit() @@ -88,14 +90,15 @@ RSDL.Flip(self.screen) def draw_pixels(self): - #str = "" + pass + str = "" for y in range(self.height): - #str += "\n" + str += "\n" for x in range(self.width): - #if y%2 == 0 or True: - # px = self.get_pixel_color(x, y) - # str += ["#", "%", "+", " ", " "][px] - RSDL_helper.set_pixel(self.screen, x, y, self.get_pixel_color(x, y)) + if y%2 == 0 or True: + px = self.get_pixel_color(x, y) + str += ["#", "%", "+", " ", " "][px] + #RSDL_helper.set_pixel(self.screen, x, y, self.get_pixel_color(x, y)) #print str; def pixel_map(self, x, y): Modified: pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_memory_bank_controller.py Wed Aug 20 09:45:04 2008 @@ -188,7 +188,7 @@ def test_mbc1_reset(mbc=None): if mbc==None: mbc = get_mbc1() - mbc.rom_bank = constants.ROM_BANK_SIZE +1 + mbc.rom_bank = 0 mbc.memory_model = 1 mbc.ram_enable = True mbc.ram_bank = 1 Modified: pypy/dist/pypy/lang/gameboy/test/test_video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_video.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_video.py Wed Aug 20 09:45:04 2008 @@ -107,13 +107,28 @@ assert video.interrupt.lcd.is_pending() == False video.control = 0x80 - video.line_y = value + video.line_y = 0xF6 + video.stat = 0x04 + video.write(0xFF45, value) + assert video.stat == 0x04 + assert video.interrupt.lcd.is_pending() == False + + video.control = 0x80 + video.line_y = 0xF6 + video.stat = 0x00 + video.write(0xFF45, value) + assert video.stat == 0x04 + assert video.interrupt.lcd.is_pending() == False + + video.control = 0x80 + video.line_y = 0xF6 video.stat = 0x40 video.write(0xFF45, value) assert video.stat == 0x44 assert video.interrupt.lcd.is_pending() == True + def test_control(): video = get_video() Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Wed Aug 20 09:45:04 2008 @@ -6,7 +6,7 @@ from pypy.lang.gameboy import constants from pypy.lang.gameboy.ram import iMemory from pypy.lang.gameboy.cpu import process_2_complement - +import pdb # ----------------------------------------------------------------------------- @@ -169,10 +169,13 @@ def write_oam(self, address, data): if address >= constants.OAM_ADDR and \ address < (constants.OAM_ADDR + constants.OAM_SIZE): - self.oam[address - constants.OAM_ADDR] = data & 0xFF + self.oam[address - constants.OAM_ADDR] = data & 0xFF elif address >= constants.VRAM_ADDR and \ address < (constants.VRAM_ADDR + constants.VRAM_SIZE): - self.vram[address - constants.VRAM_ADDR] = data & 0xFF + if (address - constants.VRAM_ADDR) == 0x1910 or \ + (address - constants.VRAM_ADDR) == 0x1911: + pass + self.vram[address - constants.VRAM_ADDR] = data & 0xFF def read(self, address): address = int(address) @@ -216,14 +219,17 @@ return self.cycles def emulate(self, ticks): + print "python: video emulating" ticks = int(ticks) if (self.control & 0x80) != 0: self.cycles -= ticks self.consume_cycles() + print "python: video emulating DONE" def consume_cycles(self): while self.cycles <= 0: mode = self.stat & 0x03 + print mode if mode == 0: self.emulate_hblank() elif mode == 1: @@ -246,6 +252,7 @@ self.control = data def reset_control(self, data): + print "python reset control" # NOTE: do not reset constants.LY=LYC flag (bit 2) of the STAT register (Mr. Do!) self.line_y = 0 self.stat = (self.stat & 0xFC) @@ -261,6 +268,7 @@ return 0x80 | self.stat def set_status(self, data): + print "python set_status" self.stat = (self.stat & 0x87) | (data & 0x78) self.set_status_bug() @@ -293,7 +301,7 @@ self.line_y_compare = data if (self.control & 0x80) == 0: return - self.emulate_hblank_line_y_compare() + self.emulate_hblank_line_y_compare(stat_check=True) def get_dma(self): return self.dma @@ -372,38 +380,46 @@ # mode setting ----------------------------------------------------------- def set_mode_3_begin(self): + print "set_mode_3_begin" self.stat = (self.stat & 0xFC) | 0x03 self.cycles += constants.MODE_3_BEGIN_TICKS self.transfer = True def set_mode_3_end(self): + print "set_mode_3_end" self.stat = (self.stat & 0xFC) | 0x03 self.cycles += constants.MODE_3_END_TICKS self.transfer = False def set_mode_0(self): + print "set_mode_0" self.stat = (self.stat & 0xFC) self.cycles += constants.MODE_0_TICKS self.h_blank_interrupt_check() def set_mode_2(self): + print "set_mode_2" self.stat = (self.stat & 0xFC) | 0x02 self.cycles += constants.MODE_2_TICKS self.oam_interrupt_check() def set_mode_1_begin(self): + print "set_mode_1_begin" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_BEGIN_TICKS def set_mode_1(self): + print "set_mode_1" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_TICKS def set_mode_1_between(self): + print "set_mode_1_between" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_TICKS - constants.MODE_1_BEGIN_TICKS def set_mode_1_end(self): + print "set_mode_1_end" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_END_TICKS @@ -417,9 +433,13 @@ else: self.emulate_hblank_part_2() - def emulate_hblank_line_y_compare(self): + def emulate_hblank_line_y_compare(self, stat_check=False): if self.line_y == self.line_y_compare: - self.line_y_line_y_compare_interrupt_check() + if stat_check: + if (self.stat & 0x04) == 0: + self.line_y_line_y_compare_interrupt_check() + else: + self.line_y_line_y_compare_interrupt_check() else: self.stat &= 0xFB From cami at codespeak.net Wed Aug 20 11:32:26 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Wed, 20 Aug 2008 11:32:26 +0200 (CEST) Subject: [pypy-svn] r57494 - pypy/dist/pypy/lang/gameboy Message-ID: <20080820093226.BC8E516A067@codespeak.net> Author: cami Date: Wed Aug 20 11:32:25 2008 New Revision: 57494 Modified: pypy/dist/pypy/lang/gameboy/cpu.py Log: fixed execution bug. fetch should not consume cycles in emulation Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Wed Aug 20 11:32:25 2008 @@ -347,11 +347,11 @@ self.cycles += ticks self.handle_pending_interrupts() while self.cycles > 0: - self.execute(self.fetch()) + self.execute(self.fetch(use_cycles=False)) def emulate_step(self): self.handle_pending_interrupts() - self.execute(self.fetch()) + self.execute(self.fetch(use_cycles=False)) def handle_pending_interrupts(self): @@ -407,7 +407,8 @@ def fetch(self, use_cycles=True): # Fetching 1 cycle - self.cycles += 1 + if use_cycles: + self.cycles += 1 if self.pc.get(use_cycles) <= 0x3FFF: data = self.rom[self.pc.get(use_cycles)] else: From arigo at codespeak.net Wed Aug 20 12:43:38 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 12:43:38 +0200 (CEST) Subject: [pypy-svn] r57495 - in pypy/dist/pypy/lib: . test2 Message-ID: <20080820104338.62EB216A083@codespeak.net> Author: arigo Date: Wed Aug 20 12:43:36 2008 New Revision: 57495 Modified: pypy/dist/pypy/lib/itertools.py pypy/dist/pypy/lib/test2/test_itertools.py Log: Performance issue and fix. Modified: pypy/dist/pypy/lib/itertools.py ============================================================================== --- pypy/dist/pypy/lib/itertools.py (original) +++ pypy/dist/pypy/lib/itertools.py Wed Aug 20 12:43:36 2008 @@ -368,14 +368,14 @@ self.donext = self.it.next except AttributeError: raise TypeError - while self.cnt < self.start: - self.donext() + nextindex = self.start + if self.stop is not None and nextindex >= self.stop: + raise StopIteration + while self.cnt <= nextindex: + nextitem = self.donext() self.cnt += 1 - if self.stop is None or self.cnt < self.stop: - self.start += self.step - self.cnt += 1 - return self.donext() - raise StopIteration + self.start += self.step + return nextitem class izip: """Make an iterator that aggregates elements from each of the Modified: pypy/dist/pypy/lib/test2/test_itertools.py ============================================================================== --- pypy/dist/pypy/lib/test2/test_itertools.py (original) +++ pypy/dist/pypy/lib/test2/test_itertools.py Wed Aug 20 12:43:36 2008 @@ -9,3 +9,15 @@ it = self.itertools.chain([], [1, 2, 3]) lst = list(it) assert lst == [1, 2, 3] + + def test_islice(self): + import sys + itertools = self.itertools + + slic = itertools.islice(itertools.count(), 1, 10, sys.maxint) + assert len(list(slic)) == 1 + + if '__pypy__' not in sys.builtin_module_names: + skip("this takes ages on top of CPython's itertools module") + slic = itertools.islice(itertools.count(), 1, 10, sys.maxint-20) + assert len(list(slic)) == 1 From arigo at codespeak.net Wed Aug 20 13:34:05 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 13:34:05 +0200 (CEST) Subject: [pypy-svn] r57496 - in pypy/dist/pypy/module/itertools: . test Message-ID: <20080820113405.B6C8316A096@codespeak.net> Author: arigo Date: Wed Aug 20 13:34:04 2008 New Revision: 57496 Modified: pypy/dist/pypy/module/itertools/interp_itertools.py pypy/dist/pypy/module/itertools/test/test_itertools.py Log: Tweak our interp itertools tests until they pass in CPython 2.5. Apply a few fixes to interp_itertools to make them pass too. Modified: pypy/dist/pypy/module/itertools/interp_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/interp_itertools.py (original) +++ pypy/dist/pypy/module/itertools/interp_itertools.py Wed Aug 20 13:34:04 2008 @@ -9,21 +9,17 @@ def __init__(self, space, firstval): self.space = space self.c = firstval - self.overflowed = False def iter_w(self): return self.space.wrap(self) def next_w(self): - if self.overflowed: - raise OperationError(self.space.w_OverflowError, - self.space.wrap("cannot count beyond sys.maxint")) - c = self.c try: self.c = ovfcheck(self.c + 1) except OverflowError: - self.overflowed = True + raise OperationError(self.space.w_OverflowError, + self.space.wrap("cannot count beyond sys.maxint")) return self.space.wrap(c) @@ -289,7 +285,10 @@ start = 0 w_stop = w_startstop elif num_args <= 2: - start = space.int_w(w_startstop) + if space.is_w(w_startstop, space.w_None): + start = 0 + else: + start = space.int_w(w_startstop) w_stop = args_w[0] else: raise OperationError(space.w_TypeError, space.wrap("islice() takes at most 4 arguments (" + str(num_args) + " given)")) @@ -302,7 +301,11 @@ stoppable = True if num_args == 2: - step = space.int_w(args_w[1]) + w_step = args_w[1] + if space.is_w(w_step, space.w_None): + step = 1 + else: + step = space.int_w(w_step) else: step = 1 Modified: pypy/dist/pypy/module/itertools/test/test_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/test/test_itertools.py (original) +++ pypy/dist/pypy/module/itertools/test/test_itertools.py Wed Aug 20 13:34:04 2008 @@ -21,10 +21,11 @@ def test_count_overflow(self): import itertools, sys - it = itertools.count(sys.maxint) - assert it.next() == sys.maxint - raises(OverflowError, it.next) - raises(OverflowError, it.next) + # this checks for exact implementation details... that's 2.5 behavior + it = itertools.count(sys.maxint - 1) + assert it.next() == sys.maxint - 1 + raises(OverflowError, it.next) + raises(OverflowError, it.next) raises(OverflowError, itertools.count, sys.maxint + 1) @@ -41,20 +42,21 @@ import itertools times = 10 - it = itertools.repeat(None, times=times) + it = itertools.repeat(None, times) for i in range(times): it.next() raises(StopIteration, it.next) - it = itertools.repeat(None, times=None) - for x in range(10): - it.next() # Should be no StopIteration + #---does not work in CPython 2.5 + #it = itertools.repeat(None, None) + #for x in range(10): + # it.next() # Should be no StopIteration - it = itertools.repeat(None, times=0) + it = itertools.repeat(None, 0) raises(StopIteration, it.next) raises(StopIteration, it.next) - it = itertools.repeat(None, times=-1) + it = itertools.repeat(None, -1) raises(StopIteration, it.next) raises(StopIteration, it.next) @@ -198,10 +200,16 @@ assert list(itertools.islice(xrange(100), 10, 3)) == [] + # new in 2.5: start=None or step=None + assert list(itertools.islice(xrange(10), None)) == range(10) + assert list(itertools.islice(xrange(10), None,None)) == range(10) + assert list(itertools.islice(xrange(10), None,None,None)) == range(10) + def test_islice_overflow(self): import itertools import sys - + if '__pypy__' not in sys.builtin_module_names: + skip("CPython 2.5 gives a strange ValueError") raises(OverflowError, itertools.islice, [], sys.maxint + 1) def test_islice_wrongargs(self): @@ -216,7 +224,6 @@ raises(ValueError, itertools.islice, [], 0, 0, -1) raises(ValueError, itertools.islice, [], 0, 0, 0) - raises(TypeError, itertools.islice, [], 0, 0, None) raises(TypeError, itertools.islice, [], 0, 0, 0, 0) @@ -260,8 +267,9 @@ def test_imap(self): import itertools - it = itertools.imap(None) - raises(StopIteration, it.next) + #---does not work in CPython 2.5 + #it = itertools.imap(None) + #raises(StopIteration, it.next) obj_list = [object(), object(), object()] it = itertools.imap(None, obj_list) @@ -291,8 +299,6 @@ import itertools # Duplicate python 2.4 behaviour for invalid arguments - it = itertools.imap(0) - raises(StopIteration, it.next) it = itertools.imap(0, []) raises(StopIteration, it.next) it = itertools.imap(0, [0]) @@ -325,9 +331,10 @@ for x in [(1, 5), (2, 6)]: assert it.next() == x raises(StopIteration, it.next) - assert it1.next() == 4 - raises(StopIteration, it.next) - assert it1.next() == 5 + assert it1.next() in [3, 4] + #---does not work in CPython 2.5 + #raises(StopIteration, it.next) + #assert it1.next() in [4, 5] def test_izip_wrongargs(self): import itertools, re @@ -512,7 +519,7 @@ itertools.groupby([]), itertools.ifilter(None, []), itertools.ifilterfalse(None, []), - itertools.imap(None), + itertools.imap(None, []), itertools.islice([], 0), itertools.izip(), itertools.repeat(None), @@ -553,7 +560,13 @@ def test_subclassing(self): import itertools - # Although (mostly) implemented as classes, the itertools functions should not be subclassable + # Although (mostly) implemented as classes, the itertools functions + # should probably not be subclassable. They are in CPython but + # subclassing them is insane if you ask me (and really forbidden + # by the docs, that pretend that they are functions). + import sys + if '__pypy__' not in sys.builtin_module_names: + skip("itertools types are subclassable in CPython") iterables = [ itertools.chain, itertools.count, From arigo at codespeak.net Wed Aug 20 13:57:13 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 13:57:13 +0200 (CEST) Subject: [pypy-svn] r57497 - in pypy/dist/pypy/module/itertools: . test Message-ID: <20080820115713.AB2DD16A035@codespeak.net> Author: arigo Date: Wed Aug 20 13:57:10 2008 New Revision: 57497 Added: pypy/dist/pypy/module/itertools/test/errors.txt (contents, props changed) Modified: pypy/dist/pypy/module/itertools/interp_itertools.py pypy/dist/pypy/module/itertools/test/test_itertools.py Log: CPython 2.5's test_itertools passes, except a number of corner cases documented in errors.txt. Modified: pypy/dist/pypy/module/itertools/interp_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/interp_itertools.py (original) +++ pypy/dist/pypy/module/itertools/interp_itertools.py Wed Aug 20 13:57:10 2008 @@ -804,8 +804,8 @@ self.new_group = True #new group raise StopIteration -def W_GroupBy___new__(space, w_subtype, w_iterable, w_fun=None): - return space.wrap(W_GroupBy(space, w_iterable, w_fun)) +def W_GroupBy___new__(space, w_subtype, w_iterable, w_key=None): + return space.wrap(W_GroupBy(space, w_iterable, w_key)) W_GroupBy.typedef = TypeDef( 'groupby', Added: pypy/dist/pypy/module/itertools/test/errors.txt ============================================================================== --- (empty file) +++ pypy/dist/pypy/module/itertools/test/errors.txt Wed Aug 20 13:57:10 2008 @@ -0,0 +1,64 @@ + + +Here are the remaining errors of CPython 2.5's test_itertools. FWIW I +consider them all as obscure undocumented implementation details. + + + +====================================================================== +ERROR: test_tee (__main__.TestBasicOps) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 376, in test_tee + c = type(a)('def') +TypeError: default __new__ takes no parameters + +====================================================================== +ERROR: test_repeat (__main__.LengthTransparency) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 690, in test_repeat + from test.test_iterlen import len +ImportError: cannot import name 'len' + +====================================================================== +ERROR: test_keywords_in_subclass (__main__.SubclassWithKwargsTest) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 760, in test_keywords_in_subclass + class Subclass(cls): +TypeError: type 'repeat' is not an acceptable base class + +====================================================================== +FAIL: test_count (__main__.TestBasicOps) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 59, in test_count + self.assertEqual(repr(c), 'count(3)') +AssertionError: '' != 'count(3)' + +====================================================================== +FAIL: test_imap (__main__.TestBasicOps) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 231, in test_imap + self.assertRaises(TypeError, imap, operator.neg) +AssertionError: TypeError not raised + +====================================================================== +FAIL: test_izip (__main__.TestBasicOps) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 199, in test_izip + self.assertEqual(min(ids), max(ids)) +AssertionError: 149283404 != 150789644 + +====================================================================== +FAIL: test_repeat (__main__.TestBasicOps) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "itest25.py", line 214, in test_repeat + self.assertEqual(repr(r), 'repeat((1+0j))') +AssertionError: '' != 'repeat((1+0j))' + +---------------------------------------------------------------------- Modified: pypy/dist/pypy/module/itertools/test/test_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/test/test_itertools.py (original) +++ pypy/dist/pypy/module/itertools/test/test_itertools.py Wed Aug 20 13:57:10 2008 @@ -474,6 +474,14 @@ raises(StopIteration, g.next) raises(StopIteration, it.next) + # keyword argument + it = itertools.groupby([0, 1, 2, 3, 4, 5], key = half_floor) + for x in [0, 1, 2]: + k, g = it.next() + assert k == x + assert list(g) == [x*2, x*2+1] + raises(StopIteration, it.next) + # Grouping is not based on key identity class NeverEqual(object): def __eq__(self, other): From cfbolz at codespeak.net Wed Aug 20 15:06:14 2008 From: cfbolz at codespeak.net (cfbolz at codespeak.net) Date: Wed, 20 Aug 2008 15:06:14 +0200 (CEST) Subject: [pypy-svn] r57503 - in pypy/branch/2.5-features/pypy/interpreter: astcompiler pyparser pyparser/test Message-ID: <20080820130614.1D67D16A0B3@codespeak.net> Author: cfbolz Date: Wed Aug 20 15:06:12 2008 New Revision: 57503 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py Log: use official name of the abs import flag Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/consts.py Wed Aug 20 15:06:12 2008 @@ -19,5 +19,5 @@ CO_GENERATOR = 0x0020 CO_GENERATOR_ALLOWED = 0x1000 CO_FUTURE_DIVISION = 0x2000 -CO_FUTURE_ABSIMPORT = 0x4000 +CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 CO_FUTURE_WITH_STATEMENT = 0x8000 Modified: pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/astcompiler/pycodegen.py Wed Aug 20 15:06:12 2008 @@ -10,7 +10,7 @@ SC_FREE, SC_CELL, SC_DEFAULT, OP_APPLY, OP_ASSIGN, OP_DELETE, OP_NONE from pypy.interpreter.astcompiler.consts import CO_VARARGS, CO_VARKEYWORDS, \ CO_NEWLOCALS, CO_NESTED, CO_GENERATOR, CO_GENERATOR_ALLOWED, \ - CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSIMPORT + CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSOLUTE_IMPORT from pypy.interpreter.pyparser.error import SyntaxError from pypy.interpreter.astcompiler.opt import is_constant_false from pypy.interpreter.astcompiler.opt import is_constant_true @@ -151,7 +151,7 @@ elif feature == "with_statement": self.graph.setFlag(CO_FUTURE_WITH_STATEMENT) elif feature == "absolute_import": - self.graph.setFlag(CO_FUTURE_ABSIMPORT) + self.graph.setFlag(CO_FUTURE_ABSOLUTE_IMPORT) def emit(self, inst ): return self.graph.emit( inst ) @@ -853,7 +853,7 @@ def visitImport(self, node): self.set_lineno(node) - if self.graph.checkFlag(CO_FUTURE_ABSIMPORT): + if self.graph.checkFlag(CO_FUTURE_ABSOLUTE_IMPORT): level = 0 else: level = -1 @@ -871,7 +871,7 @@ def visitFrom(self, node): self.set_lineno(node) level = node.level - if level == 0 and not self.graph.checkFlag(CO_FUTURE_ABSIMPORT): + if level == 0 and not self.graph.checkFlag(CO_FUTURE_ABSOLUTE_IMPORT): level = -1 fromlist = [ self.space.wrap(name) for name,alias in node.names ] self.emitop_obj('LOAD_CONST', self.space.wrap(level)) # 2.5 flag Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/future.py Wed Aug 20 15:06:12 2008 @@ -25,7 +25,7 @@ """ from pypy.interpreter.astcompiler.consts import CO_GENERATOR_ALLOWED, \ - CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSIMPORT + CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSOLUTE_IMPORT def getFutures(futureFlags, source): futures = FutureAutomaton(futureFlags, source) Modified: pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/pyparser/test/test_futureautomaton.py Wed Aug 20 15:06:12 2008 @@ -145,6 +145,6 @@ s = 'from __future__ import absolute_import\n' f = run(s) assert f.pos == len(s) - assert f.flags == fut.CO_FUTURE_ABSIMPORT + assert f.flags == fut.CO_FUTURE_ABSOLUTE_IMPORT From antocuni at codespeak.net Wed Aug 20 15:36:04 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Wed, 20 Aug 2008 15:36:04 +0200 (CEST) Subject: [pypy-svn] r57507 - in pypy/branch/oo-jit/pypy: annotation jit/codegen/cli jit/rainbow jit/timeshifter rpython/ootypesystem Message-ID: <20080820133604.9C512169E92@codespeak.net> Author: antocuni Date: Wed Aug 20 15:36:03 2008 New Revision: 57507 Modified: pypy/branch/oo-jit/pypy/annotation/builtin.py pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py pypy/branch/oo-jit/pypy/jit/rainbow/interpreter.py pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py Log: some rpython fixes Modified: pypy/branch/oo-jit/pypy/annotation/builtin.py ============================================================================== --- pypy/branch/oo-jit/pypy/annotation/builtin.py (original) +++ pypy/branch/oo-jit/pypy/annotation/builtin.py Wed Aug 20 15:36:03 2008 @@ -577,7 +577,7 @@ if TYPE is ootype.Object: return SomeOOObject() elif TYPE is ootype.Class: - return SomeOOClass(T) + return SomeOOClass(TYPE) elif isinstance(TYPE, ootype.StaticMethod): return SomeOOStaticMeth(TYPE) elif isinstance(TYPE, ootype.OOType): Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py Wed Aug 20 15:36:03 2008 @@ -337,6 +337,12 @@ UNARYOPS = fillops(cli_opcodes.unary_ops, "UnaryOp") BINARYOPS = fillops(cli_opcodes.binary_ops, "BinaryOp") +class XXX(BinaryOp): + pass + +BINARYOPS['oostring'] = XXX +BINARYOPS['subclassof'] = XXX + @specialize.memo() def getopclass1(opname): try: Modified: pypy/branch/oo-jit/pypy/jit/rainbow/interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/rainbow/interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/rainbow/interpreter.py Wed Aug 20 15:36:03 2008 @@ -1115,6 +1115,7 @@ @arguments("red", "jumptarget") def opimpl_goto_if_known_class(self, objbox, target): + assert isinstance(objbox, rvalue.AbstractPtrRedBox) known_class = False content = objbox.content if content is not None: @@ -1131,6 +1132,7 @@ def opimpl_const_oosend(self, greenargs, redargs, methname): from pypy.rpython.ootypesystem.rclass import CLASSTYPE selfbox = redargs[0] + assert isinstance(selfbox, rvalue.AbstractPtrRedBox) vstruct = selfbox.content assert vstruct is not None if isinstance(vstruct, rcontainer.PartialDataStruct): Modified: pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py (original) +++ pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py Wed Aug 20 15:36:03 2008 @@ -79,7 +79,7 @@ firstsubstructdesc = None materialize = None StructFieldDesc = None - PtrRedBox = rvalue.PtrRedBox + PtrRedBox = None firstfielddesc = 0 def __init__(self, RGenOp, TYPE): @@ -239,6 +239,7 @@ class StructTypeDesc(AbstractStructTypeDesc): StructFieldDesc = None # patched later with StructFieldDesc + PtrRedBox = rvalue.PtrRedBox _attrs_ = [] Modified: pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py ============================================================================== --- pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py (original) +++ pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py Wed Aug 20 15:36:03 2008 @@ -891,6 +891,7 @@ return '%s(%s)' % (self.__class__.__name__, self._INSTANCE) nullruntimeclass = _class(None) +Class._null = nullruntimeclass class _instance(object): From arigo at codespeak.net Wed Aug 20 16:21:04 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 16:21:04 +0200 (CEST) Subject: [pypy-svn] r57513 - in pypy/dist/pypy/module/itertools: . test Message-ID: <20080820142104.0996C16A0E9@codespeak.net> Author: arigo Date: Wed Aug 20 16:21:04 2008 New Revision: 57513 Modified: pypy/dist/pypy/module/itertools/interp_itertools.py pypy/dist/pypy/module/itertools/test/errors.txt pypy/dist/pypy/module/itertools/test/test_itertools.py Log: Test and fix: imap() with no iterables should raise TypeError. Modified: pypy/dist/pypy/module/itertools/interp_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/interp_itertools.py (original) +++ pypy/dist/pypy/module/itertools/interp_itertools.py Wed Aug 20 16:21:04 2008 @@ -443,6 +443,10 @@ self.identity_fun = (self.space.is_w(w_fun, space.w_None)) self.w_fun = w_fun + if len(args_w) == 0: + raise OperationError(space.w_TypeError, + space.wrap("imap() must have at least two arguments")) + iterators_w = [] i = 0 for iterable_w in args_w: @@ -464,16 +468,7 @@ return self.space.wrap(self) def next_w(self): - if not self.iterators_w: - raise OperationError(self.space.w_StopIteration, self.space.w_None) - - try: - w_objects = self.space.newtuple([self.space.next(w_it) for w_it in self.iterators_w]) - except OperationError, e: - if e.match(self.space, self.space.w_StopIteration): - self.iterators_w = None - raise - + w_objects = self.space.newtuple([self.space.next(w_it) for w_it in self.iterators_w]) if self.identity_fun: return w_objects else: Modified: pypy/dist/pypy/module/itertools/test/errors.txt ============================================================================== --- pypy/dist/pypy/module/itertools/test/errors.txt (original) +++ pypy/dist/pypy/module/itertools/test/errors.txt Wed Aug 20 16:21:04 2008 @@ -38,14 +38,6 @@ AssertionError: '' != 'count(3)' ====================================================================== -FAIL: test_imap (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 231, in test_imap - self.assertRaises(TypeError, imap, operator.neg) -AssertionError: TypeError not raised - -====================================================================== FAIL: test_izip (__main__.TestBasicOps) ---------------------------------------------------------------------- Traceback (most recent call last): Modified: pypy/dist/pypy/module/itertools/test/test_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/test/test_itertools.py (original) +++ pypy/dist/pypy/module/itertools/test/test_itertools.py Wed Aug 20 16:21:04 2008 @@ -267,10 +267,6 @@ def test_imap(self): import itertools - #---does not work in CPython 2.5 - #it = itertools.imap(None) - #raises(StopIteration, it.next) - obj_list = [object(), object(), object()] it = itertools.imap(None, obj_list) for x in obj_list: @@ -305,6 +301,10 @@ raises(TypeError, it.next) raises(TypeError, itertools.imap, None, 0) + raises(TypeError, itertools.imap, None) + raises(TypeError, itertools.imap, bool) + raises(TypeError, itertools.imap, 42) + def test_izip(self): import itertools From arigo at codespeak.net Wed Aug 20 16:24:18 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 16:24:18 +0200 (CEST) Subject: [pypy-svn] r57514 - pypy/dist/pypy/module/itertools/test Message-ID: <20080820142418.10B70169F9D@codespeak.net> Author: arigo Date: Wed Aug 20 16:24:17 2008 New Revision: 57514 Modified: pypy/dist/pypy/module/itertools/test/errors.txt Log: This error got lost somewhere. Modified: pypy/dist/pypy/module/itertools/test/errors.txt ============================================================================== --- pypy/dist/pypy/module/itertools/test/errors.txt (original) +++ pypy/dist/pypy/module/itertools/test/errors.txt Wed Aug 20 16:24:17 2008 @@ -4,6 +4,17 @@ consider them all as obscure undocumented implementation details. +====================================================================== +ERROR: test_islice (__main__.TestBasicOps) +---------------------------------------------------------------------- +Traceback (most recent call last): + File "test_itertools.py", line 285, in test_islice + self.assertRaises(ValueError, islice, xrange(10), 'a') + File "/home/arigo/pypysrc/lib-python/2.4.1/unittest.py", line 322, in failUnlessRaises + return + File "/home/arigo/pypysrc/lib-python/2.4.1/unittest.py", line 320, in failUnlessRaises + callableObj(*args, **kwargs) +TypeError: expected integer, got str object ====================================================================== ERROR: test_tee (__main__.TestBasicOps) From arigo at codespeak.net Wed Aug 20 16:59:14 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 16:59:14 +0200 (CEST) Subject: [pypy-svn] r57516 - in pypy/dist/pypy: bin config doc/config translator/goal Message-ID: <20080820145914.A616A16A076@codespeak.net> Author: arigo Date: Wed Aug 20 16:59:11 2008 New Revision: 57516 Modified: pypy/dist/pypy/bin/py.py pypy/dist/pypy/config/pypyoption.py pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt pypy/dist/pypy/translator/goal/targetpypystandalone.py Log: Change the handling of --allworkingmodules to let the list of modules be backend-specific. Modified: pypy/dist/pypy/bin/py.py ============================================================================== --- pypy/dist/pypy/bin/py.py (original) +++ pypy/dist/pypy/bin/py.py Wed Aug 20 16:59:11 2008 @@ -16,6 +16,7 @@ from pypy.interpreter import main, interactive, error, gateway from pypy.config.config import OptionDescription, BoolOption, StrOption from pypy.config.config import Config, to_optparse +from pypy.config import pypyoption import os, sys import time @@ -58,6 +59,10 @@ args = option.process_options(parser, argv[1:]) if interactiveconfig.verbose: error.RECORD_INTERPLEVEL_TRACEBACK = True + # --allworkingmodules takes really long to start up, but can be forced on + config.objspace.suggest(allworkingmodules=False) + if config.objspace.allworkingmodules: + pypyoption.enable_allworkingmodules(config) # create the object space Modified: pypy/dist/pypy/config/pypyoption.py ============================================================================== --- pypy/dist/pypy/config/pypyoption.py (original) +++ pypy/dist/pypy/config/pypyoption.py Wed Aug 20 16:59:11 2008 @@ -31,6 +31,11 @@ "thread"] )) +working_oo_modules = default_modules.copy() +working_oo_modules.update(dict.fromkeys( + [] # XXX at least, this probably works: "md5", "sha", "cStringIO" +)) + if sys.platform == "win32": # unix only modules del working_modules["crypt"] @@ -131,14 +136,13 @@ for modname in all_modules]), BoolOption("allworkingmodules", "use as many working modules as possible", - # NB. defaults to False for py.py and tests, but - # targetpypystandalone suggests True, which can be overridden - # with --no-allworkingmodules. - default=False, + # NB. defaults to True, but in py.py this is overridden by + # a False suggestion because it takes a while to start up. + # Actual module enabling only occurs if + # enable_allworkingmodules() is called, and it depends + # on the selected backend. + default=True, cmdline="--allworkingmodules", - suggests=[("objspace.usemodules.%s" % (modname, ), True) - for modname in working_modules - if modname in all_modules], negation=True), BoolOption("geninterp", "specify whether geninterp should be used", @@ -366,6 +370,17 @@ config.objspace.std.suggest(multimethods="doubledispatch") +def enable_allworkingmodules(config): + if config.translation.type_system == 'ootype': + modules = working_oo_modules + else: + modules = working_modules + # ignore names from 'essential_modules', notably 'exceptions', which + # may not be present in config.objspace.usemodules at all + modules = [name for name in modules if name not in essential_modules] + config.objspace.usemodules.suggest(**dict.fromkeys(modules, True)) + + if __name__ == '__main__': config = get_pypy_config() print config.getpaths() Modified: pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt ============================================================================== --- pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt (original) +++ pypy/dist/pypy/doc/config/objspace.allworkingmodules.txt Wed Aug 20 16:59:11 2008 @@ -1,5 +1,6 @@ This option enables the usage of all modules that are known to be working well and that translate without problems. -Note that this option is set to True by default by targetpypystandalone.py. -To force it to False, use ``--no-allworkingmodules``. +Note that this option defaults to True (except when running +``py.py`` because it takes a long time to start). To force it +to False, use ``--no-allworkingmodules``. Modified: pypy/dist/pypy/translator/goal/targetpypystandalone.py ============================================================================== --- pypy/dist/pypy/translator/goal/targetpypystandalone.py (original) +++ pypy/dist/pypy/translator/goal/targetpypystandalone.py Wed Aug 20 16:59:11 2008 @@ -132,14 +132,9 @@ # expose the following variables to ease debugging global space, entry_point - # not really clean, but setting the default of allworkingmodules - # to True has two problems: it doesn't implies its suggests (the - # config machinery doesn't handle that case), and we don't want - # allworkingmodules to be enabled for all spaces by default - # (e.g. in py.py or in tests). Auto-generated helps report the - # default of allworkingmodules to be False, though, which is a - # bit annoying. - config.objspace.suggest(allworkingmodules=True) + if config.objspace.allworkingmodules: + from pypy.config.pypyoption import enable_allworkingmodules + enable_allworkingmodules(config) if config.translation.thread: config.objspace.usemodules.thread = True From arigo at codespeak.net Wed Aug 20 17:30:42 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 17:30:42 +0200 (CEST) Subject: [pypy-svn] r57519 - pypy/dist/pypy/translator/goal Message-ID: <20080820153042.7B8CA169F22@codespeak.net> Author: arigo Date: Wed Aug 20 17:30:40 2008 New Revision: 57519 Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py Log: Oups oups oups. Forgot that the default GC is no longer Boehm if we say --opt=3. Modified: pypy/dist/pypy/translator/goal/bench-cronjob.py ============================================================================== --- pypy/dist/pypy/translator/goal/bench-cronjob.py (original) +++ pypy/dist/pypy/translator/goal/bench-cronjob.py Wed Aug 20 17:30:40 2008 @@ -181,8 +181,8 @@ if backends == []: #_ prefix means target specific option, # prefix to outcomment backends = [backend.strip() for backend in """ c--opt=0--_no-allworkingmodules - c--stackless--opt=3--_no-allworkingmodules - c--opt=3 + c--stackless--gc=boehm--opt=3--_no-allworkingmodules + c--gc=boehm--opt=3 c--thread--gc=hybrid--opt=3--_no-allworkingmodules c--gc=semispace--opt=3--_no-allworkingmodules c--gc=generation--opt=3--_no-allworkingmodules From arigo at codespeak.net Wed Aug 20 17:36:34 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 20 Aug 2008 17:36:34 +0200 (CEST) Subject: [pypy-svn] r57521 - in pypy/dist/pypy: config lib Message-ID: <20080820153634.D779416A0EF@codespeak.net> Author: arigo Date: Wed Aug 20 17:36:32 2008 New Revision: 57521 Modified: pypy/dist/pypy/config/pypyoption.py pypy/dist/pypy/lib/_sre.py pypy/dist/pypy/lib/cStringIO.py pypy/dist/pypy/lib/itertools.py pypy/dist/pypy/lib/marshal.py pypy/dist/pypy/lib/md5.py pypy/dist/pypy/lib/readline.py pypy/dist/pypy/lib/sha.py Log: * itertools works. * a few simple modules seem to compile fine for the CLI. * add a comment in some modules from pypy/lib/ about being shadowed by a built-in module. Modified: pypy/dist/pypy/config/pypyoption.py ============================================================================== --- pypy/dist/pypy/config/pypyoption.py (original) +++ pypy/dist/pypy/config/pypyoption.py Wed Aug 20 17:36:32 2008 @@ -28,12 +28,12 @@ "rctime" , "select", "zipimport", "_lsprof", "crypt", "signal", "dyngram", "_rawffi", "termios", "zlib", "struct", "md5", "sha", "bz2", "_minimal_curses", "cStringIO", - "thread"] + "thread", "itertools"] )) working_oo_modules = default_modules.copy() working_oo_modules.update(dict.fromkeys( - [] # XXX at least, this probably works: "md5", "sha", "cStringIO" + ["md5", "sha", "cStringIO", "itertools"] )) if sys.platform == "win32": Modified: pypy/dist/pypy/lib/_sre.py ============================================================================== --- pypy/dist/pypy/lib/_sre.py (original) +++ pypy/dist/pypy/lib/_sre.py Wed Aug 20 17:36:32 2008 @@ -1,4 +1,8 @@ # NOT_RPYTHON + +# Note that PyPy contains also a built-in module '_sre' which will hide +# this one if compiled in. + """ A pure Python reimplementation of the _sre module from CPython 2.4 Copyright 2005 Nik Haldimann, licensed under the MIT license Modified: pypy/dist/pypy/lib/cStringIO.py ============================================================================== --- pypy/dist/pypy/lib/cStringIO.py (original) +++ pypy/dist/pypy/lib/cStringIO.py Wed Aug 20 17:36:32 2008 @@ -2,6 +2,9 @@ # StringIO-based cStringIO implementation. # +# Note that PyPy contains also a built-in module 'cStringIO' which will hide +# this one if compiled in. + from StringIO import * from StringIO import __doc__ Modified: pypy/dist/pypy/lib/itertools.py ============================================================================== --- pypy/dist/pypy/lib/itertools.py (original) +++ pypy/dist/pypy/lib/itertools.py Wed Aug 20 17:36:32 2008 @@ -1,3 +1,6 @@ +# Note that PyPy contains also a built-in module 'itertools' which will +# hide this one if compiled in. + """Functional tools for creating and using iterators. Infinite iterators: Modified: pypy/dist/pypy/lib/marshal.py ============================================================================== --- pypy/dist/pypy/lib/marshal.py (original) +++ pypy/dist/pypy/lib/marshal.py Wed Aug 20 17:36:32 2008 @@ -1,3 +1,5 @@ -# temporary +# Note that PyPy contains also a built-in module 'marshal' which will +# hide this one if compiled in. + from _marshal import __doc__ from _marshal import * Modified: pypy/dist/pypy/lib/md5.py ============================================================================== --- pypy/dist/pypy/lib/md5.py (original) +++ pypy/dist/pypy/lib/md5.py Wed Aug 20 17:36:32 2008 @@ -1,6 +1,9 @@ #!/usr/bin/env python # -*- coding: iso-8859-1 -*- +# Note that PyPy contains also a built-in module 'md5' which will hide +# this one if compiled in. + """A sample implementation of MD5 in pure Python. This is an implementation of the MD5 hash function, as specified by Modified: pypy/dist/pypy/lib/readline.py ============================================================================== --- pypy/dist/pypy/lib/readline.py (original) +++ pypy/dist/pypy/lib/readline.py Wed Aug 20 17:36:32 2008 @@ -5,4 +5,9 @@ Note that some of the functions present in the CPython module 'readline' are only stubs at the moment. """ + +# Note that PyPy contains also a built-in module 'readline' which will hide +# this one if compiled in. However the built-in module is incomplete; +# don't use it. + from pyrepl.readline import * Modified: pypy/dist/pypy/lib/sha.py ============================================================================== --- pypy/dist/pypy/lib/sha.py (original) +++ pypy/dist/pypy/lib/sha.py Wed Aug 20 17:36:32 2008 @@ -1,6 +1,9 @@ #!/usr/bin/env python # -*- coding: iso-8859-1 +# Note that PyPy contains also a built-in module 'sha' which will hide +# this one if compiled in. + """A sample implementation of SHA-1 in pure Python. Framework adapted from Dinu Gherman's MD5 implementation by From antocuni at codespeak.net Thu Aug 21 12:02:46 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Thu, 21 Aug 2008 12:02:46 +0200 (CEST) Subject: [pypy-svn] r57537 - pypy/branch/oo-jit/pypy/jit/timeshifter Message-ID: <20080821100246.479CD16A189@codespeak.net> Author: antocuni Date: Thu Aug 21 12:02:44 2008 New Revision: 57537 Modified: pypy/branch/oo-jit/pypy/jit/timeshifter/exception.py Log: don't use PtrRedBox with ootype! Modified: pypy/branch/oo-jit/pypy/jit/timeshifter/exception.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/timeshifter/exception.py (original) +++ pypy/branch/oo-jit/pypy/jit/timeshifter/exception.py Thu Aug 21 12:02:44 2008 @@ -24,8 +24,8 @@ null_exc_value = self.etrafo.c_null_evalue.value self.gv_null_exc_type = RGenOp.constPrebuiltGlobal(null_exc_type) self.gv_null_exc_value = RGenOp.constPrebuiltGlobal(null_exc_value) - - self._create_boxes(RGenOp) + self.null_exc_type_box = self.PtrRedBox(self.gv_null_exc_type) + self.null_exc_value_box = self.PtrRedBox(self.gv_null_exc_value) self.lazy_exception_path = lazy_exception_path def _freeze_(self): @@ -37,8 +37,8 @@ gv_evalue = self.genop_get_exc_value(builder) self.genop_set_exc_type (builder, self.gv_null_exc_type ) self.genop_set_exc_value(builder, self.gv_null_exc_value) - etypebox = rvalue.PtrRedBox( gv_etype ) - evaluebox = rvalue.PtrRedBox(gv_evalue) + etypebox = self.PtrRedBox( gv_etype ) + evaluebox = self.PtrRedBox(gv_evalue) etypebox .known_nonzero = known_occurred evaluebox.known_nonzero = known_occurred rtimeshift.setexctypebox (jitstate, etypebox) @@ -59,11 +59,9 @@ class LLTypeExceptionDesc(AbstractExceptionDesc): - - def _create_boxes(self, RGenOp): - self.null_exc_type_box = rvalue.PtrRedBox(self.gv_null_exc_type) - self.null_exc_value_box = rvalue.PtrRedBox(self.gv_null_exc_value) + PtrRedBox = rvalue.PtrRedBox + def genop_get_exc_type(self, builder): return builder.genop_getfield(self.exc_type_token, self.gv_excdata) @@ -82,10 +80,8 @@ class OOTypeExceptionDesc(AbstractExceptionDesc): - def _create_boxes(self, RGenOp): - # XXX: think more about exceptions - self.null_exc_type_box = rvalue.InstanceRedBox(self.gv_null_exc_type) - self.null_exc_value_box = rvalue.InstanceRedBox(self.gv_null_exc_value) + + PtrRedBox = rvalue.InstanceRedBox def genop_get_exc_type(self, builder): return builder.genop_oogetfield(self.exc_type_token, self.gv_excdata) From bgola at codespeak.net Thu Aug 21 13:41:56 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Thu, 21 Aug 2008 13:41:56 +0200 (CEST) Subject: [pypy-svn] r57543 - pypy/branch/2.5-features/pypy/interpreter Message-ID: <20080821114156.9DB9716A164@codespeak.net> Author: bgola Date: Thu Aug 21 13:41:55 2008 New Revision: 57543 Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py Log: w_retval won't be space.None Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/generator.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/generator.py Thu Aug 21 13:41:55 2008 @@ -118,6 +118,6 @@ return space.w_None raise - if w_retval is not None or not space.is_w(w_retval, space.None): + if w_retval is not None: msg = "generator ignored GeneratorExit" raise OperationError(space.w_RuntimeError, space.wrap(msg)) From bgola at codespeak.net Thu Aug 21 14:05:25 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Thu, 21 Aug 2008 14:05:25 +0200 (CEST) Subject: [pypy-svn] r57547 - pypy/branch/2.5-features/lib-python Message-ID: <20080821120525.2F69016A09E@codespeak.net> Author: bgola Date: Thu Aug 21 14:05:20 2008 New Revision: 57547 Modified: pypy/branch/2.5-features/lib-python/conftest.py Log: adding some of 2.5 new tests to testmap dict Modified: pypy/branch/2.5-features/lib-python/conftest.py ============================================================================== --- pypy/branch/2.5-features/lib-python/conftest.py (original) +++ pypy/branch/2.5-features/lib-python/conftest.py Thu Aug 21 14:05:20 2008 @@ -448,6 +448,7 @@ RegrTest('test_complex.py', enabled=True, core=True), RegrTest('test_contains.py', enabled=True, dumbtest=1, core=True), + RegrTest('test_contextlib.py', enabled=True, core=True), RegrTest('test_cookie.py', enabled=False), RegrTest('test_cookielib.py', enabled=False), RegrTest('test_copy.py', enabled=True, core=True), @@ -462,6 +463,7 @@ RegrTest('test_dbm.py', enabled=False, dumbtest=1), RegrTest('test_decimal.py', enabled=True), RegrTest('test_decorators.py', enabled=True, core=True), + RegrTest('test_defaultdict.py', enabled=True), RegrTest('test_deque.py', enabled=True, core=True), RegrTest('test_descr.py', enabled=True, core=True, oldstyle=True, usemodules='_weakref'), RegrTest('test_descrtut.py', enabled=True, core=True, oldstyle=True), @@ -600,6 +602,7 @@ # seems to be the only one that invokes run_unittest # and is an unittest RegrTest('test_pep292.py', enabled=True), + RegrTest('test_pep352.py', enabled=True), RegrTest('test_pickle.py', enabled=True, core=True), RegrTest('test_pickletools.py', enabled=True, dumbtest=1, core=False), RegrTest('test_pkg.py', enabled=True, core=True), @@ -743,6 +746,7 @@ RegrTest('test_userlist.py', enabled=True, core=True), RegrTest('test_userstring.py', enabled=True, core=True), RegrTest('test_uu.py', enabled=False), + RegrTest('test_uuid.py', enabled=True), #rev 10840: 1 of 9 test fails RegrTest('test_warnings.py', enabled=True, core=True), @@ -752,6 +756,10 @@ RegrTest('test_whichdb.py', enabled=True), RegrTest('test_winreg.py', enabled=False), RegrTest('test_winsound.py', enabled=False), + RegrTest('test_with.py', enabled=True), + RegrTest('test_wsgiref.py', enabled=True), + RegrTest('test_xdrlib.py', enabled=True), + RegrTest('test_xml_etree.py', enabled=True), RegrTest('test_xmllib.py', enabled=False), RegrTest('test_xmlrpc.py', enabled=False), #rev 10840: 2 of 5 tests fail @@ -759,6 +767,7 @@ RegrTest('test_xpickle.py', enabled=False), RegrTest('test_xrange.py', enabled=True, core=True), RegrTest('test_zipfile.py', enabled=False, dumbtest=1), + RegrTest('test_zipfile64.py', enabled=False, dumbtest=1), RegrTest('test_zipimport.py', enabled=True, usemodules='zlib zipimport'), RegrTest('test_zlib.py', enabled=True, usemodules='zlib'), ] From antocuni at codespeak.net Thu Aug 21 15:28:39 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Thu, 21 Aug 2008 15:28:39 +0200 (CEST) Subject: [pypy-svn] r57553 - in pypy/branch/oo-jit/pypy: annotation rpython/ootypesystem Message-ID: <20080821132839.35E6916A14D@codespeak.net> Author: antocuni Date: Thu Aug 21 15:28:35 2008 New Revision: 57553 Modified: pypy/branch/oo-jit/pypy/annotation/model.py pypy/branch/oo-jit/pypy/annotation/unaryop.py pypy/branch/oo-jit/pypy/rpython/ootypesystem/rootype.py Log: rename the attribute "method" to "ootype" so that we can treat SomeOOStaticMeth polymorphically Modified: pypy/branch/oo-jit/pypy/annotation/model.py ============================================================================== --- pypy/branch/oo-jit/pypy/annotation/model.py (original) +++ pypy/branch/oo-jit/pypy/annotation/model.py Thu Aug 21 15:28:35 2008 @@ -576,7 +576,7 @@ class SomeOOStaticMeth(SomeObject): immutable = True def __init__(self, method): - self.method = method + self.ootype = method NUMBER = object() annotation_to_ll_map = [ @@ -594,7 +594,7 @@ if isinstance(s_val, SomeOOInstance): return s_val.ootype if isinstance(s_val, SomeOOStaticMeth): - return s_val.method + return s_val.ootype if isinstance(s_val, SomeOOClass): return s_val.ootype if isinstance(s_val, SomeInteriorPtr): Modified: pypy/branch/oo-jit/pypy/annotation/unaryop.py ============================================================================== --- pypy/branch/oo-jit/pypy/annotation/unaryop.py (original) +++ pypy/branch/oo-jit/pypy/annotation/unaryop.py Thu Aug 21 15:28:35 2008 @@ -738,7 +738,7 @@ raise Exception("keyword arguments to call to a low-level static method") info = 'argument to ll static method call' llargs = [annotation_to_lltype(s_arg, info)._defl() for s_arg in args_s] - v = m.method._example()(*llargs) + v = m.ootype._example()(*llargs) return ll_to_annotation(v) Modified: pypy/branch/oo-jit/pypy/rpython/ootypesystem/rootype.py ============================================================================== --- pypy/branch/oo-jit/pypy/rpython/ootypesystem/rootype.py (original) +++ pypy/branch/oo-jit/pypy/rpython/ootypesystem/rootype.py Thu Aug 21 15:28:35 2008 @@ -31,9 +31,9 @@ class __extend__(annmodel.SomeOOStaticMeth): def rtyper_makerepr(self, rtyper): - return OOStaticMethRepr(self.method) + return OOStaticMethRepr(self.ootype) def rtyper_makekey(self): - return self.__class__, self.method + return self.__class__, self.ootype class OOObjectRepr(Repr): From antocuni at codespeak.net Thu Aug 21 18:16:45 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Thu, 21 Aug 2008 18:16:45 +0200 (CEST) Subject: [pypy-svn] r57563 - in pypy/branch/oo-jit/pypy: annotation jit/codegen/cli jit/timeshifter Message-ID: <20080821161645.A813A16A118@codespeak.net> Author: antocuni Date: Thu Aug 21 18:16:43 2008 New Revision: 57563 Modified: pypy/branch/oo-jit/pypy/annotation/builtin.py pypy/branch/oo-jit/pypy/annotation/model.py pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py Log: various rpython fixes, trying to translate targettiny2 -b cli --jit Modified: pypy/branch/oo-jit/pypy/annotation/builtin.py ============================================================================== --- pypy/branch/oo-jit/pypy/annotation/builtin.py (original) +++ pypy/branch/oo-jit/pypy/annotation/builtin.py Thu Aug 21 18:16:43 2008 @@ -577,7 +577,7 @@ if TYPE is ootype.Object: return SomeOOObject() elif TYPE is ootype.Class: - return SomeOOClass(TYPE) + return SomeOOClass(ootype.ROOT) # ??? elif isinstance(TYPE, ootype.StaticMethod): return SomeOOStaticMeth(TYPE) elif isinstance(TYPE, ootype.OOType): Modified: pypy/branch/oo-jit/pypy/annotation/model.py ============================================================================== --- pypy/branch/oo-jit/pypy/annotation/model.py (original) +++ pypy/branch/oo-jit/pypy/annotation/model.py Thu Aug 21 18:16:43 2008 @@ -596,6 +596,8 @@ if isinstance(s_val, SomeOOStaticMeth): return s_val.ootype if isinstance(s_val, SomeOOClass): + return ootype.Class + if isinstance(s_val, SomeOOObject): return s_val.ootype if isinstance(s_val, SomeInteriorPtr): p = s_val.ll_ptrtype Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py Thu Aug 21 18:16:43 2008 @@ -169,7 +169,7 @@ return cliobj.GetType() def getobj(self): - return self.obj + return dotnet.cast_to_native_object(self.obj) def load(self, meth): assert False, 'XXX' @@ -343,6 +343,9 @@ builder = graph.branches[0] return builder, graph.gv_entrypoint, graph.inputargs_gv[:] + def replay(self, label): + raise NotImplementedError + class GraphInfo: def __init__(self): @@ -621,6 +624,18 @@ op = ops.SetField(self.meth, gv_obj, gv_value, fieldtoken) self.appendop(op) + def genop_oosend(self, methtoken, gv_self, args_gv): + raise NotImplementedError + + def genop_oononnull(self, gv_obj): + raise NotImplementedError + + def genop_ooisnull(self, gv_obj): + raise NotImplementedError + + def genop_new(self, gv_obj): + raise NotImplementedError + def enter_next_block(self, args_gv): seen = {} for i in range(len(args_gv)): Modified: pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py (original) +++ pypy/branch/oo-jit/pypy/jit/timeshifter/rcontainer.py Thu Aug 21 18:16:43 2008 @@ -59,7 +59,7 @@ class AbstractStructTypeDesc(object): __metaclass__ = cachedtype - VirtualStructCls = None # patched later with VirtualStruct + #VirtualStructCls = None # patched later with VirtualStruct _attrs_ = """TYPE PTRTYPE name firstsubstructdesc arrayfielddesc @@ -79,7 +79,7 @@ firstsubstructdesc = None materialize = None StructFieldDesc = None - PtrRedBox = None + #PtrRedBox = None firstfielddesc = 0 def __init__(self, RGenOp, TYPE): @@ -328,7 +328,7 @@ class VirtualizableStructTypeDesc(StructTypeDesc): - VirtualStructCls = None # patched later with VirtualizableStruct + #VirtualStructCls = None # patched later with VirtualizableStruct _attrs_ = """redirected_fielddescs redirected @@ -648,7 +648,7 @@ __metaclass__ = cachedtype _attrs_ = 'structdesc' - PtrRedBox = None + #PtrRedBox = None allow_void = False virtualizable = False gv_default = None From bgola at codespeak.net Thu Aug 21 20:50:11 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Thu, 21 Aug 2008 20:50:11 +0200 (CEST) Subject: [pypy-svn] r57566 - pypy/branch/2.5-features/pypy/interpreter Message-ID: <20080821185011.A9D5816A135@codespeak.net> Author: bgola Date: Thu Aug 21 20:50:08 2008 New Revision: 57566 Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py Log: intmask() on next_instr. translating pypy-c (2.5-features) works Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/generator.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/generator.py Thu Aug 21 20:50:08 2008 @@ -1,6 +1,7 @@ from pypy.interpreter.error import OperationError from pypy.interpreter.baseobjspace import Wrappable from pypy.interpreter.gateway import NoneNotWrapped +from pypy.rlib.rarithmetic import intmask class GeneratorIterator(Wrappable): @@ -99,7 +100,7 @@ ec = space.getexecutioncontext() next_instr = self.frame.handle_operation_error(ec, exception) - self.frame.last_instr = next_instr - 1 + self.frame.last_instr = intmask(next_instr - 1) return self.send_ex(space.w_None, True) From arigo at codespeak.net Fri Aug 22 10:28:49 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Fri, 22 Aug 2008 10:28:49 +0200 (CEST) Subject: [pypy-svn] r57571 - pypy/dist/pypy/module/itertools Message-ID: <20080822082849.1887616A1EB@codespeak.net> Author: arigo Date: Fri Aug 22 10:28:47 2008 New Revision: 57571 Modified: pypy/dist/pypy/module/itertools/interp_itertools.py Log: Sorry, I did not run all the tests. Modified: pypy/dist/pypy/module/itertools/interp_itertools.py ============================================================================== --- pypy/dist/pypy/module/itertools/interp_itertools.py (original) +++ pypy/dist/pypy/module/itertools/interp_itertools.py Fri Aug 22 10:28:47 2008 @@ -443,10 +443,6 @@ self.identity_fun = (self.space.is_w(w_fun, space.w_None)) self.w_fun = w_fun - if len(args_w) == 0: - raise OperationError(space.w_TypeError, - space.wrap("imap() must have at least two arguments")) - iterators_w = [] i = 0 for iterable_w in args_w: @@ -476,6 +472,9 @@ def W_IMap___new__(space, w_subtype, w_fun, args_w): + if len(args_w) == 0: + raise OperationError(space.w_TypeError, + space.wrap("imap() must have at least two arguments")) return space.wrap(W_IMap(space, w_fun, args_w)) W_IMap.typedef = TypeDef( @@ -510,6 +509,14 @@ class W_IZip(W_IMap): _error_name = "izip" + def next_w(self): + # argh. izip(*args) is almost like imap(None, *args) except + # that the former needs a special case for len(args)==0 + # while the latter just raises a TypeError in this situation. + if len(self.iterators_w) == 0: + raise OperationError(self.space.w_StopIteration, self.space.w_None) + return W_IMap.next_w(self) + def W_IZip___new__(space, w_subtype, args_w): return space.wrap(W_IZip(space, space.w_None, args_w)) From bgola at codespeak.net Fri Aug 22 21:34:07 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Fri, 22 Aug 2008 21:34:07 +0200 (CEST) Subject: [pypy-svn] r57592 - pypy/branch/2.5-features/pypy/lib Message-ID: <20080822193407.1E2D316A1D7@codespeak.net> Author: bgola Date: Fri Aug 22 21:34:05 2008 New Revision: 57592 Modified: pypy/branch/2.5-features/pypy/lib/collections.py Log: bugfix (2.5.1/test/test_defaultdict) Modified: pypy/branch/2.5-features/pypy/lib/collections.py ============================================================================== --- pypy/branch/2.5-features/pypy/lib/collections.py (original) +++ pypy/branch/2.5-features/pypy/lib/collections.py Fri Aug 22 21:34:05 2008 @@ -323,7 +323,7 @@ def __missing__(self, key): # from defaultdict docs if self.default_factory is None: - raise KeyError((key,)) + raise KeyError(key) self[key] = value = self.default_factory() return value From exarkun at codespeak.net Sat Aug 23 15:26:05 2008 From: exarkun at codespeak.net (exarkun at codespeak.net) Date: Sat, 23 Aug 2008 15:26:05 +0200 (CEST) Subject: [pypy-svn] r57601 - pypy/build/bot Message-ID: <20080823132605.7190E16A27A@codespeak.net> Author: exarkun Date: Sat Aug 23 15:26:02 2008 New Revision: 57601 Modified: pypy/build/bot/pypybuilders.py Log: put builds in a unique location Modified: pypy/build/bot/pypybuilders.py ============================================================================== --- pypy/build/bot/pypybuilders.py (original) +++ pypy/build/bot/pypybuilders.py Sat Aug 23 15:26:02 2008 @@ -141,7 +141,7 @@ uploadSuffix = "".join(translationArguments + targetArguments) self.addStep(FileUpload, slavesrc="pypy-src/pypy/translator/goal/" + self.executable, - masterdest="public_html/builds/pypy-c-" + uploadSuffix) + masterdest="public_html/builds/" + self.executable + "-" + uploadSuffix) # if pytestArguments is not None: # self.addStep( From exarkun at codespeak.net Sat Aug 23 15:26:35 2008 From: exarkun at codespeak.net (exarkun at codespeak.net) Date: Sat, 23 Aug 2008 15:26:35 +0200 (CEST) Subject: [pypy-svn] r57602 - pypy/build/bot Message-ID: <20080823132635.B0EF816A27A@codespeak.net> Author: exarkun Date: Sat Aug 23 15:26:34 2008 New Revision: 57602 Modified: pypy/build/bot/master.cfg Log: replace --faassen with -O3 Modified: pypy/build/bot/master.cfg ============================================================================== --- pypy/build/bot/master.cfg (original) +++ pypy/build/bot/master.cfg Sat Aug 23 15:26:34 2008 @@ -27,9 +27,9 @@ 'change_source': [], 'schedulers': [Nightly("nightly", [ "pypy-c-allworkingmodules-32", - "pypy-c-allworkingmodules-faassen-32", - "pypy-c-allworkingmodules-faassen-64", - "pypy-c-allworkingmodules-faassen-winxp32"], + "pypy-c-allworkingmodules-O3-32", + "pypy-c-allworkingmodules-O3-64", + "pypy-c-allworkingmodules-winxp32"], hour=19)], 'status': [status], @@ -44,20 +44,20 @@ "factory": POSIXPyPyBuildFactory( ["--boxed"], [], ["--allworkingmodules"])}, - {"name": "pypy-c-allworkingmodules-faassen-32", + {"name": "pypy-c-allworkingmodules-O3-32", "slavenames": ["charm"], "builddir": "pypy-c-allworkingmodules-faassen-32", "factory": POSIXPyPyBuildFactory( - None, ["--gc=hybrid", "--gcrootfinder=asmgcc"], - ["--allworkingmodules", "--faassen"])}, + None, ["--gc=hybrid", "--gcrootfinder=asmgcc", "-O3"], + ["--allworkingmodules"])}, - {"name": "pypy-c-allworkingmodules-faassen-64", + {"name": "pypy-c-allworkingmodules-O3-64", "slavenames": ["linux-dvs0"], "builddir": "pypy-c-allworkingmodules-faassen-64", "factory": POSIXPyPyBuildFactory( - ["--boxed"], [], ["--allworkingmodules", "--faassen"])}, + ["--boxed"], ["-O3"], ["--allworkingmodules"])}, - {"name": "pypy-c-allworkingmodules-faassen-winxp32", + {"name": "pypy-c-allworkingmodules-winxp32", "slavenames": ["winxp32-py2.5"], "builddir": "pypy-c-allworkingmodules-faassen-winxp32", "factory": WindowsPyPyBuildFactory( From bgola at codespeak.net Sat Aug 23 16:35:10 2008 From: bgola at codespeak.net (bgola at codespeak.net) Date: Sat, 23 Aug 2008 16:35:10 +0200 (CEST) Subject: [pypy-svn] r57605 - in pypy/branch/2.5-features: lib-python/modified-2.5.1/test pypy/interpreter pypy/interpreter/test pypy/lib Message-ID: <20080823143510.349F916A29D@codespeak.net> Author: bgola Date: Sat Aug 23 16:35:09 2008 New Revision: 57605 Added: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/exception_hierarchy.txt - copied unchanged from r57542, pypy/branch/2.5-features/lib-python/2.5.1/test/exception_hierarchy.txt pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_pep352.py - copied, changed from r57542, pypy/branch/2.5-features/lib-python/2.5.1/test/test_pep352.py Modified: pypy/branch/2.5-features/pypy/interpreter/error.py pypy/branch/2.5-features/pypy/interpreter/generator.py pypy/branch/2.5-features/pypy/interpreter/test/test_raise.py pypy/branch/2.5-features/pypy/lib/_exceptions.py Log: bugfixes (PEP352) Copied: pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_pep352.py (from r57542, pypy/branch/2.5-features/lib-python/2.5.1/test/test_pep352.py) ============================================================================== --- pypy/branch/2.5-features/lib-python/2.5.1/test/test_pep352.py (original) +++ pypy/branch/2.5-features/lib-python/modified-2.5.1/test/test_pep352.py Sat Aug 23 16:35:09 2008 @@ -119,7 +119,8 @@ def tearDown(self): warnings.filters = self._filters[:] - def test_raise_classic(self): + def XXXtest_raise_classic(self): + # PyPy classes are new-style classes by default class ClassicClass: pass try: Modified: pypy/branch/2.5-features/pypy/interpreter/error.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/error.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/error.py Sat Aug 23 16:35:09 2008 @@ -150,8 +150,9 @@ if space.full_exceptions: while space.is_true(space.isinstance(w_type, space.w_tuple)): w_type = space.getitem(w_type, space.wrap(0)) - - if space.is_true(space.abstract_isclass(w_type)): + if space.full_exceptions and ( + space.is_true(space.abstract_isclass(w_type)) and + space.is_true(space.issubtype(w_type, space.w_BaseException))): if space.is_w(w_value, space.w_None): # raise Type: we assume we have to instantiate Type w_value = space.call_function(w_type) @@ -177,25 +178,22 @@ space.w_str): space.warn("raising a string exception is deprecated", space.w_DeprecationWarning) - else: - # raise X: we assume that X is an already-built instance + elif space.full_exceptions and space.is_true(space.isinstance(w_type, + space.w_BaseException)): if not space.is_w(w_value, space.w_None): raise OperationError(space.w_TypeError, space.wrap("instance exception may not " "have a separate value")) w_value = w_type w_type = space.abstract_getclass(w_value) + + else: if space.full_exceptions: - # for the sake of language consistency we should not allow - # things like 'raise 1', but it is probably fine (i.e. - # not ambiguous) to allow them in the explicit form - # 'raise int, 1' - if (space.findattr(w_value, space.wrap('__dict__')) is None and - space.findattr(w_value, space.wrap('__slots__')) is None): - msg = ("raising built-in objects can be ambiguous, " - "use 'raise type, value' instead") - raise OperationError(space.w_TypeError, space.wrap(msg)) + msg = ("exceptions must be classes, or instances," + "or strings (deprecated) not %s" % (w_type.typedef.name)) + raise OperationError(space.w_TypeError, space.wrap(msg)) + self.w_type = w_type self.w_value = w_value Modified: pypy/branch/2.5-features/pypy/interpreter/generator.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/generator.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/generator.py Sat Aug 23 16:35:09 2008 @@ -79,27 +79,11 @@ msg = "throw() third argument must be a traceback object" tb = check_traceback(space, w_tb, msg) - if space.is_true(space.abstract_isclass(w_type)) and \ - space.is_true(space.issubtype(w_type, space.w_BaseException)): - exception = OperationError(w_type, w_val, tb) - - elif space.is_true(space.isinstance(w_type, space.w_BaseException)): - if not space.is_w(w_val, space.w_None): - msg = "instance exception may not have a separate value" - raise OperationError(space.w_TypeError, space.wrap(msg)) - else: - exception = OperationError(w_type.getclass(space), w_val, tb) - - else: - if not space.is_true(space.isinstance(w_type, space.w_str)): - msg = "exceptions must be classes, or instances, not %s" % ( - w_type.typedef.name) - raise OperationError(space.w_TypeError, space.wrap(msg)) - else: - exception = OperationError(w_type, w_val, tb) + operr = OperationError(w_type, w_val, tb) + operr.normalize_exception(space) ec = space.getexecutioncontext() - next_instr = self.frame.handle_operation_error(ec, exception) + next_instr = self.frame.handle_operation_error(ec, operr) self.frame.last_instr = intmask(next_instr - 1) return self.send_ex(space.w_None, True) Modified: pypy/branch/2.5-features/pypy/interpreter/test/test_raise.py ============================================================================== --- pypy/branch/2.5-features/pypy/interpreter/test/test_raise.py (original) +++ pypy/branch/2.5-features/pypy/interpreter/test/test_raise.py Sat Aug 23 16:35:09 2008 @@ -1,4 +1,4 @@ - +import py.test class AppTestRaise: def test_arg_as_string(self): @@ -107,32 +107,25 @@ raises(StopIteration, f) def test_userclass(self): + # PyPy classes are new-style so can't be raised + class A: def __init__(self, x=None): self.x = x - class B(A): - pass - try: - raise A - except A, a: - assert a.x == None - try: - raise A(42) - except A, a: - assert a.x == 42 - try: - raise A, 42 - except A, a: - assert a.x == 42 - try: - raise B - except A, b: - assert b.__class__ == B - try: - raise A, B(42) - except B, b: - assert b.__class__ == B - assert b.x == 42 + + def f(): + try: + raise A + except A, a: + assert a.x == None + raises(TypeError, f) + + def f(): + try: + raise A(42) + except A, a: + assert a.x == 42 + raises(TypeError, f) def test_it(self): C = _classobj('C', (), {}) Modified: pypy/branch/2.5-features/pypy/lib/_exceptions.py ============================================================================== --- pypy/branch/2.5-features/pypy/lib/_exceptions.py (original) +++ pypy/branch/2.5-features/pypy/lib/_exceptions.py Sat Aug 23 16:35:09 2008 @@ -81,6 +81,10 @@ def __init__(self, *args): self.args = args + if len(args) == 1: + self.message = args[0] + else: + self.message = "" def __str__(self): args = self.args @@ -125,6 +129,7 @@ """Unicode translation error.""" def __init__(self, *args): + BaseException.__init__(self, *args) argc = len(args) self.args = args # modified: always assign args, no error check if argc == 4: @@ -186,6 +191,7 @@ """Base class for I/O related errors.""" def __init__(self, *args): + BaseException.__init__(self, *args) argc = len(args) self.args = args self.errno = None @@ -232,9 +238,6 @@ class NameError(StandardError): """Name not found globally.""" -class OverflowWarning(Warning): - """Base class for warnings about numeric overflow. Won't exist in Python 2.5.""" - class IOError(EnvironmentError): """I/O operation failed.""" @@ -248,6 +251,7 @@ text = None def __init__(self, *args): + BaseException.__init__(self, *args) argc = len(args) self.args = args if argc >= 1: @@ -296,6 +300,7 @@ """Request to exit from the interpreter.""" def __init__(self, *args): + BaseException.__init__(self, *args) argc = len(args) if argc == 0: self.code = None From cami at codespeak.net Sun Aug 24 11:46:58 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Sun, 24 Aug 2008 11:46:58 +0200 (CEST) Subject: [pypy-svn] r57608 - in pypy/dist/pypy/lang/gameboy: . test Message-ID: <20080824094658.67AB616842A@codespeak.net> Author: cami Date: Sun Aug 24 11:46:54 2008 New Revision: 57608 Modified: pypy/dist/pypy/lang/gameboy/cpu.py pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/test/test_cpu.py pypy/dist/pypy/lang/gameboy/test/test_cpu_2.py pypy/dist/pypy/lang/gameboy/video.py Log: extended some tests due to missed cases Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Sun Aug 24 11:46:54 2008 @@ -425,6 +425,7 @@ self.double_register_inverse_call(CPUFetchCaller(self), register) def push(self, data, use_cycles=True): + print hex(data) # Stack, 2 cycles self.sp.dec(use_cycles) # 2 cycles self.memory.write(self.sp.get(use_cycles), data) Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Sun Aug 24 11:46:54 2008 @@ -74,6 +74,8 @@ class VideoDriverImplementation(VideoDriver): + COLOR_MAP = [0xFFFFFF, 0xCCCCCC, 0x666666, 0x000000] + def __init__(self): VideoDriver.__init__(self) self.create_screen() @@ -90,23 +92,23 @@ RSDL.Flip(self.screen) def draw_pixels(self): - pass + #pass str = "" for y in range(self.height): - str += "\n" + # str += "\n" for x in range(self.width): - if y%2 == 0 or True: - px = self.get_pixel_color(x, y) - str += ["#", "%", "+", " ", " "][px] - #RSDL_helper.set_pixel(self.screen, x, y, self.get_pixel_color(x, y)) + #if y%2 == 0 or True: + # px = self.get_pixel_color(x, y) + # str += ["#", "%", "+", " ", " "][px] + RSDL_helper.set_pixel(self.screen, x, y, self.get_pixel_color(x, y)) #print str; - def pixel_map(self, x, y): - return [0xFFFFFF, 0xCCCCCC, 0x666666, 0x000000][self.get_pixel_color(x, y)] - - def get_pixel_color(self, x, y): - return self.pixels[x+self.width*y] - #return self.map[self.pixels[x+self.width*y]] + @specialize.arg(3) + def get_pixel_color(self, x, y, string=False): + if string: + return self.pixels[x+self.width*y] + else: + return self.pixels[x+self.width*y] def pixel_to_byte(self, int_number): return struct.pack("B", (int_number) & 0xFF, Modified: pypy/dist/pypy/lang/gameboy/test/test_cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_cpu.py Sun Aug 24 11:46:54 2008 @@ -301,10 +301,19 @@ # CPU Helper Methods Testing --------------------------------------------------- +def test_fetch(): + cpu = get_cpu() + cpu.pc.set(100) + prepare_for_fetch(cpu, 0x12) + assert cpu.fetch() == 0x12 + assert_default_registers(cpu, pc=100+1) + def test_fetch_double_address(): cpu = get_cpu() + cpu.pc.set(100) prepare_for_fetch(cpu, 0x12, 0x34) assert cpu.fetch_double_address() == 0x1234 + assert_default_registers(cpu, pc=100+2) def test_push_double_register(): @@ -320,7 +329,7 @@ def test_call(): cpu = get_cpu() cpu.pc.set(0x1234) - cpu.sp.set(0x03) + cpu.sp.set(3) cpu.call(0x4321) assert_default_registers(cpu, sp=1, pc=0x4321) assert cpu.pop() == 0x34 @@ -1331,13 +1340,29 @@ cpu.f.c_flag = value # call_nnnn -def test_0xCD_call(): +def test_unconditional_call(): cpu = get_cpu() fetchValue = 0x1234 - cpu.sp.set(fetchValue) - prepare_for_fetch(cpu, fetchValue) + cpu.pc.set(0x2244) + cpu.sp.set(3) + prepare_for_fetch(cpu, 0x12, 0x34) + cpu.unconditional_call() + assert_default_registers(cpu, pc=fetchValue, sp=1) + # two fetches happened in between + assert cpu.pop() == 0x44+2 + assert cpu.pop() == 0x22 + +def test_0xCD_unconditional_call(): + cpu = get_cpu(new=True) + fetchValue = 0x1234 + cpu.pc.set(0x2244) + cpu.sp.set(3) + prepare_for_fetch(cpu, 0x12, 0x34) cycle_test(cpu, 0xCD, 6) - assert_default_registers(cpu, pc=fetchValue, sp=fetchValue-2) + assert_default_registers(cpu, pc=fetchValue, sp=1) + # two fetches happened in between + assert cpu.pop() == 0x44+2 + assert cpu.pop() == 0x22 # push_BC to push_AF def test_0xC5_to_0xF5_push_double_register(): Modified: pypy/dist/pypy/lang/gameboy/test/test_cpu_2.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_cpu_2.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_cpu_2.py Sun Aug 24 11:46:54 2008 @@ -290,8 +290,13 @@ def test_and_a(): cpu = get_cpu() - cpu.f.set(0xFF) cpu.sp.set(0xFF) + cpu.f.set(0xFF) + method_value_call(cpu, CPU.and_a, 0x00) + assert cpu.a.get() == 0x00 + assert_flags(cpu, z=True, n=False, h=True, c=False) + + cpu.f.set(0x00) method_value_call(cpu, CPU.and_a, 0x00) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=True, c=False) Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Sun Aug 24 11:46:54 2008 @@ -6,8 +6,6 @@ from pypy.lang.gameboy import constants from pypy.lang.gameboy.ram import iMemory from pypy.lang.gameboy.cpu import process_2_complement -import pdb - # ----------------------------------------------------------------------------- class VideoCallWraper(object): @@ -114,7 +112,7 @@ # window position self.window_x = 0 self.window_y = 0 - self.window_line_y = 0 + self.window_line_y = 0 self.background_palette = 0xFC self.object_palette_0 = 0xFF self.object_palette_1 = 0xFF @@ -124,11 +122,11 @@ self.vblank = True self.dirty = True - self.vram = [0]*constants.VRAM_SIZE - # Object Attribute Memor - self.oam = [0]*constants.OAM_SIZE + self.vram = [0] * constants.VRAM_SIZE + # Object Attribute Memory + self.oam = [0] * constants.OAM_SIZE - self.line = [0]* (8+160+8) + self.line = [0] * (8 + 160 + 8) self.objects = [0] * constants.OBJECTS_PER_LINE self.palette = [0] * 1024 @@ -147,7 +145,7 @@ elif address == constants.SCX: self.set_scroll_x(data) elif address == constants.LY: - # Read Online_y + # Read Only: line_y pass elif address == constants.LYC: self.set_line_y_compare(data) @@ -178,7 +176,7 @@ self.vram[address - constants.VRAM_ADDR] = data & 0xFF def read(self, address): - address = int(address) + print address, hex(address) if address == constants.LCDC: return self.get_control() elif address == constants.STAT: @@ -219,17 +217,14 @@ return self.cycles def emulate(self, ticks): - print "python: video emulating" ticks = int(ticks) if (self.control & 0x80) != 0: self.cycles -= ticks self.consume_cycles() - print "python: video emulating DONE" def consume_cycles(self): while self.cycles <= 0: mode = self.stat & 0x03 - print mode if mode == 0: self.emulate_hblank() elif mode == 1: @@ -252,7 +247,6 @@ self.control = data def reset_control(self, data): - print "python reset control" # NOTE: do not reset constants.LY=LYC flag (bit 2) of the STAT register (Mr. Do!) self.line_y = 0 self.stat = (self.stat & 0xFC) @@ -268,7 +262,6 @@ return 0x80 | self.stat def set_status(self, data): - print "python set_status" self.stat = (self.stat & 0x87) | (data & 0x78) self.set_status_bug() @@ -378,48 +371,40 @@ self.interrupt.raise_interrupt(constants.LCD) # mode setting ----------------------------------------------------------- - + def set_mode_3_begin(self): - print "set_mode_3_begin" self.stat = (self.stat & 0xFC) | 0x03 self.cycles += constants.MODE_3_BEGIN_TICKS self.transfer = True def set_mode_3_end(self): - print "set_mode_3_end" self.stat = (self.stat & 0xFC) | 0x03 self.cycles += constants.MODE_3_END_TICKS self.transfer = False - + def set_mode_0(self): - print "set_mode_0" self.stat = (self.stat & 0xFC) self.cycles += constants.MODE_0_TICKS self.h_blank_interrupt_check() - + def set_mode_2(self): - print "set_mode_2" self.stat = (self.stat & 0xFC) | 0x02 self.cycles += constants.MODE_2_TICKS self.oam_interrupt_check() - + def set_mode_1_begin(self): - print "set_mode_1_begin" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_BEGIN_TICKS def set_mode_1(self): - print "set_mode_1" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_TICKS def set_mode_1_between(self): - print "set_mode_1_between" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_TICKS - constants.MODE_1_BEGIN_TICKS def set_mode_1_end(self): - print "set_mode_1_end" self.stat = (self.stat & 0xFC) | 0x01 self.cycles += constants.MODE_1_END_TICKS @@ -614,7 +599,7 @@ if (self.control & 0x10) != 0: tile = self.vram[tile_map] else: - tile = (process_2_complement(self.vram[tile_map]) ^ 0x80) & 0xFF + tile = (self.vram[tile_map] ^ 0x80) & 0xFF self.draw_tile(x, tile_data + (tile << 4)) tile_map = (tile_map & 0x1FE0) + ((tile_map + 1) & 0x001F) x += 8 @@ -707,8 +692,8 @@ color = (self.object_palette_1 >> ((((pattern >> 4) & 0x02) +\ ((pattern >> 1) & 0x01)) << 1)) & 0x03 index = ((pattern & 0x30) << 4) + (pattern & 0x0F) - #self.palette[index] = constants.COLOR_MAP[color] - self.palette[index] = color + self.palette[index] = constants.COLOR_MAP[color] + #self.palette[index] = color self.dirty = False # ------------------------------------------------------------------------------ From cami at codespeak.net Sun Aug 24 15:10:44 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Sun, 24 Aug 2008 15:10:44 +0200 (CEST) Subject: [pypy-svn] r57610 - in pypy/dist/pypy/lang/gameboy: . test Message-ID: <20080824131044.72B20169F7D@codespeak.net> Author: cami Date: Sun Aug 24 15:10:41 2008 New Revision: 57610 Modified: pypy/dist/pypy/lang/gameboy/test/test_video.py pypy/dist/pypy/lang/gameboy/video.py Log: added video control register added test file Modified: pypy/dist/pypy/lang/gameboy/test/test_video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_video.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_video.py Sun Aug 24 15:10:41 2008 @@ -29,7 +29,7 @@ def test_reset(): video = get_video() assert video.cycles == constants.MODE_2_TICKS - assert video.control == 0x91 + assert video.control.read() == 0x91 assert video.stat == 2 assert video.line_y == 0 assert video.line_y_compare == 0 @@ -56,8 +56,7 @@ def test_read_write_properties(): video = get_video() - checks = [(0xFF40, "control"), - (0xFF42, "scroll_y"), + checks = [(0xFF42, "scroll_y"), (0xFF43, "scroll_x"), #(0xFF44, "line_y"), read only (0xFF45, "line_y_compare"), @@ -79,6 +78,14 @@ assert video.read(address) == value counted_value = (counted_value + 1 ) % 0xFF +def test_read_write_control(): + video = get_video() + (0xFF40, "control") + value = 0x2 + video.write(0xFF40, value) + assert video.control.read() == value + assert video.read(0xFF40) == value + def test_set_status(): video = get_video() value = 0x95 @@ -87,7 +94,7 @@ video.write(0xFF41, value) assert video.stat == (valueb & 0x87) | (value & 0x78) - video.control = 0x80 + video.control.write(0x80) video.stat = 0x01 video.write(0xFF41, 0x01) assert video.interrupt.lcd.is_pending() @@ -99,28 +106,28 @@ video.write(0xFF45, value) assert video.line_y_compare == value - video.control = 0x80 + video.control.write(0x80) video.line_y = value -1 video.stat = 0xFF video.write(0xFF45, value) assert video.stat == 0xFB assert video.interrupt.lcd.is_pending() == False - video.control = 0x80 + video.control.write(0x80) video.line_y = 0xF6 video.stat = 0x04 video.write(0xFF45, value) assert video.stat == 0x04 assert video.interrupt.lcd.is_pending() == False - video.control = 0x80 + video.control.write(0x80) video.line_y = 0xF6 video.stat = 0x00 video.write(0xFF45, value) assert video.stat == 0x04 assert video.interrupt.lcd.is_pending() == False - video.control = 0x80 + video.control.write(0x80) video.line_y = 0xF6 video.stat = 0x40 video.write(0xFF45, value) @@ -132,30 +139,30 @@ def test_control(): video = get_video() - video.control = 0x80 + video.control.write(0x80) video.window_line_y = 1 video.write(0xFF40, 0x80) - assert video.control == 0x80 + assert video.control.read() == 0x80 assert video.window_line_y == 1 def test_control_window_draw_skip(): video = get_video() - video.control = 0x80 + video.control.write(0x80) video.window_y = 0 video.line_y = 1 video.window_line_y = 0 video.write(0xFF40, 0x80+0x20) - assert video.control == 0x80+0x20 + assert video.control.read() == 0x80+0x20 assert video.window_line_y == 144 def test_control_reset1(): video = get_video() - video.control = 0 + video.control.write(0) video.stat = 0x30 video.line_y = 1 video.display = True video.write(0xFF40, 0x80) - assert video.control == 0x80 + assert video.control.read() == 0x80 assert video.stat == 0x30 + 0x02 assert video.cycles == constants.MODE_2_TICKS assert video.line_y == 0 @@ -163,12 +170,12 @@ def test_control_reset2(): video = get_video() - video.control = 0x80 + video.control.write(0x80) video.stat = 0x30 video.line_y = 1 video.display = True video.write(0xFF40, 0x30) - assert video.control == 0x30 + assert video.control.read() == 0x30 assert video.stat == 0x30 assert video.cycles == constants.MODE_1_TICKS assert video.line_y == 0 Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Sun Aug 24 15:10:41 2008 @@ -31,7 +31,7 @@ # ----------------------------------------------------------------------------- -def VideoStatus(object): +class VideoControl(object): # used for enabled or disabled window or background # Bit 7 - LCD Display Enable (0=Off, 1=On) # Bit 6 - Window Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF) @@ -42,41 +42,41 @@ # Bit 1 - OBJ (Sprite) Display Enable (0=Off, 1=On) # Bit 0 - BG Display (for CGB see below) (0=Off, 1=On) - def __init__(self, video): - self.video = video + def __init__(self): self.reset() def reset(self): - self.mode = 0x02 - self.lcd_enable = False - self.window_tile_map_select = False - self.window_enable = False - self.background_and_window_tile_data_select = False - self.background_tile_map_select = False - self.big_sprite_size = False - self.sprite_display_enable = False - self.background_enable = False - #Coincidence Flag (0:LYC<>LY, 1:LYC=LY) + self.lcd_enabled = True + self.window_upper_tile_map_selected = False + self.window_enabled = False + self.background_and_window_lower_tile_data_selected = True + self.background_upper_tile_map_selected = False + self.big_sprite_size_selected = False + self.sprite_display_enabled = False + self.background_enabled = True def read(self): value = 0 - value += int(self.lyc_ly_coincidence) << 7 - value += int(self.h_blank_interrupt) << 6 - value += int(self.oam_interrupt) << 5 - value += int(self.h_blank_interrupt) << 4 - value += int(self.vblank_interrupt) << 3 - value += int(self.coincidence_flag) << 2 - value += self.mode & 0x03 + value += int(self.lcd_enabled) << 7 + value += int(self.window_upper_tile_map_selected) << 6 + value += int(self.window_enabled) << 5 + value += int(self.background_and_window_lower_tile_data_selected) << 4 + value += int(self.background_upper_tile_map_selected) << 3 + value += int(self.big_sprite_size_selected) << 2 + value += int(self.sprite_display_enabled) << 1 + value += int(self.background_enabled) return value def write(self, value): - self.lyc_ly_coincidence = bool(value & (1 << 7)) - self.h_blank_interrupt = bool(value & (1 << 6)) - self.oam_interrupt = bool(value & (1 << 5)) - self.h_blank_interrupt = bool(value & (1 << 4)) - self.vblank_interrupt = bool(value & (1 << 3)) - self.coincidence_flag = bool(value & (1 << 2)) - self.mode = value & 0x03 + self.lcd_enabled = bool(value & (1 << 7)) + self.window_upper_tile_map_selected = bool(value & (1 << 6)) + self.window_enabled = bool(value & (1 << 5)) + self.background_and_window_lower_tile_data_selected = \ + bool(value & (1 << 4)) + self.background_upper_tile_map_selected = bool(value & (1 << 3)) + self.big_sprite_size_selected = bool(value & (1 << 2)) + self.sprite_display_enabled = bool(value & (1 << 1)) + self.background_enabled = bool(value & (1 << 0)) def get_bg_tile_data_address(self): pass @@ -87,9 +87,10 @@ def __init__(self, video_driver, interrupt, memory): assert isinstance(video_driver, VideoDriver) - self.driver = video_driver - self.interrupt = interrupt - self.memory = memory + self.driver = video_driver + self.interrupt = interrupt + self.control = VideoControl() + self.memory = memory self.reset() def get_frame_skip(self): @@ -100,7 +101,7 @@ def reset(self): self.cycles = constants.MODE_2_TICKS - self.control = 0x91 + self.control.reset() self.stat = 2 self.line_y = 0 self.line_y_compare = 0 @@ -218,7 +219,7 @@ def emulate(self, ticks): ticks = int(ticks) - if (self.control & 0x80) != 0: + if self.control.lcd_enabled: self.cycles -= ticks self.consume_cycles() @@ -235,16 +236,17 @@ self.emulate_transfer() def get_control(self): - return self.control + return self.control.read() def set_control(self, data): - if (self.control & 0x80) != (data & 0x80): + if (self.control.read() & 0x80) != (data & 0x80): self.reset_control(data) # don't draw window if it was not enabled and not being drawn before - if (self.control & 0x20) == 0 and (data & 0x20) != 0 and \ + if not self.control.window_enabled and \ + (data & 0x20) != 0 and \ self.window_line_y == 0 and self.line_y > self.window_y: - self.window_line_y = 144 - self.control = data + self.window_line_y = 144 + self.control.write(data) def reset_control(self, data): # NOTE: do not reset constants.LY=LYC flag (bit 2) of the STAT register (Mr. Do!) @@ -267,7 +269,7 @@ def set_status_bug(self) : # Gameboy Bug - if (self.control & 0x80) != 0x00 and \ + if self.control.lcd_enabled and \ (self.stat & 0x03) == 0x01 and \ (self.stat & 0x44) != 0x44: self.interrupt.raise_interrupt(constants.LCD) @@ -292,9 +294,8 @@ def set_line_y_compare(self, data): self.line_y_compare = data - if (self.control & 0x80) == 0: - return - self.emulate_hblank_line_y_compare(stat_check=True) + if self.control.lcd_enabled: + self.emulate_hblank_line_y_compare(stat_check=True) def get_dma(self): return self.dma @@ -478,13 +479,13 @@ self.driver.update_display() def draw_line(self): - if (self.control & 0x01) != 0: + if self.control.background_enabled: self.draw_background() else: self.draw_clean_background() - if (self.control & 0x20) != 0: + if self.control.window_enabled: self.draw_window() - if (self.control & 0x02) != 0: + if self.control.sprite_display_enabled: self.draw_objects() self.draw_pixels() @@ -522,13 +523,13 @@ return tile_map, tile_data; def get_tile_map(self, mask): - if (self.control & mask) != 0: + if (self.control.read() & mask) != 0: return constants.VRAM_MAP_B else: return constants.VRAM_MAP_A def get_tile_data(self, mask): - if (self.control & mask) != 0: + if (self.control.read() & mask) != 0: return constants.VRAM_DATA_A else: return constants.VRAM_DATA_B @@ -558,7 +559,7 @@ tile = self.oam[offset + 2] flags = self.oam[offset + 3] y = self.line_y - y + 16 - if ((self.control & 0x04) != 0): + if self.control.big_sprite_size_selected: # 8x16 tile size if (y < 0 or y > 15): continue @@ -596,7 +597,7 @@ def draw_tiles(self, x, tile_map, tile_data): while x < 168: - if (self.control & 0x10) != 0: + if self.control.background_and_window_lower_tile_data_selected: tile = self.vram[tile_map] else: tile = (self.vram[tile_map] ^ 0x80) & 0xFF From cami at codespeak.net Sun Aug 24 15:11:46 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Sun, 24 Aug 2008 15:11:46 +0200 (CEST) Subject: [pypy-svn] r57611 - pypy/dist/pypy/lang/gameboy/test Message-ID: <20080824131146.8E226169F7D@codespeak.net> Author: cami Date: Sun Aug 24 15:11:45 2008 New Revision: 57611 Added: pypy/dist/pypy/lang/gameboy/test/test_video_control.py Log: added test file for the new video control register Added: pypy/dist/pypy/lang/gameboy/test/test_video_control.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/lang/gameboy/test/test_video_control.py Sun Aug 24 15:11:45 2008 @@ -0,0 +1,45 @@ +from pypy.lang.gameboy import constants +from pypy.lang.gameboy.video import VideoControl +import py + +def test_video_control_reset(): + control = VideoControl() + assert control.read() == 0x91 + control.write(0xFF) + assert control.read() == 0xFF + control.reset() + assert control.read() == 0x91 + + +def test_read_write_properties(): + control = VideoControl() + properties = ["lcd_enabled", + "window_upper_tile_map_selected", + "window_enabled", + "background_and_window_lower_tile_data_selected", + "background_upper_tile_map_selected", + "big_sprite_size_selected", + "sprite_display_enabled", + "background_enabled"] + properties.reverse() + for index in range(8): + property = properties[index]; + control.write(0x00) + assert control.read() == 0x00 + assert control.__getattribute__(property) == False + + control.write(0xFF) + assert control.read() == 0xFF + assert control.__getattribute__(property) == True + + control.write(0x00) + control.__setattr__(property, True) + assert control.__getattribute__(property) == True + assert control.read() & (1 << index) == (1 << index) + assert control.read() & (~(1 << index)) == 0 + + control.write(1 << index) + assert control.__getattribute__(property) == True + assert control.read() & (1 << index) == (1 << index) + assert control.read() & (~(1 << index)) == 0 + From antocuni at codespeak.net Mon Aug 25 10:10:43 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Mon, 25 Aug 2008 10:10:43 +0200 (CEST) Subject: [pypy-svn] r57616 - pypy/dist/pypy/translator/cli/src Message-ID: <20080825081043.2AAA5169F12@codespeak.net> Author: antocuni Date: Mon Aug 25 10:10:41 2008 New Revision: 57616 Modified: pypy/dist/pypy/translator/cli/src/pypylib.cs Log: fix test_rfind_empty_string, which has been failing for a while now Modified: pypy/dist/pypy/translator/cli/src/pypylib.cs ============================================================================== --- pypy/dist/pypy/translator/cli/src/pypylib.cs (original) +++ pypy/dist/pypy/translator/cli/src/pypylib.cs Mon Aug 25 10:10:41 2008 @@ -356,6 +356,8 @@ int count = stop-start; if (start > s1.Length) return -1; + if (s2 == "") + return stop; return s1.LastIndexOf(s2, stop-1, count); } From antocuni at codespeak.net Mon Aug 25 10:20:56 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Mon, 25 Aug 2008 10:20:56 +0200 (CEST) Subject: [pypy-svn] r57617 - in pypy/dist/pypy: rpython/test translator/oosupport/test_template Message-ID: <20080825082056.63595169F1C@codespeak.net> Author: antocuni Date: Mon Aug 25 10:20:54 2008 New Revision: 57617 Modified: pypy/dist/pypy/rpython/test/test_rstr.py pypy/dist/pypy/rpython/test/tool.py pypy/dist/pypy/translator/oosupport/test_template/string.py Log: make test_prebuilt_ll_strings working and passing also for oo backends Modified: pypy/dist/pypy/rpython/test/test_rstr.py ============================================================================== --- pypy/dist/pypy/rpython/test/test_rstr.py (original) +++ pypy/dist/pypy/rpython/test/test_rstr.py Mon Aug 25 10:20:54 2008 @@ -816,7 +816,7 @@ else: return llstr1 res = self.interpret(f, [0]) - assert res == self.string_to_ll(None) + assert self.ll_to_string(res) is None res = self.interpret(f, [1]) assert self.ll_to_string(res) == "hello" Modified: pypy/dist/pypy/rpython/test/tool.py ============================================================================== --- pypy/dist/pypy/rpython/test/tool.py (original) +++ pypy/dist/pypy/rpython/test/tool.py Mon Aug 25 10:20:54 2008 @@ -46,6 +46,8 @@ type_system = 'lltype' def ll_to_string(self, s): + if not s: + return None return ''.join(s.chars) def ll_to_unicode(self, s): Modified: pypy/dist/pypy/translator/oosupport/test_template/string.py ============================================================================== --- pypy/dist/pypy/translator/oosupport/test_template/string.py (original) +++ pypy/dist/pypy/translator/oosupport/test_template/string.py Mon Aug 25 10:20:54 2008 @@ -33,6 +33,3 @@ def fn(answer): return 'the answer is %s' % answer assert self.ll_to_string(self.interpret(fn, [42])) == 'the answer is 42' - - def test_prebuilt_ll_strings(self): - py.test.skip("The testing framework doesn't support returning low level strings") From antocuni at codespeak.net Mon Aug 25 10:39:27 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Mon, 25 Aug 2008 10:39:27 +0200 (CEST) Subject: [pypy-svn] r57618 - pypy/dist/pypy/translator/jvm/src/pypy Message-ID: <20080825083927.4553A169F21@codespeak.net> Author: antocuni Date: Mon Aug 25 10:39:23 2008 New Revision: 57618 Modified: pypy/dist/pypy/translator/jvm/src/pypy/PyPy.java Log: fix rfind Modified: pypy/dist/pypy/translator/jvm/src/pypy/PyPy.java ============================================================================== --- pypy/dist/pypy/translator/jvm/src/pypy/PyPy.java (original) +++ pypy/dist/pypy/translator/jvm/src/pypy/PyPy.java Mon Aug 25 10:39:23 2008 @@ -672,6 +672,9 @@ public static int ll_rfind(String haystack, String needle, int start, int end) { + if (start > haystack.length()) + return -1; + haystack = substring(haystack, start, end); int res = haystack.lastIndexOf(needle); if (res == -1) return res; From afa at codespeak.net Mon Aug 25 23:19:29 2008 From: afa at codespeak.net (afa at codespeak.net) Date: Mon, 25 Aug 2008 23:19:29 +0200 (CEST) Subject: [pypy-svn] r57620 - pypy/dist/pypy/rlib Message-ID: <20080825211929.D7C7916A017@codespeak.net> Author: afa Date: Mon Aug 25 23:19:26 2008 New Revision: 57620 Modified: pypy/dist/pypy/rlib/rmmap.py Log: Translation fix for win32. The buildbots seem to use a larget allworkingmodules list. Modified: pypy/dist/pypy/rlib/rmmap.py ============================================================================== --- pypy/dist/pypy/rlib/rmmap.py (original) +++ pypy/dist/pypy/rlib/rmmap.py Mon Aug 25 23:19:26 2008 @@ -181,7 +181,8 @@ [rffi.VOIDP, rffi.SIZE_T, DWORD, DWORD], rffi.VOIDP) VirtualProtect = winexternal('VirtualProtect', - [rffi.VOIDP, rffi.SIZE_T, DWORD, DWORDP], BOOL) + [rffi.VOIDP, rffi.SIZE_T, DWORD, LPDWORD], + BOOL) VirtualFree = winexternal('VirtualFree', [rffi.VOIDP, rffi.SIZE_T, DWORD], BOOL) @@ -741,7 +742,7 @@ PAGE_EXECUTE_READWRITE) if not res: raise MemoryError - arg = lltype.malloc(DWORDP.TO, 1, zero=True, flavor='raw') + arg = lltype.malloc(LPDWORD.TO, 1, zero=True, flavor='raw') VirtualProtect(res, map_size, PAGE_EXECUTE_READWRITE, arg) lltype.free(arg, flavor='raw') # ignore errors, just try From fijal at codespeak.net Tue Aug 26 11:39:30 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 11:39:30 +0200 (CEST) Subject: [pypy-svn] r57622 - pypy/dist/pypy/interpreter Message-ID: <20080826093930.6E8DA16A022@codespeak.net> Author: fijal Date: Tue Aug 26 11:39:26 2008 New Revision: 57622 Modified: pypy/dist/pypy/interpreter/gateway.py Log: Another place that uses md5 Modified: pypy/dist/pypy/interpreter/gateway.py ============================================================================== --- pypy/dist/pypy/interpreter/gateway.py (original) +++ pypy/dist/pypy/interpreter/gateway.py Tue Aug 26 11:39:26 2008 @@ -7,7 +7,8 @@ """ -import types, sys, md5, os +import types, sys, os +from pypy.tool.compat import md5 NoneNotWrapped = object() From fijal at codespeak.net Tue Aug 26 11:46:16 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 11:46:16 +0200 (CEST) Subject: [pypy-svn] r57623 - pypy/dist/pypy/translator/tool Message-ID: <20080826094616.88B4A16A042@codespeak.net> Author: fijal Date: Tue Aug 26 11:46:15 2008 New Revision: 57623 Modified: pypy/dist/pypy/translator/tool/cbuild.py Log: always pass absolute paths to gcc calls, if possible Modified: pypy/dist/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/dist/pypy/translator/tool/cbuild.py (original) +++ pypy/dist/pypy/translator/tool/cbuild.py Tue Aug 26 11:46:15 2008 @@ -600,9 +600,10 @@ old = cfile.dirpath().chdir() try: - res = compiler.compile([cfile.basename], + res = compiler.compile([str(cfile)], include_dirs=self.eci.include_dirs, - extra_preargs=compile_extra) + extra_preargs=compile_extra, + output_dir=str(cfile.dirpath())) assert len(res) == 1 cobjfile = py.path.local(res[0]) assert cobjfile.check() From cami at codespeak.net Tue Aug 26 12:08:17 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 26 Aug 2008 12:08:17 +0200 (CEST) Subject: [pypy-svn] r57624 - in pypy/dist/pypy/lang/gameboy: . test Message-ID: <20080826100817.DF8B916A022@codespeak.net> Author: cami Date: Tue Aug 26 12:08:16 2008 New Revision: 57624 Added: pypy/dist/pypy/lang/gameboy/test/test_video_registers.py Removed: pypy/dist/pypy/lang/gameboy/test/test_video_control.py Modified: pypy/dist/pypy/lang/gameboy/cartridge.py pypy/dist/pypy/lang/gameboy/cpu.py pypy/dist/pypy/lang/gameboy/gameboy.py pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/interrupt.py pypy/dist/pypy/lang/gameboy/joypad.py pypy/dist/pypy/lang/gameboy/ram.py pypy/dist/pypy/lang/gameboy/serial.py pypy/dist/pypy/lang/gameboy/sound.py pypy/dist/pypy/lang/gameboy/test/test_video.py pypy/dist/pypy/lang/gameboy/timer.py pypy/dist/pypy/lang/gameboy/video.py Log: added new status register with some doc for video added tests for StatusRegister rename VideoControl to ControlRegister adapted implementation to handle the new register removed some debug print outs Modified: pypy/dist/pypy/lang/gameboy/cartridge.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cartridge.py (original) +++ pypy/dist/pypy/lang/gameboy/cartridge.py Tue Aug 26 12:08:16 2008 @@ -365,7 +365,7 @@ class MBC1(MBC): """ - PyGirl GameBoy (TM) Emulator + PyGirl Emulator Memory Bank Controller 1 (2MB ROM, 32KB RAM) @@ -427,8 +427,7 @@ class MBC2(MBC): """ - PyGirl GameBoy (TM) Emulator - + PyGirl GameBoPyGirl Memory Bank Controller 2 (256KB ROM, 512x4bit RAM) 0000-3FFF ROM Bank 0 (16KB) @@ -488,9 +487,7 @@ class MBC3(MBC): """ - PyGirl GameBoy (TM) Emulator - - Memory Bank Controller 3 (2MB ROM, 32KB RAM, Real Time Clock) + PyGirl GameBoy (TM) EmulatPyGirlBank Controller 3 (2MB ROM, 32KB RAM, Real Time Clock) 0000-3FFF ROM Bank 0 (16KB) 4000-7FFF ROM Bank 1-127 (16KB) @@ -642,7 +639,7 @@ """ PyGirl GameBoy (TM) Emulator - Memory Bank Controller 5 (8MB ROM, 128KB RAM) + MPyGirler 5 (8MB ROM, 128KB RAM) * 0000-3FFF ROM Bank 0 (16KB) 4000-7FFF ROM Bank 1-511 (16KB) @@ -703,7 +700,7 @@ """ PyGirl GameBoy (TM) Emulator - Hudson Memory Bank Controller 3 (2MB ROM, 128KB RAM, RTC) + Hudson Memory PyGirl2MB ROM, 128KB RAM, RTC) 0000-3FFF ROM Bank 0 (16KB) 4000-7FFF ROM Bank 1-127 (16KB) Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Tue Aug 26 12:08:16 2008 @@ -425,7 +425,6 @@ self.double_register_inverse_call(CPUFetchCaller(self), register) def push(self, data, use_cycles=True): - print hex(data) # Stack, 2 cycles self.sp.dec(use_cycles) # 2 cycles self.memory.write(self.sp.get(use_cycles), data) Modified: pypy/dist/pypy/lang/gameboy/gameboy.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy.py Tue Aug 26 12:08:16 2008 @@ -1,5 +1,5 @@ """ -PyGirl GameBoy (TM) Emulator +PyGirl Emulator GameBoy Scheduler and Memory Mapper @@ -92,7 +92,6 @@ def emulate(self, ticks): while ticks > 0: count = self.get_cycles() - print "python: ticks", count self.cpu.emulate(count) self.serial.emulate(count) self.timer.emulate(count) @@ -128,7 +127,7 @@ receiver = self.get_receiver(address) if receiver is None: return - raise Exception("invalid read address given") + #raise Exception("invalid read address given") receiver.write(address, data) if address == constants.STAT or address == 0xFFFF: self.cpu.handle_pending_interrupts() Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Tue Aug 26 12:08:16 2008 @@ -85,7 +85,6 @@ self.screen = RSDL.SetVideoMode(self.width, self.height, 32, 0) def update_display(self): - #print " update_display" RSDL.LockSurface(self.screen) self.draw_pixels() RSDL.UnlockSurface(self.screen) @@ -172,7 +171,9 @@ # SOUND DRIVER ----------------------------------------------------------------- class SoundDriverImplementation(SoundDriver): - + """ + The current implementation doesnt handle sound yet + """ def __init__(self): SoundDriver.__init__(self) self.create_sound_driver() Modified: pypy/dist/pypy/lang/gameboy/interrupt.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/interrupt.py (original) +++ pypy/dist/pypy/lang/gameboy/interrupt.py Tue Aug 26 12:08:16 2008 @@ -30,7 +30,7 @@ class Interrupt(iMemory): """ - PyGirl GameBoy (TM) Emulator + PyGirl Emulator Interrupt Controller """ Modified: pypy/dist/pypy/lang/gameboy/joypad.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/joypad.py (original) +++ pypy/dist/pypy/lang/gameboy/joypad.py Tue Aug 26 12:08:16 2008 @@ -5,7 +5,7 @@ class Joypad(iMemory): """ - PyGirl GameBoy (TM) Emulator + PyGirl Emulator Joypad Input """ @@ -31,7 +31,6 @@ if self.driver.is_raised(): self.update() self.cycles = constants.JOYPAD_CLOCK - #self.cycles = 150 def write(self, address, data): if address == constants.JOYP: @@ -61,6 +60,10 @@ class JoypadDriver(object): """ Maps the Input to the Button and Direction Codes + get_button_code and get_direction_code are called by the system + to check for pressed buttons + On Button change an interrupt flag self.raised is set to send later + and interrupt to inform the system for a pressed button """ def __init__(self): self.raised = False Modified: pypy/dist/pypy/lang/gameboy/ram.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/ram.py (original) +++ pypy/dist/pypy/lang/gameboy/ram.py Tue Aug 26 12:08:16 2008 @@ -1,5 +1,5 @@ """ -PyGirl GameBoy (TM) Emulator +PyGirl Emulator Work and High RAM """ Modified: pypy/dist/pypy/lang/gameboy/serial.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/serial.py (original) +++ pypy/dist/pypy/lang/gameboy/serial.py Tue Aug 26 12:08:16 2008 @@ -5,7 +5,7 @@ class Serial(iMemory): """ - PyGirl GameBoy (TM) Emulator + PyGirl Emulator Serial Link Controller """ Modified: pypy/dist/pypy/lang/gameboy/sound.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/sound.py (original) +++ pypy/dist/pypy/lang/gameboy/sound.py Tue Aug 26 12:08:16 2008 @@ -1,5 +1,5 @@ """ -PyGirl GameBoy (TM) Emulator +PyGirl Emulator Audio Processor Unit (Sharp LR35902 APU) """ Modified: pypy/dist/pypy/lang/gameboy/test/test_video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_video.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_video.py Tue Aug 26 12:08:16 2008 @@ -30,7 +30,8 @@ video = get_video() assert video.cycles == constants.MODE_2_TICKS assert video.control.read() == 0x91 - assert video.stat == 2 + assert video.status.read(extend=False) == 2 + assert video.status.read(extend=True) == 2 + 0x80 assert video.line_y == 0 assert video.line_y_compare == 0 assert video.dma == 0xFF @@ -80,7 +81,6 @@ def test_read_write_control(): video = get_video() - (0xFF40, "control") value = 0x2 video.write(0xFF40, value) assert video.control.read() == value @@ -88,14 +88,15 @@ def test_set_status(): video = get_video() - value = 0x95 - valueb = 0xD2 - video.stat = valueb - video.write(0xFF41, value) - assert video.stat == (valueb & 0x87) | (value & 0x78) + valueA = 0x95 + for valueB in range(0, 0xFF): + video.status.write(valueB, write_all=True) + assert video.status.read(extend=True) == valueB + video.write(0xFF41, valueA) + assert video.get_status() == (valueB & 0x87) | (valueA & 0x78) video.control.write(0x80) - video.stat = 0x01 + video.status.write(0x01, write_all=True) video.write(0xFF41, 0x01) assert video.interrupt.lcd.is_pending() @@ -108,30 +109,31 @@ video.control.write(0x80) video.line_y = value -1 - video.stat = 0xFF + video.status.write(0xFF, write_all=True) + assert video.status.read(extend=True) == 0xFF video.write(0xFF45, value) - assert video.stat == 0xFB + assert video.status.read(extend=True) == 0xFB assert video.interrupt.lcd.is_pending() == False video.control.write(0x80) video.line_y = 0xF6 - video.stat = 0x04 + video.status.write(0x04, write_all=True) video.write(0xFF45, value) - assert video.stat == 0x04 + assert video.status.read(extend=True) == 0x04 assert video.interrupt.lcd.is_pending() == False video.control.write(0x80) video.line_y = 0xF6 - video.stat = 0x00 + video.status.write(0x00, write_all=True) video.write(0xFF45, value) - assert video.stat == 0x04 + assert video.status.read(extend=True) == 0x04 assert video.interrupt.lcd.is_pending() == False video.control.write(0x80) video.line_y = 0xF6 - video.stat = 0x40 + video.status.write(0x40, write_all=True) video.write(0xFF45, value) - assert video.stat == 0x44 + assert video.status.read(extend=True) == 0x44 assert video.interrupt.lcd.is_pending() == True @@ -158,12 +160,12 @@ def test_control_reset1(): video = get_video() video.control.write(0) - video.stat = 0x30 + video.status.write(0x30, write_all=True) video.line_y = 1 video.display = True video.write(0xFF40, 0x80) assert video.control.read() == 0x80 - assert video.stat == 0x30 + 0x02 + assert video.status.read(extend=True) == 0x30 + 0x02 assert video.cycles == constants.MODE_2_TICKS assert video.line_y == 0 assert video.display == False @@ -171,12 +173,12 @@ def test_control_reset2(): video = get_video() video.control.write(0x80) - video.stat = 0x30 + video.status.write(0x30, write_all=True) video.line_y = 1 video.display = True video.write(0xFF40, 0x30) assert video.control.read() == 0x30 - assert video.stat == 0x30 + assert video.status.read(extend=True) == 0x30 assert video.cycles == constants.MODE_1_TICKS assert video.line_y == 0 assert video.display == True @@ -223,10 +225,10 @@ def test_emulate_OAM(): video = get_video() video.transfer = False - video.stat = 0xFE + video.status.write(0xFE, write_all=True) video.cycles = 0 video.emulate_oam() - assert video.stat == 0xFF + assert video.status.read(extend=True) == 0xFF assert video.cycles == constants.MODE_3_BEGIN_TICKS assert video.transfer == True @@ -235,42 +237,90 @@ video.transfer = False video.cycles = 0 - video.stat = 0xF0 + video.status.write(0xF0, write_all=True) video.emulate_transfer() - assert video.stat == 0xF0 + assert video.status.read(extend=True) == 0xF0 assert video.cycles == constants.MODE_0_TICKS assert not video.interrupt.lcd.is_pending() video.transfer = False video.cycles = 0 - video.stat = 0xF8 + video.status.write(0xF8, write_all=True) assert not video.interrupt.lcd.is_pending() video.emulate_transfer() - assert video.stat == 0xF8 + assert video.status.read(extend=True) == 0xF8 assert video.cycles == constants.MODE_0_TICKS assert video.interrupt.lcd.is_pending() video.transfer = True video.cycles = 0 - video.stat = 0xFC + video.status.write(0xFC, write_all=True) video.emulate_transfer() assert video.cycles == constants.MODE_3_END_TICKS assert video.transfer == False - assert video.stat == 0xFF + assert video.status.read(extend=True) == 0xFF + + +def test_emulate_hblank_line_y_compare(): + video = get_video() + video.line_y = 0x12 + video.line_y_compare = 0x13 + video.status.line_y_compare_flag = True + video.status.line_y_compare_interrupt = False + video.emulate_hblank_line_y_compare() + assert not video.status.line_y_compare_flag + assert not video.interrupt.lcd.is_pending() + + video.reset() + video.line_y = 0x12 + video.line_y_compare = 0x12 + video.status.line_y_compare_flag = False + video.status.line_y_compare_interrupt = False + video.emulate_hblank_line_y_compare() + assert video.status.line_y_compare_flag + assert not video.interrupt.lcd.is_pending() + + video.reset() + video.line_y = 0x12 + video.line_y_compare = 0x12 + video.status.line_y_compare_flag = False + video.status.line_y_compare_interrupt = True + video.emulate_hblank_line_y_compare() + assert video.status.line_y_compare_flag + assert video.interrupt.lcd.is_pending() +def test_emulate_hblank_line_y_compare_status_check(): + video = get_video() + video.line_y = 0x12 + video.line_y_compare = 0x12 + video.status.line_y_compare_flag = True + video.status.line_y_compare_interrupt = True + video.emulate_hblank_line_y_compare(stat_check=True) + assert video.status.line_y_compare_flag + assert not video.interrupt.lcd.is_pending() + + video.reset() + video.line_y = 0x12 + video.line_y_compare = 0x12 + video.status.line_y_compare_flag = False + video.status.line_y_compare_interrupt = True + video.emulate_hblank_line_y_compare(stat_check=True) + assert video.status.line_y_compare_flag + assert video.interrupt.lcd.is_pending() def test_emulate_h_blank_part_1_1(): video = get_video() video.line_y = 0 video.line_y_compare = 1 - video.stat = 0x20 + video.status.write(0x20, write_all=True) video.cycles = 0 video.frames = 0 assert not video.interrupt.lcd.is_pending() video.emulate_hblank() assert video.cycles == constants.MODE_2_TICKS assert video.interrupt.lcd.is_pending() - assert video.stat == 0x20 + 0x04 + 0x2 + assert video.status.get_mode() == 2 + assert video.status.read(extend=True) == 0x20 + 0x04 + 0x2 assert video.line_y == 1 assert video.frames == 0 @@ -279,21 +329,21 @@ video = get_video() video.line_y = 1 video.line_y_compare = 0 - video.stat = 0x0F + video.status.write(0x0F, write_all=True) video.cycles = 0 video.frames = 0 video.emulate_hblank() assert video.line_y == 2 assert video.cycles == constants.MODE_2_TICKS assert not video.interrupt.lcd.is_pending() - assert video.stat == 0x0B&0xFC + 0x2 + assert video.status.read(extend=True) == 0x0B&0xFC + 0x2 assert video.frames == 0 def test_emulate_h_blank_part_2_2(): video = get_video() video.line_y = 144 video.line_y_compare = 0 - video.stat = 0xFB + video.status.write(0xFB, write_all=True) video.cycles = 0 video.frames = 0 video.frame_skip = 20 @@ -303,7 +353,7 @@ assert video.line_y == 145 assert video.cycles == constants.MODE_1_BEGIN_TICKS assert not video.interrupt.lcd.is_pending() - assert video.stat == 0xFB & 0xFC + 0x01 + assert video.status.read(extend=True) == 0xFB & 0xFC + 0x01 assert video.frames == 1 assert video.display == False assert video.vblank == True @@ -313,7 +363,7 @@ video = get_video() video.line_y = 144 video.line_y_compare = 0 - video.stat = 0xFB + video.status.write(0xFB, write_all=True) video.cycles = 0 video.frames = 10 video.frame_skip = 10 @@ -323,7 +373,7 @@ assert video.line_y == 145 assert video.cycles == constants.MODE_1_BEGIN_TICKS assert not video.interrupt.lcd.is_pending() - assert video.stat == 0xFB & 0xFC + 0x01 + assert video.status.read(extend=True) == 0xFB & 0xFC + 0x01 assert video.frames == 0 assert video.vblank == True @@ -331,23 +381,24 @@ def test_emulate_v_vblank_1(): video = get_video() video.interrupt.set_interrupt_flag(0) - video.stat = 0xFE + video.status.write(0xFE, write_all=True) video.vblank = True video.cycles = 0 video.emulate_vblank() assert video.vblank == False - assert video.stat == 0xFD + assert video.status.get_mode() == 1 + assert video.status.read(extend=True) == 0xFD assert video.cycles == constants.MODE_1_TICKS - constants.MODE_1_BEGIN_TICKS assert video.interrupt.vblank.is_pending() assert video.interrupt.lcd.is_pending() video.interrupt.set_interrupt_flag(0) - video.stat = 0x00 + video.status.write(0x00, write_all=True) video.vblank = True assert not video.interrupt.vblank.is_pending() assert not video.interrupt.lcd.is_pending() video.emulate_vblank() - assert video.stat == 0x01 + assert video.status.read(extend=True) == 0x01 assert video.interrupt.vblank.is_pending() assert not video.interrupt.lcd.is_pending() @@ -356,22 +407,22 @@ def test_emulate_v_vblank_2(): video = get_video() video.interrupt.set_interrupt_flag(0) - video.stat = 0x2D + video.status.write(0x2D, write_all=True) video.vblank = False video.cycles = 0 video.line_y = 0 video.emulate_vblank() assert video.vblank == False - assert video.stat == 0x2E + assert video.status.read(extend=True) == 0x2E assert video.cycles == constants.MODE_2_TICKS assert not video.interrupt.vblank.is_pending() assert video.interrupt.lcd.is_pending() video.interrupt.set_interrupt_flag(0) video.cycles = 0 - video.stat = 0xFD + video.status.write(0xFD, write_all=True) video.emulate_vblank() assert video.vblank == False - assert video.stat == 0xFE + assert video.status.read(extend=True) == 0xFE assert video.cycles == constants.MODE_2_TICKS assert not video.interrupt.lcd.is_pending() Added: pypy/dist/pypy/lang/gameboy/test/test_video_registers.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/lang/gameboy/test/test_video_registers.py Tue Aug 26 12:08:16 2008 @@ -0,0 +1,75 @@ +from pypy.lang.gameboy import constants +from pypy.lang.gameboy.video import ControlRegister +from pypy.lang.gameboy.video import StatusRegister +import py + + +# ControlRegister -------------------------------------------------------------- + +def test_video_control_reset(): + control = ControlRegister() + assert control.read() == 0x91 + control.write(0xFF) + assert control.read() == 0xFF + control.reset() + assert control.read() == 0x91 + + +def test_video_control_read_write_properties(): + control = ControlRegister() + properties = ["lcd_enabled", + "window_upper_tile_map_selected", + "window_enabled", + "background_and_window_lower_tile_data_selected", + "background_upper_tile_map_selected", + "big_sprite_size_selected", + "sprite_display_enabled", + "background_enabled"] + properties.reverse() + for index in range(8): + property = properties[index]; + control.write(0x00) + assert control.read() == 0x00 + assert control.__getattribute__(property) == False + + control.write(0xFF) + assert control.read() == 0xFF + assert control.__getattribute__(property) == True + + control.write(0x00) + control.__setattr__(property, True) + assert control.__getattribute__(property) == True + assert control.read() & (1 << index) == (1 << index) + assert control.read() & (~(1 << index)) == 0 + + control.write(1 << index) + assert control.__getattribute__(property) == True + assert control.read() & (1 << index) == (1 << index) + assert control.read() & (~(1 << index)) == 0 + + +# StatusRegister --------------------------------------------------------------- + +def test_video_status_reset(): + status = StatusRegister() + assert status.read(extend=True) == 0x02 + 0x80 + + status.write(0x00, write_all=True) + assert status.read(extend=True) == 0x00 + status.reset() + assert status.read(extend=True) == 0x02 + 0x80 + + status.write(0xFF, write_all=True) + assert status.read(extend=True) == 0xFF + status.reset() + assert status.read(extend=True) == 0x02 + 0x80 + +def test_video_status_mode(): + status = StatusRegister() + assert status.get_mode() == 2 + + for i in range(3): + status.set_mode(i) + assert status.get_mode() == i + status.set_mode(4) + assert status.get_mode() == 0 \ No newline at end of file Modified: pypy/dist/pypy/lang/gameboy/timer.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/timer.py (original) +++ pypy/dist/pypy/lang/gameboy/timer.py Tue Aug 26 12:08:16 2008 @@ -1,5 +1,5 @@ """ -PyGirl GameBoy (TM) Emulator +PyGirl Emulator Timer and Divider """ Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Tue Aug 26 12:08:16 2008 @@ -1,5 +1,5 @@ """ - PyGirl GameBoy (TM) Emulator + PyGirl Emulator constants.LCD Video Display Processor """ @@ -31,7 +31,7 @@ # ----------------------------------------------------------------------------- -class VideoControl(object): +class ControlRegister(object): # used for enabled or disabled window or background # Bit 7 - LCD Display Enable (0=Off, 1=On) # Bit 6 - Window Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF) @@ -47,11 +47,11 @@ def reset(self): self.lcd_enabled = True - self.window_upper_tile_map_selected = False + self.window_upper_tile_map_selected = False self.window_enabled = False self.background_and_window_lower_tile_data_selected = True - self.background_upper_tile_map_selected = False - self.big_sprite_size_selected = False + self.background_upper_tile_map_selected = False + self.big_sprite_size_selected = False self.sprite_display_enabled = False self.background_enabled = True @@ -78,18 +78,79 @@ self.sprite_display_enabled = bool(value & (1 << 1)) self.background_enabled = bool(value & (1 << 0)) - def get_bg_tile_data_address(self): - pass # ----------------------------------------------------------------------------- +class StatusRegister(object): + + """ + Bit 6 - LYC=LY Coincidence Interrupt (1=Enable) (Read/Write) + Bit 5 - Mode 2 OAM Interrupt (1=Enable) (Read/Write) + Bit 4 - Mode 1 V-Blank Interrupt (1=Enable) (Read/Write) + Bit 3 - Mode 0 H-Blank Interrupt (1=Enable) (Read/Write) + Bit 2 - Coincidence Flag (0:LYC<>LY, 1:LYC=LY) (Read Only) + Bit 1-0 - Mode Flag (Mode 0-3, see below) (Read Only) + 0: During H-Blank + 1: During V-Blank + 2: During Searching OAM-RAM + 3: During Transfering Data to LCD Driver + """ + + def __init__(self): + self.reset() + + def reset(self): + self._mode = 0x02 + self.line_y_compare_flag = False + self.mode_0_h_blank_interrupt = False + self.mode_1_v_blank_interrupt = False + self.mode_2_oam_interrupt = False + self.line_y_compare_interrupt = False + self.status = True + + + def read(self, extend=False): + value = self._mode + value += self.line_y_compare_flag << 2 + value += self.mode_0_h_blank_interrupt << 3 + value += self.mode_1_v_blank_interrupt << 4 + value += self.mode_2_oam_interrupt << 5 + value += self.line_y_compare_interrupt << 6 + if extend: + value += int(self.status) << 7 + return value + + + def write(self, value, write_all=False, \ + keep_mode_0_h_blank_interrupt=False): + if write_all: + self._mode = value & 0x03 + self.line_y_compare_flag = bool(value & (1 << 2)) + self.status = bool(value & (1 << 7)) + self.mode_0_h_blank_interrupt = bool(value & (1 << 3)) + self.mode_1_v_blank_interrupt = bool(value & (1 << 4)) + self.mode_2_oam_interrupt = bool(value & (1 << 5)) + self.line_y_compare_interrupt = bool(value & (1 << 6)) + + def get_mode(self): + return self._mode + + def set_mode(self, mode): + self._mode = mode & 0x03 + + def line_y_compare_check(self): + return not self.line_y_compare_flag or not self.line_y_compare_interrupt + +# ----------------------------------------------------------------------------- + class Video(iMemory): def __init__(self, video_driver, interrupt, memory): assert isinstance(video_driver, VideoDriver) self.driver = video_driver self.interrupt = interrupt - self.control = VideoControl() + self.control = ControlRegister() + self.status = StatusRegister() self.memory = memory self.reset() @@ -102,7 +163,7 @@ def reset(self): self.cycles = constants.MODE_2_TICKS self.control.reset() - self.stat = 2 + self.status.reset() self.line_y = 0 self.line_y_compare = 0 self.dma = 0xFF @@ -177,7 +238,6 @@ self.vram[address - constants.VRAM_ADDR] = data & 0xFF def read(self, address): - print address, hex(address) if address == constants.LCDC: return self.get_control() elif address == constants.STAT: @@ -225,7 +285,7 @@ def consume_cycles(self): while self.cycles <= 0: - mode = self.stat & 0x03 + mode = self.status.get_mode() if mode == 0: self.emulate_hblank() elif mode == 1: @@ -239,11 +299,10 @@ return self.control.read() def set_control(self, data): - if (self.control.read() & 0x80) != (data & 0x80): + if self.control.lcd_enabled != bool(data & 0x80): self.reset_control(data) # don't draw window if it was not enabled and not being drawn before - if not self.control.window_enabled and \ - (data & 0x20) != 0 and \ + if not self.control.window_enabled and (data & 0x20) != 0 and \ self.window_line_y == 0 and self.line_y > self.window_y: self.window_line_y = 144 self.control.write(data) @@ -251,27 +310,27 @@ def reset_control(self, data): # NOTE: do not reset constants.LY=LYC flag (bit 2) of the STAT register (Mr. Do!) self.line_y = 0 - self.stat = (self.stat & 0xFC) if (data & 0x80) != 0: - self.stat |= 0x02 + self.status.set_mode(0x02) self.cycles = constants.MODE_2_TICKS self.display = False else: + self.status.set_mode(0) self.cycles = constants.MODE_1_TICKS self.clear_frame() def get_status(self): - return 0x80 | self.stat + return self.status.read(extend=True) def set_status(self, data): - self.stat = (self.stat & 0x87) | (data & 0x78) + self.status.write(data) self.set_status_bug() def set_status_bug(self) : # Gameboy Bug if self.control.lcd_enabled and \ - (self.stat & 0x03) == 0x01 and \ - (self.stat & 0x44) != 0x44: + self.status.get_mode() == 0x01 and \ + self.status.line_y_compare_check(): self.interrupt.raise_interrupt(constants.LCD) def get_scroll_y(self): @@ -309,25 +368,25 @@ return self.background_palette def set_background_palette(self, data): - if self.background_palette == data: return - self.background_palette = data - self.dirty = True + if self.background_palette != data: + self.background_palette = data + self.dirty = True def get_object_palette_0(self): return self.object_palette_0 def set_object_palette_0(self, data): - if self.object_palette_0 == data: return - self.object_palette_0 = data - self.dirty = True + if self.object_palette_0 != data: + self.object_palette_0 = data + self.dirty = True def get_object_palette_1(self): return self.object_palette_1 def set_object_palette_1(self, data): - if self.object_palette_1 == data: return - self.object_palette_1 = data - self.dirty = True + if self.object_palette_1 != data: + self.object_palette_1 = data + self.dirty = True def get_window_y(self): return self.window_y @@ -355,58 +414,61 @@ # interrupt checks --------------------------------------------------- def h_blank_interrupt_check(self): - if (self.stat & 0x08) != 0 and (self.stat & 0x44) != 0x44: + if self.status.mode_0_h_blank_interrupt and \ + self.status.line_y_compare_check(): self.interrupt.raise_interrupt(constants.LCD) def oam_interrupt_check(self): - if (self.stat & 0x20) != 0 and (self.stat & 0x44) != 0x44: + if self.status.mode_2_oam_interrupt and \ + self.status.line_y_compare_check(): self.interrupt.raise_interrupt(constants.LCD) def v_blank_interrupt_check(self): - if (self.stat & 0x10) != 0: + if self.status.mode_1_v_blank_interrupt: self.interrupt.raise_interrupt(constants.LCD) def line_y_line_y_compare_interrupt_check(self): - self.stat |= 0x04 - if (self.stat & 0x40) != 0: + print "line_y_line_y_compare_interrupt_check" + self.status.line_y_compare_flag = True + if self.status.line_y_compare_interrupt: self.interrupt.raise_interrupt(constants.LCD) # mode setting ----------------------------------------------------------- def set_mode_3_begin(self): - self.stat = (self.stat & 0xFC) | 0x03 + self.status.set_mode(3) self.cycles += constants.MODE_3_BEGIN_TICKS self.transfer = True def set_mode_3_end(self): - self.stat = (self.stat & 0xFC) | 0x03 + self.status.set_mode(3) self.cycles += constants.MODE_3_END_TICKS self.transfer = False def set_mode_0(self): - self.stat = (self.stat & 0xFC) + self.status.set_mode(0) self.cycles += constants.MODE_0_TICKS self.h_blank_interrupt_check() def set_mode_2(self): - self.stat = (self.stat & 0xFC) | 0x02 + self.status.set_mode(2) self.cycles += constants.MODE_2_TICKS self.oam_interrupt_check() def set_mode_1_begin(self): - self.stat = (self.stat & 0xFC) | 0x01 + self.status.set_mode(1) self.cycles += constants.MODE_1_BEGIN_TICKS def set_mode_1(self): - self.stat = (self.stat & 0xFC) | 0x01 + self.status.set_mode(1) self.cycles += constants.MODE_1_TICKS def set_mode_1_between(self): - self.stat = (self.stat & 0xFC) | 0x01 + self.status.set_mode(1) self.cycles += constants.MODE_1_TICKS - constants.MODE_1_BEGIN_TICKS def set_mode_1_end(self): - self.stat = (self.stat & 0xFC) | 0x01 + self.status.set_mode(1) self.cycles += constants.MODE_1_END_TICKS # ---------------------------------------------------------------- @@ -422,12 +484,12 @@ def emulate_hblank_line_y_compare(self, stat_check=False): if self.line_y == self.line_y_compare: if stat_check: - if (self.stat & 0x04) == 0: + if not self.status.line_y_compare_flag: self.line_y_line_y_compare_interrupt_check() else: self.line_y_line_y_compare_interrupt_check() else: - self.stat &= 0xFB + self.status.line_y_compare_flag = False def emulate_hblank_part_2(self): if self.display: @@ -507,8 +569,7 @@ return tile_map, tile_data def draw_window(self): - if self.line_y < self.window_y or \ - self.window_x >= 167 or \ + if self.line_y < self.window_y or self.window_x >= 167 or \ self.window_line_y >= 144: return tile_map, tile_data = self.prepare_window_data() From fijal at codespeak.net Tue Aug 26 12:23:49 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 12:23:49 +0200 (CEST) Subject: [pypy-svn] r57625 - in pypy/dist/pypy: tool translator/tool translator/tool/test Message-ID: <20080826102349.93EBA168435@codespeak.net> Author: fijal Date: Tue Aug 26 12:23:46 2008 New Revision: 57625 Modified: pypy/dist/pypy/tool/gcc_cache.py pypy/dist/pypy/translator/tool/cbuild.py pypy/dist/pypy/translator/tool/test/test_cbuild.py Log: Some support for cross-compilation. Eci can now say how to run and how to compile targets for different platform. Modified: pypy/dist/pypy/tool/gcc_cache.py ============================================================================== --- pypy/dist/pypy/tool/gcc_cache.py (original) +++ pypy/dist/pypy/tool/gcc_cache.py Tue Aug 26 12:23:46 2008 @@ -20,7 +20,8 @@ try: return path.read() except py.error.Error: - result = py.process.cmdexec(build_executable(c_files, eci)) + result = py.process.cmdexec(eci.get_emulator_for_platform() + + build_executable(c_files, eci)) path.write(result) return result Modified: pypy/dist/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/dist/pypy/translator/tool/cbuild.py (original) +++ pypy/dist/pypy/translator/tool/cbuild.py Tue Aug 26 12:23:46 2008 @@ -23,7 +23,8 @@ _ATTRIBUTES = ['pre_include_bits', 'includes', 'include_dirs', 'post_include_bits', 'libraries', 'library_dirs', 'separate_module_sources', 'separate_module_files', - 'export_symbols', 'compile_extra', 'link_extra', 'frameworks'] + 'export_symbols', 'compile_extra', 'link_extra', + 'frameworks'] _DUPLICATES_OK = ['compile_extra', 'link_extra'] def __init__(self, @@ -38,7 +39,8 @@ export_symbols = [], compile_extra = [], link_extra = [], - frameworks = []): + frameworks = [], + platform = 'host'): """ pre_include_bits: list of pieces of text that should be put at the top of the generated .c files, before any #include. They shouldn't @@ -79,11 +81,15 @@ linker. Use this instead of the 'libraries' parameter if you want to link to a framework bundle. Not suitable for unix-like .dylib installations. + + platform: an unique identifier of compile platform, useful for + caching. """ for name in self._ATTRIBUTES: value = locals()[name] assert isinstance(value, (list, tuple)) setattr(self, name, tuple(value)) + self.platform = platform def from_compiler_flags(cls, flags): """Returns a new ExternalCompilationInfo instance by parsing @@ -196,6 +202,11 @@ s.add(elem) attr.append(elem) attrs[name] = attr + for other in others: + if other.platform != self.platform: + raise Exception("Mixing ECI for different platforms %s and %s"% + (other.platform, self.platform)) + attrs['platform'] = self.platform return ExternalCompilationInfo(**attrs) def write_c_header(self, fileobj): @@ -251,6 +262,24 @@ d['separate_module_sources'] = () return ExternalCompilationInfo(**d) + def get_emulator_for_platform(self): + if self.platform == 'host': + return '' + elif self.platform == 'maemo': + # XXX how to do it in better way??? + return '/scratchbox/login ' + else: + raise NotImplementedError("Platform = %s" % (self.platform,)) + + def get_compiler_for_platform(self): + if self.platform == 'host': + return None + elif self.platform == 'maemo': + # XXX this should be settable somehow, not sure exactly how + return '/scratchbox/compilers/cs2005q3.2-glibc-arm/bin/sbox-arm-linux-gcc' + else: + raise NotImplementedError("Platform = %s" % (self.platform,)) + if sys.platform == 'win32': so_ext = '.dll' else: @@ -498,7 +527,10 @@ self.compile_extra = list(eci.compile_extra) self.link_extra = list(eci.link_extra) self.frameworks = list(eci.frameworks) - self.compiler_exe = compiler_exe + if compiler_exe is not None: + self.compiler_exe = compiler_exe + else: + self.compiler_exe = eci.get_compiler_for_platform() self.profbased = profbased if not sys.platform in ('win32', 'darwin'): # xxx if 'm' not in self.libraries: Modified: pypy/dist/pypy/translator/tool/test/test_cbuild.py ============================================================================== --- pypy/dist/pypy/translator/tool/test/test_cbuild.py (original) +++ pypy/dist/pypy/translator/tool/test/test_cbuild.py Tue Aug 26 12:23:46 2008 @@ -156,3 +156,34 @@ py.test.raises(ImportError, ExternalCompilationInfo.from_config_tool, 'dxowqbncpqympqhe-config') + + def test_platforms(self): + eci = ExternalCompilationInfo(platform='xxx') + eci2 = ExternalCompilationInfo() + py.test.raises(Exception, eci2.merge, eci) + assert eci.merge(eci).platform == 'xxx' + + def test_standalone_maemo(self): + # XXX skip if there is no scratchbox + if not py.path.local('/scratchbox/login').check(): + py.test.skip("No scratchbox detected") + tmpdir = self.tmpdir + c_file = tmpdir.join('stand1.c') + c_file.write(''' + #include + #include + + int main() + { + printf("%f\\n", pow(2.0, 2.0)); + return 0; + }''') + if sys.platform == 'win32': + py.test.skip("No cross-compilation on windows yet") + else: + eci = ExternalCompilationInfo(platform='maemo', + libraries=['m']) + output = build_executable([c_file], eci) + py.test.raises(py.process.cmdexec.Error, py.process.cmdexec, output) + result = py.process.cmdexec(eci.get_emulator_for_platform() + output) + assert result.startswith('4.0') From fijal at codespeak.net Tue Aug 26 12:28:34 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 12:28:34 +0200 (CEST) Subject: [pypy-svn] r57626 - in pypy/dist/pypy: config translator/goal Message-ID: <20080826102834.25573169EF1@codespeak.net> Author: fijal Date: Tue Aug 26 12:28:33 2008 New Revision: 57626 Modified: pypy/dist/pypy/config/translationoption.py pypy/dist/pypy/translator/goal/translate.py Log: Some wild hackery to get cross-compiling to work. Probably needs to be changed to something saner. Modified: pypy/dist/pypy/config/translationoption.py ============================================================================== --- pypy/dist/pypy/config/translationoption.py (original) +++ pypy/dist/pypy/config/translationoption.py Tue Aug 26 12:28:33 2008 @@ -337,3 +337,19 @@ config.translation.suggest(list_comprehension_operations=True) else: raise ValueError(word) + +# ---------------------------------------------------------------- + +PLATFORMS = [ + 'host', + 'maemo', +] + +def set_platform(config, platform): + if platform == 'maemo': + from pypy.translator.tool.cbuild import ExternalCompilationInfo + # XXX evil hackery + func_defs = list(ExternalCompilationInfo.__init__.func_defaults) + func_defs[-1] = 'maemo' + ExternalCompilationInfo.__init__.im_func.func_defaults = tuple(func_defs) + Modified: pypy/dist/pypy/translator/goal/translate.py ============================================================================== --- pypy/dist/pypy/translator/goal/translate.py (original) +++ pypy/dist/pypy/translator/goal/translate.py Tue Aug 26 12:28:33 2008 @@ -14,6 +14,7 @@ from pypy.config.translationoption import get_combined_translation_config from pypy.config.translationoption import set_opt_level from pypy.config.translationoption import OPT_LEVELS, DEFAULT_OPT_LEVEL +from pypy.config.translationoption import PLATFORMS, set_platform GOALS= [ @@ -51,6 +52,9 @@ ChoiceOption("opt", "optimization level", OPT_LEVELS, default=DEFAULT_OPT_LEVEL, cmdline="--opt -O"), + ChoiceOption("platform", + "target platform", ['host'] + PLATFORMS, default='host', + cmdline='--platform'), BoolOption("profile", "cProfile (to debug the speed of the translation process)", default=False, @@ -64,6 +68,8 @@ cmdline="--view", negation=False), BoolOption("help", "show this help message and exit", default=False, cmdline="-h --help", negation=False), + BoolOption("fullhelp", "show full help message and exit", default=False, + cmdline="--full-help", negation=False), ArbitraryOption("goals", "XXX", defaultfactory=list), # xxx default goals ['annotate', 'rtype', 'backendopt', 'source', 'compile'] @@ -156,6 +162,9 @@ existing_config=config, translating=True) + # apply the platform settings + set_platform(config, translateconfig.platform) + # apply the optimization level settings set_opt_level(config, translateconfig.opt) From fijal at codespeak.net Tue Aug 26 12:50:28 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 12:50:28 +0200 (CEST) Subject: [pypy-svn] r57627 - pypy/dist/pypy/doc Message-ID: <20080826105028.F051516A05E@codespeak.net> Author: fijal Date: Tue Aug 26 12:50:27 2008 New Revision: 57627 Added: pypy/dist/pypy/doc/maemo.txt (contents, props changed) Log: Add a little document how to cross-compile pypy to maemo Added: pypy/dist/pypy/doc/maemo.txt ============================================================================== --- (empty file) +++ pypy/dist/pypy/doc/maemo.txt Tue Aug 26 12:50:27 2008 @@ -0,0 +1,35 @@ +How to run PyPy on top of maemo platform +======================================== + +This is a short introduction how to install correct environment and +cross-compile pypy to maemo platform emulator under qemu. Note that this +is work-in-progress. + +Installing `scratchbox`_ +------------------------ + +I had no success with installing scratchbox 2.0, but theoretically it +should work. Follow these `installation instructions`_, +make sure that you install cpu-transparency modules, otherwise emulation will +not be available. Preferred toolchain is cs2005q3.2-glibc2.5-arm. + +Follow instructions, running sb-menu and selecting cpu-transparency +method (qemu-arm), devkits (debian-lenny, cpu-transparency) and install files +(and not a rootstrap). + +I had to manually edit /scratchbox/devkits/debian-etch/etc/environment +to add ARCH=armel, otherwise things did not work + +Translating pypy +---------------- + +Run:: + ./translate.py --platform=maemo --opt=mem + +This should create pypy-c which is able to run on top of scratchbox. To run it +you need to copy pypy-c, together with libraries to a place accessible +from inside scratchbox. Default is /scratchbox/users//home/ where + is your user id. + +.. _`installation instructions`: http://www.scratchbox.org/documentation/user/scratchbox-1.0/html/installdoc.html +.. _`scratchbox`: http://scratchbox.org From fijal at codespeak.net Tue Aug 26 13:07:38 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 13:07:38 +0200 (CEST) Subject: [pypy-svn] r57628 - pypy/dist/pypy/doc Message-ID: <20080826110738.619D2169FCA@codespeak.net> Author: fijal Date: Tue Aug 26 13:07:36 2008 New Revision: 57628 Modified: pypy/dist/pypy/doc/maemo.txt Log: Add --no-allworkingmodules for now Modified: pypy/dist/pypy/doc/maemo.txt ============================================================================== --- pypy/dist/pypy/doc/maemo.txt (original) +++ pypy/dist/pypy/doc/maemo.txt Tue Aug 26 13:07:36 2008 @@ -24,7 +24,7 @@ ---------------- Run:: - ./translate.py --platform=maemo --opt=mem + ./translate.py --platform=maemo --opt=mem targetpypystandalone --no-allworkingmodules This should create pypy-c which is able to run on top of scratchbox. To run it you need to copy pypy-c, together with libraries to a place accessible From fijal at codespeak.net Tue Aug 26 13:16:37 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Tue, 26 Aug 2008 13:16:37 +0200 (CEST) Subject: [pypy-svn] r57629 - in pypy/dist/pypy/translator/tool: . test Message-ID: <20080826111637.09950169FBF@codespeak.net> Author: fijal Date: Tue Aug 26 13:16:36 2008 New Revision: 57629 Modified: pypy/dist/pypy/translator/tool/cbuild.py pypy/dist/pypy/translator/tool/test/test_cbuild.py Log: Oooops. Make sure platform belongs to the key Modified: pypy/dist/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/dist/pypy/translator/tool/cbuild.py (original) +++ pypy/dist/pypy/translator/tool/cbuild.py Tue Aug 26 13:16:36 2008 @@ -156,7 +156,8 @@ from_config_tool = classmethod(from_config_tool) def _value(self): - return tuple([getattr(self, x) for x in self._ATTRIBUTES]) + return tuple([getattr(self, x) for x in self._ATTRIBUTES] + + [self.platform]) def __hash__(self): return hash(self._value()) Modified: pypy/dist/pypy/translator/tool/test/test_cbuild.py ============================================================================== --- pypy/dist/pypy/translator/tool/test/test_cbuild.py (original) +++ pypy/dist/pypy/translator/tool/test/test_cbuild.py Tue Aug 26 13:16:36 2008 @@ -160,6 +160,8 @@ def test_platforms(self): eci = ExternalCompilationInfo(platform='xxx') eci2 = ExternalCompilationInfo() + assert eci != eci2 + assert hash(eci) != hash(eci2) py.test.raises(Exception, eci2.merge, eci) assert eci.merge(eci).platform == 'xxx' From arigo at codespeak.net Tue Aug 26 13:50:07 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 26 Aug 2008 13:50:07 +0200 (CEST) Subject: [pypy-svn] r57630 - pypy/dist/pypy/objspace/std/test Message-ID: <20080826115007.4D19E16A01B@codespeak.net> Author: arigo Date: Tue Aug 26 13:50:05 2008 New Revision: 57630 Modified: pypy/dist/pypy/objspace/std/test/test_typeobject.py Log: Two tests that pass on CPython but crash for us. Modified: pypy/dist/pypy/objspace/std/test/test_typeobject.py ============================================================================== --- pypy/dist/pypy/objspace/std/test/test_typeobject.py (original) +++ pypy/dist/pypy/objspace/std/test/test_typeobject.py Tue Aug 26 13:50:05 2008 @@ -313,6 +313,27 @@ else: raise TestFailed, "didn't catch MRO conflict" + def test_mutable_bases_versus_nonheap_types(self): + skip("in-progress") + class A(int): + __slots__ = [] + class C(int): + pass + raises(TypeError, 'C.__bases__ = (A,)') + raises(TypeError, 'int.__bases__ = (object,)') + C.__bases__ = (int,) + + def test_compatible_slot_layout(self): + skip("in-progress") + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b1', 'b2'] + class C(A): + pass + class D(B, C): # assert does not raise TypeError + pass + def test_builtin_add(self): x = 5 assert x.__add__(6) == 11 From cami at codespeak.net Tue Aug 26 17:29:09 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Tue, 26 Aug 2008 17:29:09 +0200 (CEST) Subject: [pypy-svn] r57631 - pypy/dist/pypy/lang/gameboy Message-ID: <20080826152909.DAC4E169F74@codespeak.net> Author: cami Date: Tue Aug 26 17:29:06 2008 New Revision: 57631 Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py pypy/dist/pypy/lang/gameboy/video.py Log: video: small code cleanup implementation: added checks for finished execution Modified: pypy/dist/pypy/lang/gameboy/gameboy_implementation.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy_implementation.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy_implementation.py Tue Aug 26 17:29:06 2008 @@ -18,6 +18,7 @@ def __init__(self): GameBoy.__init__(self) + self.is_running = False self.init_sdl() def init_sdl(self): @@ -33,13 +34,13 @@ def mainLoop(self): self.reset() + self.is_running = True try: - isRunning = True - while isRunning and self.handle_events(): + while self.is_running and self.handle_events(): self.emulate(constants.GAMEBOY_CLOCK >> 2) time.sleep(10/1000) - RSDL.Delay(10) except : + self.is_running = False lltype.free(self.event, flavor='raw') RSDL.Quit() self.handle_execution_error() @@ -49,12 +50,11 @@ pass def handle_events(self): - isRunning = True - while self.poll_event(): + while self.poll_event() and self.is_running: if self.check_for_escape(): - isRunning = False + self.is_running = False self.joypad_driver.update(self.event) - return isRunning + return self.is_running def poll_event(self): Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Tue Aug 26 17:29:06 2008 @@ -428,7 +428,6 @@ self.interrupt.raise_interrupt(constants.LCD) def line_y_line_y_compare_interrupt_check(self): - print "line_y_line_y_compare_interrupt_check" self.status.line_y_compare_flag = True if self.status.line_y_compare_interrupt: self.interrupt.raise_interrupt(constants.LCD) @@ -483,10 +482,7 @@ def emulate_hblank_line_y_compare(self, stat_check=False): if self.line_y == self.line_y_compare: - if stat_check: - if not self.status.line_y_compare_flag: - self.line_y_line_y_compare_interrupt_check() - else: + if not (stat_check and self.status.line_y_compare_flag): self.line_y_line_y_compare_interrupt_check() else: self.status.line_y_compare_flag = False From arigo at codespeak.net Tue Aug 26 17:39:27 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 26 Aug 2008 17:39:27 +0200 (CEST) Subject: [pypy-svn] r57632 - pypy/branch/typeobject-init Message-ID: <20080826153927.66AD8169F74@codespeak.net> Author: arigo Date: Tue Aug 26 17:39:26 2008 New Revision: 57632 Added: pypy/branch/typeobject-init/ - copied from r57631, pypy/dist/ Log: A branch in which to refactor and clean up the initialization code of W_TypeObject. From arigo at codespeak.net Tue Aug 26 17:46:09 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 26 Aug 2008 17:46:09 +0200 (CEST) Subject: [pypy-svn] r57633 - in pypy/branch/typeobject-init/pypy/objspace/std: . test Message-ID: <20080826154609.D8E1916A018@codespeak.net> Author: arigo Date: Tue Aug 26 17:46:08 2008 New Revision: 57633 Modified: pypy/branch/typeobject-init/pypy/objspace/std/test/test_typeobject.py pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py pypy/branch/typeobject-init/pypy/objspace/std/typetype.py Log: * Refactor W_TypeObject.__init__() into a small forest of helpers. * Use some of the same helpers for assignment to __bases__. * More tests. Modified: pypy/branch/typeobject-init/pypy/objspace/std/test/test_typeobject.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/std/test/test_typeobject.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/std/test/test_typeobject.py Tue Aug 26 17:46:08 2008 @@ -111,6 +111,20 @@ class AppTestTypeObject: + + def test_call_type(self): + assert type(42) is int + C = type('C', (object,), {'x': lambda: 42}) + unbound_meth = C.x + raises(TypeError, unbound_meth) + assert unbound_meth.im_func() == 42 + raises(TypeError, type) + raises(TypeError, type, 'test', (object,)) + raises(TypeError, type, 'test', (object,), {}, 42) + raises(TypeError, type, 42, (object,), {}) + raises(TypeError, type, 'test', 42, {}) + raises(TypeError, type, 'test', (object,), 42) + def test_bases(self): assert int.__bases__ == (object,) class X: @@ -314,17 +328,98 @@ raise TestFailed, "didn't catch MRO conflict" def test_mutable_bases_versus_nonheap_types(self): - skip("in-progress") class A(int): - __slots__ = [] + pass + class B(int): + __slots__ = ['b'] class C(int): pass raises(TypeError, 'C.__bases__ = (A,)') + raises(TypeError, 'C.__bases__ = (B,)') + raises(TypeError, 'C.__bases__ = (C,)') raises(TypeError, 'int.__bases__ = (object,)') C.__bases__ = (int,) + #--- the following raises on CPython but works on PyPy. + #--- I don't see an obvious reason why it should fail... + import sys + if '__pypy__' not in sys.builtin_module_names: + skip("works on PyPy only") + class MostlyLikeInt(int): + __slots__ = [] + C.__bases__ = (MostlyLikeInt,) + + def test_mutable_bases_versus_slots(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b1', 'b2'] + class C(B): + pass + raises(TypeError, 'C.__bases__ = (A,)') + + def test_mutable_bases_versus_weakref(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['__weakref__'] + class C(B): + pass + raises(TypeError, 'C.__bases__ = (A,)') + + def test_mutable_bases_same_slots(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class C(B): + pass + c = C() + c.a = 42 + assert C.__mro__ == (C, B, A, object) + C.__bases__ = (A,) + assert C.__mro__ == (C, A, object) + assert c.a == 42 + + def test_mutable_bases_versus_slots_2(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b1', 'b2'] + class C(B): + __slots__ = ['c'] + raises(TypeError, 'C.__bases__ = (A,)') + + def test_mutable_bases_keeping_slots(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class C(B): + __slots__ = ['c'] + c = C() + c.a = 42 + c.c = 85 + assert C.__mro__ == (C, B, A, object) + C.__bases__ = (A,) + assert C.__mro__ == (C, A, object) + assert c.a == 42 + assert c.c == 85 + + class D(A): + __slots__ = [] + C.__bases__ = (B, D) + assert C.__mro__ == (C, B, D, A, object) + assert c.a == 42 + assert c.c == 85 + raises(TypeError, 'C.__bases__ = (B, D, B)') + + class E(A): + __slots__ = ['e'] + raises(TypeError, 'C.__bases__ = (B, E)') + raises(TypeError, 'C.__bases__ = (E, B)') + raises(TypeError, 'C.__bases__ = (E,)') def test_compatible_slot_layout(self): - skip("in-progress") class A(object): __slots__ = ['a'] class B(A): @@ -410,6 +505,8 @@ assert B_mro().b == 1 assert getattr(B_mro, 'a', None) == None assert getattr(B_mro(), 'a', None) == None + # also check what the built-in mro() method would return for 'B_mro' + assert type.mro(B_mro) == [B_mro, A_mro, object] def test_abstract_mro(self): class A1: # old-style class @@ -552,6 +649,53 @@ assert a.__ == 4 assert a.__dict__ == {} + def test_slots_multiple_inheritance(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class E(A): + __slots__ = ['e'] + class C(B, E): + pass + c = C() + c.a = 42 + c.e = 85 + assert c.a == 42 + assert c.e == 85 + + def test_base_attr(self): + # check the '__base__' + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class E(A): + __slots__ = ['e'] + class C(B, E): + pass + class D(A): + __slots__ = [] + class F(B, D): + pass + assert C.__base__ is E + assert F.__base__ is B + assert bool.__base__ is int + assert int.__base__ is object + assert object.__base__ is None + + def test_cannot_subclass(self): + raises(TypeError, type, 'A', (bool,), {}) + + def test_slot_conflict(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b'] + class E(A): + __slots__ = ['e'] + raises(TypeError, type, 'C', (B, E), {}) + def test_repr(self): globals()['__name__'] = 'a' class A(object): Modified: pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py Tue Aug 26 17:46:08 2008 @@ -1,6 +1,5 @@ from pypy.objspace.std.objspace import * from pypy.interpreter.function import Function, StaticMethod -from pypy.interpreter.argument import Arguments from pypy.interpreter import gateway from pypy.interpreter.typedef import weakref_descr from pypy.objspace.std.stdtypedef import std_dict_descr, issubtypedef, Member @@ -60,163 +59,20 @@ w_self.name = name w_self.bases_w = bases_w w_self.dict_w = dict_w - w_self.ensure_static__new__() w_self.nslots = 0 + w_self.hasdict = False w_self.needsdel = False - w_self.w_bestbase = None + w_self.weakrefable = False + w_self.w_same_layout_as = None w_self.weak_subclasses = [] - - # make sure there is a __doc__ in dict_w - if '__doc__' not in dict_w: - dict_w['__doc__'] = space.w_None + w_self.__flags__ = 0 # or _HEAPTYPE + w_self.instancetypedef = overridetypedef if overridetypedef is not None: - w_self.instancetypedef = overridetypedef - w_self.hasdict = overridetypedef.hasdict - w_self.weakrefable = overridetypedef.weakrefable - w_self.__flags__ = 0 # not a heaptype - if overridetypedef.base is not None: - w_self.w_bestbase = space.gettypeobject(overridetypedef.base) + setup_builtin_type(w_self) else: - w_self.__flags__ = _HEAPTYPE - # initialize __module__ in the dict - if '__module__' not in dict_w: - try: - caller = space.getexecutioncontext().framestack.top() - except IndexError: - w_globals = w_locals = space.newdict() - else: - w_globals = caller.w_globals - w_str_name = space.wrap('__name__') - w_name = space.finditem(w_globals, w_str_name) - if w_name is not None: - dict_w['__module__'] = w_name - # find the most specific typedef - instancetypedef = object_typedef - for w_base in bases_w: - if not isinstance(w_base, W_TypeObject): - continue - if issubtypedef(w_base.instancetypedef, instancetypedef): - if instancetypedef is not w_base.instancetypedef: - instancetypedef = w_base.instancetypedef - w_self.w_bestbase = w_base - elif not issubtypedef(instancetypedef, w_base.instancetypedef): - raise OperationError(space.w_TypeError, - space.wrap("instance layout conflicts in " - "multiple inheritance")) - if not instancetypedef.acceptable_as_base_class: - raise OperationError(space.w_TypeError, - space.wrap("type '%s' is not an " - "acceptable base class" % - instancetypedef.name)) - w_self.instancetypedef = instancetypedef - w_self.hasdict = False - w_self.weakrefable = False - hasoldstylebase = False - w_most_derived_base_with_slots = None - w_newstyle = None - for w_base in bases_w: - if not isinstance(w_base, W_TypeObject): - hasoldstylebase = True - continue - if not w_newstyle: - w_newstyle = w_base - if w_base.nslots != 0: - if w_most_derived_base_with_slots is None: - w_most_derived_base_with_slots = w_base - else: - if space.is_true(space.issubtype(w_base, w_most_derived_base_with_slots)): - w_most_derived_base_with_slots = w_base - elif not space.is_true(space.issubtype(w_most_derived_base_with_slots, w_base)): - raise OperationError(space.w_TypeError, - space.wrap("instance layout conflicts in " - "multiple inheritance")) - w_self.hasdict = w_self.hasdict or w_base.hasdict - w_self.needsdel = w_self.needsdel or w_base.needsdel - w_self.weakrefable = w_self.weakrefable or w_base.weakrefable - if not w_newstyle: # only classic bases - raise OperationError(space.w_TypeError, - space.wrap("a new-style class can't have only classic bases")) + setup_user_defined_type(w_self) - if w_most_derived_base_with_slots: - nslots = w_most_derived_base_with_slots.nslots - w_self.w_bestbase = w_most_derived_base_with_slots - else: - nslots = 0 - - if w_self.w_bestbase is None: - w_self.w_bestbase = w_newstyle - - wantdict = True - wantweakref = True - if '__slots__' in dict_w: - wantdict = False - wantweakref = False - - w_slots = dict_w['__slots__'] - if space.is_true(space.isinstance(w_slots, space.w_str)): - if space.int_w(space.len(w_slots)) == 0: - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - slot_names_w = [w_slots] - else: - slot_names_w = space.unpackiterable(w_slots) - for w_slot_name in slot_names_w: - slot_name = space.str_w(w_slot_name) - # slot_name should be a valid identifier - if len(slot_name) == 0: - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - first_char = slot_name[0] - if not first_char.isalpha() and first_char != '_': - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - for c in slot_name: - if not c.isalnum() and c!= '_': - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - if slot_name == '__dict__': - if wantdict or w_self.hasdict: - raise OperationError(space.w_TypeError, - space.wrap("__dict__ slot disallowed: we already got one")) - wantdict = True - elif slot_name == '__weakref__': - if wantweakref or w_self.weakrefable: - raise OperationError(space.w_TypeError, - space.wrap("__weakref__ slot disallowed: we already got one")) - - wantweakref = True - else: - # create member - slot_name = _mangle(slot_name, name) - # Force interning of slot names. - slot_name = space.str_w(space.new_interned_str(slot_name)) - w_self.dict_w[slot_name] = space.wrap(Member(nslots, slot_name, w_self)) - nslots += 1 - - w_self.nslots = nslots - - wantdict = wantdict or hasoldstylebase - - if wantdict and not w_self.hasdict: - w_self.dict_w['__dict__'] = space.wrap(std_dict_descr) - w_self.hasdict = True - if '__del__' in dict_w: - w_self.needsdel = True - if wantweakref and not w_self.weakrefable: - w_self.dict_w['__weakref__'] = space.wrap(weakref_descr) - w_self.weakrefable = True - w_type = space.type(w_self) - if not space.is_w(w_type, space.w_type): - if space.config.objspace.std.withtypeversion: - w_self.version_tag = None - w_self.mro_w = [] - mro_func = space.lookup(w_self, 'mro') - mro_func_args = Arguments(space, [w_self]) - w_mro = space.call_args(mro_func, mro_func_args) - w_self.mro_w = space.unpackiterable(w_mro) - return - w_self.mro_w = w_self.compute_mro() if space.config.objspace.std.withtypeversion: if w_self.instancetypedef.hasdict: w_self.version_tag = None @@ -250,33 +106,14 @@ continue w_base.add_subclass(w_self) - # compute the most parent class with the same layout as us - def get_layout(w_self): - w_bestbase = w_self.w_bestbase - if w_bestbase is None: # object - return w_self - if w_self.instancetypedef is not w_bestbase.instancetypedef: - return w_self - if w_self.nslots == w_bestbase.nslots: - return w_bestbase.get_layout() - return w_self - # compute a tuple that fully describes the instance layout def get_full_instance_layout(w_self): - w_layout = w_self.get_layout() + w_layout = w_self.w_same_layout_as or w_self return (w_layout, w_self.hasdict, w_self.needsdel, w_self.weakrefable) - def compute_mro(w_self): + def compute_default_mro(w_self): return compute_C3_mro(w_self.space, w_self) - def ensure_static__new__(w_self): - # special-case __new__, as in CPython: - # if it is a Function, turn it into a static method - if '__new__' in w_self.dict_w: - w_new = w_self.dict_w['__new__'] - if isinstance(w_new, Function): - w_self.dict_w['__new__'] = StaticMethod(w_new) - def getdictvalue(w_self, space, w_attr): return w_self.getdictvalue_w(space, space.str_w(w_attr)) @@ -487,6 +324,230 @@ def setweakref(self, space, weakreflifeline): self._lifeline_ = weakreflifeline +# ____________________________________________________________ +# Initialization of type objects + +def get_parent_layout(w_type): + """Compute the most parent class of 'w_type' whose layout + is the same as 'w_type', or None if all parents of 'w_type' + have a different layout than 'w_type'. + """ + w_starttype = w_type + while len(w_type.bases_w) > 0: + w_bestbase = find_best_base(w_type.space, w_type.bases_w) + if w_type.instancetypedef is not w_bestbase.instancetypedef: + break + if w_type.nslots != w_bestbase.nslots: + break + w_type = w_bestbase + if w_type is not w_starttype: + return w_type + else: + return None + +def issublayout(w_layout1, w_layout2): + space = w_layout2.space + while w_layout1 is not w_layout2: + w_layout1 = find_best_base(space, w_layout1.bases_w) + if w_layout1 is None: + return False + w_layout1 = w_layout1.w_same_layout_as or w_layout1 + return True + +def find_best_base(space, bases_w): + """The best base is one of the bases in the given list: the one + whose layout a new type should use as a starting point. + """ + w_bestbase = None + for w_candidate in bases_w: + if not isinstance(w_candidate, W_TypeObject): + continue + if w_bestbase is None: + w_bestbase = w_candidate # for now + continue + candtypedef = w_candidate.instancetypedef + besttypedef = w_bestbase.instancetypedef + if candtypedef is besttypedef: + # two candidates with the same typedef are equivalent unless + # one has extra slots over the other + if w_candidate.nslots > w_bestbase.nslots: + w_bestbase = w_candidate + elif issubtypedef(candtypedef, besttypedef): + w_bestbase = w_candidate + return w_bestbase + +def check_and_find_best_base(space, bases_w): + """The best base is one of the bases in the given list: the one + whose layout a new type should use as a starting point. + This version checks that bases_w is an acceptable tuple of bases. + """ + w_bestbase = find_best_base(space, bases_w) + if w_bestbase is None: + raise OperationError(space.w_TypeError, + space.wrap("a new-style class can't have " + "only classic bases")) + if not w_bestbase.instancetypedef.acceptable_as_base_class: + raise OperationError(space.w_TypeError, + space.wrap("type '%s' is not an " + "acceptable base class" % + w_bestbase.instancetypedef.name)) + + # check that all other bases' layouts are superclasses of the bestbase + w_bestlayout = w_bestbase.w_same_layout_as or w_bestbase + for w_base in bases_w: + if isinstance(w_base, W_TypeObject): + w_layout = w_base.w_same_layout_as or w_base + if not issublayout(w_bestlayout, w_layout): + raise OperationError(space.w_TypeError, + space.wrap("instance layout conflicts in " + "multiple inheritance")) + return w_bestbase + +def copy_flags_from_bases(w_self, w_bestbase): + hasoldstylebase = False + for w_base in w_self.bases_w: + if not isinstance(w_base, W_TypeObject): + hasoldstylebase = True + continue + w_self.hasdict = w_self.hasdict or w_base.hasdict + w_self.needsdel = w_self.needsdel or w_base.needsdel + w_self.weakrefable = w_self.weakrefable or w_base.weakrefable + w_self.nslots = w_bestbase.nslots + return hasoldstylebase + +def create_all_slots(w_self, hasoldstylebase): + space = w_self.space + dict_w = w_self.dict_w + if '__slots__' not in dict_w: + wantdict = True + wantweakref = True + else: + wantdict = False + wantweakref = False + w_slots = dict_w['__slots__'] + if space.is_true(space.isinstance(w_slots, space.w_str)): + slot_names_w = [w_slots] + else: + slot_names_w = space.unpackiterable(w_slots) + for w_slot_name in slot_names_w: + slot_name = space.str_w(w_slot_name) + if slot_name == '__dict__': + if wantdict or w_self.hasdict: + raise OperationError(space.w_TypeError, + space.wrap("__dict__ slot disallowed: " + "we already got one")) + wantdict = True + elif slot_name == '__weakref__': + if wantweakref or w_self.weakrefable: + raise OperationError(space.w_TypeError, + space.wrap("__weakref__ slot disallowed: " + "we already got one")) + wantweakref = True + else: + create_slot(w_self, slot_name) + wantdict = wantdict or hasoldstylebase + if wantdict: create_dict_slot(w_self) + if wantweakref: create_weakref_slot(w_self) + if '__del__' in dict_w: w_self.needsdel = True + +def create_slot(w_self, slot_name): + space = w_self.space + if not valid_slot_name(slot_name): + raise OperationError(space.w_TypeError, + space.wrap('__slots__ must be identifiers')) + # create member + slot_name = _mangle(slot_name, w_self.name) + # Force interning of slot names. + slot_name = space.str_w(space.new_interned_str(slot_name)) + member = Member(w_self.nslots, slot_name, w_self) + w_self.dict_w[slot_name] = space.wrap(member) + w_self.nslots += 1 + +def create_dict_slot(w_self): + if not w_self.hasdict: + w_self.dict_w['__dict__'] = w_self.space.wrap(std_dict_descr) + w_self.hasdict = True + +def create_weakref_slot(w_self): + if not w_self.weakrefable: + w_self.dict_w['__weakref__'] = w_self.space.wrap(weakref_descr) + w_self.weakrefable = True + +def valid_slot_name(slot_name): + if len(slot_name) == 0 or slot_name[0].isdigit(): + return False + for c in slot_name: + if not c.isalnum() and c != '_': + return False + return True + +def setup_user_defined_type(w_self): + if len(w_self.bases_w) == 0: + w_self.bases_w = [w_self.space.w_object] + w_bestbase = check_and_find_best_base(w_self.space, w_self.bases_w) + w_self.instancetypedef = w_bestbase.instancetypedef + w_self.__flags__ = _HEAPTYPE + + hasoldstylebase = copy_flags_from_bases(w_self, w_bestbase) + create_all_slots(w_self, hasoldstylebase) + + w_self.w_same_layout_as = get_parent_layout(w_self) + ensure_common_attributes(w_self) + +def setup_builtin_type(w_self): + w_self.hasdict = w_self.instancetypedef.hasdict + w_self.weakrefable = w_self.instancetypedef.weakrefable + ensure_common_attributes(w_self) + +def ensure_common_attributes(w_self): + ensure_static_new(w_self) + ensure_doc_attr(w_self) + if w_self.is_heaptype(): + ensure_module_attr(w_self) + w_self.mro_w = [] # temporarily + compute_mro(w_self) + +def ensure_static_new(w_self): + # special-case __new__, as in CPython: + # if it is a Function, turn it into a static method + if '__new__' in w_self.dict_w: + w_new = w_self.dict_w['__new__'] + if isinstance(w_new, Function): + w_self.dict_w['__new__'] = StaticMethod(w_new) + +def ensure_doc_attr(w_self): + # make sure there is a __doc__ in dict_w + w_self.dict_w.setdefault('__doc__', w_self.space.w_None) + +def ensure_module_attr(w_self): + # initialize __module__ in the dict (user-defined types only) + if '__module__' not in w_self.dict_w: + space = w_self.space + try: + caller = space.getexecutioncontext().framestack.top() + except IndexError: + pass + else: + w_globals = caller.w_globals + w_name = space.finditem(w_globals, space.wrap('__name__')) + if w_name is not None: + w_self.dict_w['__module__'] = w_name + +def compute_mro(w_self): + if w_self.is_heaptype(): + space = w_self.space + w_metaclass = space.type(w_self) + w_where, w_mro_func = space.lookup_in_type_where(w_metaclass, 'mro') + assert w_mro_func is not None # because there is one in 'type' + if not space.is_w(w_where, space.w_type): + w_mro_meth = space.get(w_mro_func, w_self) + w_mro = space.call_function(w_mro_meth) + w_self.mro_w = space.unpackiterable(w_mro) + # do some checking here + return # done + w_self.mro_w = w_self.compute_default_mro() + +# ____________________________________________________________ def call__Type(space, w_type, __args__): # special case for type(x) Modified: pypy/branch/typeobject-init/pypy/objspace/std/typetype.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/std/typetype.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/std/typetype.py Tue Aug 26 17:46:08 2008 @@ -56,6 +56,8 @@ (space.type(w_type).getname(space, '?')))) return w_type +# ____________________________________________________________ + def _check(space, w_type, msg=None): from pypy.objspace.std.typeobject import W_TypeObject if not isinstance(w_type, W_TypeObject): @@ -78,62 +80,30 @@ def descr_get__mro__(space, w_type): w_type = _check(space, w_type) - # XXX this should be inside typeobject.py return space.newtuple(w_type.mro_w) def descr_mro(space, w_type): """Return a type's method resolution order.""" w_type = _check(space, w_type,"expected type") - return space.newlist(w_type.compute_mro()) + return space.newlist(w_type.compute_default_mro()) def descr_get__bases__(space, w_type): w_type = _check(space, w_type) return space.newtuple(w_type.bases_w) def mro_subclasses(space, w_type, temp): - from pypy.objspace.std.typeobject import W_TypeObject + from pypy.objspace.std.typeobject import W_TypeObject, compute_mro + temp.append((w_type, w_type.mro_w)) + compute_mro(w_type) for w_sc in w_type.get_subclasses(): assert isinstance(w_sc, W_TypeObject) - temp.append((w_sc, w_sc.mro_w)) - mro_internal(space, w_sc) mro_subclasses(space, w_sc, temp) -# should be a W_TypeObject method i guess -def mro_internal(space, w_type): - if not space.is_w(space.type(w_type), space.w_type): - #w_type.mro_w = [] - mro_func = space.lookup(w_type, 'mro') - mro_func_args = Arguments(space, [w_type]) - w_mro = space.call_args(mro_func, mro_func_args) - w_type.mro_w = space.unpackiterable(w_mro) - # do some checking here - else: - w_type.mro_w = w_type.compute_mro() - -def best_base(space, newstyle_bases_w): - if not newstyle_bases_w: - raise OperationError(space.w_TypeError, - space.wrap("a new-style class can't have only classic bases")) - w_bestbase = None - w_winner = None - for w_base in newstyle_bases_w: - w_candidate = w_base.get_layout() - if w_winner is None: - w_winner = w_candidate - w_bestbase = w_base - elif space.is_true(space.issubtype(w_winner, w_candidate)): - pass - elif space.is_true(space.issubtype(w_candidate, w_winner)): - w_winner = w_candidate - w_bestbase = w_base - else: - raise OperationError(space.w_TypeError, - space.wrap("multiple bases have instance lay-out conflict")) - return w_bestbase - def descr_set__bases__(space, w_type, w_value): - from pypy.objspace.std.typeobject import W_TypeObject # this assumes all app-level type objects are W_TypeObject + from pypy.objspace.std.typeobject import W_TypeObject + from pypy.objspace.std.typeobject import check_and_find_best_base + from pypy.objspace.std.typeobject import get_parent_layout w_type = _check(space, w_type) if not w_type.is_heaptype(): raise OperationError(space.w_TypeError, @@ -143,73 +113,61 @@ raise OperationError(space.w_TypeError, space.wrap("can only assign tuple" " to %s.__bases__, not %s"% - (w_type.name, - space.type(w_value).getname(space, '?')))) - if space.int_w(space.len(w_value)) == 0: + (w_type.name, + space.type(w_value).getname(space, '?')))) + newbases_w = space.unpackiterable(w_value) + if len(newbases_w) == 0: raise OperationError(space.w_TypeError, - space.wrap("can only assign non-empty tuple to %s.__bases__, not ()"% + space.wrap("can only assign non-empty tuple" + " to %s.__bases__, not ()"% w_type.name)) - new_newstyle_bases = [] - for w_base in space.unpackiterable(w_value): - if not isinstance(w_base, W_TypeObject): - w_typ = space.type(w_base) - if not space.is_w(w_typ, space.w_classobj): - raise OperationError(space.w_TypeError, - space.wrap("%s.__bases__ must be tuple " - "of old- or new-style classes" - ", not '%s'"% - (w_type.name, - w_typ.getname(space, '?')))) - else: - new_newstyle_bases.append(w_base) - if space.is_true(space.issubtype(w_base, w_type)): + + for w_newbase in newbases_w: + if isinstance(w_newbase, W_TypeObject): + if w_type in w_newbase.compute_default_mro(): raise OperationError(space.w_TypeError, - space.wrap("a __bases__ item causes an inheritance cycle")) + space.wrap("a __bases__ item causes" + " an inheritance cycle")) - new_base = best_base(space, new_newstyle_bases) + w_oldbestbase = check_and_find_best_base(space, w_type.bases_w) + w_newbestbase = check_and_find_best_base(space, newbases_w) + oldlayout = w_oldbestbase.get_full_instance_layout() + newlayout = w_newbestbase.get_full_instance_layout() - if w_type.w_bestbase.get_full_instance_layout() != new_base.get_full_instance_layout(): + if oldlayout != newlayout: raise OperationError(space.w_TypeError, - space.wrap("__bases__ assignment: '%s' object layout differs from '%s'" % - (w_type.getname(space, '?'), new_base.getname(space, '?')))) + space.wrap("__bases__ assignment: '%s' object layout" + " differs from '%s'" % + (w_newbestbase.getname(space, '?'), + w_oldbestbase.getname(space, '?')))) # invalidate the version_tag of all the current subclasses w_type.mutated() - saved_bases = w_type.bases_w - saved_base = w_type.w_bestbase - saved_mro = w_type.mro_w - - w_type.bases_w = space.unpackiterable(w_value) - w_type.w_bestbase = new_base - + # now we can go ahead and change 'w_type.bases_w' + saved_bases_w = w_type.bases_w temp = [] try: - mro_internal(space, w_type) - + for w_oldbase in saved_bases_w: + if isinstance(w_oldbase, W_TypeObject): + w_oldbase.remove_subclass(w_type) + w_type.bases_w = newbases_w + for w_newbase in newbases_w: + if isinstance(w_newbase, W_TypeObject): + w_newbase.add_subclass(w_type) + # try to recompute all MROs mro_subclasses(space, w_type, temp) - - for old_base in saved_bases: - if isinstance(old_base, W_TypeObject): - old_base.remove_subclass(w_type) - for new_base in new_newstyle_bases: - new_base.add_subclass(w_type) except: for cls, old_mro in temp: cls.mro_w = old_mro - w_type.bases_w = saved_bases - w_type.w_bestbase = saved_base - w_type.mro_w = saved_mro + w_type.bases_w = saved_bases_w raise - + assert w_type.w_same_layout_as is get_parent_layout(w_type) # invariant + def descr__base(space, w_type): + from pypy.objspace.std.typeobject import find_best_base w_type = _check(space, w_type) - if w_type.w_bestbase is not None: - return w_type.w_bestbase - elif w_type is not space.w_object: - return space.w_object - else: - return space.w_None + return find_best_base(space, w_type.bases_w) def descr__doc(space, w_type): if space.is_w(w_type, space.w_type): From arigo at codespeak.net Tue Aug 26 18:39:57 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 26 Aug 2008 18:39:57 +0200 (CEST) Subject: [pypy-svn] r57634 - pypy/branch/typeobject-init/pypy/objspace/std Message-ID: <20080826163957.BC06D169F82@codespeak.net> Author: arigo Date: Tue Aug 26 18:39:56 2008 New Revision: 57634 Modified: pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py Log: Fix test_shadowtracking. Modified: pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py Tue Aug 26 18:39:56 2008 @@ -70,11 +70,13 @@ if overridetypedef is not None: setup_builtin_type(w_self) + custom_metaclass = False else: setup_user_defined_type(w_self) + custom_metaclass = not space.is_w(space.type(w_self), space.w_type) if space.config.objspace.std.withtypeversion: - if w_self.instancetypedef.hasdict: + if w_self.instancetypedef.hasdict or custom_metaclass: w_self.version_tag = None else: w_self.version_tag = VersionTag() From arigo at codespeak.net Tue Aug 26 19:22:29 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Tue, 26 Aug 2008 19:22:29 +0200 (CEST) Subject: [pypy-svn] r57635 - pypy/branch/typeobject-init/pypy/objspace/std Message-ID: <20080826172229.94EC5169EA6@codespeak.net> Author: arigo Date: Tue Aug 26 19:22:27 2008 New Revision: 57635 Modified: pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py Log: Fix for test_versionedtype. Modified: pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/std/typeobject.py Tue Aug 26 19:22:27 2008 @@ -48,6 +48,7 @@ from pypy.objspace.std.typetype import type_typedef as typedef lazyloaders = {} # can be overridden by specific instances + version_tag = None uses_object_getattribute = False # ^^^ for config.objspace.std.getattributeshortcut @@ -77,7 +78,7 @@ if space.config.objspace.std.withtypeversion: if w_self.instancetypedef.hasdict or custom_metaclass: - w_self.version_tag = None + pass else: w_self.version_tag = VersionTag() From cami at codespeak.net Wed Aug 27 10:48:51 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Wed, 27 Aug 2008 10:48:51 +0200 (CEST) Subject: [pypy-svn] r57638 - pypy/dist/pypy/lang/gameboy Message-ID: <20080827084851.7CA3C169FCA@codespeak.net> Author: cami Date: Wed Aug 27 10:48:49 2008 New Revision: 57638 Modified: pypy/dist/pypy/lang/gameboy/cartridge.py pypy/dist/pypy/lang/gameboy/gameboy.py pypy/dist/pypy/lang/gameboy/interrupt.py pypy/dist/pypy/lang/gameboy/joypad.py pypy/dist/pypy/lang/gameboy/video.py Log: addde comments and explanations from http://nocash.emubase.de/pandocs.txt Modified: pypy/dist/pypy/lang/gameboy/cartridge.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cartridge.py (original) +++ pypy/dist/pypy/lang/gameboy/cartridge.py Wed Aug 27 10:48:49 2008 @@ -169,6 +169,9 @@ return (checksum == self.get_checksum()) def verify_header(self): + """ + The memory at 0100-014F contains the cartridge header. + """ if len(self.rom) < 0x0150: return False checksum = 0xE7 Modified: pypy/dist/pypy/lang/gameboy/gameboy.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/gameboy.py (original) +++ pypy/dist/pypy/lang/gameboy/gameboy.py Wed Aug 27 10:48:49 2008 @@ -144,6 +144,21 @@ pass def get_receiver(self, address): + """ + General Memory Map + 0000-3FFF 16KB ROM Bank 00 (in cartridge, fixed at bank 00) + 4000-7FFF 16KB ROM Bank 01..NN (in cartridge, switchable bank number) + 8000-9FFF 8KB Video RAM (VRAM) + A000-BFFF 8KB External RAM (in cartridge, switchable bank, if any) + C000-CFFF 4KB Work RAM Bank 0 (WRAM) + D000-DFFF 4KB Work RAM Bank 1 (WRAM) + E000-FDFF Same as C000-DDFF (ECHO) (typically not used) + FE00-FE9F Sprite Attribute Table (OAM) + FEA0-FEFF Not Usable + FF00-FF7F I/O Ports + FF80-FFFE High RAM (HRAM) + FFFF Interrupt Enable Register + """ if 0x0000 <= address <= 0x7FFF: self.print_receiver_msg(address, "memoryBank") return self.cartridge_manager.get_memory_bank() Modified: pypy/dist/pypy/lang/gameboy/interrupt.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/interrupt.py (original) +++ pypy/dist/pypy/lang/gameboy/interrupt.py Wed Aug 27 10:48:49 2008 @@ -3,7 +3,9 @@ class InterruptFlag(object): - + """ + An Interrupt Flag handles a single interrupt channel + """ def __init__(self, _reset, mask, call_code): self._reset = _reset self.mask = mask @@ -32,6 +34,28 @@ """ PyGirl Emulator Interrupt Controller + + V-Blank Interrupt + The V-Blank interrupt occurs ca. 59.7 times a second on a regular GB and ca. + 61.1 times a second on a Super GB (SGB). This interrupt occurs at the + beginning of the V-Blank period (LY=144). + During this period video hardware is not using video ram so it may be freely + accessed. This period lasts approximately 1.1 milliseconds. + + LCDC Status Interrupt + There are various reasons for this interrupt to occur as described by the STAT + register ($FF40). One very popular reason is to indicate to the user when the + video hardware is about to redraw a given LCD line. This can be useful for + dynamically controlling the SCX/SCY registers ($FF43/$FF42) to perform special + video effects. + + Joypad interrupt is requested when any of the above Input lines changes from + High to Low. Generally this should happen when a key becomes pressed + (provided that the button/direction key is enabled by above Bit4/5), + however, because of switch bounce, one or more High to Low transitions are + usually produced both when pressing or releasing a key. + + """ def __init__(self): Modified: pypy/dist/pypy/lang/gameboy/joypad.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/joypad.py (original) +++ pypy/dist/pypy/lang/gameboy/joypad.py Wed Aug 27 10:48:49 2008 @@ -6,8 +6,22 @@ class Joypad(iMemory): """ PyGirl Emulator - Joypad Input + + The eight gameboy buttons/direction keys are arranged in form of a 2x4 + matrix. Select either button or direction keys by writing to this register, + then read-out bit 0-3. + Bit 7 - Not used + Bit 6 - Not used + Bit 5 - P15 Select Button Keys (0=Select) + Bit 4 - P14 Select Direction Keys (0=Select) + Bit 3 - P13 Input Down or Start (0=Pressed) (Read Only) + Bit 2 - P12 Input Up or Select (0=Pressed) (Read Only) + Bit 1 - P11 Input Left or Button B (0=Pressed) (Read Only) + Bit 0 - P10 Input Right or Button A (0=Pressed) (Read Only) + Note: Most programs are repeatedly reading from this port several times (the + first reads used as short delay, allowing the inputs to stabilize, and only + the value from the last read actually used). """ def __init__(self, joypad_driver, interrupt): Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Wed Aug 27 10:48:49 2008 @@ -32,16 +32,17 @@ # ----------------------------------------------------------------------------- class ControlRegister(object): - # used for enabled or disabled window or background - # Bit 7 - LCD Display Enable (0=Off, 1=On) - # Bit 6 - Window Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF) - # Bit 5 - Window Display Enable (0=Off, 1=On) - # Bit 4 - BG & Window Tile Data Select (0=8800-97FF, 1=8000-8FFF) - # Bit 3 - BG Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF) - # Bit 2 - OBJ (Sprite) Size (0=8x8, 1=8x16) - # Bit 1 - OBJ (Sprite) Display Enable (0=Off, 1=On) - # Bit 0 - BG Display (for CGB see below) (0=Off, 1=On) - + """ + used for enabled or disabled window or background + Bit 7 - LCD Display Enable (0=Off, 1=On) + Bit 6 - Window Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF) + Bit 5 - Window Display Enable (0=Off, 1=On) + Bit 4 - BG & Window Tile Data Select (0=8800-97FF, 1=8000-8FFF) + Bit 3 - BG Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF) + Bit 2 - OBJ (Sprite) Size (0=8x8, 1=8x16) + Bit 1 - OBJ (Sprite) Display Enable (0=Off, 1=On) + Bit 0 - BG Display (for CGB see below) (0=Off, 1=On) + """ def __init__(self): self.reset() @@ -82,7 +83,6 @@ # ----------------------------------------------------------------------------- class StatusRegister(object): - """ Bit 6 - LYC=LY Coincidence Interrupt (1=Enable) (Read/Write) Bit 5 - Mode 2 OAM Interrupt (1=Enable) (Read/Write) @@ -95,7 +95,6 @@ 2: During Searching OAM-RAM 3: During Transfering Data to LCD Driver """ - def __init__(self): self.reset() @@ -194,7 +193,9 @@ self.frames = 0 self.frame_skip = 0 - + + # Read Write shared memory ------------------------------------------------- + def write(self, address, data): address = int(address) # assert data >= 0x00 and data <= 0xFF @@ -274,27 +275,11 @@ return self.vram[address - constants.VRAM_ADDR] return 0xFF + # Getters and Setters ------------------------------------------------------ + def get_cycles(self): return self.cycles - def emulate(self, ticks): - ticks = int(ticks) - if self.control.lcd_enabled: - self.cycles -= ticks - self.consume_cycles() - - def consume_cycles(self): - while self.cycles <= 0: - mode = self.status.get_mode() - if mode == 0: - self.emulate_hblank() - elif mode == 1: - self.emulate_vblank() - elif mode == 2: - self.emulate_oam() - else: - self.emulate_transfer() - def get_control(self): return self.control.read() @@ -332,26 +317,51 @@ self.status.get_mode() == 0x01 and \ self.status.line_y_compare_check(): self.interrupt.raise_interrupt(constants.LCD) - - def get_scroll_y(self): - return self.scroll_y - - def set_scroll_y(self, data): - self.scroll_y = data def get_scroll_x(self): + """ see set_scroll_x """ return self.scroll_x def set_scroll_x(self, data): + """ + Specifies the position in the 256x256 pixels BG map (32x32 tiles) which + is to be displayed at the upper/left LCD display position. + Values in range from 0-255 may be used for X/Y each, the video + controller automatically wraps back to the upper (left) position in BG + map when drawing exceeds the lower (right) border of the BG map area. + """ self.scroll_x = data + def get_scroll_y(self): + """ see set_scroll_x """ + return self.scroll_y + + def set_scroll_y(self, data): + """ see set_scroll_x """ + self.scroll_y = data def get_line_y(self): + """ see set_line_y """ return self.line_y + + def set_line_y(self): + """ + The LY indicates the vertical line to which the present data is + transferred to the LCD Driver. The LY can take on any value between 0 + through 153. The values between 144 and 153 indicate the V-Blank period. + Writing will reset the counter. + """ + pass def get_line_y_compare(self): + """ see set_line_y_compare""" return self.line_y_compare def set_line_y_compare(self, data): + """ + The gameboy permanently compares the value of the LYC and LY registers. + When both values are identical, the coincident bit in the STAT register + becomes set, and (if enabled) a STAT interrupt is requested. + """ self.line_y_compare = data if self.control.lcd_enabled: self.emulate_hblank_line_y_compare(stat_check=True) @@ -360,14 +370,50 @@ return self.dma def set_dma(self, data): + """ + Writing to this register launches a DMA transfer from ROM or RAM to OAM + memory (sprite attribute table). The written value specifies the + transfer source address divided by 100h, ie. source & destination are: + Source: XX00-XX9F ;XX in range from 00-F1h + Destination: FE00-FE9F + It takes 160 microseconds until the transfer has completed, during this + time the CPU can access only HRAM (memory at FF80-FFFE). For this + reason, the programmer must copy a short procedure into HRAM, and use + this procedure to start the transfer from inside HRAM, and wait until + the transfer has finished: + ld (0FF46h),a ;start DMA transfer, a=start address/100h + ld a,28h ;delay... + wait: ;total 5x40 cycles, approx 200ms + dec a ;1 cycle + jr nz,wait ;4 cycles + Most programs are executing this procedure from inside of their VBlank + procedure, but it is possible to execute it during display redraw also, + allowing to display more than 40 sprites on the screen (ie. for example + 40 sprites in upper half, and other 40 sprites in lower half of the + screen). + """ self.dma = data for index in range(constants.OAM_SIZE): self.oam[index] = self.memory.read((self.dma << 8) + index) def get_background_palette(self): + """ see set_background_palette""" return self.background_palette def set_background_palette(self, data): + """ + This register assigns gray shades to the color numbers of the BG and + Window tiles. + Bit 7-6 - Shade for Color Number 3 + Bit 5-4 - Shade for Color Number 2 + Bit 3-2 - Shade for Color Number 1 + Bit 1-0 - Shade for Color Number 0 + The four possible gray shades are: + 0 White + 1 Light gray + 2 Dark gray + 3 Black + """ if self.background_palette != data: self.background_palette = data self.dirty = True @@ -376,6 +422,11 @@ return self.object_palette_0 def set_object_palette_0(self, data): + """ + This register assigns gray shades for sprite palette 0. It works exactly + as BGP (FF47), except that the lower two bits aren't used because sprite + data 00 is transparent. + """ if self.object_palette_0 != data: self.object_palette_0 = data self.dirty = True @@ -384,14 +435,29 @@ return self.object_palette_1 def set_object_palette_1(self, data): + """ + This register assigns gray shades for sprite palette 1. It works exactly + as BGP (FF47), except that the lower two bits aren't used because sprite + data 00 is transparent. + """ if self.object_palette_1 != data: self.object_palette_1 = data self.dirty = True def get_window_y(self): + """ see set_window_y """ return self.window_y def set_window_y(self, data): + """ + Specifies the upper/left positions of the Window area. (The window is an + alternate background area which can be displayed above of the normal + background. OBJs (sprites) may be still displayed above or behinf the + window, just as for normal BG.) + The window becomes visible (if enabled) when positions are set in range + WX=0..166, WY=0..143. A postion of WX=7, WY=0 locates the window at + upper left, it is then completly covering normal background. + """ self.window_y = data def get_window_x(self): @@ -400,16 +466,6 @@ def set_window_x(self, data): self.window_x = data - def emulate_oam(self): - self.set_mode_3_begin() - - def emulate_transfer(self): - if self.transfer: - if self.display: - self.draw_line() - self.set_mode_3_end() - else: - self.set_mode_0() # interrupt checks --------------------------------------------------- @@ -433,7 +489,24 @@ self.interrupt.raise_interrupt(constants.LCD) # mode setting ----------------------------------------------------------- - + """ + The two lower STAT bits show the current status of the LCD controller. + Mode 0: The LCD controller is in the H-Blank period and + the CPU can access both the display RAM (8000h-9FFFh) + and OAM (FE00h-FE9Fh) + + Mode 1: The LCD contoller is in the V-Blank period (or the + display is disabled) and the CPU can access both the + display RAM (8000h-9FFFh) and OAM (FE00h-FE9Fh) + + Mode 2: The LCD controller is reading from OAM memory. + The CPU access OAM memory (FE00h-FE9Fh) + during this period. + + Mode 3: The LCD controller is reading from both OAM and VRAM, + The CPU access OAM and VRAM during this period. + CGB Mode: Cannot access Palette Data (FF69,FF6B) either. + """ def set_mode_3_begin(self): self.status.set_mode(3) self.cycles += constants.MODE_3_BEGIN_TICKS @@ -470,7 +543,36 @@ self.status.set_mode(1) self.cycles += constants.MODE_1_END_TICKS - # ---------------------------------------------------------------- + # emulation ---------------------------------------------------------------- + + def emulate(self, ticks): + ticks = int(ticks) + if self.control.lcd_enabled: + self.cycles -= ticks + self.consume_cycles() + + def consume_cycles(self): + while self.cycles <= 0: + mode = self.status.get_mode() + if mode == 0: + self.emulate_hblank() + elif mode == 1: + self.emulate_vblank() + elif mode == 2: + self.emulate_oam() + else: + self.emulate_transfer() + + def emulate_oam(self): + self.set_mode_3_begin() + + def emulate_transfer(self): + if self.transfer: + if self.display: + self.draw_line() + self.set_mode_3_end() + else: + self.set_mode_0() def emulate_hblank(self): self.line_y+=1 @@ -529,6 +631,8 @@ else: self.set_mode_1() + # graphics handling -------------------------------------------------------- + def draw_frame(self): self.driver.update_display() From antocuni at codespeak.net Wed Aug 27 14:47:56 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Wed, 27 Aug 2008 14:47:56 +0200 (CEST) Subject: [pypy-svn] r57644 - in pypy/branch/oo-jit/pypy: jit/codegen jit/codegen/cli/test jit/rainbow/test translator/cli/test Message-ID: <20080827124756.91770169E7B@codespeak.net> Author: antocuni Date: Wed Aug 27 14:47:55 2008 New Revision: 57644 Added: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py (contents, props changed) Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_promotion.py pypy/branch/oo-jit/pypy/jit/codegen/model.py pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py Log: add a new mixin to compile and test rainbow tests through gencli; probably lot of tests in test_gencli_interpreter.py still fails Added: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py ============================================================================== --- (empty file) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Wed Aug 27 14:47:55 2008 @@ -0,0 +1,26 @@ +import py +from pypy.jit.codegen.cli.rgenop import RCliGenOp +from pypy.jit.rainbow.test.test_interpreter import TestOOType as RainbowTest +from pypy.translator.cli.test.runtest import compile_graph + + +class CompiledCliMixin(object): + RGenOp = RCliGenOp + translate_support_code = True + + def interpret(self, ll_function, values, opt_consts=[], *args, **kwds): + values, writer, jitcode = self.convert_and_serialize(ll_function, values, **kwds) + translator = self.rtyper.annotator.translator + func = compile_graph(self.rewriter.portal_entry_graph, translator) + return func(*values) + + + def check_insns(self, expected=None, **counts): + "Cannot check instructions in the generated assembler." + +class TestRainbowCli(CompiledCliMixin, RainbowTest): + + # for the individual tests see + # ====> ../../../rainbow/test/test_interpreter.py + + pass Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_promotion.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_promotion.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_promotion.py Wed Aug 27 14:47:55 2008 @@ -31,3 +31,5 @@ test_vstruct_unfreeze = skip test_promote_after_call = skip test_merge_then_promote = skip + test_promote_class = skip + test_read___class___after_promotion = skip Modified: pypy/branch/oo-jit/pypy/jit/codegen/model.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/model.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/model.py Wed Aug 27 14:47:55 2008 @@ -97,6 +97,8 @@ ## def genop_oonewarray(self, alloctoken, gv_length): ## def genop_oosend(self, methtoken, gv_self, args_gv): ## def genop_oononnull(self, gv_obj): +## def genop_ooisnull(self, gv_obj): + ## def genop_oogetfield(self, fieldtoken, gv_obj): ## def genop_oosetfield(self, fieldtoken, gv_obj, gv_value): Modified: pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Wed Aug 27 14:47:55 2008 @@ -194,12 +194,16 @@ self._cache_order.append(key) return self.writer, self.jitcode - def interpret(self, ll_function, values, opt_consts=[], *args, **kwds): + def convert_and_serialize(self, ll_function, values, **kwds): if hasattr(ll_function, 'convert_arguments'): assert len(ll_function.convert_arguments) == len(values) values = [decoder(value) for decoder, value in zip( ll_function.convert_arguments, values)] writer, jitcode= self.serialize(ll_function, values, **kwds) + return values, writer, jitcode + + def interpret(self, ll_function, values, opt_consts=[], *args, **kwds): + values, writer, jitcode = self.convert_and_serialize(ll_function, values, **kwds) argcolors = [] for i, ll_val in enumerate(values): color = writer.varcolor(self.graph.startblock.inputargs[i]) Modified: pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py ============================================================================== --- pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py (original) +++ pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py Wed Aug 27 14:47:55 2008 @@ -150,6 +150,15 @@ unpatch_os(olddefs) # restore original values return CliFunctionWrapper(exe_name, func.__name__, auto_raise_exc) +def compile_graph(graph, translator, auto_raise_exc=False, + exctrans=False, nowrap=False): + gen = _build_gen_from_graph(graph, translator, exctrans, nowrap) + gen.generate_source() + exe_name = gen.build_exe() + name = getattr(graph, 'name', '') + return CliFunctionWrapper(exe_name, name, auto_raise_exc) + + def _build_gen(func, annotation, graph=None, backendopt=True, exctrans=False, annotatorpolicy=None, nowrap=False): try: @@ -180,12 +189,15 @@ if getoption('view'): t.view() + return _build_gen_from_graph(main_graph, t, exctrans, nowrap) + +def _build_gen_from_graph(graph, t, exctrans=False, nowrap=False): if getoption('wd'): tmpdir = py.path.local('.') else: tmpdir = udir - return GenCli(tmpdir, t, TestEntryPoint(main_graph, not nowrap), exctrans=exctrans) + return GenCli(tmpdir, t, TestEntryPoint(graph, not nowrap), exctrans=exctrans) class CliFunctionWrapper(object): def __init__(self, exe_name, name=None, auto_raise_exc=False): From arigo at codespeak.net Wed Aug 27 15:01:40 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 15:01:40 +0200 (CEST) Subject: [pypy-svn] r57645 - pypy/dist/pypy/translator/tool Message-ID: <20080827130140.86705169F33@codespeak.net> Author: arigo Date: Wed Aug 27 15:01:36 2008 New Revision: 57645 Modified: pypy/dist/pypy/translator/tool/cbuild.py Log: Revert r57623 because its effect is probably not as intended: the .o files end up in a strange subdirectory of where they used to end up. It breaks a few things -- e.g. the generated Makefile no longer matches what cbuild does. fijal: this should probably be done on branch Modified: pypy/dist/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/dist/pypy/translator/tool/cbuild.py (original) +++ pypy/dist/pypy/translator/tool/cbuild.py Wed Aug 27 15:01:36 2008 @@ -633,10 +633,9 @@ old = cfile.dirpath().chdir() try: - res = compiler.compile([str(cfile)], + res = compiler.compile([cfile.basename], include_dirs=self.eci.include_dirs, - extra_preargs=compile_extra, - output_dir=str(cfile.dirpath())) + extra_preargs=compile_extra) assert len(res) == 1 cobjfile = py.path.local(res[0]) assert cobjfile.check() From arigo at codespeak.net Wed Aug 27 15:19:00 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 15:19:00 +0200 (CEST) Subject: [pypy-svn] r57646 - pypy/branch/typeobject-init/pypy/objspace/std Message-ID: <20080827131900.27946169F44@codespeak.net> Author: arigo Date: Wed Aug 27 15:18:58 2008 New Revision: 57646 Modified: pypy/branch/typeobject-init/pypy/objspace/std/typetype.py Log: Minor cleanup. Modified: pypy/branch/typeobject-init/pypy/objspace/std/typetype.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/std/typetype.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/std/typetype.py Wed Aug 27 15:18:58 2008 @@ -108,7 +108,7 @@ if not w_type.is_heaptype(): raise OperationError(space.w_TypeError, space.wrap("can't set %s.__bases__" % - w_type.name)) + (w_type.name,))) if not space.is_true(space.isinstance(w_value, space.w_tuple)): raise OperationError(space.w_TypeError, space.wrap("can only assign tuple" @@ -120,7 +120,7 @@ raise OperationError(space.w_TypeError, space.wrap("can only assign non-empty tuple" " to %s.__bases__, not ()"% - w_type.name)) + (w_type.name,))) for w_newbase in newbases_w: if isinstance(w_newbase, W_TypeObject): From arigo at codespeak.net Wed Aug 27 15:19:14 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 15:19:14 +0200 (CEST) Subject: [pypy-svn] r57647 - in pypy/branch/typeobject-init/pypy/objspace/flow: . test Message-ID: <20080827131914.7DF11169F44@codespeak.net> Author: arigo Date: Wed Aug 27 15:19:13 2008 New Revision: 57647 Modified: pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py Log: Test and fix in the flow object space (duh). Modified: pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py Wed Aug 27 15:19:13 2008 @@ -292,7 +292,8 @@ e.w_type is self.space.w_ImportError): raise ImportError('import statement always raises %s' % ( e,)) - if e.w_type is self.space.w_RuntimeError: + if (e.w_type is self.space.w_RuntimeError and + not hasattr(e, '_comes_from_implicit')): raise RuntimeError('during flow graph construction: %r' % ( e.w_value,)) link = self.make_link([e.w_type, e.w_value], self.graph.exceptblock) @@ -382,7 +383,9 @@ operr = ExecutionContext.sys_exc_info(self) if isinstance(operr, ImplicitOperationError): # re-raising an implicit operation makes it an explicit one + src = operr operr = OperationError(operr.w_type, operr.w_value) + operr._comes_from_implicit = src return operr # hack for unrolling iterables, don't use this Modified: pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py Wed Aug 27 15:19:13 2008 @@ -337,6 +337,18 @@ traverse(find_exceptions, x) assert found == {ValueError: True, ZeroDivisionError: True, OverflowError: True} + def loop_in_bare_except_bug(lst): + try: + for x in lst: + pass + except: + raise + + def test_loop_in_bare_except_bug(self): + x = self.codetest(self.loop_in_bare_except_bug) + simplify_graph(x) + self.show(x) + #__________________________________________________________ def freevar(self, x): def adder(y): From arigo at codespeak.net Wed Aug 27 15:47:08 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 15:47:08 +0200 (CEST) Subject: [pypy-svn] r57649 - in pypy/branch/typeobject-init/pypy/objspace/flow: . test Message-ID: <20080827134708.5362E16A039@codespeak.net> Author: arigo Date: Wed Aug 27 15:47:07 2008 New Revision: 57649 Modified: pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py pypy/branch/typeobject-init/pypy/objspace/flow/objspace.py pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py Log: Improve the test, showing that r57647 didn't fix anything. Revert r57647 and try another approach. Modified: pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/flow/flowcontext.py Wed Aug 27 15:47:07 2008 @@ -292,10 +292,6 @@ e.w_type is self.space.w_ImportError): raise ImportError('import statement always raises %s' % ( e,)) - if (e.w_type is self.space.w_RuntimeError and - not hasattr(e, '_comes_from_implicit')): - raise RuntimeError('during flow graph construction: %r' % ( - e.w_value,)) link = self.make_link([e.w_type, e.w_value], self.graph.exceptblock) self.recorder.crnt_block.closeblock(link) @@ -383,9 +379,7 @@ operr = ExecutionContext.sys_exc_info(self) if isinstance(operr, ImplicitOperationError): # re-raising an implicit operation makes it an explicit one - src = operr operr = OperationError(operr.w_type, operr.w_value) - operr._comes_from_implicit = src return operr # hack for unrolling iterables, don't use this Modified: pypy/branch/typeobject-init/pypy/objspace/flow/objspace.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/flow/objspace.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/flow/objspace.py Wed Aug 27 15:47:07 2008 @@ -51,8 +51,7 @@ self.w_tuple = Constant(tuple) self.concrete_mode = 0 for exc in [KeyError, ValueError, IndexError, StopIteration, - AssertionError, TypeError, AttributeError, ImportError, - RuntimeError]: + AssertionError, TypeError, AttributeError, ImportError]: clsname = exc.__name__ setattr(self, 'w_'+clsname, Constant(exc)) # the following exceptions are the ones that should not show up @@ -374,7 +373,7 @@ if outcome is StopIteration: raise OperationError(self.w_StopIteration, w_exc_value) elif outcome is RuntimeError: - raise flowcontext.ImplicitOperationError(self.w_RuntimeError, + raise flowcontext.ImplicitOperationError(Constant(RuntimeError), w_exc_value) else: return w_item @@ -472,6 +471,12 @@ raise KeyboardInterrupt w_KeyboardInterrupt = property(w_KeyboardInterrupt) + def w_RuntimeError(self): + # XXX same as w_KeyboardInterrupt() + raise RuntimeError("the interpreter raises RuntimeError during " + "flow graph construction") + w_RuntimeError = property(w_RuntimeError) + # the following gives us easy access to declare more for applications: NOT_REALLY_CONST = { Constant(sys): { Modified: pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py ============================================================================== --- pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py (original) +++ pypy/branch/typeobject-init/pypy/objspace/flow/test/test_objspace.py Wed Aug 27 15:47:07 2008 @@ -342,6 +342,7 @@ for x in lst: pass except: + lst.append(5) raise def test_loop_in_bare_except_bug(self): From antocuni at codespeak.net Wed Aug 27 15:59:22 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Wed, 27 Aug 2008 15:59:22 +0200 (CEST) Subject: [pypy-svn] r57650 - pypy/branch/oo-jit/pypy/jit/codegen/cli/test Message-ID: <20080827135922.9A22A169FE1@codespeak.net> Author: antocuni Date: Wed Aug 27 15:59:21 2008 New Revision: 57650 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Log: skip failing tests Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Wed Aug 27 15:59:21 2008 @@ -23,4 +23,57 @@ # for the individual tests see # ====> ../../../rainbow/test/test_interpreter.py - pass + def skip(self): + py.test.skip('in progress') + + test_simple_struct = skip + test_complex_struct = skip + test_degenerate_with_voids = skip + test_arith_plus_minus = skip + test_plus_minus = skip + test_red_virtual_container = skip + test_red_propagate = skip + test_merge_structures = skip + test_green_with_side_effects = skip + test_compile_time_const_tuple = skip + test_green_deepfrozen_oosend = skip + test_direct_oosend_with_green_self = skip + test_builtin_oosend_with_green_args = skip + test_residual_red_call = skip + test_residual_red_call_with_exc = skip + test_simple_meth = skip + test_simple_red_meth = skip + test_simple_red_meth_vars_around = skip + test_yellow_meth_with_green_result = skip + test_simple_indirect_call = skip + test_normalize_indirect_call = skip + test_normalize_indirect_call_more = skip + test_green_char_at_merge = skip + test_self_referential_structures = skip + test_known_nonzero = skip + test_debug_assert_ptr_nonzero = skip + test_indirect_red_call = skip + test_indirect_red_call_with_exc = skip + test_indirect_gray_call = skip + test_indirect_residual_red_call = skip + test_constant_indirect_red_call = skip + test_constant_indirect_red_call_no_result = skip + test_indirect_sometimes_residual_pure_red_call = skip + test_red_int_add_ovf = skip + test_nonzeroness_assert_while_compiling = skip + test_segfault_while_compiling = skip + test_learn_nonzeroness = skip + test_freeze_booleffects_correctly = skip + test_ptrequality = skip + test_void_args = skip + test_red_isinstance = skip + test_red_isinstance_degenerated = skip + test_simple_array = skip + test_arraysize = skip + test_setarrayitem = skip + test_red_array = skip + test_degenerated_before_return = skip + test_degenerated_before_return_2 = skip + test_degenerated_at_return = skip + test_degenerated_via_substructure = skip + test_red_subclass = skip From arigo at codespeak.net Wed Aug 27 17:36:03 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 17:36:03 +0200 (CEST) Subject: [pypy-svn] r57652 - pypy/branch/typeobject-init/pypy/translator/tool Message-ID: <20080827153603.F2B57169E95@codespeak.net> Author: arigo Date: Wed Aug 27 17:36:01 2008 New Revision: 57652 Modified: pypy/branch/typeobject-init/pypy/translator/tool/cbuild.py Log: Revert r57623 (i.e. merge r57645 from trunk). Modified: pypy/branch/typeobject-init/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/branch/typeobject-init/pypy/translator/tool/cbuild.py (original) +++ pypy/branch/typeobject-init/pypy/translator/tool/cbuild.py Wed Aug 27 17:36:01 2008 @@ -633,10 +633,9 @@ old = cfile.dirpath().chdir() try: - res = compiler.compile([str(cfile)], + res = compiler.compile([cfile.basename], include_dirs=self.eci.include_dirs, - extra_preargs=compile_extra, - output_dir=str(cfile.dirpath())) + extra_preargs=compile_extra) assert len(res) == 1 cobjfile = py.path.local(res[0]) assert cobjfile.check() From arigo at codespeak.net Wed Aug 27 17:41:02 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 17:41:02 +0200 (CEST) Subject: [pypy-svn] r57653 - in pypy/dist/pypy/objspace/flow: . test Message-ID: <20080827154102.CE6DD16A079@codespeak.net> Author: arigo Date: Wed Aug 27 17:41:01 2008 New Revision: 57653 Modified: pypy/dist/pypy/objspace/flow/flowcontext.py pypy/dist/pypy/objspace/flow/objspace.py pypy/dist/pypy/objspace/flow/test/test_objspace.py Log: A test and a somewhat hackish fix for the flow object space (duh). Modified: pypy/dist/pypy/objspace/flow/flowcontext.py ============================================================================== --- pypy/dist/pypy/objspace/flow/flowcontext.py (original) +++ pypy/dist/pypy/objspace/flow/flowcontext.py Wed Aug 27 17:41:01 2008 @@ -292,9 +292,6 @@ e.w_type is self.space.w_ImportError): raise ImportError('import statement always raises %s' % ( e,)) - if e.w_type is self.space.w_RuntimeError: - raise RuntimeError('during flow graph construction: %r' % ( - e.w_value,)) link = self.make_link([e.w_type, e.w_value], self.graph.exceptblock) self.recorder.crnt_block.closeblock(link) Modified: pypy/dist/pypy/objspace/flow/objspace.py ============================================================================== --- pypy/dist/pypy/objspace/flow/objspace.py (original) +++ pypy/dist/pypy/objspace/flow/objspace.py Wed Aug 27 17:41:01 2008 @@ -51,8 +51,7 @@ self.w_tuple = Constant(tuple) self.concrete_mode = 0 for exc in [KeyError, ValueError, IndexError, StopIteration, - AssertionError, TypeError, AttributeError, ImportError, - RuntimeError]: + AssertionError, TypeError, AttributeError, ImportError]: clsname = exc.__name__ setattr(self, 'w_'+clsname, Constant(exc)) # the following exceptions are the ones that should not show up @@ -374,7 +373,7 @@ if outcome is StopIteration: raise OperationError(self.w_StopIteration, w_exc_value) elif outcome is RuntimeError: - raise flowcontext.ImplicitOperationError(self.w_RuntimeError, + raise flowcontext.ImplicitOperationError(Constant(RuntimeError), w_exc_value) else: return w_item @@ -472,6 +471,12 @@ raise KeyboardInterrupt w_KeyboardInterrupt = property(w_KeyboardInterrupt) + def w_RuntimeError(self): + # XXX same as w_KeyboardInterrupt() + raise RuntimeError("the interpreter raises RuntimeError during " + "flow graph construction") + w_RuntimeError = property(w_RuntimeError) + # the following gives us easy access to declare more for applications: NOT_REALLY_CONST = { Constant(sys): { Modified: pypy/dist/pypy/objspace/flow/test/test_objspace.py ============================================================================== --- pypy/dist/pypy/objspace/flow/test/test_objspace.py (original) +++ pypy/dist/pypy/objspace/flow/test/test_objspace.py Wed Aug 27 17:41:01 2008 @@ -337,6 +337,19 @@ traverse(find_exceptions, x) assert found == {ValueError: True, ZeroDivisionError: True, OverflowError: True} + def loop_in_bare_except_bug(lst): + try: + for x in lst: + pass + except: + lst.append(5) + raise + + def test_loop_in_bare_except_bug(self): + x = self.codetest(self.loop_in_bare_except_bug) + simplify_graph(x) + self.show(x) + #__________________________________________________________ def freevar(self, x): def adder(y): From arigo at codespeak.net Wed Aug 27 17:42:32 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 17:42:32 +0200 (CEST) Subject: [pypy-svn] r57654 - in pypy/dist/pypy/objspace/std: . test Message-ID: <20080827154232.490E016A085@codespeak.net> Author: arigo Date: Wed Aug 27 17:42:31 2008 New Revision: 57654 Modified: pypy/dist/pypy/objspace/std/test/test_typeobject.py pypy/dist/pypy/objspace/std/typeobject.py pypy/dist/pypy/objspace/std/typetype.py Log: issue390 resolved Merge of the typeobject-init branch: * Refactor W_TypeObject.__init__() into a small forest of helpers. * Use some of the same helpers for assignment to __bases__. * More tests. Modified: pypy/dist/pypy/objspace/std/test/test_typeobject.py ============================================================================== --- pypy/dist/pypy/objspace/std/test/test_typeobject.py (original) +++ pypy/dist/pypy/objspace/std/test/test_typeobject.py Wed Aug 27 17:42:31 2008 @@ -111,6 +111,20 @@ class AppTestTypeObject: + + def test_call_type(self): + assert type(42) is int + C = type('C', (object,), {'x': lambda: 42}) + unbound_meth = C.x + raises(TypeError, unbound_meth) + assert unbound_meth.im_func() == 42 + raises(TypeError, type) + raises(TypeError, type, 'test', (object,)) + raises(TypeError, type, 'test', (object,), {}, 42) + raises(TypeError, type, 42, (object,), {}) + raises(TypeError, type, 'test', 42, {}) + raises(TypeError, type, 'test', (object,), 42) + def test_bases(self): assert int.__bases__ == (object,) class X: @@ -314,17 +328,98 @@ raise TestFailed, "didn't catch MRO conflict" def test_mutable_bases_versus_nonheap_types(self): - skip("in-progress") class A(int): - __slots__ = [] + pass + class B(int): + __slots__ = ['b'] class C(int): pass raises(TypeError, 'C.__bases__ = (A,)') + raises(TypeError, 'C.__bases__ = (B,)') + raises(TypeError, 'C.__bases__ = (C,)') raises(TypeError, 'int.__bases__ = (object,)') C.__bases__ = (int,) + #--- the following raises on CPython but works on PyPy. + #--- I don't see an obvious reason why it should fail... + import sys + if '__pypy__' not in sys.builtin_module_names: + skip("works on PyPy only") + class MostlyLikeInt(int): + __slots__ = [] + C.__bases__ = (MostlyLikeInt,) + + def test_mutable_bases_versus_slots(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b1', 'b2'] + class C(B): + pass + raises(TypeError, 'C.__bases__ = (A,)') + + def test_mutable_bases_versus_weakref(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['__weakref__'] + class C(B): + pass + raises(TypeError, 'C.__bases__ = (A,)') + + def test_mutable_bases_same_slots(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class C(B): + pass + c = C() + c.a = 42 + assert C.__mro__ == (C, B, A, object) + C.__bases__ = (A,) + assert C.__mro__ == (C, A, object) + assert c.a == 42 + + def test_mutable_bases_versus_slots_2(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b1', 'b2'] + class C(B): + __slots__ = ['c'] + raises(TypeError, 'C.__bases__ = (A,)') + + def test_mutable_bases_keeping_slots(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class C(B): + __slots__ = ['c'] + c = C() + c.a = 42 + c.c = 85 + assert C.__mro__ == (C, B, A, object) + C.__bases__ = (A,) + assert C.__mro__ == (C, A, object) + assert c.a == 42 + assert c.c == 85 + + class D(A): + __slots__ = [] + C.__bases__ = (B, D) + assert C.__mro__ == (C, B, D, A, object) + assert c.a == 42 + assert c.c == 85 + raises(TypeError, 'C.__bases__ = (B, D, B)') + + class E(A): + __slots__ = ['e'] + raises(TypeError, 'C.__bases__ = (B, E)') + raises(TypeError, 'C.__bases__ = (E, B)') + raises(TypeError, 'C.__bases__ = (E,)') def test_compatible_slot_layout(self): - skip("in-progress") class A(object): __slots__ = ['a'] class B(A): @@ -410,6 +505,8 @@ assert B_mro().b == 1 assert getattr(B_mro, 'a', None) == None assert getattr(B_mro(), 'a', None) == None + # also check what the built-in mro() method would return for 'B_mro' + assert type.mro(B_mro) == [B_mro, A_mro, object] def test_abstract_mro(self): class A1: # old-style class @@ -552,6 +649,53 @@ assert a.__ == 4 assert a.__dict__ == {} + def test_slots_multiple_inheritance(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class E(A): + __slots__ = ['e'] + class C(B, E): + pass + c = C() + c.a = 42 + c.e = 85 + assert c.a == 42 + assert c.e == 85 + + def test_base_attr(self): + # check the '__base__' + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = [] + class E(A): + __slots__ = ['e'] + class C(B, E): + pass + class D(A): + __slots__ = [] + class F(B, D): + pass + assert C.__base__ is E + assert F.__base__ is B + assert bool.__base__ is int + assert int.__base__ is object + assert object.__base__ is None + + def test_cannot_subclass(self): + raises(TypeError, type, 'A', (bool,), {}) + + def test_slot_conflict(self): + class A(object): + __slots__ = ['a'] + class B(A): + __slots__ = ['b'] + class E(A): + __slots__ = ['e'] + raises(TypeError, type, 'C', (B, E), {}) + def test_repr(self): globals()['__name__'] = 'a' class A(object): Modified: pypy/dist/pypy/objspace/std/typeobject.py ============================================================================== --- pypy/dist/pypy/objspace/std/typeobject.py (original) +++ pypy/dist/pypy/objspace/std/typeobject.py Wed Aug 27 17:42:31 2008 @@ -1,6 +1,5 @@ from pypy.objspace.std.objspace import * from pypy.interpreter.function import Function, StaticMethod -from pypy.interpreter.argument import Arguments from pypy.interpreter import gateway from pypy.interpreter.typedef import weakref_descr from pypy.objspace.std.stdtypedef import std_dict_descr, issubtypedef, Member @@ -49,6 +48,7 @@ from pypy.objspace.std.typetype import type_typedef as typedef lazyloaders = {} # can be overridden by specific instances + version_tag = None uses_object_getattribute = False # ^^^ for config.objspace.std.getattributeshortcut @@ -60,166 +60,25 @@ w_self.name = name w_self.bases_w = bases_w w_self.dict_w = dict_w - w_self.ensure_static__new__() w_self.nslots = 0 + w_self.hasdict = False w_self.needsdel = False - w_self.w_bestbase = None + w_self.weakrefable = False + w_self.w_same_layout_as = None w_self.weak_subclasses = [] - - # make sure there is a __doc__ in dict_w - if '__doc__' not in dict_w: - dict_w['__doc__'] = space.w_None + w_self.__flags__ = 0 # or _HEAPTYPE + w_self.instancetypedef = overridetypedef if overridetypedef is not None: - w_self.instancetypedef = overridetypedef - w_self.hasdict = overridetypedef.hasdict - w_self.weakrefable = overridetypedef.weakrefable - w_self.__flags__ = 0 # not a heaptype - if overridetypedef.base is not None: - w_self.w_bestbase = space.gettypeobject(overridetypedef.base) + setup_builtin_type(w_self) + custom_metaclass = False else: - w_self.__flags__ = _HEAPTYPE - # initialize __module__ in the dict - if '__module__' not in dict_w: - try: - caller = space.getexecutioncontext().framestack.top() - except IndexError: - w_globals = w_locals = space.newdict() - else: - w_globals = caller.w_globals - w_str_name = space.wrap('__name__') - w_name = space.finditem(w_globals, w_str_name) - if w_name is not None: - dict_w['__module__'] = w_name - # find the most specific typedef - instancetypedef = object_typedef - for w_base in bases_w: - if not isinstance(w_base, W_TypeObject): - continue - if issubtypedef(w_base.instancetypedef, instancetypedef): - if instancetypedef is not w_base.instancetypedef: - instancetypedef = w_base.instancetypedef - w_self.w_bestbase = w_base - elif not issubtypedef(instancetypedef, w_base.instancetypedef): - raise OperationError(space.w_TypeError, - space.wrap("instance layout conflicts in " - "multiple inheritance")) - if not instancetypedef.acceptable_as_base_class: - raise OperationError(space.w_TypeError, - space.wrap("type '%s' is not an " - "acceptable base class" % - instancetypedef.name)) - w_self.instancetypedef = instancetypedef - w_self.hasdict = False - w_self.weakrefable = False - hasoldstylebase = False - w_most_derived_base_with_slots = None - w_newstyle = None - for w_base in bases_w: - if not isinstance(w_base, W_TypeObject): - hasoldstylebase = True - continue - if not w_newstyle: - w_newstyle = w_base - if w_base.nslots != 0: - if w_most_derived_base_with_slots is None: - w_most_derived_base_with_slots = w_base - else: - if space.is_true(space.issubtype(w_base, w_most_derived_base_with_slots)): - w_most_derived_base_with_slots = w_base - elif not space.is_true(space.issubtype(w_most_derived_base_with_slots, w_base)): - raise OperationError(space.w_TypeError, - space.wrap("instance layout conflicts in " - "multiple inheritance")) - w_self.hasdict = w_self.hasdict or w_base.hasdict - w_self.needsdel = w_self.needsdel or w_base.needsdel - w_self.weakrefable = w_self.weakrefable or w_base.weakrefable - if not w_newstyle: # only classic bases - raise OperationError(space.w_TypeError, - space.wrap("a new-style class can't have only classic bases")) + setup_user_defined_type(w_self) + custom_metaclass = not space.is_w(space.type(w_self), space.w_type) - if w_most_derived_base_with_slots: - nslots = w_most_derived_base_with_slots.nslots - w_self.w_bestbase = w_most_derived_base_with_slots - else: - nslots = 0 - - if w_self.w_bestbase is None: - w_self.w_bestbase = w_newstyle - - wantdict = True - wantweakref = True - if '__slots__' in dict_w: - wantdict = False - wantweakref = False - - w_slots = dict_w['__slots__'] - if space.is_true(space.isinstance(w_slots, space.w_str)): - if space.int_w(space.len(w_slots)) == 0: - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - slot_names_w = [w_slots] - else: - slot_names_w = space.unpackiterable(w_slots) - for w_slot_name in slot_names_w: - slot_name = space.str_w(w_slot_name) - # slot_name should be a valid identifier - if len(slot_name) == 0: - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - first_char = slot_name[0] - if not first_char.isalpha() and first_char != '_': - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - for c in slot_name: - if not c.isalnum() and c!= '_': - raise OperationError(space.w_TypeError, - space.wrap('__slots__ must be identifiers')) - if slot_name == '__dict__': - if wantdict or w_self.hasdict: - raise OperationError(space.w_TypeError, - space.wrap("__dict__ slot disallowed: we already got one")) - wantdict = True - elif slot_name == '__weakref__': - if wantweakref or w_self.weakrefable: - raise OperationError(space.w_TypeError, - space.wrap("__weakref__ slot disallowed: we already got one")) - - wantweakref = True - else: - # create member - slot_name = _mangle(slot_name, name) - # Force interning of slot names. - slot_name = space.str_w(space.new_interned_str(slot_name)) - w_self.dict_w[slot_name] = space.wrap(Member(nslots, slot_name, w_self)) - nslots += 1 - - w_self.nslots = nslots - - wantdict = wantdict or hasoldstylebase - - if wantdict and not w_self.hasdict: - w_self.dict_w['__dict__'] = space.wrap(std_dict_descr) - w_self.hasdict = True - if '__del__' in dict_w: - w_self.needsdel = True - if wantweakref and not w_self.weakrefable: - w_self.dict_w['__weakref__'] = space.wrap(weakref_descr) - w_self.weakrefable = True - w_type = space.type(w_self) - if not space.is_w(w_type, space.w_type): - if space.config.objspace.std.withtypeversion: - w_self.version_tag = None - w_self.mro_w = [] - mro_func = space.lookup(w_self, 'mro') - mro_func_args = Arguments(space, [w_self]) - w_mro = space.call_args(mro_func, mro_func_args) - w_self.mro_w = space.unpackiterable(w_mro) - return - w_self.mro_w = w_self.compute_mro() if space.config.objspace.std.withtypeversion: - if w_self.instancetypedef.hasdict: - w_self.version_tag = None + if w_self.instancetypedef.hasdict or custom_metaclass: + pass else: w_self.version_tag = VersionTag() @@ -250,33 +109,14 @@ continue w_base.add_subclass(w_self) - # compute the most parent class with the same layout as us - def get_layout(w_self): - w_bestbase = w_self.w_bestbase - if w_bestbase is None: # object - return w_self - if w_self.instancetypedef is not w_bestbase.instancetypedef: - return w_self - if w_self.nslots == w_bestbase.nslots: - return w_bestbase.get_layout() - return w_self - # compute a tuple that fully describes the instance layout def get_full_instance_layout(w_self): - w_layout = w_self.get_layout() + w_layout = w_self.w_same_layout_as or w_self return (w_layout, w_self.hasdict, w_self.needsdel, w_self.weakrefable) - def compute_mro(w_self): + def compute_default_mro(w_self): return compute_C3_mro(w_self.space, w_self) - def ensure_static__new__(w_self): - # special-case __new__, as in CPython: - # if it is a Function, turn it into a static method - if '__new__' in w_self.dict_w: - w_new = w_self.dict_w['__new__'] - if isinstance(w_new, Function): - w_self.dict_w['__new__'] = StaticMethod(w_new) - def getdictvalue(w_self, space, w_attr): return w_self.getdictvalue_w(space, space.str_w(w_attr)) @@ -487,6 +327,230 @@ def setweakref(self, space, weakreflifeline): self._lifeline_ = weakreflifeline +# ____________________________________________________________ +# Initialization of type objects + +def get_parent_layout(w_type): + """Compute the most parent class of 'w_type' whose layout + is the same as 'w_type', or None if all parents of 'w_type' + have a different layout than 'w_type'. + """ + w_starttype = w_type + while len(w_type.bases_w) > 0: + w_bestbase = find_best_base(w_type.space, w_type.bases_w) + if w_type.instancetypedef is not w_bestbase.instancetypedef: + break + if w_type.nslots != w_bestbase.nslots: + break + w_type = w_bestbase + if w_type is not w_starttype: + return w_type + else: + return None + +def issublayout(w_layout1, w_layout2): + space = w_layout2.space + while w_layout1 is not w_layout2: + w_layout1 = find_best_base(space, w_layout1.bases_w) + if w_layout1 is None: + return False + w_layout1 = w_layout1.w_same_layout_as or w_layout1 + return True + +def find_best_base(space, bases_w): + """The best base is one of the bases in the given list: the one + whose layout a new type should use as a starting point. + """ + w_bestbase = None + for w_candidate in bases_w: + if not isinstance(w_candidate, W_TypeObject): + continue + if w_bestbase is None: + w_bestbase = w_candidate # for now + continue + candtypedef = w_candidate.instancetypedef + besttypedef = w_bestbase.instancetypedef + if candtypedef is besttypedef: + # two candidates with the same typedef are equivalent unless + # one has extra slots over the other + if w_candidate.nslots > w_bestbase.nslots: + w_bestbase = w_candidate + elif issubtypedef(candtypedef, besttypedef): + w_bestbase = w_candidate + return w_bestbase + +def check_and_find_best_base(space, bases_w): + """The best base is one of the bases in the given list: the one + whose layout a new type should use as a starting point. + This version checks that bases_w is an acceptable tuple of bases. + """ + w_bestbase = find_best_base(space, bases_w) + if w_bestbase is None: + raise OperationError(space.w_TypeError, + space.wrap("a new-style class can't have " + "only classic bases")) + if not w_bestbase.instancetypedef.acceptable_as_base_class: + raise OperationError(space.w_TypeError, + space.wrap("type '%s' is not an " + "acceptable base class" % + w_bestbase.instancetypedef.name)) + + # check that all other bases' layouts are superclasses of the bestbase + w_bestlayout = w_bestbase.w_same_layout_as or w_bestbase + for w_base in bases_w: + if isinstance(w_base, W_TypeObject): + w_layout = w_base.w_same_layout_as or w_base + if not issublayout(w_bestlayout, w_layout): + raise OperationError(space.w_TypeError, + space.wrap("instance layout conflicts in " + "multiple inheritance")) + return w_bestbase + +def copy_flags_from_bases(w_self, w_bestbase): + hasoldstylebase = False + for w_base in w_self.bases_w: + if not isinstance(w_base, W_TypeObject): + hasoldstylebase = True + continue + w_self.hasdict = w_self.hasdict or w_base.hasdict + w_self.needsdel = w_self.needsdel or w_base.needsdel + w_self.weakrefable = w_self.weakrefable or w_base.weakrefable + w_self.nslots = w_bestbase.nslots + return hasoldstylebase + +def create_all_slots(w_self, hasoldstylebase): + space = w_self.space + dict_w = w_self.dict_w + if '__slots__' not in dict_w: + wantdict = True + wantweakref = True + else: + wantdict = False + wantweakref = False + w_slots = dict_w['__slots__'] + if space.is_true(space.isinstance(w_slots, space.w_str)): + slot_names_w = [w_slots] + else: + slot_names_w = space.unpackiterable(w_slots) + for w_slot_name in slot_names_w: + slot_name = space.str_w(w_slot_name) + if slot_name == '__dict__': + if wantdict or w_self.hasdict: + raise OperationError(space.w_TypeError, + space.wrap("__dict__ slot disallowed: " + "we already got one")) + wantdict = True + elif slot_name == '__weakref__': + if wantweakref or w_self.weakrefable: + raise OperationError(space.w_TypeError, + space.wrap("__weakref__ slot disallowed: " + "we already got one")) + wantweakref = True + else: + create_slot(w_self, slot_name) + wantdict = wantdict or hasoldstylebase + if wantdict: create_dict_slot(w_self) + if wantweakref: create_weakref_slot(w_self) + if '__del__' in dict_w: w_self.needsdel = True + +def create_slot(w_self, slot_name): + space = w_self.space + if not valid_slot_name(slot_name): + raise OperationError(space.w_TypeError, + space.wrap('__slots__ must be identifiers')) + # create member + slot_name = _mangle(slot_name, w_self.name) + # Force interning of slot names. + slot_name = space.str_w(space.new_interned_str(slot_name)) + member = Member(w_self.nslots, slot_name, w_self) + w_self.dict_w[slot_name] = space.wrap(member) + w_self.nslots += 1 + +def create_dict_slot(w_self): + if not w_self.hasdict: + w_self.dict_w['__dict__'] = w_self.space.wrap(std_dict_descr) + w_self.hasdict = True + +def create_weakref_slot(w_self): + if not w_self.weakrefable: + w_self.dict_w['__weakref__'] = w_self.space.wrap(weakref_descr) + w_self.weakrefable = True + +def valid_slot_name(slot_name): + if len(slot_name) == 0 or slot_name[0].isdigit(): + return False + for c in slot_name: + if not c.isalnum() and c != '_': + return False + return True + +def setup_user_defined_type(w_self): + if len(w_self.bases_w) == 0: + w_self.bases_w = [w_self.space.w_object] + w_bestbase = check_and_find_best_base(w_self.space, w_self.bases_w) + w_self.instancetypedef = w_bestbase.instancetypedef + w_self.__flags__ = _HEAPTYPE + + hasoldstylebase = copy_flags_from_bases(w_self, w_bestbase) + create_all_slots(w_self, hasoldstylebase) + + w_self.w_same_layout_as = get_parent_layout(w_self) + ensure_common_attributes(w_self) + +def setup_builtin_type(w_self): + w_self.hasdict = w_self.instancetypedef.hasdict + w_self.weakrefable = w_self.instancetypedef.weakrefable + ensure_common_attributes(w_self) + +def ensure_common_attributes(w_self): + ensure_static_new(w_self) + ensure_doc_attr(w_self) + if w_self.is_heaptype(): + ensure_module_attr(w_self) + w_self.mro_w = [] # temporarily + compute_mro(w_self) + +def ensure_static_new(w_self): + # special-case __new__, as in CPython: + # if it is a Function, turn it into a static method + if '__new__' in w_self.dict_w: + w_new = w_self.dict_w['__new__'] + if isinstance(w_new, Function): + w_self.dict_w['__new__'] = StaticMethod(w_new) + +def ensure_doc_attr(w_self): + # make sure there is a __doc__ in dict_w + w_self.dict_w.setdefault('__doc__', w_self.space.w_None) + +def ensure_module_attr(w_self): + # initialize __module__ in the dict (user-defined types only) + if '__module__' not in w_self.dict_w: + space = w_self.space + try: + caller = space.getexecutioncontext().framestack.top() + except IndexError: + pass + else: + w_globals = caller.w_globals + w_name = space.finditem(w_globals, space.wrap('__name__')) + if w_name is not None: + w_self.dict_w['__module__'] = w_name + +def compute_mro(w_self): + if w_self.is_heaptype(): + space = w_self.space + w_metaclass = space.type(w_self) + w_where, w_mro_func = space.lookup_in_type_where(w_metaclass, 'mro') + assert w_mro_func is not None # because there is one in 'type' + if not space.is_w(w_where, space.w_type): + w_mro_meth = space.get(w_mro_func, w_self) + w_mro = space.call_function(w_mro_meth) + w_self.mro_w = space.unpackiterable(w_mro) + # do some checking here + return # done + w_self.mro_w = w_self.compute_default_mro() + +# ____________________________________________________________ def call__Type(space, w_type, __args__): # special case for type(x) Modified: pypy/dist/pypy/objspace/std/typetype.py ============================================================================== --- pypy/dist/pypy/objspace/std/typetype.py (original) +++ pypy/dist/pypy/objspace/std/typetype.py Wed Aug 27 17:42:31 2008 @@ -56,6 +56,8 @@ (space.type(w_type).getname(space, '?')))) return w_type +# ____________________________________________________________ + def _check(space, w_type, msg=None): from pypy.objspace.std.typeobject import W_TypeObject if not isinstance(w_type, W_TypeObject): @@ -78,138 +80,94 @@ def descr_get__mro__(space, w_type): w_type = _check(space, w_type) - # XXX this should be inside typeobject.py return space.newtuple(w_type.mro_w) def descr_mro(space, w_type): """Return a type's method resolution order.""" w_type = _check(space, w_type,"expected type") - return space.newlist(w_type.compute_mro()) + return space.newlist(w_type.compute_default_mro()) def descr_get__bases__(space, w_type): w_type = _check(space, w_type) return space.newtuple(w_type.bases_w) def mro_subclasses(space, w_type, temp): - from pypy.objspace.std.typeobject import W_TypeObject + from pypy.objspace.std.typeobject import W_TypeObject, compute_mro + temp.append((w_type, w_type.mro_w)) + compute_mro(w_type) for w_sc in w_type.get_subclasses(): assert isinstance(w_sc, W_TypeObject) - temp.append((w_sc, w_sc.mro_w)) - mro_internal(space, w_sc) mro_subclasses(space, w_sc, temp) -# should be a W_TypeObject method i guess -def mro_internal(space, w_type): - if not space.is_w(space.type(w_type), space.w_type): - #w_type.mro_w = [] - mro_func = space.lookup(w_type, 'mro') - mro_func_args = Arguments(space, [w_type]) - w_mro = space.call_args(mro_func, mro_func_args) - w_type.mro_w = space.unpackiterable(w_mro) - # do some checking here - else: - w_type.mro_w = w_type.compute_mro() - -def best_base(space, newstyle_bases_w): - if not newstyle_bases_w: - raise OperationError(space.w_TypeError, - space.wrap("a new-style class can't have only classic bases")) - w_bestbase = None - w_winner = None - for w_base in newstyle_bases_w: - w_candidate = w_base.get_layout() - if w_winner is None: - w_winner = w_candidate - w_bestbase = w_base - elif space.is_true(space.issubtype(w_winner, w_candidate)): - pass - elif space.is_true(space.issubtype(w_candidate, w_winner)): - w_winner = w_candidate - w_bestbase = w_base - else: - raise OperationError(space.w_TypeError, - space.wrap("multiple bases have instance lay-out conflict")) - return w_bestbase - def descr_set__bases__(space, w_type, w_value): - from pypy.objspace.std.typeobject import W_TypeObject # this assumes all app-level type objects are W_TypeObject + from pypy.objspace.std.typeobject import W_TypeObject + from pypy.objspace.std.typeobject import check_and_find_best_base + from pypy.objspace.std.typeobject import get_parent_layout w_type = _check(space, w_type) if not w_type.is_heaptype(): raise OperationError(space.w_TypeError, space.wrap("can't set %s.__bases__" % - w_type.name)) + (w_type.name,))) if not space.is_true(space.isinstance(w_value, space.w_tuple)): raise OperationError(space.w_TypeError, space.wrap("can only assign tuple" " to %s.__bases__, not %s"% - (w_type.name, - space.type(w_value).getname(space, '?')))) - if space.int_w(space.len(w_value)) == 0: + (w_type.name, + space.type(w_value).getname(space, '?')))) + newbases_w = space.unpackiterable(w_value) + if len(newbases_w) == 0: raise OperationError(space.w_TypeError, - space.wrap("can only assign non-empty tuple to %s.__bases__, not ()"% - w_type.name)) - new_newstyle_bases = [] - for w_base in space.unpackiterable(w_value): - if not isinstance(w_base, W_TypeObject): - w_typ = space.type(w_base) - if not space.is_w(w_typ, space.w_classobj): - raise OperationError(space.w_TypeError, - space.wrap("%s.__bases__ must be tuple " - "of old- or new-style classes" - ", not '%s'"% - (w_type.name, - w_typ.getname(space, '?')))) - else: - new_newstyle_bases.append(w_base) - if space.is_true(space.issubtype(w_base, w_type)): + space.wrap("can only assign non-empty tuple" + " to %s.__bases__, not ()"% + (w_type.name,))) + + for w_newbase in newbases_w: + if isinstance(w_newbase, W_TypeObject): + if w_type in w_newbase.compute_default_mro(): raise OperationError(space.w_TypeError, - space.wrap("a __bases__ item causes an inheritance cycle")) + space.wrap("a __bases__ item causes" + " an inheritance cycle")) - new_base = best_base(space, new_newstyle_bases) + w_oldbestbase = check_and_find_best_base(space, w_type.bases_w) + w_newbestbase = check_and_find_best_base(space, newbases_w) + oldlayout = w_oldbestbase.get_full_instance_layout() + newlayout = w_newbestbase.get_full_instance_layout() - if w_type.w_bestbase.get_full_instance_layout() != new_base.get_full_instance_layout(): + if oldlayout != newlayout: raise OperationError(space.w_TypeError, - space.wrap("__bases__ assignment: '%s' object layout differs from '%s'" % - (w_type.getname(space, '?'), new_base.getname(space, '?')))) + space.wrap("__bases__ assignment: '%s' object layout" + " differs from '%s'" % + (w_newbestbase.getname(space, '?'), + w_oldbestbase.getname(space, '?')))) # invalidate the version_tag of all the current subclasses w_type.mutated() - saved_bases = w_type.bases_w - saved_base = w_type.w_bestbase - saved_mro = w_type.mro_w - - w_type.bases_w = space.unpackiterable(w_value) - w_type.w_bestbase = new_base - + # now we can go ahead and change 'w_type.bases_w' + saved_bases_w = w_type.bases_w temp = [] try: - mro_internal(space, w_type) - + for w_oldbase in saved_bases_w: + if isinstance(w_oldbase, W_TypeObject): + w_oldbase.remove_subclass(w_type) + w_type.bases_w = newbases_w + for w_newbase in newbases_w: + if isinstance(w_newbase, W_TypeObject): + w_newbase.add_subclass(w_type) + # try to recompute all MROs mro_subclasses(space, w_type, temp) - - for old_base in saved_bases: - if isinstance(old_base, W_TypeObject): - old_base.remove_subclass(w_type) - for new_base in new_newstyle_bases: - new_base.add_subclass(w_type) except: for cls, old_mro in temp: cls.mro_w = old_mro - w_type.bases_w = saved_bases - w_type.w_bestbase = saved_base - w_type.mro_w = saved_mro + w_type.bases_w = saved_bases_w raise - + assert w_type.w_same_layout_as is get_parent_layout(w_type) # invariant + def descr__base(space, w_type): + from pypy.objspace.std.typeobject import find_best_base w_type = _check(space, w_type) - if w_type.w_bestbase is not None: - return w_type.w_bestbase - elif w_type is not space.w_object: - return space.w_object - else: - return space.w_None + return find_best_base(space, w_type.bases_w) def descr__doc(space, w_type): if space.is_w(w_type, space.w_type): From arigo at codespeak.net Wed Aug 27 17:42:44 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 17:42:44 +0200 (CEST) Subject: [pypy-svn] r57655 - pypy/branch/typeobject-init Message-ID: <20080827154244.69EEC16A09B@codespeak.net> Author: arigo Date: Wed Aug 27 17:42:41 2008 New Revision: 57655 Removed: pypy/branch/typeobject-init/ Log: Remove merged branch. From arigo at codespeak.net Wed Aug 27 19:15:32 2008 From: arigo at codespeak.net (arigo at codespeak.net) Date: Wed, 27 Aug 2008 19:15:32 +0200 (CEST) Subject: [pypy-svn] r57656 - in pypy/dist/pypy/interpreter/pyparser: . test test/samples Message-ID: <20080827171532.1846C169E55@codespeak.net> Author: arigo Date: Wed Aug 27 19:15:29 2008 New Revision: 57656 Added: pypy/dist/pypy/interpreter/pyparser/test/samples/snippet_decorators_2.py Modified: pypy/dist/pypy/interpreter/pyparser/astbuilder.py pypy/dist/pypy/interpreter/pyparser/asthelper.py pypy/dist/pypy/interpreter/pyparser/test/test_astbuilder.py pypy/dist/pypy/interpreter/pyparser/test/test_samples.py Log: Fix for decorators of the form @f() with an empty argument list. Modified: pypy/dist/pypy/interpreter/pyparser/astbuilder.py ============================================================================== --- pypy/dist/pypy/interpreter/pyparser/astbuilder.py (original) +++ pypy/dist/pypy/interpreter/pyparser/astbuilder.py Wed Aug 27 19:15:29 2008 @@ -580,18 +580,23 @@ def build_decorator(builder, nb): """decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE""" atoms = get_atoms(builder, nb) - nodes = [] - # remove '@', '(' and ')' from atoms and use parse_attraccess - for token in atoms[1:]: - if isinstance(token, TokenObject) and ( - token.name == builder.parser.tokens['LPAR'] - or token.name == builder.parser.tokens['RPAR'] - or token.name == builder.parser.tokens['NEWLINE']): - # skip those ones - continue - else: - nodes.append(token) - obj = parse_attraccess(nodes, builder) + # collect all nodes up to '(' or NEWLINE + end = 1 + while True: + token = atoms[end] + if isinstance(token, TokenObject): + if token.name == builder.parser.tokens['NEWLINE']: + arglist = None + break + if token.name == builder.parser.tokens['LPAR']: + arglist = atoms[end+1] + if not isinstance(arglist, ArglistObject): # because it's RPAR + arglist = ArglistObject([], None, None, token.lineno) + break + end += 1 + obj = parse_attraccess(atoms[1:end], builder) + if arglist is not None: + obj = reduce_callfunc(obj, arglist) builder.push(obj) def build_funcdef(builder, nb): Modified: pypy/dist/pypy/interpreter/pyparser/asthelper.py ============================================================================== --- pypy/dist/pypy/interpreter/pyparser/asthelper.py (original) +++ pypy/dist/pypy/interpreter/pyparser/asthelper.py Wed Aug 27 19:15:29 2008 @@ -494,6 +494,9 @@ """parses token list like ['a', '.', 'b', '.', 'c', ...] and returns an ast node : ast.Getattr(Getattr(Name('a'), 'b'), 'c' ...) + + Well, no, that's lying. In reality this is also parsing everything + that goes in the grammar 'trailer' rule. """ token = tokens[0] # XXX HACK for when parse_attraccess is called from build_decorator @@ -614,10 +617,12 @@ self.lineno = lineno def __str__(self): - return "" % self.value + return repr(self) def __repr__(self): - return "" % self.value + return "" % (self.arguments, + self.stararg, + self.dstararg) class SubscriptObject(ObjectAccessor): """helper class to build subscript list Added: pypy/dist/pypy/interpreter/pyparser/test/samples/snippet_decorators_2.py ============================================================================== --- (empty file) +++ pypy/dist/pypy/interpreter/pyparser/test/samples/snippet_decorators_2.py Wed Aug 27 19:15:29 2008 @@ -0,0 +1,7 @@ + + +# this one makes the "stablecompiler" explode. Ha ha. + + at spam.egg() +def f(): + pass Modified: pypy/dist/pypy/interpreter/pyparser/test/test_astbuilder.py ============================================================================== --- pypy/dist/pypy/interpreter/pyparser/test/test_astbuilder.py (original) +++ pypy/dist/pypy/interpreter/pyparser/test/test_astbuilder.py Wed Aug 27 19:15:29 2008 @@ -257,6 +257,7 @@ 'snippet_whitespaces.py', 'snippet_samples.py', 'snippet_decorators.py', + 'snippet_decorators_2.py', 'snippet_listlinenos.py', 'snippet_whilelineno.py', ] Modified: pypy/dist/pypy/interpreter/pyparser/test/test_samples.py ============================================================================== --- pypy/dist/pypy/interpreter/pyparser/test/test_samples.py (original) +++ pypy/dist/pypy/interpreter/pyparser/test/test_samples.py Wed Aug 27 19:15:29 2008 @@ -23,6 +23,7 @@ SKIP_ALWAYS = [ "snippet_with_1.py", "snippet_with_2.py", + "snippet_decorators_2.py", ] REAL_EXPECTED_OUTPUT = { # for snippets that show bugs of Python's compiler package @@ -79,11 +80,11 @@ for path in sample_paths: fname = path.basename if fname in SKIP_ALWAYS: - yield lambda: py.test.skip( + yield lambda fname=fname: py.test.skip( "%r is set to always skip." % (fname,)) continue if GRAMMAR_MISMATCH and fname in SKIP_IF_NOT_NATIVE: - yield lambda: py.test.skip( + yield lambda fname=fname: py.test.skip( "Grammar mismatch and %s is not native" % (fname,)) continue yield check_parse, str(path) From fijal at codespeak.net Wed Aug 27 19:45:12 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Wed, 27 Aug 2008 19:45:12 +0200 (CEST) Subject: [pypy-svn] r57658 - pypy/branch/cross-compilation Message-ID: <20080827174512.C38DA16A0C2@codespeak.net> Author: fijal Date: Wed Aug 27 19:45:09 2008 New Revision: 57658 Added: pypy/branch/cross-compilation/ - copied from r57657, pypy/dist/ Log: Branch for experimenting with cross-compilation From fijal at codespeak.net Wed Aug 27 20:10:37 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Wed, 27 Aug 2008 20:10:37 +0200 (CEST) Subject: [pypy-svn] r57659 - pypy/branch/cross-compilation/pypy/translator/c Message-ID: <20080827181037.6973C16A02E@codespeak.net> Author: fijal Date: Wed Aug 27 20:10:36 2008 New Revision: 57659 Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py Log: Use the same CC in Makefile as in cbuild. Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/c/genc.py (original) +++ pypy/branch/cross-compilation/pypy/translator/c/genc.py Wed Aug 27 20:10:36 2008 @@ -399,7 +399,9 @@ if self.config.translation.cc: cc = self.config.translation.cc else: - cc = 'gcc' + cc = eci.get_compiler_for_platform() + if cc is None: + cc = 'gcc' make_no_prof = '' if self.has_profopt(): profopt = self.config.translation.profopt From fijal at codespeak.net Wed Aug 27 20:12:28 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Wed, 27 Aug 2008 20:12:28 +0200 (CEST) Subject: [pypy-svn] r57660 - pypy/branch/cross-compilation/pypy/translator/c Message-ID: <20080827181228.4DB4916A035@codespeak.net> Author: fijal Date: Wed Aug 27 20:12:27 2008 New Revision: 57660 Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py Log: Leave a note about config.translation.cc not affecting cbuild.py Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/c/genc.py (original) +++ pypy/branch/cross-compilation/pypy/translator/c/genc.py Wed Aug 27 20:12:27 2008 @@ -312,7 +312,10 @@ bk = self.translator.annotator.bookkeeper return getfunctionptr(bk.getdesc(self.entrypoint).getuniquegraph()) - def getccompiler(self): + def getccompiler(self): + # XXX note that overwritten cc here will not affect already + # performed steps in cbuild.py + # completely unsure how to get rid of this inconsistency cc = self.config.translation.cc # Copy extrafiles to target directory, if needed extrafiles = [] From fijal at codespeak.net Wed Aug 27 20:19:14 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Wed, 27 Aug 2008 20:19:14 +0200 (CEST) Subject: [pypy-svn] r57661 - pypy/branch/cross-compilation/pypy/translator/c Message-ID: <20080827181914.F057916A0B4@codespeak.net> Author: fijal Date: Wed Aug 27 20:19:11 2008 New Revision: 57661 Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py Log: Ooops, a typo and untested code Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/c/genc.py (original) +++ pypy/branch/cross-compilation/pypy/translator/c/genc.py Wed Aug 27 20:19:11 2008 @@ -402,7 +402,7 @@ if self.config.translation.cc: cc = self.config.translation.cc else: - cc = eci.get_compiler_for_platform() + cc = self.eci.get_compiler_for_platform() if cc is None: cc = 'gcc' make_no_prof = '' From fijal at codespeak.net Thu Aug 28 11:51:43 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 28 Aug 2008 11:51:43 +0200 (CEST) Subject: [pypy-svn] r57665 - in pypy/branch/cross-compilation/pypy: config tool translator/c translator/tool translator/tool/test Message-ID: <20080828095143.30AB816A110@codespeak.net> Author: fijal Date: Thu Aug 28 11:51:40 2008 New Revision: 57665 Added: pypy/branch/cross-compilation/pypy/tool/pyplatform.py (contents, props changed) Modified: pypy/branch/cross-compilation/pypy/config/translationoption.py pypy/branch/cross-compilation/pypy/tool/gcc_cache.py pypy/branch/cross-compilation/pypy/translator/c/genc.py pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py Log: Refactor a bit cross-compilation details to have a single place where platform info is kept. Not much is there, but it's a good start Modified: pypy/branch/cross-compilation/pypy/config/translationoption.py ============================================================================== --- pypy/branch/cross-compilation/pypy/config/translationoption.py (original) +++ pypy/branch/cross-compilation/pypy/config/translationoption.py Thu Aug 28 11:51:40 2008 @@ -348,8 +348,11 @@ def set_platform(config, platform): if platform == 'maemo': from pypy.translator.tool.cbuild import ExternalCompilationInfo + from pypy.tool.pyplatform import Maemo # XXX evil hackery func_defs = list(ExternalCompilationInfo.__init__.func_defaults) - func_defs[-1] = 'maemo' + func_defs[-1] = Maemo() ExternalCompilationInfo.__init__.im_func.func_defaults = tuple(func_defs) + elif platform != 'host': + raise NotImplementedError('Platform = %s' % (platform,)) Modified: pypy/branch/cross-compilation/pypy/tool/gcc_cache.py ============================================================================== --- pypy/branch/cross-compilation/pypy/tool/gcc_cache.py (original) +++ pypy/branch/cross-compilation/pypy/tool/gcc_cache.py Thu Aug 28 11:51:40 2008 @@ -20,8 +20,7 @@ try: return path.read() except py.error.Error: - result = py.process.cmdexec(eci.get_emulator_for_platform() + - build_executable(c_files, eci)) + result = eci.platform.execute(build_executable(c_files, eci)) path.write(result) return result Added: pypy/branch/cross-compilation/pypy/tool/pyplatform.py ============================================================================== --- (empty file) +++ pypy/branch/cross-compilation/pypy/tool/pyplatform.py Thu Aug 28 11:51:40 2008 @@ -0,0 +1,31 @@ + +""" This file contains various platform-specific profiles for +pypy's cross compilation +""" + +import py + +class Platform(object): + def get_compiler(self): + return None + + def execute(self, cmd): + return py.process.cmdexec(cmd) + + # platform objects are immutable + + def __hash__(self): + return hash(self.__class__.__name__) + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + return self.__class__.__name__ == other.__class__.__name__ + +class Maemo(Platform): + def get_compiler(self): + return '/scratchbox/compilers/cs2005q3.2-glibc-arm/bin/sbox-arm-linux-gcc' + + def execute(self, cmd): + return py.process.cmdexec('/scratchbox/login ' + cmd) Modified: pypy/branch/cross-compilation/pypy/translator/c/genc.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/c/genc.py (original) +++ pypy/branch/cross-compilation/pypy/translator/c/genc.py Thu Aug 28 11:51:40 2008 @@ -402,7 +402,7 @@ if self.config.translation.cc: cc = self.config.translation.cc else: - cc = self.eci.get_compiler_for_platform() + cc = self.eci.platform.get_compiler() if cc is None: cc = 'gcc' make_no_prof = '' Modified: pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py (original) +++ pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py Thu Aug 28 11:51:40 2008 @@ -9,6 +9,7 @@ log = py.log.Producer("cbuild") py.log.setconsumer("cbuild", ansi_log) from pypy.tool.udir import udir +from pypy.tool.pyplatform import Platform debug = 0 @@ -40,7 +41,7 @@ compile_extra = [], link_extra = [], frameworks = [], - platform = 'host'): + platform = Platform()): """ pre_include_bits: list of pieces of text that should be put at the top of the generated .c files, before any #include. They shouldn't @@ -82,8 +83,7 @@ link to a framework bundle. Not suitable for unix-like .dylib installations. - platform: an unique identifier of compile platform, useful for - caching. + platform: an object that can identify the platform """ for name in self._ATTRIBUTES: value = locals()[name] @@ -174,6 +174,7 @@ for attr in self._ATTRIBUTES: val = getattr(self, attr) info.append("%s=%s" % (attr, repr(val))) + info.append("platform=%s" % self.platform.__class__.__name__) return "" % ", ".join(info) def merge(self, *others): @@ -263,24 +264,6 @@ d['separate_module_sources'] = () return ExternalCompilationInfo(**d) - def get_emulator_for_platform(self): - if self.platform == 'host': - return '' - elif self.platform == 'maemo': - # XXX how to do it in better way??? - return '/scratchbox/login ' - else: - raise NotImplementedError("Platform = %s" % (self.platform,)) - - def get_compiler_for_platform(self): - if self.platform == 'host': - return None - elif self.platform == 'maemo': - # XXX this should be settable somehow, not sure exactly how - return '/scratchbox/compilers/cs2005q3.2-glibc-arm/bin/sbox-arm-linux-gcc' - else: - raise NotImplementedError("Platform = %s" % (self.platform,)) - if sys.platform == 'win32': so_ext = '.dll' else: @@ -531,7 +514,7 @@ if compiler_exe is not None: self.compiler_exe = compiler_exe else: - self.compiler_exe = eci.get_compiler_for_platform() + self.compiler_exe = eci.platform.get_compiler() self.profbased = profbased if not sys.platform in ('win32', 'darwin'): # xxx if 'm' not in self.libraries: Modified: pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py (original) +++ pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py Thu Aug 28 11:51:40 2008 @@ -158,14 +158,38 @@ 'dxowqbncpqympqhe-config') def test_platforms(self): - eci = ExternalCompilationInfo(platform='xxx') + from pypy.tool.pyplatform import Maemo + eci = ExternalCompilationInfo(platform=Maemo()) eci2 = ExternalCompilationInfo() assert eci != eci2 assert hash(eci) != hash(eci2) + assert repr(eci) != repr(eci2) py.test.raises(Exception, eci2.merge, eci) - assert eci.merge(eci).platform == 'xxx' + assert eci.merge(eci).platform == Maemo() + + def test_platform(self): + from pypy.tool.pyplatform import Platform + class Expected(Exception): + pass + + class X(Platform): + def get_compiler(self): + raise Expected + + def execute(self): + return 3 + + eci = ExternalCompilationInfo(platform=X()) + try: + build_executable([self.modfile], eci) + except Expected: + pass + else: + py.test.fail("Did not raise") + assert eci.platform.execute() == 3 def test_standalone_maemo(self): + from pypy.tool.pyplatform import Maemo # XXX skip if there is no scratchbox if not py.path.local('/scratchbox/login').check(): py.test.skip("No scratchbox detected") @@ -183,9 +207,9 @@ if sys.platform == 'win32': py.test.skip("No cross-compilation on windows yet") else: - eci = ExternalCompilationInfo(platform='maemo', + eci = ExternalCompilationInfo(platform=Maemo(), libraries=['m']) output = build_executable([c_file], eci) py.test.raises(py.process.cmdexec.Error, py.process.cmdexec, output) - result = py.process.cmdexec(eci.get_emulator_for_platform() + output) + result = eci.platform.execute(output) assert result.startswith('4.0') From fijal at codespeak.net Thu Aug 28 12:01:59 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 28 Aug 2008 12:01:59 +0200 (CEST) Subject: [pypy-svn] r57666 - pypy/branch/cross-compilation/pypy/translator/c/test Message-ID: <20080828100159.BB93416A0EA@codespeak.net> Author: fijal Date: Thu Aug 28 12:01:57 2008 New Revision: 57666 Modified: pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py Log: Some tests for standalone cross-compilation Modified: pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py (original) +++ pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py Thu Aug 28 12:01:57 2008 @@ -1,5 +1,5 @@ import py -import sys, os +import sys, os, re from pypy.rlib.rarithmetic import r_longlong from pypy.translator.translator import TranslationContext @@ -226,3 +226,30 @@ assert " ll_strtod.h" in makefile assert " ll_strtod.o" in makefile +def test_cross_compilation(): + from pypy.tool.pyplatform import Platform + from pypy.config.translationoption import set_platform + + class X(Platform): + def get_compiler(self): + return 'x' + + def entry_point(argv): + return 0 + + t = TranslationContext() + t.buildannotator().build_types(entry_point, [s_list_of_strings]) + t.buildrtyper().specialize() + + set_platform(t.config, X()) + try: + eci = ExternalCompilationInfo(platform=X()) + + cbuilder = CStandaloneBuilder(t, entry_point, t.config) + cbuilder.generate_source() + + makefile = udir.join(cbuilder.modulename, 'Makefile').read() + + m = re.search('^CC\s*=\s*x$', makefile) + finally: + set_platform(t.config, Platform()) From fijal at codespeak.net Thu Aug 28 20:03:28 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Thu, 28 Aug 2008 20:03:28 +0200 (CEST) Subject: [pypy-svn] r57668 - in pypy/branch/cross-compilation/pypy: config rlib tool translator/c/test translator/tool translator/tool/test Message-ID: <20080828180328.0967A16A0DB@codespeak.net> Author: fijal Date: Thu Aug 28 20:03:24 2008 New Revision: 57668 Added: pypy/branch/cross-compilation/pypy/rlib/pyplatform.py - copied unchanged from r57665, pypy/branch/cross-compilation/pypy/tool/pyplatform.py Removed: pypy/branch/cross-compilation/pypy/tool/pyplatform.py Modified: pypy/branch/cross-compilation/pypy/config/translationoption.py pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py Log: Move pyplatform from tool to rlib Modified: pypy/branch/cross-compilation/pypy/config/translationoption.py ============================================================================== --- pypy/branch/cross-compilation/pypy/config/translationoption.py (original) +++ pypy/branch/cross-compilation/pypy/config/translationoption.py Thu Aug 28 20:03:24 2008 @@ -346,13 +346,19 @@ ] def set_platform(config, platform): - if platform == 'maemo': - from pypy.translator.tool.cbuild import ExternalCompilationInfo - from pypy.tool.pyplatform import Maemo - # XXX evil hackery - func_defs = list(ExternalCompilationInfo.__init__.func_defaults) - func_defs[-1] = Maemo() - ExternalCompilationInfo.__init__.im_func.func_defaults = tuple(func_defs) - elif platform != 'host': - raise NotImplementedError('Platform = %s' % (platform,)) + from pypy.rlib.pyplatform import Platform, Maemo + from pypy.translator.tool.cbuild import ExternalCompilationInfo + if isinstance(platform, str): + if platform == 'maemo': + platform = Maemo() + elif platform == 'host': + return + else: + raise NotImplementedError('Platform = %s' % (platform,)) + assert isinstance(platform, Platform) + # XXX evil hackery + func_defs = list(ExternalCompilationInfo.__init__.func_defaults) + func_defs[-1] = platform + ExternalCompilationInfo.__init__.im_func.func_defaults = tuple(func_defs) + Modified: pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py (original) +++ pypy/branch/cross-compilation/pypy/translator/c/test/test_standalone.py Thu Aug 28 20:03:24 2008 @@ -227,7 +227,7 @@ assert " ll_strtod.o" in makefile def test_cross_compilation(): - from pypy.tool.pyplatform import Platform + from pypy.rlib.pyplatform import Platform from pypy.config.translationoption import set_platform class X(Platform): Modified: pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py (original) +++ pypy/branch/cross-compilation/pypy/translator/tool/cbuild.py Thu Aug 28 20:03:24 2008 @@ -9,7 +9,7 @@ log = py.log.Producer("cbuild") py.log.setconsumer("cbuild", ansi_log) from pypy.tool.udir import udir -from pypy.tool.pyplatform import Platform +from pypy.rlib.pyplatform import Platform debug = 0 Modified: pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py ============================================================================== --- pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py (original) +++ pypy/branch/cross-compilation/pypy/translator/tool/test/test_cbuild.py Thu Aug 28 20:03:24 2008 @@ -158,7 +158,7 @@ 'dxowqbncpqympqhe-config') def test_platforms(self): - from pypy.tool.pyplatform import Maemo + from pypy.rlib.pyplatform import Maemo eci = ExternalCompilationInfo(platform=Maemo()) eci2 = ExternalCompilationInfo() assert eci != eci2 @@ -168,7 +168,7 @@ assert eci.merge(eci).platform == Maemo() def test_platform(self): - from pypy.tool.pyplatform import Platform + from pypy.rlib.pyplatform import Platform class Expected(Exception): pass @@ -189,7 +189,7 @@ assert eci.platform.execute() == 3 def test_standalone_maemo(self): - from pypy.tool.pyplatform import Maemo + from pypy.rlib.pyplatform import Maemo # XXX skip if there is no scratchbox if not py.path.local('/scratchbox/login').check(): py.test.skip("No scratchbox detected") From antocuni at codespeak.net Fri Aug 29 12:02:52 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Fri, 29 Aug 2008 12:02:52 +0200 (CEST) Subject: [pypy-svn] r57673 - pypy/branch/oo-jit/pypy/translator/cli/test Message-ID: <20080829100252.63C6616A136@codespeak.net> Author: antocuni Date: Fri Aug 29 12:02:49 2008 New Revision: 57673 Modified: pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py pypy/branch/oo-jit/pypy/translator/cli/test/test_runtest.py Log: add support to pass strings as input arguments to compiled functions Modified: pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py ============================================================================== --- pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py (original) +++ pypy/branch/oo-jit/pypy/translator/cli/test/runtest.py Fri Aug 29 12:02:49 2008 @@ -8,7 +8,7 @@ from pypy.rpython.test.tool import BaseRtypingTest, OORtypeMixin from pypy.rpython.lltypesystem.lltype import typeOf from pypy.rpython.ootypesystem import ootype -from pypy.annotation.model import lltype_to_annotation +from pypy.annotation.model import lltype_to_annotation, SomeString from pypy.translator.backendopt.all import backend_optimizations from pypy.translator.backendopt.checkvirtual import check_virtual_methods from pypy.rpython.ootypesystem import ootype @@ -132,6 +132,7 @@ CTS.types.uint64: 'ToUInt64', CTS.types.bool: 'ToBoolean', CTS.types.char: 'ToChar', + CTS.types.string: 'ToString', } try: @@ -264,6 +265,12 @@ def __repr__(self): return 'ExceptionWrapper(%s)' % repr(self.class_name) +def get_annotation(x): + if isinstance(x, basestring) and len(x) > 1: + return SomeString() + else: + return lltype_to_annotation(typeOf(x)) + class CliTest(BaseRtypingTest, OORtypeMixin): def __init__(self): self._func = None @@ -272,7 +279,7 @@ def _compile(self, fn, args, ann=None, backendopt=True, auto_raise_exc=False, exctrans=False): if ann is None: - ann = [lltype_to_annotation(typeOf(x)) for x in args] + ann = [get_annotation(x) for x in args] if self._func is fn and self._ann == ann: return self._cli_func else: Modified: pypy/branch/oo-jit/pypy/translator/cli/test/test_runtest.py ============================================================================== --- pypy/branch/oo-jit/pypy/translator/cli/test/test_runtest.py (original) +++ pypy/branch/oo-jit/pypy/translator/cli/test/test_runtest.py Fri Aug 29 12:02:49 2008 @@ -15,4 +15,10 @@ return a0 res = self.interpret(fn, [42]*10) assert res == 42 - + + def test_input_string(self): + def fn(s): + return len(s) + + res = self.interpret(fn, ["hello"]) + assert res == 5 From antocuni at codespeak.net Fri Aug 29 12:04:31 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Fri, 29 Aug 2008 12:04:31 +0200 (CEST) Subject: [pypy-svn] r57674 - pypy/branch/oo-jit/pypy/jit/codegen/cli/test Message-ID: <20080829100431.C1E84169E70@codespeak.net> Author: antocuni Date: Fri Aug 29 12:04:30 2008 New Revision: 57674 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Log: use convert_arguments also for compiled functions Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Fri Aug 29 12:04:30 2008 @@ -1,17 +1,44 @@ import py from pypy.jit.codegen.cli.rgenop import RCliGenOp from pypy.jit.rainbow.test.test_interpreter import TestOOType as RainbowTest -from pypy.translator.cli.test.runtest import compile_graph +from pypy.translator.cli.test.runtest import compile_graph, get_annotation +from pypy.annotation import model as annmodel +def wrap_convert_arguments(callee, convert_arguments): + indexes = range(len(convert_arguments)) + convnames = ['conv%d' % i for i in indexes] + argnames = ['arg%d' % i for i in indexes] + varnames = ['var%d' % i for i in indexes] + lines = [] + lines.append('def fn(%s):' % ', '.join(argnames)) + for var, conv, arg in zip(varnames, convnames, argnames): + lines.append(' %s = %s(%s)' % (var, conv, arg)) + lines.append(' return callee(%s)' % ', '.join(varnames)) + + src = py.code.Source('\n'.join(lines)) + mydict = (dict(zip(convnames, convert_arguments))) + mydict['callee'] = callee + exec src.compile() in mydict + return mydict['fn'] class CompiledCliMixin(object): RGenOp = RCliGenOp translate_support_code = True def interpret(self, ll_function, values, opt_consts=[], *args, **kwds): - values, writer, jitcode = self.convert_and_serialize(ll_function, values, **kwds) + newvalues, writer, jitcode = self.convert_and_serialize(ll_function, values, **kwds) translator = self.rtyper.annotator.translator - func = compile_graph(self.rewriter.portal_entry_graph, translator) + graph = self.rewriter.portal_entry_graph + + if hasattr(ll_function, 'convert_arguments'): + fn = wrap_convert_arguments(self.rewriter.portal_entry, ll_function.convert_arguments) + FUNC = self.rewriter.PORTAL_FUNCTYPE + args_s = [get_annotation(value) for value in values] + s_result = annmodel.lltype_to_annotation(FUNC.RESULT) + graph = self.rewriter.annhelper.getgraph(fn, args_s, s_result) + self.rewriter.annhelper.finish() + + func = compile_graph(graph, translator, nowrap=True) return func(*values) @@ -26,6 +53,16 @@ def skip(self): py.test.skip('in progress') + def test_convert_arguments(self): + def ll_function(x): + return x+40 + def getlen(string): + return len(string) + ll_function.convert_arguments = [getlen] + res = self.interpret(ll_function, ["xx"], []) + assert res == 42 + + test_simple_struct = skip test_complex_struct = skip test_degenerate_with_voids = skip From cami at codespeak.net Fri Aug 29 12:54:39 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Fri, 29 Aug 2008 12:54:39 +0200 (CEST) Subject: [pypy-svn] r57676 - in pypy/dist/pypy/lang/gameboy: . test Message-ID: <20080829105439.4FBA9169F9D@codespeak.net> Author: cami Date: Fri Aug 29 12:54:35 2008 New Revision: 57676 Modified: pypy/dist/pypy/lang/gameboy/cpu.py pypy/dist/pypy/lang/gameboy/test/test_cpu.py pypy/dist/pypy/lang/gameboy/test/test_cpu_2.py pypy/dist/pypy/lang/gameboy/test/test_rom.py pypy/dist/pypy/lang/gameboy/timer.py pypy/dist/pypy/lang/gameboy/video.py Log: renamed flag register for better code understanding added doc text for flag register Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Fri Aug 29 12:54:35 2008 @@ -124,7 +124,41 @@ # ------------------------------------------------------------------------------ class FlagRegister(Register): - + """ + The Flag Register (lower 8bit of AF register) + Bit Name Set Clr Expl. + 7 zf Z NZ Zero Flag + 6 n - - Add/Sub-Flag (BCD) + 5 h - - Half Carry Flag (BCD) + 4 cy C NC Carry Flag + 3-0 - - - Not used (always zero) + Conatins the result from the recent instruction which has affected flags. + + The Zero Flag (Z) + This bit becomes set (1) if the result of an operation has been zero (0). + Used for conditional jumps. + + The Carry Flag (C, or Cy) + Becomes set when the result of an addition became bigger than FFh (8bit) or + FFFFh (16bit). Or when the result of a subtraction or comparision became + less than zero (much as for Z80 and 80x86 CPUs, but unlike as for 65XX and + ARM CPUs). Also the flag becomes set when a rotate/shift operation has + shifted-out a "1"-bit. + Used for conditional jumps, and for instructions such like ADC, SBC, RL, + RLA, etc. + + The BCD Flags (N, H) + These flags are (rarely) used for the DAA instruction only, N Indicates + whether the previous instruction has been an addition or subtraction, and H + indicates carry for lower 4bits of the result, also for DAA, the C flag must + indicate carry for upper 8bits. + After adding/subtracting two BCD numbers, DAA is intended to convert the + result into BCD format; BCD numbers are ranged from 00h to 99h rather than + 00h to FFh. + Because C and H flags must contain carry-outs for each digit, DAA cannot be + used for 16bit operations (which have 4 digits), or for INC/DEC operations + (which do not affect C-flag). + """ def __init__(self, cpu, reset_value): assert isinstance(cpu, CPU) self.cpu = cpu @@ -134,16 +168,17 @@ def reset(self): self.partial_reset() - def partial_reset(self, keep_z=False, keep_n=False, keep_h=False, keep_c=False,\ + def partial_reset(self, keep_is_zero=False, keep_is_subtraction=False, + keep_is_half_carry=False, keep_is_carry=False,\ keep_p=False, keep_s=False): - if not keep_z: - self.z_flag = False - if not keep_n: - self.n_flag = False - if not keep_h: - self.h_flag = False - if not keep_c: - self.c_flag = False + if not keep_is_zero: + self.is_zero = False + if not keep_is_subtraction: + self.is_subtraction = False + if not keep_is_half_carry: + self.is_half_carry = False + if not keep_is_carry: + self.is_carry = False if not keep_p: self.p_flag = False if not keep_s: @@ -152,41 +187,41 @@ def get(self, use_cycles=True): value = 0 - value += (int(self.c_flag) << 4) - value += (int(self.h_flag) << 5) - value += (int(self.n_flag) << 6) - value += (int(self.z_flag) << 7) + value += (int(self.is_carry) << 4) + value += (int(self.is_half_carry) << 5) + value += (int(self.is_subtraction) << 6) + value += (int(self.is_zero) << 7) return value + self.lower def set(self, value, use_cycles=True): - self.c_flag = bool(value & (1 << 4)) - self.h_flag = bool(value & (1 << 5)) - self.n_flag = bool(value & (1 << 6)) - self.z_flag = bool(value & (1 << 7)) - self.lower = value & 0x0F + self.is_carry = bool(value & (1 << 4)) + self.is_half_carry = bool(value & (1 << 5)) + self.is_subtraction = bool(value & (1 << 6)) + self.is_zero = bool(value & (1 << 7)) + self.lower = value & 0x0F if use_cycles: self.cpu.cycles -= 1 - def z_flag_compare(self, a, reset=False): + def is_zero_check(self, a, reset=False): if reset: self.reset() if isinstance(a, (Register)): a = a.get() - self.z_flag = ((a & 0xFF) == 0) + self.is_zero = ((a & 0xFF) == 0) - def c_flag_compare(self, value, compare_and=0x01, reset=False): + def is_carry_compare(self, value, compare_and=0x01, reset=False): if reset: self.reset() - self.c_flag = ((value & compare_and) != 0) + self.is_carry = ((value & compare_and) != 0) - def h_flag_compare(self, value, a, inverted=False): + def is_half_carry_compare(self, value, a, inverted=False): if inverted: - self.h_flag = ((value & 0x0F) < (a & 0x0F)) + self.is_half_carry = ((value & 0x0F) < (a & 0x0F)) else: - self.h_flag = ((value & 0x0F) > (a & 0x0F)) + self.is_half_carry = ((value & 0x0F) > (a & 0x0F)) - #def c_flag_compare(self, a, b): - # self.c_flag = (a < b) + #def is_carry_compare(self, a, b): + # self.is_carry = (a < b) # # ------------------------------------------------------------------------------ @@ -211,41 +246,41 @@ self.reset() def ini_registers(self): - self.b = Register(self) - self.c = Register(self) - self.bc = DoubleRegister(self, self.b, self.c, constants.RESET_BC) - - self.d = Register(self) - self.e = Register(self) - self.de = DoubleRegister(self, self.d, self.e, constants.RESET_DE) - - self.h = Register(self) - self.l = Register(self) - self.hl = DoubleRegister(self, self.h, self.l, constants.RESET_HL) - - self.hli = ImmediatePseudoRegister(self, self.hl) - self.pc = DoubleRegister(self, Register(self), Register(self), reset_value=constants.RESET_PC) - self.sp = DoubleRegister(self, Register(self), Register(self), reset_value=constants.RESET_SP) - - self.a = Register(self, constants.RESET_A) - self.f = FlagRegister(self, constants.RESET_F) - self.af = DoubleRegister(self, self.a, self.f) + self.b = Register(self) + self.c = Register(self) + self.bc = DoubleRegister(self, self.b, self.c, constants.RESET_BC) + + self.d = Register(self) + self.e = Register(self) + self.de = DoubleRegister(self, self.d, self.e, constants.RESET_DE) + + self.h = Register(self) + self.l = Register(self) + self.hl = DoubleRegister(self, self.h, self.l, constants.RESET_HL) + + self.hli = ImmediatePseudoRegister(self, self.hl) + self.pc = DoubleRegister(self, Register(self), Register(self), reset_value=constants.RESET_PC) + self.sp = DoubleRegister(self, Register(self), Register(self), reset_value=constants.RESET_SP) + + self.a = Register(self, constants.RESET_A) + self.flag = FlagRegister(self, constants.RESET_F) + self.af = DoubleRegister(self, self.a, self.flag) def reset(self): self.reset_registers() - self.f.reset() - self.f.z_flag = True - self.ime = False - self.halted = False - self.cycles = 0 + self.flag.reset() + self.flag.is_zero = True + self.ime = False + self.halted = False + self.cycles = 0 self.instruction_counter = 0 self.last_op_code = -1 self.last_fetch_execute_op_code = -1 def reset_registers(self): self.a.reset() - self.f.reset() + self.flag.reset() self.bc.reset() self.de.reset() self.hl.reset() @@ -306,25 +341,25 @@ def is_z(self): """ zero flag""" - return self.f.z_flag + return self.flag.is_zero def is_c(self): """ carry flag, true if the result did not fit in the register""" - return self.f.c_flag + return self.flag.is_carry def is_h(self): """ half carry, carry from bit 3 to 4""" - return self.f.h_flag + return self.flag.is_half_carry def is_n(self): """ subtract flag, true if the last operation was a subtraction""" - return self.f.n_flag + return self.flag.is_subtraction def isS(self): - return self.f.s_flag + return self.flag.s_flag def is_p(self): - return self.f.p_flag + return self.flag.p_flag def is_not_z(self): return not self.is_z() @@ -483,31 +518,31 @@ # 2 cycles data = register.get() added = (self.hl.get() + data) # 1 cycle - self.f.partial_reset(keep_z=True) - self.f.h_flag = (((added ^ self.hl.get() ^ data) & 0x1000) != 0) - self.f.c_flag = (added >= 0x10000 or added < 0) + self.flag.partial_reset(keep_is_zero=True) + self.flag.is_half_carry = (((added ^ self.hl.get() ^ data) & 0x1000) != 0) + self.flag.is_carry = (added >= 0x10000 or added < 0) self.hl.set(added & 0xFFFF) self.cycles -= 1 def add_a_with_carry(self, getCaller, setCaller=None): # 1 cycle data = getCaller.get() - s = self.a.get() + data + int(self.f.c_flag) + s = self.a.get() + data + int(self.flag.is_carry) self.add_sub_flag_finish(s,data) def subtract_with_carry_a(self, getCaller, setCaller=None): # 1 cycle data = getCaller.get() - s = self.a.get() - data - int(self.f.c_flag) + s = self.a.get() - data - int(self.flag.is_carry) self.add_sub_flag_finish(s, data) - self.f.n_flag = True + self.flag.is_subtraction = True def add_sub_flag_finish(self, s, data): - self.f.reset() + self.flag.reset() # set the h flag if the 0x10 bit was affected - self.f.h_flag = (((s ^ self.a.get() ^ data) & 0x10) != 0) - self.f.c_flag = (s >= 0x100 or s < 0) - self.f.z_flag_compare(s) + self.flag.is_half_carry = (((s ^ self.a.get() ^ data) & 0x10) != 0) + self.flag.is_carry = (s >= 0x100 or s < 0) + self.flag.is_zero_check(s) self.a.set(s & 0xFF) # 1 cycle def subtract_a(self, getCaller, setCaller=None): @@ -528,32 +563,32 @@ def compare_a_simple(self, s): s = (self.a.get() - s) & 0xFF - self.f.reset() - self.f.n_flag = True - self.f.z_flag_compare(s) - self.subtract_hc_flag_finish(s) + self.flag.reset() + self.flag.is_subtraction = True + self.flag.is_zero_check(s) + self.subtract_his_carry_finish(s) self.cycles -= 1 - def subtract_hc_flag_finish(self, data): - self.f.c_flag = (data > self.a.get()) - self.f.h_flag_compare(data, self.a.get()) + def subtract_his_carry_finish(self, data): + self.flag.is_carry = (data > self.a.get()) + self.flag.is_half_carry_compare(data, self.a.get()) def and_a(self, getCaller, setCaller=None): # 1 cycle self.a.set(self.a.get() & getCaller.get()) # 1 cycle - self.f.reset() - self.f.z_flag_compare(self.a.get()) - self.f.h_flag = True + self.flag.reset() + self.flag.is_zero_check(self.a.get()) + self.flag.is_half_carry = True def xor_a(self, getCaller, setCaller=None): # 1 cycle self.a.set( self.a.get() ^ getCaller.get()) # 1 cycle - self.f.z_flag_compare(self.a.get(), reset=True) + self.flag.is_zero_check(self.a.get(), reset=True) def or_a(self, getCaller, setCaller=None): # 1 cycle self.a.set(self.a.get() | getCaller.get()) # 1 cycle - self.f.z_flag_compare(self.a.get(), reset=True) + self.flag.is_zero_check(self.a.get(), reset=True) def inc_double_register(self, register): # INC rr @@ -566,18 +601,18 @@ def inc(self, getCaller, setCaller): # 1 cycle data = (getCaller.get() + 1) & 0xFF - self.dec_inc_flag_finish(data, setCaller, 0x00) + self.dec_inis_carry_finish(data, setCaller, 0x00) def dec(self, getCaller, setCaller): # 1 cycle data = (getCaller.get() - 1) & 0xFF - self.dec_inc_flag_finish(data, setCaller, 0x0F) - self.f.n_flag = True + self.dec_inis_carry_finish(data, setCaller, 0x0F) + self.flag.is_subtraction = True - def dec_inc_flag_finish(self, data, setCaller, compare): - self.f.partial_reset(keep_c=True) - self.f.z_flag_compare(data) - self.f.h_flag = ((data & 0x0F) == compare) + def dec_inis_carry_finish(self, data, setCaller, compare): + self.flag.partial_reset(keep_is_carry=True) + self.flag.is_zero_check(data) + self.flag.is_half_carry = ((data & 0x0F) == compare) setCaller.set(data) # 1 cycle def rotate_left_circular(self, getCaller, setCaller): @@ -595,7 +630,7 @@ def rotate_left(self, getCaller, setCaller): # 1 cycle data = getCaller.get() - s = ((data & 0x7F) << 1) + int(self.f.c_flag) + s = ((data & 0x7F) << 1) + int(self.flag.is_carry) self.flags_and_setter_finish(s, data, setCaller, 0x80) # 1 cycle def rotate_left_a(self): @@ -618,7 +653,7 @@ # 1 cycle data = getCaller.get() s = (data >> 1) - if self.f.c_flag: + if self.flag.is_carry: s += 0x80 self.flags_and_setter_finish(s, data, setCaller) # 1 cycle @@ -648,25 +683,24 @@ def flags_and_setter_finish(self, s, data, setCaller, compare_and=0x01): # 2 cycles s &= 0xFF - self.f.reset() - self.f.z_flag_compare(s) - self.f.c_flag_compare(data, compare_and) + self.flag.reset() + self.flag.is_zero_check(s) + self.flag.is_carry_compare(data, compare_and) setCaller.set(s) # 1 cycle def swap(self, getCaller, setCaller): data = getCaller.get() # 1 cycle s = ((data << 4) + (data >> 4)) & 0xFF - self.f.z_flag_compare(s, reset=True) + self.flag.is_zero_check(s, reset=True) setCaller.set(s) def test_bit(self, getCaller, setCaller, n): # 2 cycles - self.f.partial_reset(keep_c=True) - self.f.h_flag = True - self.f.z_flag = False - self.f.z_flag = ((getCaller.get() & (1 << n)) == 0) + self.flag.partial_reset(keep_is_carry=True) + self.flag.is_half_carry = True + self.flag.is_zero = ((getCaller.get() & (1 << n)) == 0) self.cycles -= 1 def set_bit(self, getCaller, setCaller, n): @@ -733,7 +767,7 @@ # LDH (nn),A 3 cycles self.write(0xFF00 + self.fetch(), self.a.get()) # 2 + 1 cycles - def write_a_at_expaded_c_address(self): + def write_a_at_expanded_c_address(self): # LDH (C),A 2 cycles self.write(0xFF00 + self.c.get(), self.a.get()) # 2 cycles @@ -757,8 +791,8 @@ def complement_a(self): # CPA self.a.set(self.a.get() ^ 0xFF) - self.f.n_flag = True - self.f.h_flag = True + self.flag.is_subtraction = True + self.flag.is_half_carry = True def decimal_adjust_a(self): # DAA 1 cycle @@ -777,10 +811,10 @@ self.a.set((self.a.get() + delta) & 0xFF) # 1 cycle else: self.a.set((self.a.get() - delta) & 0xFF) # 1 cycle - self.f.partial_reset(keep_n=True) + self.flag.partial_reset(keep_is_subtraction=True) if delta >= 0x60: - self.f.c_flag = True - self.f.z_flag_compare(self.a.get()) + self.flag.is_carry = True + self.flag.is_zero_check(self.a.get()) def increment_sp_by_fetch(self): # ADD SP,nn 4 cycles @@ -796,25 +830,25 @@ # 1 cycle offset = process_2_complement(self.fetch()) # 1 cycle s = (self.sp.get() + offset) & 0xFFFF - self.f.reset() + self.flag.reset() if (offset >= 0): - self.f.c_flag = (s < self.sp.get()) + self.flag.is_carry = (s < self.sp.get()) if (s & 0x0F00) < (self.sp.get() & 0x0F00): - self.f.h_flag = True + self.flag.is_half_carry = True else: - self.f.c_flag = (s > self.sp.get()) + self.flag.is_carry = (s > self.sp.get()) if (s & 0x0F00) > (self.sp.get() & 0x0F00): - self.f.h_flag = True + self.flag.is_half_carry = True return s def complement_carry_flag(self): # CCF/SCF - self.f.partial_reset(keep_z=True, keep_c=True) - self.f.c_flag = not self.f.c_flag + self.flag.partial_reset(keep_is_zero=True, keep_is_carry=True) + self.flag.is_carry = not self.flag.is_carry def set_carry_flag(self): - self.f.partial_reset(keep_z=True) - self.f.c_flag = True + self.flag.partial_reset(keep_is_zero=True) + self.flag.is_carry = True def nop(self): # NOP 1 cycle @@ -880,7 +914,7 @@ # RST nn 4 cycles self.call(nn) # 4 cycles - def disable_interrups(self): + def disable_interrupts(self): # DI/EI 1 cycle self.ime = False self.cycles -= 1 @@ -1072,9 +1106,9 @@ (0x37, CPU.set_carry_flag), (0x3F, CPU.complement_carry_flag), (0x76, CPU.halt), - (0xF3, CPU.disable_interrups), + (0xF3, CPU.disable_interrupts), (0xFB, CPU.enable_interrupts), - (0xE2, CPU.write_a_at_expaded_c_address), + (0xE2, CPU.write_a_at_expanded_c_address), (0xEA, CPU.store_a_at_fetched_address), (0xF2, CPU.store_expanded_c_in_a), (0xFA, CPU.store_fetched_memory_in_a), Modified: pypy/dist/pypy/lang/gameboy/test/test_cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_cpu.py Fri Aug 29 12:54:35 2008 @@ -34,7 +34,7 @@ def test_reset(): cpu = get_cpu() assert cpu.a.get() == 0x01 - #assert cpu.f.get() == 0xB0 + #assert cpu.flag.get() == 0xB0 assert cpu.b.get() == 0x00 assert cpu.c.get() == 0x13 assert cpu.de.get() == 0x00D8 @@ -46,7 +46,7 @@ assert_default_registers(cpu) assert cpu.af.cpu == cpu assert cpu.a.cpu == cpu - assert cpu.f.cpu == cpu + assert cpu.flag.cpu == cpu assert cpu.bc.cpu == cpu assert cpu.b.cpu == cpu @@ -120,23 +120,23 @@ def test_flags(): cpu = get_cpu() - cpu.f.set(constants.Z_FLAG) + cpu.flag.set(constants.Z_FLAG) assert cpu.is_z() == True assert cpu.is_not_z() == False - cpu.f.set(~constants.Z_FLAG) + cpu.flag.set(~constants.Z_FLAG) assert cpu.is_z() == False assert cpu.is_not_z() == True - cpu.f.set(constants.C_FLAG) + cpu.flag.set(constants.C_FLAG) assert cpu.is_c() == True assert cpu.is_not_c() == False - cpu.f.set(~constants.C_FLAG) + cpu.flag.set(~constants.C_FLAG) assert cpu.is_c() == False assert cpu.is_not_c() == True def test_flags_memory_access(): cpu = get_cpu() - cpu.f.set(constants.Z_FLAG) + cpu.flag.set(constants.Z_FLAG) assert cpu.is_z() == True prepare_for_fetch(cpu, 0x12, 0x12) cpu.memory.write(0x1234, 0x12) @@ -220,7 +220,7 @@ if de is not None: assert cpu.de.get() == de, "Register de is %s but should be %s" % (hex(cpu.de.get()),hex(de)) if f is not None: - assert cpu.f.get() == f, "Register f is %s but should be %s" % (hex(cpu.f.get()),hex(f)) + assert cpu.flag.get() == f, "Register f is %s but should be %s" % (hex(cpu.flag.get()),hex(f)) if hl is not None: assert cpu.hl.get() == hl, "Register hl is %s but should be %s" % (hex(cpu.hl.get()), hex(hl)) if sp is not None: @@ -234,17 +234,17 @@ def assert_flags(cpu, z_flag=None, n_flag=None, h_flag=None, c_flag=None, p_flag=None, s_flag=None): if z_flag is not None: - assert cpu.f.z_flag == z_flag, "Z-Flag is %s but should be %s" % (cpu.f.z_flag, z_flag) + assert cpu.flag.is_zero == z_flag, "Z-Flag is %s but should be %s" % (cpu.flag.is_zero, z_flag) if n_flag is not None: - assert cpu.f.n_flag == n_flag, "N-Flag is %s but should be %s" % (cpu.f.n_flag, n_flag) + assert cpu.flag.is_subtraction == n_flag, "N-Flag is %s but should be %s" % (cpu.flag.is_subtraction, n_flag) if h_flag is not None: - assert cpu.f.h_flag == h_flag, "H-Flag is %s but should be %s" % (cpu.f.h_flag, h_flag) + assert cpu.flag.is_half_carry == h_flag, "H-Flag is %s but should be %s" % (cpu.flag.is_half_carry, h_flag) if c_flag is not None: - assert cpu.f.c_flag == c_flag, "C-Flag is %s but should be %s" % (cpu.f.c_flag, c_flag) + assert cpu.flag.is_carry == c_flag, "C-Flag is %s but should be %s" % (cpu.flag.is_carry, c_flag) if p_flag is not None: - assert cpu.f.p_flag == p_flag, "P-Flag is %s but should be %s" % (cpu.f.p_flag, p_flag) + assert cpu.flag.p_flag == p_flag, "P-Flag is %s but should be %s" % (cpu.flag.p_flag, p_flag) if s_flag is not None: - assert cpu.f.s_flag == s_flag, "S-Flag is %s but should be %s" % (cpu.f.s_flag, s_flag) + assert cpu.flag.s_flag == s_flag, "S-Flag is %s but should be %s" % (cpu.flag.s_flag, s_flag) def prepare_for_fetch(cpu, value, valueLo=None): pc = cpu.pc.get() @@ -373,7 +373,7 @@ cycle_test(cpu, 0x18, 3) # relative offset + one fetch assert cpu.pc.get() == 0x1234 + jump + 1 - assert_default_registers(cpu, f=cpu.f.get(), pc=0x1234+jump+1) + assert_default_registers(cpu, f=cpu.flag.get(), pc=0x1234+jump+1) # jr_NZ_nn see test_jr_cc_nn def test_0x20_to_0x38_relative_conditional_jump(): @@ -385,17 +385,17 @@ for jump in range(-128,128): cpu.pc.set(0x1234) prepare_for_fetch(cpu, jump & 0xFF) - cpu.f.set(flags[i]) + cpu.flag.set(flags[i]) cycle_test(cpu, opCode, 3) # relative offset + one fetch assert cpu.pc.get() == 0x1234 + jump + 1 - assert_default_registers(cpu, f=cpu.f.get(), pc=0x1234+jump+1) + assert_default_registers(cpu, f=cpu.flag.get(), pc=0x1234+jump+1) pc = cpu.pc.get() - cpu.f.set(~flags[i]) + cpu.flag.set(~flags[i]) cycle_test(cpu, opCode, 2) assert cpu.pc.get() == pc+1 - assert_default_registers(cpu, f=cpu.f.get(), pc=pc+1) + assert_default_registers(cpu, f=cpu.flag.get(), pc=pc+1) value += 3 opCode += 0x08 @@ -544,7 +544,7 @@ # cycle testing is done in the other tests a = cpu.a a.set(0xFF) - cpu.f.c_flag = True + cpu.flag.is_carry = True cpu.inc(RegisterCallWrapper(a), RegisterCallWrapper(a)) assert_default_flags(cpu, z_flag=True, h_flag=True, c_flag=True) @@ -589,22 +589,22 @@ # cycle testing is done in the other tests a = cpu.a a.set(1) - cpu.f.c_flag = True + cpu.flag.is_carry = True cpu.dec(RegisterCallWrapper(a), RegisterCallWrapper(a)) assert_default_flags(cpu, z_flag=True, h_flag=False, n_flag=True, c_flag=True) a.set(1) - cpu.f.c_flag = False + cpu.flag.is_carry = False cpu.dec(RegisterCallWrapper(a), RegisterCallWrapper(a)) assert_default_flags(cpu, z_flag=True, h_flag=False, n_flag=True, c_flag=False) a.set(0x0F+1) - cpu.f.c_flag = True + cpu.flag.is_carry = True cpu.dec(RegisterCallWrapper(a), RegisterCallWrapper(a)) assert_default_flags(cpu, z_flag=False, h_flag=True, n_flag=True, c_flag=True) a.set(0x0F+1) - cpu.f.c_flag = False + cpu.flag.is_carry = False cpu.dec(RegisterCallWrapper(a), RegisterCallWrapper(a)) assert_default_flags(cpu, z_flag=False, h_flag=True, n_flag=True, c_flag=False) @@ -701,7 +701,7 @@ def test_0x17(): cpu = get_cpu() value = 0x01 - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(value) cycle_test(cpu, 0x17, 1) assert_default_registers(cpu, a=(value << 1) & 0xFF, f=None); @@ -710,20 +710,20 @@ def test_0x1F(): cpu = get_cpu() value = 0x40 - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(value) cycle_test(cpu, 0x1F, 1) assert_default_registers(cpu, a=(value >> 1) & 0xFF, f=None); cpu.reset() - cpu.f.set(0x00) + cpu.flag.set(0x00) value = 0x40 cpu.a.set(value) cycle_test(cpu, 0x1F, 1) assert_default_registers(cpu, a=(value >> 1) & 0xFF, f=None); cpu.reset() - cpu.f.set(0x00) + cpu.flag.set(0x00) value = 0x02 cpu.a.set(value) cycle_test(cpu, 0x1F, 1) @@ -738,9 +738,9 @@ def test_0x2F_complement_a(): cpu = get_cpu() value = 0x12 - fValue = cpu.f.get() - cpu.f.n_flag = False - cpu.f.h_flag = False + fValue = cpu.flag.get() + cpu.flag.is_subtraction = False + cpu.flag.is_half_carry = False cpu.a.set(value) cycle_test(cpu, 0x2F, 1) assert_default_registers(cpu, a=value^0xFF, f=None) @@ -748,12 +748,12 @@ # scf def test_0x37(): cpu = get_cpu() - cpu.f.c_flag = False + cpu.flag.is_carry = False cycle_test(cpu, 0x37, 0) assert_default_registers(cpu, f=None) assert_default_flags(cpu, c_flag=True) - cpu.f.c_flag = True + cpu.flag.is_carry = True cycle_test(cpu, 0x37, 0) assert_default_registers(cpu, f=None) assert_default_flags(cpu, c_flag=True) @@ -761,12 +761,12 @@ # ccf def test_0x3F(): cpu = get_cpu() - cpu.f.c_flag = True + cpu.flag.is_carry = True cycle_test(cpu, 0x3F, 0) assert_default_registers(cpu, f=None) assert_default_flags(cpu, c_flag=False) - cpu.f.c_flag = False + cpu.flag.is_carry = False cycle_test(cpu, 0x3F, 0) assert_default_registers(cpu, f=None) assert_default_flags(cpu, c_flag=True) @@ -844,7 +844,7 @@ assert cpu.a.get() == 2*value cpu.reset() - cpu.f.c_flag = True + cpu.flag.is_carry = True cpu.a.set(value-1) register.set(value) numCycles= 1 @@ -893,7 +893,7 @@ assert cpu.a.get() == 0 cpu.reset() - cpu.f.c_flag = True + cpu.flag.is_carry = True cpu.a.set(value+1) register.set(value) numCycles= 1 @@ -989,9 +989,9 @@ numCycles = 2 cycle_test(cpu, opCode, numCycles) if register == cpu.a: - assert cpu.f.z_flag == True + assert cpu.flag.is_zero == True else: - assert cpu.f.z_flag == False + assert cpu.flag.is_zero == False cpu.a.set(0x12) register.set(0x12) @@ -999,7 +999,7 @@ if register == cpu.hli: numCycles = 2 cycle_test(cpu, opCode, numCycles) - assert cpu.f.z_flag == True + assert cpu.flag.is_zero == True opCode += 0x01 @@ -1012,15 +1012,15 @@ for i in range(0, 4): cpu.reset() prepare_for_pop(cpu, value >> 8, value & 0xFF) - cpu.f.set(flags[i]) + cpu.flag.set(flags[i]) cycle_test(cpu, opCode, 5) assert cpu.pc.get() == value cpu.reset() prepare_for_pop(cpu, value >> 8, value & 0xFF) - cpu.f.set(~flags[i]) + cpu.flag.set(~flags[i]) cycle_test(cpu, opCode, 2) - assert_default_registers(cpu, f=cpu.f.get()) + assert_default_registers(cpu, f=cpu.flag.get()) value += 3 opCode += 0x08 @@ -1088,7 +1088,7 @@ cpu.sp.set(valueSp) pc = cpu.pc.get() cycle_test(cpu, 0xF8, 3) - f = cpu.f.get(); + f = cpu.flag.get(); assert_default_registers(cpu, hl=valueSp+value, f=f, sp=valueSp, pc=pc+1) # pop_BC to pop_AF @@ -1195,13 +1195,13 @@ cpu.reset() prepare_for_fetch(cpu, value >> 8, value & 0xFF) pc = cpu.pc.get() - cpu.f.set(flags[i]) + cpu.flag.set(flags[i]) cycle_test(cpu, opCode, 4) assert_default_registers(cpu, f=flags[i] & 0xFF, pc=value) cpu.reset() prepare_for_fetch(cpu, value >> 8, value & 0xFF) - cpu.f.set(~flags[i]) + cpu.flag.set(~flags[i]) pc = cpu.pc.get() cycle_test(cpu, opCode, 3) assert_default_registers(cpu, f=~flags[i] & 0xFF, pc=pc+2) @@ -1289,7 +1289,7 @@ # set the condition to false and dont call flagSetter(cpu, False) cpu.pc.set(0) - f = cpu.f.get() + f = cpu.flag.get() cycle_test(cpu, opCode, 3) assert_default_registers(cpu, pc=2, f=f) # set the condition to true: unconditional_call @@ -1300,7 +1300,7 @@ cpu.sp.set(0x03) prepare_for_fetch(cpu, fetchValue >> 8, fetchValue & 0xFF) assert cpu.sp.get() == 0x03 - f = cpu.f.get() + f = cpu.flag.get() cycle_test(cpu, opCode, 6) assert_default_registers(cpu, pc=fetchValue, sp=1, f=f) # 2 fetches happen before the pc is pushed on the stack @@ -1313,7 +1313,7 @@ conditional_call_test(cpu, 0xC4, set_flag_0xC4) def set_flag_0xC4(cpu, value): - cpu.f.z_flag = not value + cpu.flag.is_zero = not value # call_Z_nnnn def test_0xCC_call_z_nnn(): @@ -1321,7 +1321,7 @@ conditional_call_test(cpu, 0xCC, set_flag_0xCC) def set_flag_0xCC(cpu, value): - cpu.f.z_flag = value + cpu.flag.is_zero = value # call_NC_nnnn def test_0xD4_call_nc_nnn(): @@ -1329,7 +1329,7 @@ conditional_call_test(cpu, 0xD4, set_flag_0xD4) def set_flag_0xD4(cpu, value): - cpu.f.c_flag = not value + cpu.flag.is_carry = not value # call_C_nnnn def test_0xDC_call_C_nnnn(): @@ -1337,7 +1337,7 @@ conditional_call_test(cpu, 0xDC, set_flag_0xDC) def set_flag_0xDC(cpu, value): - cpu.f.c_flag = value + cpu.flag.is_carry = value # call_nnnn def test_unconditional_call(): @@ -1394,7 +1394,7 @@ pc = cpu.pc.get() cycle_test(cpu, opCode, cycles) - assert_default_registers(cpu, a=opCaller(value,valueAdd, cpu), pc=pc+1, f=cpu.f.get()) + assert_default_registers(cpu, a=opCaller(value,valueAdd, cpu), pc=pc+1, f=cpu.flag.get()) return cpu # add_A_nn @@ -1436,8 +1436,8 @@ cycle_test(cpu, 0xFE, 2) - assert_default_registers(cpu, a=valueA, pc=pc+1, f=cpu.f.get()) - assert cpu.f.z_flag == True + assert_default_registers(cpu, a=valueA, pc=pc+1, f=cpu.flag.get()) + assert cpu.flag.is_zero == True # rst(0x00) to rst(0x38) def test_0xC7_to_0xFF_reset(): @@ -1528,12 +1528,12 @@ cpu.reset() register.set(0) fetch_execute_cycle_test_second_order(cpu, registerOpCode, cycles) - assert cpu.f.z_flag == True + assert cpu.flag.is_zero == True cpu.reset() register.set((1<> 8, value & 0x00FF) @@ -125,7 +125,7 @@ def test_add_with_carry(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_value_call(cpu, CPU.add_a_with_carry, 0x00) assert cpu.a.get() == 0x01 @@ -135,7 +135,7 @@ def test_add_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_value_call(cpu, CPU.add_a, 0x00) assert cpu.a.get() == 0x00 @@ -144,25 +144,25 @@ add_flag_test(cpu, CPU.add_a) def add_flag_test(cpu, method): - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.add_a_with_carry, 0x00) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x0F) method_value_call(cpu, CPU.add_a_with_carry, 0x01) assert cpu.a.get() == 0x10 assert_flags(cpu, z=False, n=False, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_value_call(cpu, CPU.add_a_with_carry, 0xF0) assert cpu.a.get() == 0xEF assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_value_call(cpu, CPU.add_a_with_carry, 0x01) assert cpu.a.get() == 0x00 @@ -170,37 +170,37 @@ def test_add_hl(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.hl.set(0x0000) method_value_call(cpu, CPU.add_hl, 0x0000) assert cpu.hl.get() == 0x0000 assert_flags(cpu, z=True, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.hl.set(0x0000) method_value_call(cpu, CPU.add_hl, 0x0000) assert cpu.hl.get() == 0x0000 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.hl.set(0x0000) method_value_call(cpu, CPU.add_hl, 0x00) assert cpu.hl.get() == 0x0000 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.hl.set(0x0F00) method_value_call(cpu, CPU.add_hl, 0x0100) assert cpu.hl.get() == 0x1000 assert_flags(cpu, z=False, n=False, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.hl.set(0xFF00) method_value_call(cpu, CPU.add_hl, 0xF000) assert cpu.hl.get() == 0xEF00 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.hl.set(0xFF00) method_value_call(cpu, CPU.add_hl, 0x0100) assert cpu.hl.get() == 0x0000 @@ -208,7 +208,7 @@ def test_add_sp(): cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) for i in range(0, 0x7F): cpu.sp.set(0x00) prepare_for_fetch(cpu, i); @@ -225,35 +225,35 @@ def test_add_sp_carry(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.sp.set(0xFF) prepare_for_fetch(cpu, 0xFF) cpu.increment_sp_by_fetch() assert cpu.sp.get() == 0xFE assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.sp.set(0xFF) prepare_for_fetch(cpu, 0xFF) cpu.increment_sp_by_fetch() assert cpu.sp.get() == 0xFE assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.sp.set(0x00) prepare_for_fetch(cpu, 0x01) cpu.increment_sp_by_fetch() assert cpu.sp.get() == 0x01 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.sp.set(0x00) prepare_for_fetch(cpu, 0x01) cpu.increment_sp_by_fetch() assert cpu.sp.get() == 0x01 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.sp.set(0x02) prepare_for_fetch(cpu, 0xFE) cpu.increment_sp_by_fetch() @@ -262,7 +262,7 @@ def test_add_sp_carry_flags(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.sp.set(0x0FFF) prepare_for_fetch(cpu, 0x01) cpu.increment_sp_by_fetch() @@ -291,12 +291,12 @@ def test_and_a(): cpu = get_cpu() cpu.sp.set(0xFF) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) method_value_call(cpu, CPU.and_a, 0x00) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) method_value_call(cpu, CPU.and_a, 0x00) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=True, c=False) @@ -306,7 +306,7 @@ assert cpu.a.get() == 0x12 assert_flags(cpu, z=False, n=False, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_value_call(cpu, CPU.and_a, 0x12) assert cpu.a.get() == 0x12 @@ -314,31 +314,31 @@ def test_or_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_value_call(cpu, CPU.or_a, 0xFF) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.or_a, 0x00) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_value_call(cpu, CPU.or_a, 0x00) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_value_call(cpu, CPU.or_a, 0x00) assert cpu.a.get() == 0x01 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_value_call(cpu, CPU.or_a, 0xFF) assert cpu.a.get() == 0xFF @@ -346,31 +346,31 @@ def test_xor_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_value_call(cpu, CPU.xor_a, 0xFF) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_value_call(cpu, CPU.xor_a, 0xFF) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_value_call(cpu, CPU.xor_a, 0x00) assert cpu.a.get() == 0x01 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_value_call(cpu, CPU.xor_a, 0xFF) assert cpu.a.get() == 0xFF - 0x01 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.xor_a, 0x00) assert cpu.a.get() == 0x00 @@ -378,19 +378,19 @@ def test_bit(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_register_value_call(cpu, CPU.test_bit, cpu.a, 0x00) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=True, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_value_call(cpu, CPU.test_bit, cpu.a, 0x00) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x40) method_register_value_call(cpu, CPU.test_bit, cpu.a, 0x05) assert_flags(cpu, z=True, n=False, h=True, c=False) @@ -404,118 +404,118 @@ def test_set_bit(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_register_value_call(cpu, CPU.set_bit, cpu.a, 0x00) assert cpu.a.get() == 0x01 - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF for i in range(8): cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_register_value_call(cpu, CPU.set_bit, cpu.a, i) assert cpu.a.get() == 0x01 << i - assert cpu.f.get() == 0x00 + assert cpu.flag.get() == 0x00 def test_reset_bit(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x01) method_register_value_call(cpu, CPU.reset_bit, cpu.a, 0x00) assert cpu.a.get() == 0x00 - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF for i in range(8): cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_value_call(cpu, CPU.reset_bit, cpu.a, i) assert cpu.a.get() == 0xFF - (0x01 << i) - assert cpu.f.get() == 0x00 + assert cpu.flag.get() == 0x00 def test_unconditional_call(): cpu = get_cpu() - cpu.f.set(0x12) + cpu.flag.set(0x12) cpu.pc.set(0x1234) assert cpu.pc.get_hi() == 0x12 assert cpu.pc.get_lo() == 0x34 prepare_for_double_fetch(cpu, 0x5678) cpu.unconditional_call() - assert cpu.f.get() == 0x12 + assert cpu.flag.get() == 0x12 assert cpu.pop() == 0x34+2 assert cpu.pop() == 0x12 assert cpu.pc.get() == 0x5678 def test_conditional_call(): cpu = get_cpu() - cpu.f.set(0x12) + cpu.flag.set(0x12) cpu.pc.set(0x1234) cpu.conditional_call(False) assert cpu.pc.get() == 0x1234+2 - assert cpu.f.get() == 0x12 + assert cpu.flag.get() == 0x12 cpu.reset() - cpu.f.set(0x12) + cpu.flag.set(0x12) cpu.pc.set(0x1234) assert cpu.pc.get_hi() == 0x12 assert cpu.pc.get_lo() == 0x34 prepare_for_double_fetch(cpu, 0x5678) cpu.conditional_call(True) - assert cpu.f.get() == 0x12 + assert cpu.flag.get() == 0x12 assert cpu.pop() == 0x34+2 assert cpu.pop() == 0x12 assert cpu.pc.get() == 0x5678 def test_complement_carry_flag(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.complement_carry_flag() assert_flags(cpu, z=True, n=False, h=False, c=False) cpu.complement_carry_flag() assert_flags(cpu, z=True, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.complement_carry_flag() assert_flags(cpu, z=False, n=False, h=False, c=True) def test_compare_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_value_call(cpu, CPU.compare_a, 0x00) assert_flags(cpu, z=True, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.compare_a, 0x00) assert_flags(cpu, z=True, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x11) method_value_call(cpu, CPU.compare_a, 0x02) assert_flags(cpu, z=False, n=True, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x0F) method_value_call(cpu, CPU.compare_a, 0xFF) assert_flags(cpu, z=False, n=True, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.compare_a, 0x01) assert_flags(cpu, z=False, n=True, h=True, c=True) def test_complement_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xF0) cpu.complement_a() assert cpu.a.get() == 0x0F assert_flags(cpu, z=True, n=True, h=True, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.complement_a() assert cpu.a.get() == 0xF0 assert_flags(cpu, z=False, n=True, h=True, c=False) @@ -523,36 +523,36 @@ def test_decimal_adjust_a(): py.test.skip("not yet implemented") cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0) cpu.decimal_adjust_a() assert_flags(cpu, z=False, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0) cpu.decimal_adjust_a() assert_flags(cpu, z=False, n=False, h=False, c=False) def test_decrement_register(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_register_call(cpu, CPU.dec, cpu.a) assert cpu.a.get() == 0xFE assert_flags(cpu, z=False, n=True, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) method_register_call(cpu, CPU.dec, cpu.a) assert cpu.a.get() == 0xFD assert_flags(cpu, z=False, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_register_call(cpu, CPU.dec, cpu.a) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x10) method_register_call(cpu, CPU.dec, cpu.a) assert cpu.a.get() == 0x0F @@ -560,24 +560,24 @@ def test_increment_register(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xF1) method_register_call(cpu, CPU.inc, cpu.a) assert cpu.a.get() == 0xF2 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) method_register_call(cpu, CPU.inc, cpu.a) assert cpu.a.get() == 0xF3 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x0F) method_register_call(cpu, CPU.inc, cpu.a) assert cpu.a.get() == 0x10 assert_flags(cpu, z=False, n=False, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_call(cpu, CPU.inc, cpu.a) assert cpu.a.get() == 0x00 @@ -585,81 +585,81 @@ def test_decrement_double_register(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.bc.set(0xFFFF) cpu.dec_double_register(cpu.bc) assert cpu.bc.get() == 0xFFFE - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.dec_double_register(cpu.bc) assert cpu.bc.get() == 0xFFFD - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.bc.set(0x0000) cpu.dec_double_register(cpu.bc) assert cpu.bc.get() == 0xFFFF - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF def test_increment_double_register(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.bc.set(0xFFFD) cpu.inc_double_register(cpu.bc) assert cpu.bc.get() == 0xFFFE - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.inc_double_register(cpu.bc) assert cpu.bc.get() == 0xFFFF - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.inc_double_register(cpu.bc) assert cpu.bc.get() == 0x0000 - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF def test_disable_interrupts(): cpu = get_cpu() - cpu.f.set(0xFF) - cpu.disable_interrups() - assert cpu.f.get() == 0xFF - - cpu.f.set(0x00) - cpu.disable_interrups() - assert cpu.f.get() == 0x00 + cpu.flag.set(0xFF) + cpu.disable_interrupts() + assert cpu.flag.get() == 0xFF + + cpu.flag.set(0x00) + cpu.disable_interrupts() + assert cpu.flag.get() == 0x00 def test_enable_interrupts(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.enable_interrupts() - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.enable_interrupts() - assert cpu.f.get() == 0x00 + assert cpu.flag.get() == 0x00 def test_jump(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) prepare_for_double_fetch(cpu, 0x1234) cpu.jump() - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF assert cpu.pc.get() == 0x1234 def test_conditional_jump(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) prepare_for_double_fetch(cpu, 0x1234) cpu.conditional_jump(True) - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF assert cpu.pc.get() == 0x1234 cpu.pc.set(0x1234) prepare_for_double_fetch(cpu, 0x1234) cpu.conditional_jump(False) - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF assert cpu.pc.get() == 0x1234+2 def test_process_2_complement(): @@ -674,12 +674,12 @@ def test_relative_jump(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) for i in range(0x7F): cpu.pc.set(0x1234) prepare_for_fetch(cpu, i) cpu.relative_jump() - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF #+1 for a single fetch assert cpu.pc.get() == 0x1234+1 + i @@ -687,30 +687,30 @@ cpu.pc.set(0x1234) prepare_for_fetch(cpu, 0xFF - i+1) cpu.relative_jump() - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF #+1 for a single fetch assert cpu.pc.get() == 0x1234+1 - i def test_conditional_relative_jump(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) for i in range(0x7F): cpu.pc.set(0x1234) prepare_for_fetch(cpu, i) cpu.relative_conditional_jump(True) - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF #+1 for a single fetch assert cpu.pc.get() == 0x1234+1 + i cpu.pc.set(0x1234) prepare_for_fetch(cpu, 0x12) cpu.relative_conditional_jump(False) - assert cpu.f.get() == 0xFF + assert cpu.flag.get() == 0xFF assert cpu.pc.get() == 0x1234+1 def store_fetch_added_sp_in_hl(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.sp.set(0x1234) prepare_for_fetch(0x02) cpu.store_fetch_added_sp_in_hl() @@ -718,7 +718,7 @@ assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.sp.set(0x1234) prepare_for_fetch(0x02) cpu.store_fetch_added_sp_in_hl() @@ -726,7 +726,7 @@ assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.sp.set(0x1234) prepare_for_fetch(0xFF) cpu.store_fetch_added_sp_in_hl() @@ -735,44 +735,44 @@ def test_rotate_left(): cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0xFE assert_flags(cpu, z=False, n=False, h=False, c=True) cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0xFE+1 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0x02 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x80) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=False, c=True) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x80) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0x01 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x40) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0x80 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x7F) method_register_call(cpu, CPU.rotate_left, cpu.a) assert cpu.a.get() == 0xFE @@ -780,39 +780,39 @@ def test_rotate_right(): cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_right, cpu.a) assert cpu.a.get() == 0x7F assert_flags(cpu, z=False, n=False, h=False, c=True) cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_right, cpu.a) assert cpu.a.get() == 0x7F + 0x80 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_register_call(cpu, CPU.rotate_right, cpu.a) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=False, c=True) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x01) method_register_call(cpu, CPU.rotate_right, cpu.a) assert cpu.a.get() == 0x80 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x08) method_register_call(cpu, CPU.rotate_right, cpu.a) assert cpu.a.get() == 0x04 assert_flags(cpu, z=False, n=False, h=False, c=False) for i in range(0, 7): - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x80 >> i) method_register_call(cpu, CPU.rotate_right, cpu.a) assert cpu.a.get() == 0x80 >> (i+1) @@ -820,26 +820,26 @@ def test_rotate_left_circular(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_left_circular, cpu.a) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=True) cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_left_circular, cpu.a) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x80) method_register_call(cpu, CPU.rotate_left_circular, cpu.a) assert cpu.a.get() == 0x01 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x01) method_register_call(cpu, CPU.rotate_left_circular, cpu.a) assert cpu.a.get() == 0x02 @@ -847,26 +847,26 @@ def test_rotate_right_circular(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_right_circular, cpu.a) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=True) cpu = get_cpu() - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_register_call(cpu, CPU.rotate_right_circular, cpu.a) assert cpu.a.get() == 0xFF assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_register_call(cpu, CPU.rotate_right_circular, cpu.a) assert cpu.a.get() == 0x80 assert_flags(cpu, z=False, n=False, h=False, c=True) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x02) method_register_call(cpu, CPU.rotate_right_circular, cpu.a) assert cpu.a.get() == 0x01 @@ -874,25 +874,25 @@ def test_subtract_with_carry_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x01) method_value_call(cpu, CPU.subtract_with_carry_a, 0x00) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_value_call(cpu, CPU.subtract_with_carry_a, 0x00) assert cpu.a.get() == 0x01 assert_flags(cpu, z=False, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x10) method_value_call(cpu, CPU.subtract_with_carry_a, 0x01) assert cpu.a.get() == 0x0F assert_flags(cpu, z=False, n=True, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.subtract_with_carry_a, 0x01) assert cpu.a.get() == 0xFF @@ -902,13 +902,13 @@ def test_subtract_a(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0xFF) method_value_call(cpu, CPU.subtract_a, 0x01) assert cpu.a.get() == 0xFE assert_flags(cpu, z=False, n=True, h=False, c=False) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x01) method_value_call(cpu, CPU.subtract_a, 0x01) assert cpu.a.get() == 0x00 @@ -917,25 +917,25 @@ subtract_flag_test(cpu, CPU.subtract_a) def subtract_flag_test(cpu, method): - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0xFF) method_value_call(cpu, CPU.subtract_a, 0x01) assert cpu.a.get() == 0xFE assert_flags(cpu, z=False, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x01) method_value_call(cpu, CPU.subtract_a, 0x01) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=True, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x10) method_value_call(cpu, CPU.subtract_a, 0x01) assert cpu.a.get() == 0x0F assert_flags(cpu, z=False, n=True, h=True, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x00) method_value_call(cpu, CPU.subtract_a, 0x01) assert cpu.a.get() == 0xFF @@ -943,19 +943,19 @@ def test_swap(): cpu = get_cpu() - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x12) method_register_call(cpu, CPU.swap, cpu.a) assert cpu.a.get() == 0x21 assert_flags(cpu, z=False, n=False, h=False, c=False) - cpu.f.set(0xFF) + cpu.flag.set(0xFF) cpu.a.set(0x00) method_register_call(cpu, CPU.swap, cpu.a) assert cpu.a.get() == 0x00 assert_flags(cpu, z=True, n=False, h=False, c=False) - cpu.f.set(0x00) + cpu.flag.set(0x00) cpu.a.set(0x34) method_register_call(cpu, CPU.swap, cpu.a) assert cpu.a.get() == 0x43 Modified: pypy/dist/pypy/lang/gameboy/test/test_rom.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/test/test_rom.py (original) +++ pypy/dist/pypy/lang/gameboy/test/test_rom.py Fri Aug 29 12:54:35 2008 @@ -114,29 +114,29 @@ emulate_step_op_codes_test(gameboy, [0xDD, 0xAF, 0xC6]) pc = cpu.pc.get() assert cpu.a.get() == 1 - assert cpu.f.c_flag == False + assert cpu.flag.c_flag == False # check jr in .loop2 emulate_step_op_codes_test(gameboy, [0x30]) assert cpu.pc.get() == pc-2 # looping in .loop2 emulate_step_op_codes_test(gameboy, [0xC6, 0x30]*0xFF) assert cpu.a.get() == 0 - assert cpu.f.c_flag == True + assert cpu.flag.c_flag == True # debugg call reseting emulate_step_op_codes_test(gameboy, [0xDD, 0xAF]) assert cpu.a.get() == 0 - assert cpu.f.c_flag == False + assert cpu.flag.c_flag == False pc = cpu.pc.get() # enter .loop3 - c_flag = cpu.f.c_flag + c_flag = cpu.flag.c_flag emulate_step_op_codes_test(gameboy, [0x3C, 0xD2]) - assert cpu.f.c_flag == c_flag + assert cpu.flag.c_flag == c_flag assert cpu.a.get() == 1 assert cpu.pc.get() == pc # looping in .loop3 emulate_step_op_codes_test(gameboy, [0x3C, 0xD2]*255) assert cpu.a.get() == 0 - assert cpu.f.c_flag == False + assert cpu.flag.c_flag == False emulate_step_op_codes_test(gameboy, [0xDD, 0x76, 0x76]) @@ -192,7 +192,7 @@ emulate_step_op_codes_test(gameboy, [0x0D, 0x20]) assert cpu.c.get() == c-1 - while not cpu.f.z_flag: + while not cpu.flag.is_zero: hl = cpu.hl.get() emulate_step_op_codes_test(gameboy, [0x22]) assert cpu.hl.get() == hl+1 Modified: pypy/dist/pypy/lang/gameboy/timer.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/timer.py (original) +++ pypy/dist/pypy/lang/gameboy/timer.py Fri Aug 29 12:54:35 2008 @@ -60,13 +60,23 @@ return self.divider def set_divider(self, data): - """ DIV register resets on write """ + """ + This register is incremented at rate of 16384Hz (~16779Hz on SGB). + Writing any value to this register resets it to 00h. + """ self.divider = 0 def get_timer_counter(self): return self.timer_counter def set_timer_counter(self, data): + """ + TIMA + This timer is incremented by a clock frequency specified by the TAC + register ($FF07). When the value overflows (gets bigger than FFh) then + it will be reset to the value specified in TMA (FF06), and an interrupt + will be requested, as described below. + """ self.timer_counter = data @@ -74,6 +84,9 @@ return self.timer_modulo def set_timer_modulo(self, data): + """ + When the TIMA overflows, this data will be loaded. + """ self.timer_modulo = data @@ -81,6 +94,14 @@ return 0xF8 | self.timer_control def set_timer_control(self, data): + """ + Bit 2 - Timer Stop (0=Stop, 1=Start) + Bits 1-0 - Input Clock Select + 00: 4096 Hz + 01: 262144 Hz + 10: 65536 Hz + 11: 16384 Hz + """ if (self.timer_control & 0x03) != (data & 0x03): self.timer_clock = constants.TIMER_CLOCK[data & 0x03] self.timer_cycles = constants.TIMER_CLOCK[data & 0x03] @@ -113,9 +134,18 @@ while self.timer_cycles <= 0: self.timer_counter = (self.timer_counter + 1) & 0xFF self.timer_cycles += self.timer_clock - if self.timer_counter == 0x00: - self.timer_counter = self.timer_modulo - self.interrupt.raise_interrupt(constants.TIMER) + self.timer_interrupt_check() + + def timer_interrupt_check(self): + """ + Each time when the timer overflows (ie. when TIMA gets bigger than FFh), + then an interrupt is requested by setting Bit 2 in the IF Register + (FF0F). When that interrupt is enabled, then the CPU will execute it by + calling the timer interrupt vector at 0050h. + """ + if self.timer_counter == 0x00: + self.timer_counter = self.timer_modulo + self.interrupt.raise_interrupt(constants.TIMER) #def emulate_timer(self, ticks): # if (self.timer_control & 0x04) == 0: Modified: pypy/dist/pypy/lang/gameboy/video.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/video.py (original) +++ pypy/dist/pypy/lang/gameboy/video.py Fri Aug 29 12:54:35 2008 @@ -142,6 +142,56 @@ # ----------------------------------------------------------------------------- +class Sprite(object): + + def __init__(self): + self.big_size = False + self.reset() + + def reset(self): + self.x = 0 + self.y = 0 + self._tile_number = 0 + self.object_behind_background = False + self.x_flipped = False + self.y_flipped = False + self.use_object_pallette_1 = False + + def get_tile_number(self): + return self._tile_number + + def set_tile_number(self, patter_number): + self._tile_number = patter_number & 0xFF + + def get_width(self): + return 8 + + def get_height(self): + if self.big_size: + return 16 + else: + return 8 + + def overlaps(self, sprite): + return False + +# ----------------------------------------------------------------------------- + + +class Tile(object): + + def __init__(self): + pass + + + def set_tile_data(self, rom, height): + self.height = height + + def get_tile_dta(self): + pass + +# ----------------------------------------------------------------------------- + class Video(iMemory): def __init__(self, video_driver, interrupt, memory): From cami at codespeak.net Fri Aug 29 13:51:51 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Fri, 29 Aug 2008 13:51:51 +0200 (CEST) Subject: [pypy-svn] r57680 - pypy/dist/pypy/lang/gameboy Message-ID: <20080829115151.0862C169F83@codespeak.net> Author: cami Date: Fri Aug 29 13:51:50 2008 New Revision: 57680 Modified: pypy/dist/pypy/lang/gameboy/cpu.py Log: added source code examples to the implementaztion section. Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Fri Aug 29 13:51:50 2008 @@ -202,7 +202,7 @@ if use_cycles: self.cpu.cycles -= 1 - def is_zero_check(self, a, reset=False): + def zero_check(self, a, reset=False): if reset: self.reset() if isinstance(a, (Register)): @@ -542,7 +542,7 @@ # set the h flag if the 0x10 bit was affected self.flag.is_half_carry = (((s ^ self.a.get() ^ data) & 0x10) != 0) self.flag.is_carry = (s >= 0x100 or s < 0) - self.flag.is_zero_check(s) + self.flag.zero_check(s) self.a.set(s & 0xFF) # 1 cycle def subtract_a(self, getCaller, setCaller=None): @@ -565,7 +565,7 @@ s = (self.a.get() - s) & 0xFF self.flag.reset() self.flag.is_subtraction = True - self.flag.is_zero_check(s) + self.flag.zero_check(s) self.subtract_his_carry_finish(s) self.cycles -= 1 @@ -577,18 +577,18 @@ # 1 cycle self.a.set(self.a.get() & getCaller.get()) # 1 cycle self.flag.reset() - self.flag.is_zero_check(self.a.get()) + self.flag.zero_check(self.a.get()) self.flag.is_half_carry = True def xor_a(self, getCaller, setCaller=None): # 1 cycle self.a.set( self.a.get() ^ getCaller.get()) # 1 cycle - self.flag.is_zero_check(self.a.get(), reset=True) + self.flag.zero_check(self.a.get(), reset=True) def or_a(self, getCaller, setCaller=None): # 1 cycle self.a.set(self.a.get() | getCaller.get()) # 1 cycle - self.flag.is_zero_check(self.a.get(), reset=True) + self.flag.zero_check(self.a.get(), reset=True) def inc_double_register(self, register): # INC rr @@ -611,7 +611,7 @@ def dec_inis_carry_finish(self, data, setCaller, compare): self.flag.partial_reset(keep_is_carry=True) - self.flag.is_zero_check(data) + self.flag.zero_check(data) self.flag.is_half_carry = ((data & 0x0F) == compare) setCaller.set(data) # 1 cycle @@ -684,7 +684,7 @@ # 2 cycles s &= 0xFF self.flag.reset() - self.flag.is_zero_check(s) + self.flag.zero_check(s) self.flag.is_carry_compare(data, compare_and) setCaller.set(s) # 1 cycle @@ -692,7 +692,7 @@ data = getCaller.get() # 1 cycle s = ((data << 4) + (data >> 4)) & 0xFF - self.flag.is_zero_check(s, reset=True) + self.flag.zero_check(s, reset=True) setCaller.set(s) @@ -814,7 +814,7 @@ self.flag.partial_reset(keep_is_subtraction=True) if delta >= 0x60: self.flag.is_carry = True - self.flag.is_zero_check(self.a.get()) + self.flag.zero_check(self.a.get()) def increment_sp_by_fetch(self): # ADD SP,nn 4 cycles @@ -1054,7 +1054,7 @@ def create_register_op_codes(table): op_codes = [] for entry in table: - op_code = entry[0] + op_code = entry[0] step = entry[1] function = entry[2] for registerOrGetter in entry[3]: From cami at codespeak.net Fri Aug 29 13:54:54 2008 From: cami at codespeak.net (cami at codespeak.net) Date: Fri, 29 Aug 2008 13:54:54 +0200 (CEST) Subject: [pypy-svn] r57681 - pypy/dist/pypy/lang/gameboy Message-ID: <20080829115454.CC43A169F83@codespeak.net> Author: cami Date: Fri Aug 29 13:54:53 2008 New Revision: 57681 Modified: pypy/dist/pypy/lang/gameboy/cpu.py Log: corrigendum: the last commit was in the wrong svn ;) removed some useless pass isnstruction from the CallWrappers Modified: pypy/dist/pypy/lang/gameboy/cpu.py ============================================================================== --- pypy/dist/pypy/lang/gameboy/cpu.py (original) +++ pypy/dist/pypy/lang/gameboy/cpu.py Fri Aug 29 13:54:53 2008 @@ -941,17 +941,13 @@ # ------------------------------------------------------------------------------ class CallWrapper(object): - def get(self, use_cycles=True): raise Exception("called CallWrapper.get") - return 0 def set(self, value, use_cycles=True): raise Exception("called CallWrapper.set") - pass class NumberCallWrapper(CallWrapper): - def __init__(self, number): self.number = number @@ -960,7 +956,6 @@ def set(self, value, use_cycles=True): raise Exception("called CallWrapper.set") - pass class RegisterCallWrapper(CallWrapper): def __init__(self, register): From antocuni at codespeak.net Fri Aug 29 16:31:15 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Fri, 29 Aug 2008 16:31:15 +0200 (CEST) Subject: [pypy-svn] r57682 - in pypy/branch/oo-jit/pypy/jit: codegen/cli codegen/cli/test rainbow Message-ID: <20080829143115.AD00C169F08@codespeak.net> Author: antocuni Date: Fri Aug 29 16:31:12 2008 New Revision: 57682 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py pypy/branch/oo-jit/pypy/jit/rainbow/portal.py Log: keep both the type and the attribute name as a fieldtoken. test_simple_struct passes Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py Fri Aug 29 16:31:12 2008 @@ -176,11 +176,15 @@ class GetField(Operation): - def __init__(self, meth, gv_obj, fieldname): + def __init__(self, meth, gv_obj, fieldtoken): + from pypy.jit.codegen.cli.rgenop import class2type self.meth = meth self.gv_obj = gv_obj - clitype = gv_obj.getCliType() - self.fieldinfo = clitype.GetField(fieldname) + if fieldtoken.cls is None: + clitype = gv_obj.getCliType() # XXX: it's a Record, need to think how to fix + else: + clitype = class2type(fieldtoken.cls) + self.fieldinfo = clitype.GetField(fieldtoken.name) def restype(self): return self.fieldinfo.get_FieldType() @@ -193,12 +197,16 @@ class SetField(Operation): - def __init__(self, meth, gv_obj, gv_value, fieldname): + def __init__(self, meth, gv_obj, gv_value, fieldtoken): + from pypy.jit.codegen.cli.rgenop import class2type self.meth = meth self.gv_obj = gv_obj self.gv_value = gv_value - clitype = gv_obj.getCliType() - self.fieldinfo = clitype.GetField(fieldname) + if fieldtoken.cls is None: + clitype = gv_obj.getCliType() # XXX: it's a Record, need to think how to fix + else: + clitype = class2type(fieldtoken.cls) + self.fieldinfo = clitype.GetField(fieldtoken.name) def restype(self): return None Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py Fri Aug 29 16:31:12 2008 @@ -34,6 +34,11 @@ self.res = res self.funcclass = funcclass +class FieldToken: + def __init__(self, cls, name): + self.cls = cls + self.name = name + def class2type(cls): 'Cast a PBC of type ootype.Class into a System.Type instance' if cls is cVoid: @@ -323,8 +328,11 @@ @staticmethod @specialize.memo() def fieldToken(T, name): - _, FIELD = T._lookup_field(name) - return name #, RCliGenOp.kindToken(FIELD) + if isinstance(T, ootype.Record): + cls = ootype.nullruntimeclass + else: + cls = ootype.runtimeClass(T) + return FieldToken(cls, name) @staticmethod @specialize.memo() Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Fri Aug 29 16:31:12 2008 @@ -63,7 +63,6 @@ assert res == 42 - test_simple_struct = skip test_complex_struct = skip test_degenerate_with_voids = skip test_arith_plus_minus = skip Modified: pypy/branch/oo-jit/pypy/jit/rainbow/portal.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/rainbow/portal.py (original) +++ pypy/branch/oo-jit/pypy/jit/rainbow/portal.py Fri Aug 29 16:31:12 2008 @@ -90,6 +90,7 @@ PTR_RESFUNC = self.PTR_RESIDUAL_FUNCTYPE args_s = [annmodel.lltype_to_annotation(ARG) for ARG in FUNC.ARGS] s_result = annmodel.lltype_to_annotation(FUNC.RESULT) + self.annhelper = annhelper self.portal_entry_graph = annhelper.getgraph( self.portal_entry, args_s, s_result) portal_entry_graph_ptr = annhelper.graph2delayed( From antocuni at codespeak.net Fri Aug 29 16:36:24 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Fri, 29 Aug 2008 16:36:24 +0200 (CEST) Subject: [pypy-svn] r57683 - in pypy/branch/oo-jit/pypy/jit: codegen/cli/test rainbow/test Message-ID: <20080829143624.6DC98169F13@codespeak.net> Author: antocuni Date: Fri Aug 29 16:36:23 2008 New Revision: 57683 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Log: if we use only single chars for field names, gencli gets confused when we try to pass them to a native method which expects a string. As a workaround, we make a field name longer than one so that they all gets annotated as string. Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Fri Aug 29 16:36:23 2008 @@ -63,7 +63,6 @@ assert res == 42 - test_complex_struct = skip test_degenerate_with_voids = skip test_arith_plus_minus = skip test_plus_minus = skip Modified: pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Fri Aug 29 16:36:23 2008 @@ -724,15 +724,15 @@ def test_complex_struct(self): S = self.GcStruct('S', ('n', lltype.Signed)) PTRS = self.Ptr(S) - T = self.GcStruct('T', ('s', PTRS), ('n', lltype.Signed)) + T = self.GcStruct('T', ('succ', PTRS), ('n', lltype.Signed)) malloc = self.malloc def ll_function(x, y): t = malloc(T) - t.s = malloc(S) - t.s.n = y + t.succ = malloc(S) + t.succ.n = y t.n = x - return t.n + t.s.n + return t.n + t.succ.n res = self.interpret(ll_function, [20, 22]) assert res == 42 From antocuni at codespeak.net Fri Aug 29 16:45:56 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Fri, 29 Aug 2008 16:45:56 +0200 (CEST) Subject: [pypy-svn] r57684 - in pypy/branch/oo-jit/pypy/jit: codegen/cli rainbow/test Message-ID: <20080829144556.343F516A03A@codespeak.net> Author: antocuni Date: Fri Aug 29 16:45:55 2008 New Revision: 57684 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Log: revert r57683, and put a workaround in operation.{GetField, SetField} Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py Fri Aug 29 16:45:55 2008 @@ -184,7 +184,7 @@ clitype = gv_obj.getCliType() # XXX: it's a Record, need to think how to fix else: clitype = class2type(fieldtoken.cls) - self.fieldinfo = clitype.GetField(fieldtoken.name) + self.fieldinfo = clitype.GetField(str(fieldtoken.name)) def restype(self): return self.fieldinfo.get_FieldType() @@ -206,7 +206,7 @@ clitype = gv_obj.getCliType() # XXX: it's a Record, need to think how to fix else: clitype = class2type(fieldtoken.cls) - self.fieldinfo = clitype.GetField(fieldtoken.name) + self.fieldinfo = clitype.GetField(str(fieldtoken.name)) def restype(self): return None Modified: pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Fri Aug 29 16:45:55 2008 @@ -724,15 +724,15 @@ def test_complex_struct(self): S = self.GcStruct('S', ('n', lltype.Signed)) PTRS = self.Ptr(S) - T = self.GcStruct('T', ('succ', PTRS), ('n', lltype.Signed)) + T = self.GcStruct('T', ('s', PTRS), ('n', lltype.Signed)) malloc = self.malloc def ll_function(x, y): t = malloc(T) - t.succ = malloc(S) - t.succ.n = y + t.s = malloc(S) + t.s.n = y t.n = x - return t.n + t.succ.n + return t.n + t.s.n res = self.interpret(ll_function, [20, 22]) assert res == 42 @@ -778,7 +778,7 @@ malloc = self.malloc def ll_function(): s = malloc(S) - s.x = 123 + s.xx = 123 return s res = self.interpret(ll_function, [], []) assert res.x == 123 From fijal at codespeak.net Fri Aug 29 18:38:31 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Fri, 29 Aug 2008 18:38:31 +0200 (CEST) Subject: [pypy-svn] r57685 - pypy/extradoc/sprintinfo/october-2008 Message-ID: <20080829163831.817E116A001@codespeak.net> Author: fijal Date: Fri Aug 29 18:38:28 2008 New Revision: 57685 Added: pypy/extradoc/sprintinfo/october-2008/ pypy/extradoc/sprintinfo/october-2008/cz-info.txt (contents, props changed) Log: * A directory for next sprint * A file with some details regarding possible location Added: pypy/extradoc/sprintinfo/october-2008/cz-info.txt ============================================================================== --- (empty file) +++ pypy/extradoc/sprintinfo/october-2008/cz-info.txt Fri Aug 29 18:38:28 2008 @@ -0,0 +1,19 @@ +Decin is a small town at the entrance of Ceske Svycarsko (czech side of +Sachsische Schweiz). + +How to get there: Easy. 1:30 train from Prague or 1h train from Dresden. +Goes quite often. + +Czech republic (at least countryside) is a fairly cheap place, +Example prices: + +Beer in pub - 25 kC (1 euro) +Dinner - 100-150 kC (4-6 euros) +Rooms - from 10 euros to 40, depends on hotel class. probably one can find +something even cheaper. + +Possible sprint venue - hotel Ceska Koruna Decin. They want us to pay +at least 150 kC per person per day in order for a room to get for free. +This is roughly less than 7 euros, which covers more or less coffee and +snacks for whole day. + From fijal at codespeak.net Fri Aug 29 18:53:54 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Fri, 29 Aug 2008 18:53:54 +0200 (CEST) Subject: [pypy-svn] r57686 - pypy/extradoc/sprintinfo/october-2008 Message-ID: <20080829165354.33A78169FCE@codespeak.net> Author: fijal Date: Fri Aug 29 18:53:53 2008 New Revision: 57686 Modified: pypy/extradoc/sprintinfo/october-2008/cz-info.txt Log: add english name Modified: pypy/extradoc/sprintinfo/october-2008/cz-info.txt ============================================================================== --- pypy/extradoc/sprintinfo/october-2008/cz-info.txt (original) +++ pypy/extradoc/sprintinfo/october-2008/cz-info.txt Fri Aug 29 18:53:53 2008 @@ -1,5 +1,5 @@ Decin is a small town at the entrance of Ceske Svycarsko (czech side of -Sachsische Schweiz). +Sachsische Schweiz or Saxon Switzerland). How to get there: Easy. 1:30 train from Prague or 1h train from Dresden. Goes quite often. From pedronis at codespeak.net Sat Aug 30 13:19:00 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 30 Aug 2008 13:19:00 +0200 (CEST) Subject: [pypy-svn] r57691 - pypy/dist/pypy/translator/test Message-ID: <20080830111900.D1D31169EFE@codespeak.net> Author: pedronis Date: Sat Aug 30 13:18:58 2008 New Revision: 57691 Modified: pypy/dist/pypy/translator/test/snippet.py pypy/dist/pypy/translator/test/test_geninterp.py Log: some more geninterp tests just because Modified: pypy/dist/pypy/translator/test/snippet.py ============================================================================== --- pypy/dist/pypy/translator/test/snippet.py (original) +++ pypy/dist/pypy/translator/test/snippet.py Sat Aug 30 13:18:58 2008 @@ -553,6 +553,30 @@ else: return (None, None) +def star_args0(*args): + return args[0] / 2 + +def call_star_args0(z): + return star_args0(z) + +def star_args1(a, *args): + return a + args[0] / 2 + +def call_star_args1(z): + return star_args1(z, 20) + +def star_args1def(a=4, *args): + if args: + return a + args[0] / 2 + else: + return a*3 + +def call_star_args1def(z): + a = star_args1def(z, 22) + b = star_args1def(5) + c = star_args1def() + return a+b+c + def star_args(x, y, *args): return x + args[0] Modified: pypy/dist/pypy/translator/test/test_geninterp.py ============================================================================== --- pypy/dist/pypy/translator/test/test_geninterp.py (original) +++ pypy/dist/pypy/translator/test/test_geninterp.py Sat Aug 30 13:18:58 2008 @@ -191,6 +191,18 @@ multiple_inheritance = self.build_interpfunc(snippet.multiple_inheritance) assert multiple_inheritance() == 1+2+3+4 + def test_call_star_args0(self): + call_star_args = self.build_interpfunc(snippet.call_star_args0) + assert call_star_args(42) == 21 + + def test_call_star_args1(self): + call_star_args = self.build_interpfunc(snippet.call_star_args1) + assert call_star_args(30) == 40 + + def test_call_star_args1def(self): + call_star_args = self.build_interpfunc(snippet.call_star_args1def) + assert call_star_args(7) == 45 + def test_call_star_args(self): call_star_args = self.build_interpfunc(snippet.call_star_args) assert call_star_args(42) == 52 From pedronis at codespeak.net Sat Aug 30 13:19:43 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sat, 30 Aug 2008 13:19:43 +0200 (CEST) Subject: [pypy-svn] r57692 - pypy/dist/pypy/translator/microbench Message-ID: <20080830111943.E0C20169EFE@codespeak.net> Author: pedronis Date: Sat Aug 30 13:19:43 2008 New Revision: 57692 Modified: pypy/dist/pypy/translator/microbench/test_bltn.py pypy/dist/pypy/translator/microbench/test_dict.py Log: - make the tests originally about new-style classes use them again - introduce when it makes sense old-style versions Modified: pypy/dist/pypy/translator/microbench/test_bltn.py ============================================================================== --- pypy/dist/pypy/translator/microbench/test_bltn.py (original) +++ pypy/dist/pypy/translator/microbench/test_bltn.py Sat Aug 30 13:19:43 2008 @@ -21,7 +21,7 @@ x = fabs(x) c += 1 -class foo: +class foo(object): pass class bar(foo): @@ -55,3 +55,33 @@ isinstance(b2, (bar, baz)) isinstance(b2, (bar, baz)) +# old-style + +class Foo: + pass + +class Bar(Foo): + pass + +class Baz(Bar): + pass + +def test_isinstance1_old_style(): + f = Foo() + b1 = Bar() + b2 = Baz() + for x in xrange(100000): + isinstance(b1, Foo) + isinstance(b1, Baz) + isinstance(f, Bar) + isinstance(b2, Foo) + +def test_isinstance3_old_style(): + b2 = Baz() + for x in xrange(100000): + isinstance(b2, (Bar, Baz)) + isinstance(b2, (Bar, Baz)) + isinstance(b2, (Bar, Baz)) + isinstance(b2, (Bar, Baz)) + isinstance(b2, (Bar, Baz)) + Modified: pypy/dist/pypy/translator/microbench/test_dict.py ============================================================================== --- pypy/dist/pypy/translator/microbench/test_dict.py (original) +++ pypy/dist/pypy/translator/microbench/test_dict.py Sat Aug 30 13:19:43 2008 @@ -49,7 +49,9 @@ for x in range(iterations): pass -class A: +# class and attrs + +class NewStyle(object): def __init__(self): self.a = 3 self.b = 4 @@ -60,7 +62,7 @@ def test_dict_class_dict_getmethod(): - a = A() + a = NewStyle() for x in range(iterations): a.f a.f @@ -68,7 +70,7 @@ a.f def test_dict_instance_getattr_instance_dict(): - a = A() + a = NewStyle() for x in range(iterations): a.a a.b @@ -76,7 +78,7 @@ a.b def test_dict_instance_setattr_instance_dict(): - a = A() + a = NewStyle() for x in range(iterations): a.a = 3 a.b = 4 @@ -84,7 +86,51 @@ a.b = 4 def test_dict_instance_setnewattr_instance_dict(): - a = A() + a = NewStyle() + for x in range(iterations): + a.c = 3 + a.d = 4 + a.e = 5 + a.f = 6 + +# old-style + +class OldStyle: + def __init__(self): + self.a = 3 + self.b = 4 + def f(self): + pass + def g(self): + pass + + +def test_dict_class_dict_getmethod_old_style(): + a = OldStyle() + for x in range(iterations): + a.f + a.f + a.f + a.f + +def test_dict_instance_getattr_instance_dict_old_style(): + a = OldStyle() + for x in range(iterations): + a.a + a.b + a.a + a.b + +def test_dict_instance_setattr_instance_dict_old_style(): + a = OldStyle() + for x in range(iterations): + a.a = 3 + a.b = 4 + a.a = 3 + a.b = 4 + +def test_dict_instance_setnewattr_instance_dict_old_style(): + a = OldStyle() for x in range(iterations): a.c = 3 a.d = 4 From antocuni at codespeak.net Sun Aug 31 13:16:53 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Sun, 31 Aug 2008 13:16:53 +0200 (CEST) Subject: [pypy-svn] r57699 - in pypy/branch/oo-jit/pypy/rpython/ootypesystem: . test Message-ID: <20080831111653.5017E169E0B@codespeak.net> Author: antocuni Date: Sun Aug 31 13:16:50 2008 New Revision: 57699 Modified: pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py pypy/branch/oo-jit/pypy/rpython/ootypesystem/test/test_ootype.py Log: associate a Class object to each Record, as we do for Instances Modified: pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py ============================================================================== --- pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py (original) +++ pypy/branch/oo-jit/pypy/rpython/ootypesystem/ootype.py Sun Aug 31 13:16:50 2008 @@ -307,6 +307,8 @@ # We try to keep Record as similar to Instance as possible, so backends # can treat them polymorphically, if they choose to do so. + + _classes = {} def __init__(self, fields, _hints={}): self._fields = frozendict() @@ -315,6 +317,15 @@ self._null = _null_record(self) self._hints = frozendict(_hints) + @property + def _class(self): + try: + return self._classes[self] + except KeyError: + cls = _class(self) + self._classes[self] = cls + return cls + def _defl(self): return self._null @@ -1767,7 +1778,11 @@ def runtimenew(class_): assert isinstance(class_, _class) assert class_ is not nullruntimeclass - return make_instance(class_._INSTANCE) + TYPE = class_._INSTANCE + if isinstance(TYPE, Record): + return _record(TYPE) + else: + return make_instance(TYPE) def static_meth(FUNCTION, name, **attrs): return _static_meth(FUNCTION, _name=name, **attrs) @@ -1812,7 +1827,7 @@ INSTANCE._override_default_for_fields(fields) def runtimeClass(INSTANCE): - assert isinstance(INSTANCE, Instance) + assert isinstance(INSTANCE, (Instance, Record)) return INSTANCE._class def isSubclass(C1, C2): Modified: pypy/branch/oo-jit/pypy/rpython/ootypesystem/test/test_ootype.py ============================================================================== --- pypy/branch/oo-jit/pypy/rpython/ootypesystem/test/test_ootype.py (original) +++ pypy/branch/oo-jit/pypy/rpython/ootypesystem/test/test_ootype.py Sun Aug 31 13:16:50 2008 @@ -32,7 +32,7 @@ assert d.a == 1 -def test_runtime_instanciation(): +def test_runtime_instantiation(): I = Instance("test", ROOT, {"a": Signed}) c = runtimeClass(I) i = runtimenew(c) @@ -40,6 +40,27 @@ assert typeOf(i) == I assert typeOf(c) == Class +def test_record_equivalence(): + R1 = Record({"a": Signed}) + R2 = Record({"a": Signed}) + assert R1 == R2 + assert hash(R1) == hash(R2) + assert R1._class is R2._class + +def test_runtime_record_instantiation(): + R = Record({"a": Signed}) + c = runtimeClass(R) + r = runtimenew(c) + + assert typeOf(r) == R + assert typeOf(c) == Class + +def test_class_records(): + R1 = Record({"a": Signed}) + R2 = Record({"a": Signed}) + assert R1 == R2 + assert runtimeClass(R1) is runtimeClass(R2) + def test_classof(): I = Instance("test", ROOT, {"a": Signed}) c = runtimeClass(I) From antocuni at codespeak.net Sun Aug 31 13:45:55 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Sun, 31 Aug 2008 13:45:55 +0200 (CEST) Subject: [pypy-svn] r57700 - pypy/branch/oo-jit/pypy/jit/rainbow/test Message-ID: <20080831114555.5599B169F03@codespeak.net> Author: antocuni Date: Sun Aug 31 13:45:53 2008 New Revision: 57700 Modified: pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Log: typo Modified: pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/rainbow/test/test_interpreter.py Sun Aug 31 13:45:53 2008 @@ -778,7 +778,7 @@ malloc = self.malloc def ll_function(): s = malloc(S) - s.xx = 123 + s.x = 123 return s res = self.interpret(ll_function, [], []) assert res.x == 123 From antocuni at codespeak.net Sun Aug 31 13:46:36 2008 From: antocuni at codespeak.net (antocuni at codespeak.net) Date: Sun, 31 Aug 2008 13:46:36 +0200 (CEST) Subject: [pypy-svn] r57701 - in pypy/branch/oo-jit/pypy/jit/codegen/cli: . test Message-ID: <20080831114636.13FD6169F03@codespeak.net> Author: antocuni Date: Sun Aug 31 13:46:36 2008 New Revision: 57701 Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Log: implement genop_new; a modified version of test_degenerate_with_voids passes Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/operation.py Sun Aug 31 13:46:36 2008 @@ -1,7 +1,7 @@ import py from pypy.rlib.objectmodel import specialize from pypy.rpython.ootypesystem import ootype -from pypy.translator.cli.dotnet import CLR, typeof +from pypy.translator.cli.dotnet import CLR, typeof, new_array from pypy.translator.cli import opcodes as cli_opcodes System = CLR.System OpCodes = System.Reflection.Emit.OpCodes @@ -216,6 +216,20 @@ self.gv_value.load(self.meth) self.meth.il.Emit(OpCodes.Stfld, self.fieldinfo) +class New(Operation): + + def __init__(self, meth, alloctoken): + self.meth = meth + self.clitype = alloctoken.getCliType() + + def restype(self): + return self.clitype + + def emit(self): + ctor = self.clitype.GetConstructor(new_array(System.Type, 0)) + self.meth.il.Emit(OpCodes.Newobj, ctor) + self.storeResult() + def mark(il, s): il.Emit(OpCodes.Ldstr, s) il.Emit(OpCodes.Pop) Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/rgenop.py Sun Aug 31 13:46:36 2008 @@ -39,6 +39,13 @@ self.cls = cls self.name = name +class AllocToken: + def __init__(self, ooclass): + self.ooclass = ooclass + + def getCliType(self): + return class2type(self.ooclass) + def class2type(cls): 'Cast a PBC of type ootype.Class into a System.Type instance' if cls is cVoid: @@ -337,7 +344,7 @@ @staticmethod @specialize.memo() def allocToken(T): - return RCliGenOp.kindToken(T) + return AllocToken(ootype.runtimeClass(T)) def check_no_open_mc(self): pass @@ -641,8 +648,10 @@ def genop_ooisnull(self, gv_obj): raise NotImplementedError - def genop_new(self, gv_obj): - raise NotImplementedError + def genop_new(self, alloctoken): + op = ops.New(self.meth, alloctoken) + self.appendop(op) + return op.gv_res() def enter_next_block(self, args_gv): seen = {} Modified: pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py ============================================================================== --- pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py (original) +++ pypy/branch/oo-jit/pypy/jit/codegen/cli/test/test_gencli_interpreter.py Sun Aug 31 13:46:36 2008 @@ -1,4 +1,5 @@ import py +from pypy.rpython.lltypesystem import lltype from pypy.jit.codegen.cli.rgenop import RCliGenOp from pypy.jit.rainbow.test.test_interpreter import TestOOType as RainbowTest from pypy.translator.cli.test.runtest import compile_graph, get_annotation @@ -62,8 +63,20 @@ res = self.interpret(ll_function, ["xx"], []) assert res == 42 + def test_degenerate_with_voids(self): + # the original test can't be executed when compiled because we can't + # inspect the content of an instance return an instance as a result; + # instead, we just check the class name + S = self.GcStruct('S', ('y', lltype.Void), + ('x', lltype.Signed)) + malloc = self.malloc + def ll_function(): + s = malloc(S) + s.x = 123 + return s + res = self.interpret(ll_function, [], []) + assert res.class_name == 'S' - test_degenerate_with_voids = skip test_arith_plus_minus = skip test_plus_minus = skip test_red_virtual_container = skip From pedronis at codespeak.net Sun Aug 31 15:28:46 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 31 Aug 2008 15:28:46 +0200 (CEST) Subject: [pypy-svn] r57702 - pypy/branch/garden-call-code-2 Message-ID: <20080831132846.ADAE1169E48@codespeak.net> Author: pedronis Date: Sun Aug 31 15:28:41 2008 New Revision: 57702 Added: pypy/branch/garden-call-code-2/ - copied from r57701, pypy/dist/ Log: another branch in which to work on our call code From fijal at codespeak.net Sun Aug 31 19:07:35 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Sun, 31 Aug 2008 19:07:35 +0200 (CEST) Subject: [pypy-svn] r57703 - in pypy/dist/pypy/lib: . app_test Message-ID: <20080831170735.C4506169E70@codespeak.net> Author: fijal Date: Sun Aug 31 19:07:32 2008 New Revision: 57703 Modified: pypy/dist/pypy/lib/app_test/test_pyexpat.py pypy/dist/pypy/lib/pyexpat.py Log: A test and a fix for error reporting with parser error Modified: pypy/dist/pypy/lib/app_test/test_pyexpat.py ============================================================================== --- pypy/dist/pypy/lib/app_test/test_pyexpat.py (original) +++ pypy/dist/pypy/lib/app_test/test_pyexpat.py Sun Aug 31 19:07:32 2008 @@ -646,3 +646,16 @@ def test_segfault(self): py.test.raises(TypeError, expat.ParserCreate, 1234123123) + +def test_invalid_data(): + parser = expat.ParserCreate() + parser.Parse('invalid.xml', 0) + try: + parser.Parse("", 1) + except expat.ExpatError, e: + assert e.code == 2 # XXX is this reliable? + assert e.lineno == 1 + assert e.message.startswith('syntax error') + else: + py.test.fail("Did not raise") + Modified: pypy/dist/pypy/lib/pyexpat.py ============================================================================== --- pypy/dist/pypy/lib/pyexpat.py (original) +++ pypy/dist/pypy/lib/pyexpat.py Sun Aug 31 19:07:32 2008 @@ -148,6 +148,7 @@ self.buffer_size = 8192 self.character_data_handler = None self.intern = {} + self.__exc_info = None def _flush_character_buffer(self): if not self.buffer: @@ -189,13 +190,19 @@ e.lineno = lineno err = XML_ErrorString(code)[:200] e.s = "%s: line: %d, column: %d" % (err, lineno, colno) + e.message = e.s self._error = e def Parse(self, data, is_final=0): res = XML_Parse(self.itself, data, len(data), is_final) if res == 0: self._set_error(XML_GetErrorCode(self.itself)) - raise self.__exc_info[0], self.__exc_info[1], self.__exc_info[2] + if self.__exc_info: + exc_info = self.__exc_info + self.__exc_info = None + raise exc_info[0], exc_info[1], exc_info[2] + else: + raise self._error self._flush_character_buffer() return res From fijal at codespeak.net Sun Aug 31 19:50:50 2008 From: fijal at codespeak.net (fijal at codespeak.net) Date: Sun, 31 Aug 2008 19:50:50 +0200 (CEST) Subject: [pypy-svn] r57704 - pypy/dist/pypy/lib Message-ID: <20080831175050.E4D3D169ECB@codespeak.net> Author: fijal Date: Sun Aug 31 19:50:47 2008 New Revision: 57704 Modified: pypy/dist/pypy/lib/pyexpat.py Log: I'm stupid as hell. And ctypes does not help me. I wonder why it did not bite before, but result is called restype in ctypes, not result. Fixes 64bit segfault in xml parser. Modified: pypy/dist/pypy/lib/pyexpat.py ============================================================================== --- pypy/dist/pypy/lib/pyexpat.py (original) +++ pypy/dist/pypy/lib/pyexpat.py Sun Aug 31 19:50:47 2008 @@ -50,7 +50,7 @@ def declare_external(name, args, res): func = getattr(lib, name) func.args = args - func.result = res + func.restype = res globals()[name] = func declare_external('XML_ParserCreate', [c_char_p], XML_Parser) @@ -61,15 +61,14 @@ for name in currents: func = getattr(lib, 'XML_Get' + name) func.args = [XML_Parser] - func.result = c_int + func.restype = c_int declare_external('XML_SetReturnNSTriplet', [XML_Parser, c_int], None) declare_external('XML_GetSpecifiedAttributeCount', [XML_Parser], c_int) declare_external('XML_SetParamEntityParsing', [XML_Parser, c_int], None) declare_external('XML_GetErrorCode', [XML_Parser], c_int) declare_external('XML_StopParser', [XML_Parser, c_int], None) -lib.XML_ErrorString.args = [c_int] -lib.XML_ErrorString.result = c_int +declare_external('XML_ErrorString', [c_int], c_char_p) declare_external('XML_SetBase', [XML_Parser, c_char_p], None) declare_external('XML_SetUnknownEncodingHandler', [XML_Parser, c_void_p, @@ -77,12 +76,6 @@ declare_external('XML_FreeContentModel', [XML_Parser, POINTER(XML_Content)], None) -def XML_ErrorString(code): - res = lib.XML_ErrorString(code) - p = c_char_p() - p.value = res - return p.value - handler_names = [ 'StartElement', 'EndElement', From pedronis at codespeak.net Sun Aug 31 23:32:18 2008 From: pedronis at codespeak.net (pedronis at codespeak.net) Date: Sun, 31 Aug 2008 23:32:18 +0200 (CEST) Subject: [pypy-svn] r57705 - pypy/branch/garden-call-code-2/pypy/interpreter Message-ID: <20080831213218.00B14168533@codespeak.net> Author: pedronis Date: Sun Aug 31 23:32:15 2008 New Revision: 57705 Modified: pypy/branch/garden-call-code-2/pypy/interpreter/function.py Log: WIP: start of removing fastcall* on PyCode and the ArgumentsFormValuestack this expectedly break test in test_function.py about fastcall being used Modified: pypy/branch/garden-call-code-2/pypy/interpreter/function.py ============================================================================== --- pypy/branch/garden-call-code-2/pypy/interpreter/function.py (original) +++ pypy/branch/garden-call-code-2/pypy/interpreter/function.py Sun Aug 31 23:32:15 2008 @@ -69,25 +69,39 @@ return self.call_args(Arguments(self.space, list(args_w))) def funccall_valuestack(self, nargs, frame): # speed hack + from pypy.interpreter import gateway code = self.getcode() # hook for the jit fast_natural_arity = code.fast_natural_arity - if nargs == fast_natural_arity: - if nargs == 0: - return code.fastcall_0(self.space, self) - elif nargs == 1: - return code.fastcall_1(self.space, self, frame.peekvalue(0)) - elif nargs == 2: - return code.fastcall_2(self.space, self, frame.peekvalue(1), - frame.peekvalue(0)) - elif nargs == 3: - return code.fastcall_3(self.space, self, frame.peekvalue(2), - frame.peekvalue(1), frame.peekvalue(0)) - elif nargs == 4: - return code.fastcall_4(self.space, self, frame.peekvalue(3), - frame.peekvalue(2), frame.peekvalue(1), - frame.peekvalue(0)) + if nargs == fast_natural_arity: + from pypy.interpreter.pycode import PyCode + if type(code) is PyCode: + new_frame = self.space.createframe(code, self.w_func_globals, + self.closure) + for i in xrange(nargs): + w_arg = frame.peekvalue(nargs-1-i) + new_frame.fastlocals_w[i] = w_arg + return new_frame.run() + else: + if nargs == 0: + assert isinstance(code, gateway.BuiltinCode0) + return code.fastcall_0(self.space, self) + elif nargs == 1: + assert isinstance(code, gateway.BuiltinCode1) + return code.fastcall_1(self.space, self, frame.peekvalue(0)) + elif nargs == 2: + assert isinstance(code, gateway.BuiltinCode2) + return code.fastcall_2(self.space, self, frame.peekvalue(1), + frame.peekvalue(0)) + elif nargs == 3: + assert isinstance(code, gateway.BuiltinCode3) + return code.fastcall_3(self.space, self, frame.peekvalue(2), + frame.peekvalue(1), frame.peekvalue(0)) + elif nargs == 4: + assert isinstance(code, gateway.BuiltinCode4) + return code.fastcall_4(self.space, self, frame.peekvalue(3), + frame.peekvalue(2), frame.peekvalue(1), + frame.peekvalue(0)) elif fast_natural_arity == -1 and nargs >= 1: - from pypy.interpreter import gateway assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) w_obj = frame.peekvalue(nargs-1) args = frame.make_arguments(nargs-1)