From noreply at buildbot.pypy.org Tue Jul 1 00:13:43 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 00:13:43 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Small optimization for one case
Message-ID: <20140630221343.90A4B1C33F0@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72301:e6db9b63d6e6
Date: 2014-06-30 22:00 +0200
http://bitbucket.org/pypy/pypy/changeset/e6db9b63d6e6/
Log: Small optimization for one case
diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py
--- a/rpython/jit/backend/x86/assembler.py
+++ b/rpython/jit/backend/x86/assembler.py
@@ -2353,8 +2353,12 @@
assert isinstance(loc_index, ImmedLoc)
cardindex = loc_index.value >> card_bits
if isinstance(loc_base, RegLoc):
- mc.MOV_ri(r11.value, cardindex << 4) # 32/64bit
- mc.ADD_rr(r11.value, loc_base.value)
+ if rx86.fits_in_32bits(write_locks_base + cardindex):
+ write_locks_base += cardindex
+ mc.MOV_rr(r11.value, loc_base.value)
+ else:
+ mc.MOV_ri(r11.value, cardindex << 4) # 32/64bit
+ mc.ADD_rr(r11.value, loc_base.value)
mc.SHR_ri(r11.value, 4)
else:
mc.MOV_ri(r11.value, cardindex + (loc_base.value >> 4))
From noreply at buildbot.pypy.org Tue Jul 1 06:11:50 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Tue, 1 Jul 2014 06:11:50 +0200 (CEST)
Subject: [pypy-commit] pypy default: don't import stuuf from conftest for no
bloody reason
Message-ID: <20140701041150.12F051C33F0@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch:
Changeset: r72302:13859c071d0f
Date: 2014-07-01 05:11 +0100
http://bitbucket.org/pypy/pypy/changeset/13859c071d0f/
Log: don't import stuuf from conftest for no bloody reason
diff --git a/rpython/conftest.py b/rpython/conftest.py
--- a/rpython/conftest.py
+++ b/rpython/conftest.py
@@ -1,10 +1,8 @@
-from os.path import *
import py, pytest
from rpython.tool import leakfinder
pytest_plugins = 'rpython.tool.pytest.expecttest'
-cdir = realpath(join(dirname(__file__), 'translator', 'c'))
option = None
def braindead_deindent(self):
diff --git a/rpython/rlib/_rffi_stacklet.py b/rpython/rlib/_rffi_stacklet.py
--- a/rpython/rlib/_rffi_stacklet.py
+++ b/rpython/rlib/_rffi_stacklet.py
@@ -3,7 +3,7 @@
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.rtyper.tool import rffi_platform
from rpython.rlib.rarithmetic import is_emulated_long
-from rpython.conftest import cdir
+from rpython.translator import cdir
cdir = py.path.local(cdir)
diff --git a/rpython/rlib/clibffi.py b/rpython/rlib/clibffi.py
--- a/rpython/rlib/clibffi.py
+++ b/rpython/rlib/clibffi.py
@@ -15,7 +15,7 @@
from rpython.rlib.objectmodel import specialize
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.platform import platform
-from rpython.conftest import cdir
+from rpython.translator import cdir
from platform import machine
import py
import os
diff --git a/rpython/rlib/rdtoa.py b/rpython/rlib/rdtoa.py
--- a/rpython/rlib/rdtoa.py
+++ b/rpython/rlib/rdtoa.py
@@ -1,7 +1,7 @@
from __future__ import with_statement
from rpython.rlib import rfloat
from rpython.translator.tool.cbuild import ExternalCompilationInfo
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib import jit
from rpython.rlib.rstring import StringBuilder
diff --git a/rpython/rlib/rgil.py b/rpython/rlib/rgil.py
--- a/rpython/rlib/rgil.py
+++ b/rpython/rlib/rgil.py
@@ -1,5 +1,5 @@
import py
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
diff --git a/rpython/rlib/rsignal.py b/rpython/rlib/rsignal.py
--- a/rpython/rlib/rsignal.py
+++ b/rpython/rlib/rsignal.py
@@ -1,7 +1,7 @@
import signal as cpy_signal
import sys
import py
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rtyper.tool import rffi_platform
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.translator.tool.cbuild import ExternalCompilationInfo
diff --git a/rpython/rlib/rstack.py b/rpython/rlib/rstack.py
--- a/rpython/rlib/rstack.py
+++ b/rpython/rlib/rstack.py
@@ -10,7 +10,7 @@
from rpython.rlib import rgc
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper.lltypesystem.lloperation import llop
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
# ____________________________________________________________
diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py
--- a/rpython/rlib/rthread.py
+++ b/rpython/rlib/rthread.py
@@ -1,6 +1,6 @@
from rpython.rtyper.lltypesystem import rffi, lltype, llmemory
from rpython.translator.tool.cbuild import ExternalCompilationInfo
-from rpython.conftest import cdir
+from rpython.translator import cdir
import py
from rpython.rlib import jit, rgc
from rpython.rlib.debug import ll_assert
@@ -59,7 +59,7 @@
c_thread_acquirelock = llexternal('RPyThreadAcquireLock', [TLOCKP, rffi.INT],
rffi.INT,
releasegil=True) # release the GIL
-c_thread_acquirelock_timed = llexternal('RPyThreadAcquireLockTimed',
+c_thread_acquirelock_timed = llexternal('RPyThreadAcquireLockTimed',
[TLOCKP, rffi.LONGLONG, rffi.INT],
rffi.INT,
releasegil=True) # release the GIL
diff --git a/rpython/rtyper/lltypesystem/module/ll_math.py b/rpython/rtyper/lltypesystem/module/ll_math.py
--- a/rpython/rtyper/lltypesystem/module/ll_math.py
+++ b/rpython/rtyper/lltypesystem/module/ll_math.py
@@ -3,7 +3,7 @@
import py
import sys
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rlib import jit, rposix
from rpython.rlib.rfloat import INFINITY, NAN, isfinite, isinf, isnan
from rpython.rtyper.lltypesystem import lltype, rffi
diff --git a/rpython/translator/__init__.py b/rpython/translator/__init__.py
--- a/rpython/translator/__init__.py
+++ b/rpython/translator/__init__.py
@@ -0,0 +1,3 @@
+from os.path import realpath, join, dirname
+cdir = realpath(join(dirname(__file__), 'c'))
+del realpath, join, dirname
diff --git a/rpython/translator/c/test/test_standalone.py b/rpython/translator/c/test/test_standalone.py
--- a/rpython/translator/c/test/test_standalone.py
+++ b/rpython/translator/c/test/test_standalone.py
@@ -13,7 +13,7 @@
from rpython.translator.c.genc import CStandaloneBuilder, ExternalCompilationInfo
from rpython.annotator.listdef import s_list_of_strings
from rpython.tool.udir import udir
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.conftest import option
def setup_module(module):
@@ -382,7 +382,7 @@
if str(path).find(':')>=0:
# bad choice of udir, there is a ':' in it which messes up the test
pass
- else:
+ else:
out, err = cbuilder.cmdexec("", err=True, env={'PYPYLOG': str(path)})
size = os.stat(str(path)).st_size
assert out.strip() == 'got:a.' + str(size) + '.'
From noreply at buildbot.pypy.org Tue Jul 1 10:39:15 2014
From: noreply at buildbot.pypy.org (Raemi)
Date: Tue, 1 Jul 2014 10:39:15 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: handle everything in
collect_cardrefs_to_nursery
Message-ID: <20140701083915.C12F21C024A@cobra.cs.uni-duesseldorf.de>
Author: Remi Meier
Branch: card-marking
Changeset: r1265:1f04257b1db7
Date: 2014-07-01 10:40 +0200
http://bitbucket.org/pypy/stmgc/changeset/1f04257b1db7/
Log: handle everything in collect_cardrefs_to_nursery
diff --git a/c7/stm/core.c b/c7/stm/core.c
--- a/c7/stm/core.c
+++ b/c7/stm/core.c
@@ -67,6 +67,7 @@
/* Card marking. Don't remove GCFLAG_WRITE_BARRIER because we
need to come back to _stm_write_slowpath_card() for every
card to mark. Add GCFLAG_CARDS_SET. */
+ assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
obj->stm_flags |= GCFLAG_CARDS_SET;
assert(STM_PSEGMENT->old_objects_with_cards);
LIST_APPEND(STM_PSEGMENT->old_objects_with_cards, obj);
diff --git a/c7/stm/nursery.c b/c7/stm/nursery.c
--- a/c7/stm/nursery.c
+++ b/c7/stm/nursery.c
@@ -328,6 +328,7 @@
static inline void _collect_now(object_t *obj, bool was_definitely_young)
{
assert(!_is_young(obj));
+ assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
dprintf(("_collect_now: %p\n", obj));
@@ -339,22 +340,6 @@
stmcb_trace((struct object_s *)realobj, &minor_trace_if_young);
obj->stm_flags |= GCFLAG_WRITE_BARRIER;
- if (obj->stm_flags & GCFLAG_CARDS_SET) {
- /* all objects that had WB cleared need to be fully synchronised
- on commit, so we have to mark all their cards */
- struct stm_priv_segment_info_s *pseg = get_priv_segment(
- STM_SEGMENT->segment_num);
-
- /* stm_wb-slowpath should never have triggered for young objs */
- assert(!was_definitely_young);
-
- if (!IS_OVERFLOW_OBJ(STM_PSEGMENT, obj)) {
- _reset_object_cards(pseg, obj, CARD_MARKED_OLD, true); /* mark all */
- } else {
- /* simply clear overflow */
- _reset_object_cards(pseg, obj, CARD_CLEAR, false);
- }
- }
}
/* else traced in collect_cardrefs_to_nursery if necessary */
}
@@ -371,12 +356,11 @@
assert(!_is_young(obj));
if (!(obj->stm_flags & GCFLAG_CARDS_SET)) {
- /* handled in _collect_now() */
+ /* sometimes we remove the CARDS_SET in the WB slowpath, see core.c */
continue;
}
- /* traces cards, clears marked cards or marks them old if
- necessary */
+ /* traces cards, clears marked cards or marks them old if necessary */
_trace_card_object(obj);
assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
@@ -551,6 +535,7 @@
if (!commit && STM_PSEGMENT->large_overflow_objects == NULL)
STM_PSEGMENT->large_overflow_objects = list_create();
+
/* All the objects we move out of the nursery become "overflow"
objects. We use the list 'objects_pointing_to_nursery'
to hold the ones we didn't trace so far. */
@@ -558,6 +543,11 @@
if (STM_PSEGMENT->objects_pointing_to_nursery == NULL) {
STM_PSEGMENT->objects_pointing_to_nursery = list_create();
+ /* collect objs with cards, adds to objects_pointing_to_nursery
+ and makes sure there are no objs with cards left in
+ modified_old_objs */
+ collect_cardrefs_to_nursery();
+
/* See the doc of 'objects_pointing_to_nursery': if it is NULL,
then it is implicitly understood to be equal to
'modified_old_objects'. We could copy modified_old_objects
@@ -567,6 +557,7 @@
num_old = 0;
}
else {
+ collect_cardrefs_to_nursery();
num_old = STM_PSEGMENT->modified_old_objects_markers_num_old;
}
@@ -574,7 +565,6 @@
collect_roots_in_nursery();
- collect_cardrefs_to_nursery();
collect_oldrefs_to_nursery();
assert(list_is_empty(STM_PSEGMENT->old_objects_with_cards));
From noreply at buildbot.pypy.org Tue Jul 1 10:45:33 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 10:45:33 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: Another interface needed for the
PyPy JIT
Message-ID: <20140701084533.295531C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: card-marking
Changeset: r1266:2f4d07820293
Date: 2014-06-30 17:48 +0200
http://bitbucket.org/pypy/stmgc/changeset/2f4d07820293/
Log: Another interface needed for the PyPy JIT
diff --git a/c7/stm/core.c b/c7/stm/core.c
--- a/c7/stm/core.c
+++ b/c7/stm/core.c
@@ -244,6 +244,14 @@
return mark_card;
}
+char *_stm_write_slowpath_card_extra_base(void)
+{
+ /* for the PyPy JIT: _stm_write_slowpath_card_extra_base[obj >> 4]
+ is the byte that must be set to CARD_MARKED. The logic below
+ does the same, but more explicitly. */
+ return (char *)write_locks - WRITELOCK_START + 1;
+}
+
void _stm_write_slowpath_card(object_t *obj, uintptr_t index)
{
/* If CARDS_SET is not set so far, issue a normal write barrier.
diff --git a/c7/stmgc.h b/c7/stmgc.h
--- a/c7/stmgc.h
+++ b/c7/stmgc.h
@@ -109,6 +109,7 @@
void _stm_write_slowpath(object_t *);
void _stm_write_slowpath_card(object_t *, uintptr_t);
char _stm_write_slowpath_card_extra(object_t *);
+char *_stm_write_slowpath_card_extra_base(void);
object_t *_stm_allocate_slowpath(ssize_t);
object_t *_stm_allocate_external(ssize_t);
void _stm_become_inevitable(const char*);
From noreply at buildbot.pypy.org Tue Jul 1 10:45:34 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 10:45:34 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: Expose this value 100 too
Message-ID: <20140701084534.4EC211C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: card-marking
Changeset: r1267:e1df81263680
Date: 2014-06-30 17:57 +0200
http://bitbucket.org/pypy/stmgc/changeset/e1df81263680/
Log: Expose this value 100 too
diff --git a/c7/stm/core.h b/c7/stm/core.h
--- a/c7/stm/core.h
+++ b/c7/stm/core.h
@@ -225,10 +225,10 @@
static uint8_t write_locks[WRITELOCK_END - WRITELOCK_START];
enum /* card values for write_locks */ {
- CARD_CLEAR = 0, /* card not used at all */
- CARD_MARKED = 100, /* card marked for tracing in the next gc */
- CARD_MARKED_OLD = 101, /* card was marked before, but cleared
- in a GC */
+ CARD_CLEAR = 0, /* card not used at all */
+ CARD_MARKED = _STM_CARD_MARKED, /* card marked for tracing in the next gc */
+ CARD_MARKED_OLD = 101, /* card was marked before, but cleared
+ in a GC */
};
diff --git a/c7/stmgc.h b/c7/stmgc.h
--- a/c7/stmgc.h
+++ b/c7/stmgc.h
@@ -110,6 +110,7 @@
void _stm_write_slowpath_card(object_t *, uintptr_t);
char _stm_write_slowpath_card_extra(object_t *);
char *_stm_write_slowpath_card_extra_base(void);
+#define _STM_CARD_MARKED 100
object_t *_stm_allocate_slowpath(ssize_t);
object_t *_stm_allocate_external(ssize_t);
void _stm_become_inevitable(const char*);
From noreply at buildbot.pypy.org Tue Jul 1 10:45:35 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 10:45:35 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: merge heads
Message-ID: <20140701084535.786651C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: card-marking
Changeset: r1268:b9101a55e80d
Date: 2014-07-01 10:45 +0200
http://bitbucket.org/pypy/stmgc/changeset/b9101a55e80d/
Log: merge heads
diff --git a/c7/stm/core.c b/c7/stm/core.c
--- a/c7/stm/core.c
+++ b/c7/stm/core.c
@@ -67,6 +67,7 @@
/* Card marking. Don't remove GCFLAG_WRITE_BARRIER because we
need to come back to _stm_write_slowpath_card() for every
card to mark. Add GCFLAG_CARDS_SET. */
+ assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
obj->stm_flags |= GCFLAG_CARDS_SET;
assert(STM_PSEGMENT->old_objects_with_cards);
LIST_APPEND(STM_PSEGMENT->old_objects_with_cards, obj);
diff --git a/c7/stm/nursery.c b/c7/stm/nursery.c
--- a/c7/stm/nursery.c
+++ b/c7/stm/nursery.c
@@ -328,6 +328,7 @@
static inline void _collect_now(object_t *obj, bool was_definitely_young)
{
assert(!_is_young(obj));
+ assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
dprintf(("_collect_now: %p\n", obj));
@@ -339,22 +340,6 @@
stmcb_trace((struct object_s *)realobj, &minor_trace_if_young);
obj->stm_flags |= GCFLAG_WRITE_BARRIER;
- if (obj->stm_flags & GCFLAG_CARDS_SET) {
- /* all objects that had WB cleared need to be fully synchronised
- on commit, so we have to mark all their cards */
- struct stm_priv_segment_info_s *pseg = get_priv_segment(
- STM_SEGMENT->segment_num);
-
- /* stm_wb-slowpath should never have triggered for young objs */
- assert(!was_definitely_young);
-
- if (!IS_OVERFLOW_OBJ(STM_PSEGMENT, obj)) {
- _reset_object_cards(pseg, obj, CARD_MARKED_OLD, true); /* mark all */
- } else {
- /* simply clear overflow */
- _reset_object_cards(pseg, obj, CARD_CLEAR, false);
- }
- }
}
/* else traced in collect_cardrefs_to_nursery if necessary */
}
@@ -371,12 +356,11 @@
assert(!_is_young(obj));
if (!(obj->stm_flags & GCFLAG_CARDS_SET)) {
- /* handled in _collect_now() */
+ /* sometimes we remove the CARDS_SET in the WB slowpath, see core.c */
continue;
}
- /* traces cards, clears marked cards or marks them old if
- necessary */
+ /* traces cards, clears marked cards or marks them old if necessary */
_trace_card_object(obj);
assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
@@ -551,6 +535,7 @@
if (!commit && STM_PSEGMENT->large_overflow_objects == NULL)
STM_PSEGMENT->large_overflow_objects = list_create();
+
/* All the objects we move out of the nursery become "overflow"
objects. We use the list 'objects_pointing_to_nursery'
to hold the ones we didn't trace so far. */
@@ -558,6 +543,11 @@
if (STM_PSEGMENT->objects_pointing_to_nursery == NULL) {
STM_PSEGMENT->objects_pointing_to_nursery = list_create();
+ /* collect objs with cards, adds to objects_pointing_to_nursery
+ and makes sure there are no objs with cards left in
+ modified_old_objs */
+ collect_cardrefs_to_nursery();
+
/* See the doc of 'objects_pointing_to_nursery': if it is NULL,
then it is implicitly understood to be equal to
'modified_old_objects'. We could copy modified_old_objects
@@ -567,6 +557,7 @@
num_old = 0;
}
else {
+ collect_cardrefs_to_nursery();
num_old = STM_PSEGMENT->modified_old_objects_markers_num_old;
}
@@ -574,7 +565,6 @@
collect_roots_in_nursery();
- collect_cardrefs_to_nursery();
collect_oldrefs_to_nursery();
assert(list_is_empty(STM_PSEGMENT->old_objects_with_cards));
From noreply at buildbot.pypy.org Tue Jul 1 11:09:55 2014
From: noreply at buildbot.pypy.org (Raemi)
Date: Tue, 1 Jul 2014 11:09:55 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: reset cards on overflow objs only
needed when aborting (otherwise they are already cleared by normal minor
collections)
Message-ID: <20140701090955.2E3C11C31F4@cobra.cs.uni-duesseldorf.de>
Author: Remi Meier
Branch: card-marking
Changeset: r1269:664aca4f69ca
Date: 2014-07-01 11:10 +0200
http://bitbucket.org/pypy/stmgc/changeset/664aca4f69ca/
Log: reset cards on overflow objs only needed when aborting (otherwise
they are already cleared by normal minor collections)
diff --git a/c7/stm/core.c b/c7/stm/core.c
--- a/c7/stm/core.c
+++ b/c7/stm/core.c
@@ -946,6 +946,24 @@
/* throw away the content of the nursery */
long bytes_in_nursery = throw_away_nursery(pseg);
+ /* modified_old_objects' cards get cleared in
+ reset_modified_from_other_segments. Objs in old_objs_with_cards but not
+ in modified_old_objs are overflow objects and handled here: */
+ if (pseg->large_overflow_objects != NULL) {
+ /* some overflow objects may have cards when aborting, clear them too */
+ LIST_FOREACH_R(pseg->large_overflow_objects, object_t * /*item*/,
+ {
+ struct object_s *realobj = (struct object_s *)
+ REAL_ADDRESS(pseg->pub.segment_base, item);
+
+ if (realobj->stm_flags & GCFLAG_CARDS_SET) {
+ /* CARDS_SET is enough since other HAS_CARDS objs
+ are already cleared */
+ _reset_object_cards(pseg, item, CARD_CLEAR, false);
+ }
+ });
+ }
+
/* reset all the modified objects (incl. re-adding GCFLAG_WRITE_BARRIER) */
reset_modified_from_other_segments(segment_num);
_verify_cards_cleared_in_all_lists(pseg);
diff --git a/c7/stm/nursery.c b/c7/stm/nursery.c
--- a/c7/stm/nursery.c
+++ b/c7/stm/nursery.c
@@ -325,7 +325,7 @@
-static inline void _collect_now(object_t *obj, bool was_definitely_young)
+static inline void _collect_now(object_t *obj)
{
assert(!_is_young(obj));
assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
@@ -376,8 +376,7 @@
uintptr_t obj_sync_now = list_pop_item(lst);
object_t *obj = (object_t *)(obj_sync_now & ~FLAG_SYNC_LARGE);
- bool was_definitely_young = (obj_sync_now & FLAG_SYNC_LARGE);
- _collect_now(obj, was_definitely_young);
+ _collect_now(obj);
assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
if (obj_sync_now & FLAG_SYNC_LARGE) {
@@ -407,7 +406,7 @@
dprintf(("collect_modified_old_objects\n"));
LIST_FOREACH_R(
STM_PSEGMENT->modified_old_objects, object_t * /*item*/,
- _collect_now(item, false));
+ _collect_now(item));
}
static void collect_roots_from_markers(uintptr_t num_old)
@@ -475,25 +474,6 @@
tree_clear(pseg->nursery_objects_shadows);
-
- /* modified_old_objects' cards get cleared in push_modified_to_other_segments
- or reset_modified_from_other_segments. Objs in old_objs_with_cards but not
- in modified_old_objs are overflow objects and handled here: */
- if (pseg->large_overflow_objects != NULL) {
- /* some overflow objects may have cards when aborting, clear them too */
- LIST_FOREACH_R(pseg->large_overflow_objects, object_t * /*item*/,
- {
- struct object_s *realobj = (struct object_s *)
- REAL_ADDRESS(pseg->pub.segment_base, item);
-
- if (realobj->stm_flags & GCFLAG_CARDS_SET) {
- /* CARDS_SET is enough since other HAS_CARDS objs
- are already cleared */
- _reset_object_cards(pseg, item, CARD_CLEAR, false);
- }
- });
- }
-
return nursery_used;
#pragma pop_macro("STM_SEGMENT")
#pragma pop_macro("STM_PSEGMENT")
From noreply at buildbot.pypy.org Tue Jul 1 11:39:39 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 11:39:39 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: Workaround for what seems like a
clang bug (I'm sure people would argue
Message-ID: <20140701093939.131831C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: card-marking
Changeset: r1270:6d6832a447c3
Date: 2014-07-01 11:37 +0200
http://bitbucket.org/pypy/stmgc/changeset/6d6832a447c3/
Log: Workaround for what seems like a clang bug (I'm sure people would
argue otherwise, but I don't care: it needs a workaround).
diff --git a/c7/stm/core.c b/c7/stm/core.c
--- a/c7/stm/core.c
+++ b/c7/stm/core.c
@@ -245,12 +245,13 @@
return mark_card;
}
-char *_stm_write_slowpath_card_extra_base(void)
+long _stm_write_slowpath_card_extra_base(void)
{
/* for the PyPy JIT: _stm_write_slowpath_card_extra_base[obj >> 4]
is the byte that must be set to CARD_MARKED. The logic below
does the same, but more explicitly. */
- return (char *)write_locks - WRITELOCK_START + 1;
+ return (((long)write_locks) - WRITELOCK_START + 1)
+ + 0x4000000000000000L; // <- workaround for a clang bug :-(
}
void _stm_write_slowpath_card(object_t *obj, uintptr_t index)
diff --git a/c7/stmgc.h b/c7/stmgc.h
--- a/c7/stmgc.h
+++ b/c7/stmgc.h
@@ -109,7 +109,7 @@
void _stm_write_slowpath(object_t *);
void _stm_write_slowpath_card(object_t *, uintptr_t);
char _stm_write_slowpath_card_extra(object_t *);
-char *_stm_write_slowpath_card_extra_base(void);
+long _stm_write_slowpath_card_extra_base(void);
#define _STM_CARD_MARKED 100
object_t *_stm_allocate_slowpath(ssize_t);
object_t *_stm_allocate_external(ssize_t);
From noreply at buildbot.pypy.org Tue Jul 1 11:42:45 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 11:42:45 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: import stmgc/6d6832a447c3 (branch
card-marking) and fix the call to _stm_write_slowpath_card_extra_base()
Message-ID: <20140701094245.1DF611C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72303:46d55a933c65
Date: 2014-07-01 11:42 +0200
http://bitbucket.org/pypy/pypy/changeset/46d55a933c65/
Log: import stmgc/6d6832a447c3 (branch card-marking) and fix the call to
_stm_write_slowpath_card_extra_base()
diff --git a/rpython/rlib/rstm.py b/rpython/rlib/rstm.py
--- a/rpython/rlib/rstm.py
+++ b/rpython/rlib/rstm.py
@@ -28,7 +28,7 @@
adr_write_slowpath_card_extra = (
CFlexSymbolic('((long)&_stm_write_slowpath_card_extra)'))
adr__stm_write_slowpath_card_extra_base = (
- CFlexSymbolic('((long)_stm_write_slowpath_card_extra_base())'))
+ CFlexSymbolic('(_stm_write_slowpath_card_extra_base()-0x4000000000000000L)'))
CARD_MARKED = CFlexSymbolic('_STM_CARD_MARKED')
CARD_SIZE = CFlexSymbolic('_STM_CARD_SIZE')
diff --git a/rpython/translator/stm/src_stm/revision b/rpython/translator/stm/src_stm/revision
--- a/rpython/translator/stm/src_stm/revision
+++ b/rpython/translator/stm/src_stm/revision
@@ -1,1 +1,1 @@
-e1df81263680
+6d6832a447c3
diff --git a/rpython/translator/stm/src_stm/stm/core.c b/rpython/translator/stm/src_stm/stm/core.c
--- a/rpython/translator/stm/src_stm/stm/core.c
+++ b/rpython/translator/stm/src_stm/stm/core.c
@@ -68,6 +68,7 @@
/* Card marking. Don't remove GCFLAG_WRITE_BARRIER because we
need to come back to _stm_write_slowpath_card() for every
card to mark. Add GCFLAG_CARDS_SET. */
+ assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
obj->stm_flags |= GCFLAG_CARDS_SET;
assert(STM_PSEGMENT->old_objects_with_cards);
LIST_APPEND(STM_PSEGMENT->old_objects_with_cards, obj);
@@ -245,12 +246,13 @@
return mark_card;
}
-char *_stm_write_slowpath_card_extra_base(void)
+long _stm_write_slowpath_card_extra_base(void)
{
/* for the PyPy JIT: _stm_write_slowpath_card_extra_base[obj >> 4]
is the byte that must be set to CARD_MARKED. The logic below
does the same, but more explicitly. */
- return (char *)write_locks - WRITELOCK_START + 1;
+ return (((long)write_locks) - WRITELOCK_START + 1)
+ + 0x4000000000000000L; // <- workaround for a clang bug :-(
}
void _stm_write_slowpath_card(object_t *obj, uintptr_t index)
@@ -946,6 +948,24 @@
/* throw away the content of the nursery */
long bytes_in_nursery = throw_away_nursery(pseg);
+ /* modified_old_objects' cards get cleared in
+ reset_modified_from_other_segments. Objs in old_objs_with_cards but not
+ in modified_old_objs are overflow objects and handled here: */
+ if (pseg->large_overflow_objects != NULL) {
+ /* some overflow objects may have cards when aborting, clear them too */
+ LIST_FOREACH_R(pseg->large_overflow_objects, object_t * /*item*/,
+ {
+ struct object_s *realobj = (struct object_s *)
+ REAL_ADDRESS(pseg->pub.segment_base, item);
+
+ if (realobj->stm_flags & GCFLAG_CARDS_SET) {
+ /* CARDS_SET is enough since other HAS_CARDS objs
+ are already cleared */
+ _reset_object_cards(pseg, item, CARD_CLEAR, false);
+ }
+ });
+ }
+
/* reset all the modified objects (incl. re-adding GCFLAG_WRITE_BARRIER) */
reset_modified_from_other_segments(segment_num);
_verify_cards_cleared_in_all_lists(pseg);
diff --git a/rpython/translator/stm/src_stm/stm/nursery.c b/rpython/translator/stm/src_stm/stm/nursery.c
--- a/rpython/translator/stm/src_stm/stm/nursery.c
+++ b/rpython/translator/stm/src_stm/stm/nursery.c
@@ -326,9 +326,10 @@
-static inline void _collect_now(object_t *obj, bool was_definitely_young)
+static inline void _collect_now(object_t *obj)
{
assert(!_is_young(obj));
+ assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
dprintf(("_collect_now: %p\n", obj));
@@ -340,22 +341,6 @@
stmcb_trace((struct object_s *)realobj, &minor_trace_if_young);
obj->stm_flags |= GCFLAG_WRITE_BARRIER;
- if (obj->stm_flags & GCFLAG_CARDS_SET) {
- /* all objects that had WB cleared need to be fully synchronised
- on commit, so we have to mark all their cards */
- struct stm_priv_segment_info_s *pseg = get_priv_segment(
- STM_SEGMENT->segment_num);
-
- /* stm_wb-slowpath should never have triggered for young objs */
- assert(!was_definitely_young);
-
- if (!IS_OVERFLOW_OBJ(STM_PSEGMENT, obj)) {
- _reset_object_cards(pseg, obj, CARD_MARKED_OLD, true); /* mark all */
- } else {
- /* simply clear overflow */
- _reset_object_cards(pseg, obj, CARD_CLEAR, false);
- }
- }
}
/* else traced in collect_cardrefs_to_nursery if necessary */
}
@@ -372,12 +357,11 @@
assert(!_is_young(obj));
if (!(obj->stm_flags & GCFLAG_CARDS_SET)) {
- /* handled in _collect_now() */
+ /* sometimes we remove the CARDS_SET in the WB slowpath, see core.c */
continue;
}
- /* traces cards, clears marked cards or marks them old if
- necessary */
+ /* traces cards, clears marked cards or marks them old if necessary */
_trace_card_object(obj);
assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
@@ -393,8 +377,7 @@
uintptr_t obj_sync_now = list_pop_item(lst);
object_t *obj = (object_t *)(obj_sync_now & ~FLAG_SYNC_LARGE);
- bool was_definitely_young = (obj_sync_now & FLAG_SYNC_LARGE);
- _collect_now(obj, was_definitely_young);
+ _collect_now(obj);
assert(!(obj->stm_flags & GCFLAG_CARDS_SET));
if (obj_sync_now & FLAG_SYNC_LARGE) {
@@ -424,7 +407,7 @@
dprintf(("collect_modified_old_objects\n"));
LIST_FOREACH_R(
STM_PSEGMENT->modified_old_objects, object_t * /*item*/,
- _collect_now(item, false));
+ _collect_now(item));
}
static void collect_roots_from_markers(uintptr_t num_old)
@@ -492,25 +475,6 @@
tree_clear(pseg->nursery_objects_shadows);
-
- /* modified_old_objects' cards get cleared in push_modified_to_other_segments
- or reset_modified_from_other_segments. Objs in old_objs_with_cards but not
- in modified_old_objs are overflow objects and handled here: */
- if (pseg->large_overflow_objects != NULL) {
- /* some overflow objects may have cards when aborting, clear them too */
- LIST_FOREACH_R(pseg->large_overflow_objects, object_t * /*item*/,
- {
- struct object_s *realobj = (struct object_s *)
- REAL_ADDRESS(pseg->pub.segment_base, item);
-
- if (realobj->stm_flags & GCFLAG_CARDS_SET) {
- /* CARDS_SET is enough since other HAS_CARDS objs
- are already cleared */
- _reset_object_cards(pseg, item, CARD_CLEAR, false);
- }
- });
- }
-
return nursery_used;
#pragma pop_macro("STM_SEGMENT")
#pragma pop_macro("STM_PSEGMENT")
@@ -552,6 +516,7 @@
if (!commit && STM_PSEGMENT->large_overflow_objects == NULL)
STM_PSEGMENT->large_overflow_objects = list_create();
+
/* All the objects we move out of the nursery become "overflow"
objects. We use the list 'objects_pointing_to_nursery'
to hold the ones we didn't trace so far. */
@@ -559,6 +524,11 @@
if (STM_PSEGMENT->objects_pointing_to_nursery == NULL) {
STM_PSEGMENT->objects_pointing_to_nursery = list_create();
+ /* collect objs with cards, adds to objects_pointing_to_nursery
+ and makes sure there are no objs with cards left in
+ modified_old_objs */
+ collect_cardrefs_to_nursery();
+
/* See the doc of 'objects_pointing_to_nursery': if it is NULL,
then it is implicitly understood to be equal to
'modified_old_objects'. We could copy modified_old_objects
@@ -568,6 +538,7 @@
num_old = 0;
}
else {
+ collect_cardrefs_to_nursery();
num_old = STM_PSEGMENT->modified_old_objects_markers_num_old;
}
@@ -575,7 +546,6 @@
collect_roots_in_nursery();
- collect_cardrefs_to_nursery();
collect_oldrefs_to_nursery();
assert(list_is_empty(STM_PSEGMENT->old_objects_with_cards));
diff --git a/rpython/translator/stm/src_stm/stmgc.h b/rpython/translator/stm/src_stm/stmgc.h
--- a/rpython/translator/stm/src_stm/stmgc.h
+++ b/rpython/translator/stm/src_stm/stmgc.h
@@ -110,7 +110,7 @@
void _stm_write_slowpath(object_t *);
void _stm_write_slowpath_card(object_t *, uintptr_t);
char _stm_write_slowpath_card_extra(object_t *);
-char *_stm_write_slowpath_card_extra_base(void);
+long _stm_write_slowpath_card_extra_base(void);
#define _STM_CARD_MARKED 100
object_t *_stm_allocate_slowpath(ssize_t);
object_t *_stm_allocate_external(ssize_t);
From noreply at buildbot.pypy.org Tue Jul 1 12:06:41 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 12:06:41 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: Increase the usable memory to
24GB (from 1.5GB). I'd like to increase
Message-ID: <20140701100641.BD7081C3331@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: card-marking
Changeset: r1271:f90f884585dd
Date: 2014-07-01 12:06 +0200
http://bitbucket.org/pypy/stmgc/changeset/f90f884585dd/
Log: Increase the usable memory to 24GB (from 1.5GB). I'd like to
increase it more but I'm getting again clang linking errors...
diff --git a/c7/demo/demo2.c b/c7/demo/demo2.c
--- a/c7/demo/demo2.c
+++ b/c7/demo/demo2.c
@@ -303,7 +303,7 @@
unregister_thread_local();
- stm_teardown();
+ //stm_teardown();
return 0;
}
diff --git a/c7/stm/core.h b/c7/stm/core.h
--- a/c7/stm/core.h
+++ b/c7/stm/core.h
@@ -14,7 +14,7 @@
#endif
-#define NB_PAGES (1500*256) // 1500MB
+#define NB_PAGES (24000*256) // 24GB
#define NB_SEGMENTS STM_NB_SEGMENTS
#define NB_SEGMENTS_MAX 240 /* don't increase NB_SEGMENTS past this */
#define MAP_PAGES_FLAGS (MAP_SHARED | MAP_ANONYMOUS | MAP_NORESERVE)
From noreply at buildbot.pypy.org Tue Jul 1 12:12:38 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 12:12:38 +0200 (CEST)
Subject: [pypy-commit] pypy default: Clarify to the compiler that this path
is not a fall-through
Message-ID: <20140701101238.BD1EC1C31F4@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72304:ef7d6396c2e4
Date: 2014-07-01 12:12 +0200
http://bitbucket.org/pypy/pypy/changeset/ef7d6396c2e4/
Log: Clarify to the compiler that this path is not a fall-through
diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py
--- a/rpython/translator/c/funcgen.py
+++ b/rpython/translator/c/funcgen.py
@@ -267,7 +267,7 @@
# Emit default case
yield 'default:'
if defaultlink is None:
- yield '\tassert(!"bad switch!!");'
+ yield '\tassert(!"bad switch!!"); abort();'
else:
for op in self.gen_link(defaultlink):
yield '\t' + op
From noreply at buildbot.pypy.org Tue Jul 1 13:07:03 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 13:07:03 +0200 (CEST)
Subject: [pypy-commit] stmgc card-marking: Reduce the limit to 2.5GB again.
We're getting relocation errors on
Message-ID: <20140701110703.097861C31F4@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: card-marking
Changeset: r1272:f18bff5ab704
Date: 2014-07-01 13:07 +0200
http://bitbucket.org/pypy/stmgc/changeset/f18bff5ab704/
Log: Reduce the limit to 2.5GB again. We're getting relocation errors on
pypy, and fork() takes forever...
diff --git a/c7/stm/core.h b/c7/stm/core.h
--- a/c7/stm/core.h
+++ b/c7/stm/core.h
@@ -14,7 +14,7 @@
#endif
-#define NB_PAGES (24000*256) // 24GB
+#define NB_PAGES (2500*256) // 2500MB
#define NB_SEGMENTS STM_NB_SEGMENTS
#define NB_SEGMENTS_MAX 240 /* don't increase NB_SEGMENTS past this */
#define MAP_PAGES_FLAGS (MAP_SHARED | MAP_ANONYMOUS | MAP_NORESERVE)
From noreply at buildbot.pypy.org Tue Jul 1 13:08:11 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 13:08:11 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: import stmgc/f18bff5ab704 (branch
card-marking)
Message-ID: <20140701110811.292141C31F4@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72305:2903b643cad6
Date: 2014-07-01 13:07 +0200
http://bitbucket.org/pypy/pypy/changeset/2903b643cad6/
Log: import stmgc/f18bff5ab704 (branch card-marking)
diff --git a/rpython/translator/stm/src_stm/revision b/rpython/translator/stm/src_stm/revision
--- a/rpython/translator/stm/src_stm/revision
+++ b/rpython/translator/stm/src_stm/revision
@@ -1,1 +1,1 @@
-6d6832a447c3
+f18bff5ab704
diff --git a/rpython/translator/stm/src_stm/stm/core.h b/rpython/translator/stm/src_stm/stm/core.h
--- a/rpython/translator/stm/src_stm/stm/core.h
+++ b/rpython/translator/stm/src_stm/stm/core.h
@@ -15,7 +15,7 @@
#endif
-#define NB_PAGES (1500*256) // 1500MB
+#define NB_PAGES (2500*256) // 2500MB
#define NB_SEGMENTS STM_NB_SEGMENTS
#define NB_SEGMENTS_MAX 240 /* don't increase NB_SEGMENTS past this */
#define MAP_PAGES_FLAGS (MAP_SHARED | MAP_ANONYMOUS | MAP_NORESERVE)
From noreply at buildbot.pypy.org Tue Jul 1 16:14:40 2014
From: noreply at buildbot.pypy.org (Raemi)
Date: Tue, 1 Jul 2014 16:14:40 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: be sure to initialize the
transaction_length when starting a thread
Message-ID: <20140701141440.ADA391C024A@cobra.cs.uni-duesseldorf.de>
Author: Remi Meier
Branch: stmgc-c7
Changeset: r72306:f1bc1a8a5ae1
Date: 2014-07-01 16:14 +0200
http://bitbucket.org/pypy/pypy/changeset/f1bc1a8a5ae1/
Log: be sure to initialize the transaction_length when starting a thread
diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py
--- a/rpython/rlib/rthread.py
+++ b/rpython/rlib/rthread.py
@@ -62,7 +62,7 @@
c_thread_acquirelock = llexternal('RPyThreadAcquireLock', [TLOCKP, rffi.INT],
rffi.INT,
releasegil=True) # release the GIL
-c_thread_acquirelock_timed = llexternal('RPyThreadAcquireLockTimed',
+c_thread_acquirelock_timed = llexternal('RPyThreadAcquireLockTimed',
[TLOCKP, rffi.LONGLONG, rffi.INT],
rffi.INT,
releasegil=True) # release the GIL
@@ -97,9 +97,11 @@
@specialize.arg(0)
def ll_start_new_thread(func):
- if rgc.stm_is_enabled:
- from rpython.rlib.rstm import register_invoke_around_extcall
+ if rgc.stm_is_enabled():
+ from rpython.rlib.rstm import (register_invoke_around_extcall,
+ set_transaction_length)
register_invoke_around_extcall()
+ set_transaction_length(1.0)
ident = c_thread_start(func)
if ident == -1:
raise error("can't start new thread")
From noreply at buildbot.pypy.org Tue Jul 1 16:57:52 2014
From: noreply at buildbot.pypy.org (Raemi)
Date: Tue, 1 Jul 2014 16:57:52 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: adapt some tests and make
hint_commit_soon do something again even if transaction_length is set to
unlimited. It previously didn't do anything in that case,
so now we will get more breaks again since we call stmcb_commit_soon in
minor collections.
Message-ID: <20140701145752.E05731C0083@cobra.cs.uni-duesseldorf.de>
Author: Remi Meier
Branch: stmgc-c7
Changeset: r72307:3e144ed1d5b7
Date: 2014-07-01 16:58 +0200
http://bitbucket.org/pypy/pypy/changeset/3e144ed1d5b7/
Log: adapt some tests and make hint_commit_soon do something again even
if transaction_length is set to unlimited. It previously didn't do
anything in that case, so now we will get more breaks again since we
call stmcb_commit_soon in minor collections.
diff --git a/rpython/translator/stm/src_stm/stmgcintf.c b/rpython/translator/stm/src_stm/stmgcintf.c
--- a/rpython/translator/stm/src_stm/stmgcintf.c
+++ b/rpython/translator/stm/src_stm/stmgcintf.c
@@ -42,7 +42,8 @@
if (((long)pypy_stm_nursery_low_fill_mark_saved) > 0) {
pypy_stm_nursery_low_fill_mark_saved = 0;
}
- } else if (((long)pypy_stm_nursery_low_fill_mark) > 0) {
+ } else {
+ /* if (((long)pypy_stm_nursery_low_fill_mark) > 0) */
/* if not set to unlimited by pypy_stm_setup() (s.b.) */
pypy_stm_nursery_low_fill_mark = 0;
}
diff --git a/rpython/translator/stm/test/test_ztranslated.py b/rpython/translator/stm/test/test_ztranslated.py
--- a/rpython/translator/stm/test/test_ztranslated.py
+++ b/rpython/translator/stm/test/test_ztranslated.py
@@ -90,6 +90,7 @@
def test_should_break_transaction(self):
def entry_point(argv):
+ rstm.hint_commit_soon()
print '<', int(rstm.should_break_transaction()), '>'
return 0
t, cbuilder = self.compile(entry_point)
@@ -213,7 +214,7 @@
S = lltype.GcStruct('S', ('got_exception', OBJECTPTR))
PS = lltype.Ptr(S)
perform_transaction = rstm.make_perform_transaction(check, PS)
-
+
from rpython.rtyper.lltypesystem import lltype
R = lltype.GcStruct('R', ('x', lltype.Signed))
S1 = lltype.Struct('S1', ('r', lltype.Ptr(R)))
@@ -281,6 +282,11 @@
Parent().xy = 0
globf.xy = -2
globf.yx = 'hi there %d' % len(argv)
+
+ # make sure perform_transaction breaks the transaction:
+ rstm.hint_commit_soon()
+ assert rstm.should_break_transaction()
+
perform_transaction(lltype.nullptr(PS.TO))
return 0
t, cbuilder = self.compile(main)
@@ -378,6 +384,9 @@
perform_transaction = rstm.make_perform_transaction(check, PS)
def main(argv):
+ # make sure perform_transaction breaks the transaction:
+ rstm.hint_commit_soon()
+ assert rstm.should_break_transaction()
perform_transaction(lltype.nullptr(PS.TO))
return 0
@@ -589,8 +598,8 @@
'File "/tmp/foobaz.py", line 73, in bar\n'
'stopping bar\n') in data
assert ('starting some_extremely_longish_and_boring_function_name\n'
- 'File "...bla/br/project/foobaz.py", line 81,'
- ' in some_extremely_longish_a...\n') in data
+ 'File "\n') in data
def test_pypy_marker_2(self):
import time
@@ -619,6 +628,11 @@
llop.stm_setup_expand_marker_for_pypy(
lltype.Void, pycode1,
"co_filename", "co_name", "co_firstlineno", "co_lnotab")
+
+ # make sure perform_transaction breaks the transaction:
+ rstm.hint_commit_soon()
+ assert rstm.should_break_transaction()
+
perform_transaction(lltype.malloc(S))
return 0
#
From noreply at buildbot.pypy.org Tue Jul 1 20:47:02 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 1 Jul 2014 20:47:02 +0200 (CEST)
Subject: [pypy-commit] cffi default: Add malloc.h,
needed for alloca() in this file.
Message-ID: <20140701184702.84FC21C0083@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1533:003b8ea084ef
Date: 2014-07-01 20:47 +0200
http://bitbucket.org/cffi/cffi/changeset/003b8ea084ef/
Log: Add malloc.h, needed for alloca() in this file.
diff --git a/c/misc_win32.h b/c/misc_win32.h
--- a/c/misc_win32.h
+++ b/c/misc_win32.h
@@ -1,3 +1,4 @@
+#include /* for alloca() */
/************************************************************/
/* errno and GetLastError support */
From noreply at buildbot.pypy.org Wed Jul 2 00:17:59 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Wed, 2 Jul 2014 00:17:59 +0200 (CEST)
Subject: [pypy-commit] pypy default: fix imports
Message-ID: <20140701221800.0AB091C3331@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch:
Changeset: r72308:ec8a85b373b9
Date: 2014-07-01 23:17 +0100
http://bitbucket.org/pypy/pypy/changeset/ec8a85b373b9/
Log: fix imports
diff --git a/pypy/module/_lsprof/interp_lsprof.py b/pypy/module/_lsprof/interp_lsprof.py
--- a/pypy/module/_lsprof/interp_lsprof.py
+++ b/pypy/module/_lsprof/interp_lsprof.py
@@ -11,7 +11,7 @@
from rpython.rlib.rtimer import read_timestamp, _is_64_bit
from rpython.rtyper.lltypesystem import rffi, lltype
from rpython.translator.tool.cbuild import ExternalCompilationInfo
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rlib.rarithmetic import r_longlong
import time, sys
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -10,7 +10,7 @@
from rpython.rtyper.lltypesystem import ll2ctypes
from rpython.rtyper.annlowlevel import llhelper
from rpython.rlib.objectmodel import we_are_translated
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.gensupp import NameManager
from rpython.tool.udir import udir
From noreply at buildbot.pypy.org Wed Jul 2 08:24:49 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 08:24:49 +0200 (CEST)
Subject: [pypy-commit] pypy default: Detect the x32 mode. (Note that there
are compilation issues too
Message-ID: <20140702062449.069F61C3225@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72309:049f1e96b1de
Date: 2014-07-02 08:24 +0200
http://bitbucket.org/pypy/pypy/changeset/049f1e96b1de/
Log: Detect the x32 mode. (Note that there are compilation issues too
which may prevent a translate.py from ever reaching this point, but
well, if we fix these, then we'll hit this barrier rather than
compile a buggy executable.)
diff --git a/rpython/jit/backend/detect_cpu.py b/rpython/jit/backend/detect_cpu.py
--- a/rpython/jit/backend/detect_cpu.py
+++ b/rpython/jit/backend/detect_cpu.py
@@ -73,11 +73,14 @@
result = MODEL_X86_64
else:
assert sys.maxint == 2**31-1
- from rpython.jit.backend.x86.detect_sse2 import detect_sse2
- if detect_sse2():
+ from rpython.jit.backend.x86 import detect_sse2
+ if detect_sse2.detect_sse2():
result = MODEL_X86
else:
result = MODEL_X86_NO_SSE2
+ if detect_sse2.detect_x32_mode():
+ raise ProcessorAutodetectError(
+ 'JITting in x32 mode is not implemented')
#
if result.startswith('arm'):
from rpython.jit.backend.arm.detect import detect_float
diff --git a/rpython/jit/backend/x86/detect_sse2.py b/rpython/jit/backend/x86/detect_sse2.py
--- a/rpython/jit/backend/x86/detect_sse2.py
+++ b/rpython/jit/backend/x86/detect_sse2.py
@@ -1,3 +1,4 @@
+import sys
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib.rmmap import alloc, free
@@ -18,9 +19,26 @@
free(data, 4096)
return bool(code & (1<<25)) and bool(code & (1<<26))
+def detect_x32_mode():
+ data = alloc(4096)
+ pos = 0 # 32-bit 64-bit / x32
+ for c in ("\x48" # DEC EAX
+ "\xB8\xC8\x00\x00\x00"# MOV EAX, 200 MOV RAX, 0x40404040000000C8
+ "\x40\x40\x40\x40" # 4x INC EAX
+ "\xC3"): # RET RET
+ data[pos] = c
+ pos += 1
+ fnptr = rffi.cast(lltype.Ptr(lltype.FuncType([], lltype.Signed)), data)
+ code = fnptr()
+ free(data, 4096)
+ assert code in (200, 204, 0x40404040000000C8)
+ return code == 200
+
if __name__ == '__main__':
if detect_sse2():
print 'Processor supports sse2.'
else:
print 'Missing processor support for sse2.'
+ if detect_x32_mode():
+ print 'Process is running in "x32" mode.'
From noreply at buildbot.pypy.org Wed Jul 2 08:49:38 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 08:49:38 +0200 (CEST)
Subject: [pypy-commit] pypy default: test: doing a large number of
ping-pongs between two threads, using locks,
Message-ID: <20140702064938.43C4C1C3225@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72310:1a8c4f5e30da
Date: 2014-07-02 08:49 +0200
http://bitbucket.org/pypy/pypy/changeset/1a8c4f5e30da/
Log: test: doing a large number of ping-pongs between two threads, using
locks, should complete in a reasonable time on a translated pypy
with -A.
diff --git a/pypy/module/thread/test/support.py b/pypy/module/thread/test/support.py
--- a/pypy/module/thread/test/support.py
+++ b/pypy/module/thread/test/support.py
@@ -44,6 +44,7 @@
spaceconfig = dict(usemodules=('thread', 'rctime', 'signal'))
def setup_class(cls):
+ cls.w_runappdirect = cls.space.wrap(cls.runappdirect)
if cls.runappdirect:
def plain_waitfor(self, condition, delay=1):
adaptivedelay = 0.04
diff --git a/pypy/module/thread/test/test_lock.py b/pypy/module/thread/test/test_lock.py
--- a/pypy/module/thread/test/test_lock.py
+++ b/pypy/module/thread/test/test_lock.py
@@ -57,8 +57,34 @@
assert lock.acquire() is True
assert lock.acquire(False) is False
raises(TypeError, lock.acquire, True, timeout=.1)
- lock._py3k_acquire(True, timeout=.01)
- lock._py3k_acquire(True, .01)
+ if hasattr(lock, '_py3k_acquire'):
+ lock._py3k_acquire(True, timeout=.01)
+ lock._py3k_acquire(True, .01)
+ else:
+ assert self.runappdirect, "missing lock._py3k_acquire()"
+
+ def test_ping_pong(self):
+ # The purpose of this test is that doing a large number of ping-pongs
+ # between two threads, using locks, should complete in a reasonable
+ # time on a translated pypy with -A. If the GIL logic causes too
+ # much sleeping, then it will fail.
+ import thread, time
+ COUNT = 100000 if self.runappdirect else 50
+ lock1 = thread.allocate_lock()
+ lock2 = thread.allocate_lock()
+ def fn():
+ for i in range(COUNT):
+ lock1.acquire()
+ lock2.release()
+ lock2.acquire()
+ print "STARTING"
+ start = time.time()
+ thread.start_new_thread(fn, ())
+ for i in range(COUNT):
+ lock2.acquire()
+ lock1.release()
+ stop = time.time()
+ assert stop - start < 30.0 # ~0.6 sec on pypy-c-jit
def test_compile_lock():
From noreply at buildbot.pypy.org Wed Jul 2 10:12:29 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 10:12:29 +0200 (CEST)
Subject: [pypy-commit] pypy default: Optimize array.extend() and make it
support directly lists of ints or floats.
Message-ID: <20140702081229.9D4391C0083@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72311:6b30b8d83c24
Date: 2014-07-02 10:11 +0200
http://bitbucket.org/pypy/pypy/changeset/6b30b8d83c24/
Log: Optimize array.extend() and make it support directly lists of ints
or floats.
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -674,6 +674,10 @@
return rffi.cast(mytype.itemtype, item)
#
# "regular" case: it fits in an rpython integer (lltype.Signed)
+ # or it is a float
+ return self.item_from_int_or_float(item)
+
+ def item_from_int_or_float(self, item):
result = rffi.cast(mytype.itemtype, item)
if mytype.canoverflow:
if rffi.cast(lltype.Signed, result) != item:
@@ -686,8 +690,8 @@
% mytype.bytes)
if not mytype.signed:
msg = 'un' + msg # 'signed' => 'unsigned'
- raise OperationError(space.w_OverflowError,
- space.wrap(msg))
+ raise OperationError(self.space.w_OverflowError,
+ self.space.wrap(msg))
return result
def __del__(self):
@@ -734,27 +738,32 @@
def fromsequence(self, w_seq):
space = self.space
oldlen = self.len
+ newlen = oldlen
try:
- new = space.len_w(w_seq)
- self.setlen(self.len + new)
- except OperationError:
- pass
-
- i = 0
- try:
- if mytype.typecode == 'u':
- myiter = space.unpackiterable
+ # optimized case for arrays of integers or floats
+ if mytype.unwrap == 'int_w':
+ lst = space.listview_int(w_seq)
+ elif mytype.unwrap == 'float_w':
+ lst = space.listview_float(w_seq)
else:
- myiter = space.listview
- for w_i in myiter(w_seq):
- if oldlen + i >= self.len:
- self.setlen(oldlen + i + 1)
- self.buffer[oldlen + i] = self.item_w(w_i)
- i += 1
- except OperationError:
- self.setlen(oldlen + i)
- raise
- self.setlen(oldlen + i)
+ lst = None
+ if lst is not None:
+ self.setlen(oldlen + len(lst))
+ buf = self.buffer
+ for num in lst:
+ buf[newlen] = self.item_from_int_or_float(num)
+ newlen += 1
+ return
+ #
+ # this is the general case
+ lst_w = space.listview(w_seq)
+ self.setlen(oldlen + len(lst_w))
+ for w_num in lst_w:
+ self.buffer[newlen] = self.item_w(w_num)
+ newlen += 1
+ finally:
+ if self.len != newlen:
+ self.setlen(newlen)
def extend(self, w_iterable, accept_different_array=False):
space = self.space
From noreply at buildbot.pypy.org Wed Jul 2 11:18:48 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 11:18:48 +0200 (CEST)
Subject: [pypy-commit] pypy default: Issue #1783
Message-ID: <20140702091848.9B2C61C0083@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72312:93a8f5aeb3bc
Date: 2014-07-02 11:18 +0200
http://bitbucket.org/pypy/pypy/changeset/93a8f5aeb3bc/
Log: Issue #1783
Improve array.extend(x) by not requiring an intermediate list in
case there isn't one, e.g. if x is a generator or iterator.
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -967,6 +967,13 @@
"""
return self.unpackiterable(w_iterable, expected_length)
+ def listview_no_unpack(self, w_iterable):
+ """ Same as listview() if cheap. If 'w_iterable' is something like
+ a generator, for example, then return None instead.
+ May return None anyway.
+ """
+ return None
+
def listview_bytes(self, w_list):
""" Return a list of unwrapped strings out of a list of strings. If the
argument is not a list or does not contain only strings, return None.
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -15,6 +15,7 @@
interp2app, interpindirect2app, unwrap_spec)
from pypy.interpreter.typedef import (
GetSetProperty, TypeDef, make_weakref_descr)
+from pypy.interpreter.generator import GeneratorIterator
from pypy.module._file.interp_file import W_File
from pypy.objspace.std.floatobject import W_FloatObject
@@ -630,6 +631,10 @@
def make_array(mytype):
W_ArrayBase = globals()['W_ArrayBase']
+ unpack_driver = jit.JitDriver(name='unpack_array',
+ greens=['tp'],
+ reds=['self', 'w_iterator'])
+
class W_Array(W_ArrayBase):
itemsize = mytype.bytes
typecode = mytype.typecode
@@ -739,31 +744,64 @@
space = self.space
oldlen = self.len
newlen = oldlen
- try:
- # optimized case for arrays of integers or floats
- if mytype.unwrap == 'int_w':
- lst = space.listview_int(w_seq)
- elif mytype.unwrap == 'float_w':
- lst = space.listview_float(w_seq)
- else:
- lst = None
- if lst is not None:
- self.setlen(oldlen + len(lst))
+
+ # optimized case for arrays of integers or floats
+ if mytype.unwrap == 'int_w':
+ lst = space.listview_int(w_seq)
+ elif mytype.unwrap == 'float_w':
+ lst = space.listview_float(w_seq)
+ else:
+ lst = None
+ if lst is not None:
+ self.setlen(oldlen + len(lst))
+ try:
buf = self.buffer
for num in lst:
buf[newlen] = self.item_from_int_or_float(num)
newlen += 1
- return
- #
- # this is the general case
- lst_w = space.listview(w_seq)
+ except OperationError:
+ self.setlen(newlen)
+ raise
+ return
+
+ # this is the common case: w_seq is a list or a tuple
+ lst_w = space.listview_no_unpack(w_seq)
+ if lst_w is not None:
self.setlen(oldlen + len(lst_w))
- for w_num in lst_w:
- self.buffer[newlen] = self.item_w(w_num)
- newlen += 1
- finally:
- if self.len != newlen:
- self.setlen(newlen)
+ buf = self.buffer
+ try:
+ for w_num in lst_w:
+ # note: self.item_w() might invoke arbitrary code.
+ # In case it resizes the same array, then strange
+ # things may happen, but as we don't reload 'buf'
+ # we know that one is big enough for all items
+ # (so at least we avoid crashes)
+ buf[newlen] = self.item_w(w_num)
+ newlen += 1
+ except OperationError:
+ if buf == self.buffer:
+ self.setlen(newlen)
+ raise
+ return
+
+ self._fromiterable(w_seq)
+
+ def _fromiterable(self, w_seq):
+ # a more careful case if w_seq happens to be a very large
+ # iterable: don't copy the items into some intermediate list
+ w_iterator = self.space.iter(w_seq)
+ tp = self.space.type(w_iterator)
+ while True:
+ unpack_driver.jit_merge_point(tp=tp, self=self,
+ w_iterator=w_iterator)
+ space = self.space
+ try:
+ w_item = space.next(w_iterator)
+ except OperationError, e:
+ if not e.match(space, space.w_StopIteration):
+ raise
+ break # done
+ self.descr_append(space, w_item)
def extend(self, w_iterable, accept_different_array=False):
space = self.space
@@ -806,8 +844,9 @@
def descr_append(self, space, w_x):
x = self.item_w(w_x)
- self.setlen(self.len + 1)
- self.buffer[self.len - 1] = x
+ index = self.len
+ self.setlen(index + 1)
+ self.buffer[index] = x
# List interface
def descr_count(self, space, w_val):
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -421,14 +421,19 @@
assert expected_length >= 0
return self.fixedview(w_obj, expected_length, unroll=True)
+ def listview_no_unpack(self, w_obj):
+ if type(w_obj) is W_ListObject:
+ return w_obj.getitems()
+ elif isinstance(w_obj, W_AbstractTupleObject) and self._uses_tuple_iter(w_obj):
+ return w_obj.getitems_copy()
+ elif isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ return w_obj.getitems()
+ else:
+ return None
+
def listview(self, w_obj, expected_length=-1):
- if type(w_obj) is W_ListObject:
- t = w_obj.getitems()
- elif isinstance(w_obj, W_AbstractTupleObject) and self._uses_tuple_iter(w_obj):
- t = w_obj.getitems_copy()
- elif isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
- t = w_obj.getitems()
- else:
+ t = self.listview_no_unpack(w_obj)
+ if t is None:
return ObjSpace.unpackiterable(self, w_obj, expected_length)
if expected_length != -1 and len(t) != expected_length:
raise self._wrap_expected_length(expected_length, len(t))
From noreply at buildbot.pypy.org Wed Jul 2 15:51:07 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 15:51:07 +0200 (CEST)
Subject: [pypy-commit] pypy default: Add some logic in an attempt to fix
issue #1782. There are cases where it slows things down; see comments.
Message-ID: <20140702135107.3B54F1D3522@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72313:640d0a2fedc6
Date: 2014-07-02 14:33 +0200
http://bitbucket.org/pypy/pypy/changeset/640d0a2fedc6/
Log: Add some logic in an attempt to fix issue #1782. There are cases
where it slows things down; see comments.
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -61,6 +61,13 @@
return self.send_ex(w_arg)
def send_ex(self, w_arg, operr=None):
+ pycode = self.pycode
+ if jit.we_are_jitted() and should_not_inline(pycode):
+ generatorentry_driver.jit_merge_point(gen=self, w_arg=w_arg,
+ operr=operr, pycode=pycode)
+ return self._send_ex(w_arg, operr)
+
+ def _send_ex(self, w_arg, operr):
space = self.space
if self.running:
raise OperationError(space.w_ValueError,
@@ -72,8 +79,7 @@
if operr is None:
operr = OperationError(space.w_StopIteration, space.w_None)
raise operr
- # XXX it's not clear that last_instr should be promoted at all
- # but as long as it is necessary for call_assembler, let's do it early
+
last_instr = jit.promote(frame.last_instr)
if last_instr == -1:
if w_arg and not space.is_w(w_arg, space.w_None):
@@ -214,3 +220,38 @@
"interrupting generator of ")
break
block = block.previous
+
+
+
+def get_printable_location_genentry(bytecode):
+ return '%s ' % (bytecode.get_repr(),)
+generatorentry_driver = jit.JitDriver(greens=['pycode'],
+ reds=['gen', 'w_arg', 'operr'],
+ get_printable_location =
+ get_printable_location_genentry,
+ name='generatorentry')
+
+from pypy.tool.stdlib_opcode import HAVE_ARGUMENT, opmap
+YIELD_VALUE = opmap['YIELD_VALUE']
+
+ at jit.elidable_promote()
+def should_not_inline(pycode):
+ # Should not inline generators with more than one "yield",
+ # as an approximative fix (see issue #1782). There are cases
+ # where it slows things down; for example calls to a simple
+ # generator that just produces a few simple values with a few
+ # consecutive "yield" statements. It fixes the near-infinite
+ # slow-down in issue #1782, though...
+ count_yields = 0
+ code = pycode.co_code
+ n = len(code)
+ i = 0
+ while i < n:
+ c = code[i]
+ op = ord(c)
+ if op == YIELD_VALUE:
+ count_yields += 1
+ i += 1
+ if op >= HAVE_ARGUMENT:
+ i += 2
+ return count_yields >= 2
diff --git a/pypy/interpreter/test/test_generator.py b/pypy/interpreter/test/test_generator.py
--- a/pypy/interpreter/test/test_generator.py
+++ b/pypy/interpreter/test/test_generator.py
@@ -278,4 +278,21 @@
def f():
yield 1
raise StopIteration
- assert tuple(f()) == (1,)
\ No newline at end of file
+ assert tuple(f()) == (1,)
+
+
+def test_should_not_inline(space):
+ from pypy.interpreter.generator import should_not_inline
+ w_co = space.appexec([], '''():
+ def g(x):
+ yield x + 5
+ return g.func_code
+ ''')
+ assert should_not_inline(w_co) == False
+ w_co = space.appexec([], '''():
+ def g(x):
+ yield x + 5
+ yield x + 6
+ return g.func_code
+ ''')
+ assert should_not_inline(w_co) == True
From noreply at buildbot.pypy.org Wed Jul 2 16:26:16 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 16:26:16 +0200 (CEST)
Subject: [pypy-commit] pypy default: Issue #1779: PyList_GetItem() took a
time proportional to the length of
Message-ID: <20140702142616.AABEA1C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72314:07de89e151e9
Date: 2014-07-02 16:24 +0200
http://bitbucket.org/pypy/pypy/changeset/07de89e151e9/
Log: Issue #1779: PyList_GetItem() took a time proportional to the length
of the list in case the list's strategy is not the default one.
diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py
--- a/pypy/module/cpyext/listobject.py
+++ b/pypy/module/cpyext/listobject.py
@@ -46,11 +46,11 @@
IndexError exception."""
if not isinstance(w_list, W_ListObject):
PyErr_BadInternalCall(space)
- wrappeditems = w_list.getitems()
- if index < 0 or index >= len(wrappeditems):
+ if index < 0 or index >= w_list.length():
raise OperationError(space.w_IndexError, space.wrap(
"list index out of range"))
- return borrow_from(w_list, wrappeditems[index])
+ w_item = w_list.getitem(index)
+ return borrow_from(w_list, w_item)
@cpython_api([PyObject, PyObject], rffi.INT_real, error=-1)
From noreply at buildbot.pypy.org Wed Jul 2 17:07:17 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 17:07:17 +0200 (CEST)
Subject: [pypy-commit] pypy default: Tweaks to timeit.py:
Message-ID: <20140702150717.6B46B1D350D@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72315:bbabcc9974eb
Date: 2014-07-02 17:06 +0200
http://bitbucket.org/pypy/pypy/changeset/bbabcc9974eb/
Log: Tweaks to timeit.py:
* don't use itertools.repeat(), just plainly do "while n > 0: n -=
1".
* recompile the source code each time before calling inner(). There
are situations like Issue #1776 where PyPy tries to reuse the JIT
code from before, but that's not going to work: the first thing
the function does is the "-s" statement, which may declare new
classes (here a namedtuple). We end up with bridges from the
inner loop; more and more of them every time we call inner().
diff --git a/lib-python/2.7/timeit.py b/lib-python/2.7/timeit.py
--- a/lib-python/2.7/timeit.py
+++ b/lib-python/2.7/timeit.py
@@ -55,11 +55,6 @@
import gc
import sys
import time
-try:
- import itertools
-except ImportError:
- # Must be an older Python version (see timeit() below)
- itertools = None
__all__ = ["Timer"]
@@ -81,7 +76,8 @@
def inner(_it, _timer):
%(setup)s
_t0 = _timer()
- for _i in _it:
+ while _it > 0:
+ _it -= 1
%(stmt)s
_t1 = _timer()
return _t1 - _t0
@@ -96,7 +92,8 @@
def inner(_it, _timer, _func=func):
setup()
_t0 = _timer()
- for _i in _it:
+ while _it > 0:
+ _it -= 1
_func()
_t1 = _timer()
return _t1 - _t0
@@ -133,9 +130,11 @@
else:
raise ValueError("setup is neither a string nor callable")
self.src = src # Save for traceback display
- code = compile(src, dummy_src_name, "exec")
- exec code in globals(), ns
- self.inner = ns["inner"]
+ def make_inner():
+ code = compile(src, dummy_src_name, "exec")
+ exec code in globals(), ns
+ return ns["inner"]
+ self.make_inner = make_inner
elif hasattr(stmt, '__call__'):
self.src = None
if isinstance(setup, basestring):
@@ -144,7 +143,8 @@
exec _setup in globals(), ns
elif not hasattr(setup, '__call__'):
raise ValueError("setup is neither a string nor callable")
- self.inner = _template_func(setup, stmt)
+ inner = _template_func(setup, stmt)
+ self.make_inner = lambda: inner
else:
raise ValueError("stmt is neither a string nor callable")
@@ -185,15 +185,12 @@
to one million. The main statement, the setup statement and
the timer function to be used are passed to the constructor.
"""
- if itertools:
- it = itertools.repeat(None, number)
- else:
- it = [None] * number
+ inner = self.make_inner()
gcold = gc.isenabled()
if '__pypy__' not in sys.builtin_module_names:
gc.disable() # only do that on CPython
try:
- timing = self.inner(it, self.timer)
+ timing = inner(number, self.timer)
finally:
if gcold:
gc.enable()
From noreply at buildbot.pypy.org Wed Jul 2 17:44:14 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 17:44:14 +0200 (CEST)
Subject: [pypy-commit] pypy default: Issue #1790: implement numpy.empty()
differently than numpy.zeros().
Message-ID: <20140702154414.A1AB81D34FF@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72316:4d1d1c2d78ae
Date: 2014-07-02 17:37 +0200
http://bitbucket.org/pypy/pypy/changeset/4d1d1c2d78ae/
Log: Issue #1790: implement numpy.empty() differently than numpy.zeros().
diff --git a/pypy/module/micronumpy/__init__.py b/pypy/module/micronumpy/__init__.py
--- a/pypy/module/micronumpy/__init__.py
+++ b/pypy/module/micronumpy/__init__.py
@@ -12,7 +12,7 @@
'scalar' : 'ctors.build_scalar',
'array': 'ctors.array',
'zeros': 'ctors.zeros',
- 'empty': 'ctors.zeros',
+ 'empty': 'ctors.empty',
'empty_like': 'ctors.empty_like',
'fromstring': 'ctors.fromstring',
'frombuffer': 'ctors.frombuffer',
diff --git a/pypy/module/micronumpy/base.py b/pypy/module/micronumpy/base.py
--- a/pypy/module/micronumpy/base.py
+++ b/pypy/module/micronumpy/base.py
@@ -28,12 +28,12 @@
self.implementation = implementation
@staticmethod
- def from_shape(space, shape, dtype, order='C', w_instance=None):
+ def from_shape(space, shape, dtype, order='C', w_instance=None, zero=True):
from pypy.module.micronumpy import concrete
from pypy.module.micronumpy.strides import calc_strides
strides, backstrides = calc_strides(shape, dtype.base, order)
impl = concrete.ConcreteArray(shape, dtype.base, order, strides,
- backstrides)
+ backstrides, zero=zero)
if w_instance:
return wrap_impl(space, space.type(w_instance), w_instance, impl)
return W_NDimArray(impl)
diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py
--- a/pypy/module/micronumpy/concrete.py
+++ b/pypy/module/micronumpy/concrete.py
@@ -369,9 +369,11 @@
class ConcreteArray(ConcreteArrayNotOwning):
- def __init__(self, shape, dtype, order, strides, backstrides, storage=lltype.nullptr(RAW_STORAGE)):
+ def __init__(self, shape, dtype, order, strides, backstrides,
+ storage=lltype.nullptr(RAW_STORAGE), zero=True):
if storage == lltype.nullptr(RAW_STORAGE):
- storage = dtype.itemtype.malloc(support.product(shape) * dtype.elsize)
+ storage = dtype.itemtype.malloc(support.product(shape) *
+ dtype.elsize, zero=zero)
ConcreteArrayNotOwning.__init__(self, shape, dtype, order, strides, backstrides,
storage)
diff --git a/pypy/module/micronumpy/ctors.py b/pypy/module/micronumpy/ctors.py
--- a/pypy/module/micronumpy/ctors.py
+++ b/pypy/module/micronumpy/ctors.py
@@ -91,13 +91,19 @@
return w_arr
-def zeros(space, w_shape, w_dtype=None, w_order=None):
+def _zeros_or_empty(space, w_shape, w_dtype, w_order, zero):
dtype = space.interp_w(descriptor.W_Dtype,
space.call_function(space.gettypefor(descriptor.W_Dtype), w_dtype))
if dtype.is_str_or_unicode() and dtype.elsize < 1:
dtype = descriptor.variable_dtype(space, dtype.char + '1')
shape = shape_converter(space, w_shape, dtype)
- return W_NDimArray.from_shape(space, shape, dtype=dtype)
+ return W_NDimArray.from_shape(space, shape, dtype=dtype, zero=zero)
+
+def empty(space, w_shape, w_dtype=None, w_order=None):
+ return _zeros_or_empty(space, w_shape, w_dtype, w_order, zero=False)
+
+def zeros(space, w_shape, w_dtype=None, w_order=None):
+ return _zeros_or_empty(space, w_shape, w_dtype, w_order, zero=True)
@unwrap_spec(subok=bool)
@@ -111,7 +117,8 @@
if dtype.is_str_or_unicode() and dtype.elsize < 1:
dtype = descriptor.variable_dtype(space, dtype.char + '1')
return W_NDimArray.from_shape(space, w_a.get_shape(), dtype=dtype,
- w_instance=w_a if subok else None)
+ w_instance=w_a if subok else None,
+ zero=False)
def _fromstring_text(space, s, count, sep, length, dtype):
diff --git a/pypy/module/micronumpy/test/test_arrayops.py b/pypy/module/micronumpy/test/test_arrayops.py
--- a/pypy/module/micronumpy/test/test_arrayops.py
+++ b/pypy/module/micronumpy/test/test_arrayops.py
@@ -2,6 +2,20 @@
class AppTestNumSupport(BaseNumpyAppTest):
+ def test_zeros(self):
+ from numpypy import zeros, empty
+ a = zeros(3)
+ assert len(a) == 3
+ assert a[0] == a[1] == a[2] == 0
+ a = empty(1000)
+ assert len(a) == 1000
+ for i in range(1000):
+ if a[i] != 0:
+ break
+ else:
+ raise AssertionError(
+ "empty() returned a zeroed out array of length 1000 (unlikely)")
+
def test_where(self):
from numpypy import where, ones, zeros, array
a = [1, 2, 3, 0, -3]
diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py
--- a/pypy/module/micronumpy/test/test_ndarray.py
+++ b/pypy/module/micronumpy/test/test_ndarray.py
@@ -11,7 +11,7 @@
class MockDtype(object):
class itemtype(object):
@staticmethod
- def malloc(size):
+ def malloc(size, zero=True):
return None
def __init__(self):
diff --git a/pypy/module/micronumpy/types.py b/pypy/module/micronumpy/types.py
--- a/pypy/module/micronumpy/types.py
+++ b/pypy/module/micronumpy/types.py
@@ -117,8 +117,11 @@
def __repr__(self):
return self.__class__.__name__
- def malloc(self, size):
- return alloc_raw_storage(size, track_allocation=False, zero=True)
+ def malloc(self, size, zero=True):
+ if zero:
+ return alloc_raw_storage(size, track_allocation=False, zero=True)
+ else:
+ return alloc_raw_storage(size, track_allocation=False, zero=False)
class Primitive(object):
_mixin_ = True
From noreply at buildbot.pypy.org Wed Jul 2 17:52:50 2014
From: noreply at buildbot.pypy.org (Hubert Hesse)
Date: Wed, 2 Jul 2014 17:52:50 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk 64bit-c2: Float asString produced
wrong results because image instances were read wrongly
Message-ID: <20140702155250.9CEDF1D34FF@cobra.cs.uni-duesseldorf.de>
Author: Hubert Hesse
Branch: 64bit-c2
Changeset: r847:54b5ca0cfbd4
Date: 2014-06-12 20:14 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/54b5ca0cfbd4/
Log: Float asString produced wrong results because image instances were
read wrongly
diff --git a/spyvm/squeakimage.py b/spyvm/squeakimage.py
--- a/spyvm/squeakimage.py
+++ b/spyvm/squeakimage.py
@@ -561,8 +561,9 @@
return bytes[:stop] # omit odd bytes
def get_ruints(self, required_len=-1):
- from rpython.rlib.rarithmetic import r_uint
- words = [r_uint(x) for x in self.chunk.data]
+ from rpython.rlib.rarithmetic import r_uint32
+ # XXX: Fix for 64bit image support
+ words = [r_uint32(x) for x in self.chunk.data]
if required_len != -1 and len(words) != required_len:
raise CorruptImageError("Expected %d words, got %d" % (required_len, len(words)))
return words
From noreply at buildbot.pypy.org Wed Jul 2 17:52:51 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 2 Jul 2014 17:52:51 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk vref: commit first translating version
where all senders are vrefs
Message-ID: <20140702155251.DF91D1D34FF@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: vref
Changeset: r848:a0a057d2e444
Date: 2014-07-02 13:05 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/a0a057d2e444/
Log: commit first translating version where all senders are vrefs
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -86,6 +86,7 @@
s_new_context = self.c_loop(s_new_context)
except StackOverflow, e:
s_new_context = e.s_context
+ s_new_context.unvirtualize_sender()
except Return, nlr:
s_new_context = s_sender
while s_new_context is not nlr.s_target_context:
@@ -98,9 +99,11 @@
except ProcessSwitch, p:
if self.trace:
print "====== Switch from: %s to: %s ======" % (s_new_context.short_str(), p.s_new_context.short_str())
+ s_new_context.unvirtualize_sender()
s_new_context = p.s_new_context
def c_loop(self, s_context, may_context_switch=True):
+ assert isinstance(s_context, ContextPartShadow)
old_pc = 0
if not jit.we_are_jitted() and may_context_switch:
self.quick_check_for_interrupt(s_context)
diff --git a/spyvm/model.py b/spyvm/model.py
--- a/spyvm/model.py
+++ b/spyvm/model.py
@@ -203,7 +203,7 @@
return r_uint(val)
- @jit.elidable
+ # @jit.elidable
def as_repr_string(self):
return "W_SmallInteger(%d)" % self.value
@@ -457,7 +457,7 @@
name = self.s_class.name
return "a %s" % (name or '?',)
- @jit.elidable
+ # @jit.elidable
def as_repr_string(self):
return self.as_embellished_string("W_O /w Class", "")
@@ -491,7 +491,7 @@
class W_AbstractPointersObject(W_AbstractObjectWithClassReference):
"""Common object."""
_attrs_ = ['shadow']
-
+
def changed(self):
# This is invoked when an instance-variable is changed.
# Kept here in case it might be usefull in the future.
@@ -550,7 +550,7 @@
def _get_shadow(self):
return self.shadow
-
+
@objectmodel.specialize.arg(2)
def attach_shadow_of_class(self, space, TheClass):
shadow = TheClass(space, self)
@@ -632,7 +632,7 @@
w_other.changed()
return True
- @jit.elidable
+ # @jit.elidable
def as_repr_string(self):
return W_AbstractObjectWithClassReference.as_embellished_string(self,
className='W_PointersObject',
@@ -651,11 +651,11 @@
self.fieldtypes = fieldtypes_of_length(self.s_class, size)
for i in range(size): # do it by hand for the JIT's sake
vars[i] = w_nil
-
+
def set_vars(self, new_vars):
self._vars = new_vars
make_sure_not_resized(self._vars)
-
+
def fillin(self, space, g_self):
W_AbstractPointersObject.fillin(self, space, g_self)
from spyvm.fieldtypes import fieldtypes_of
@@ -1013,7 +1013,7 @@
_immutable_fields_ = ['_realsize', 'display', '_depth', '_real_depth_buffer']
pixelbuffer = None
-
+
@staticmethod
def create(space, w_class, size, depth, display):
if depth < 8:
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1315,7 +1315,7 @@
# Set some fields
s_block_ctx.store_pc(s_block_ctx.initialip())
try:
- s_block_ctx.store_s_sender(s_frame)
+ s_block_ctx.store_s_sender(virtual=jit.virtual_ref(s_frame))
except SenderChainManipulation, e:
assert e.s_context == s_block_ctx
return s_block_ctx
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -34,7 +34,7 @@
import_from_mixin(version.VersionMixin)
version = None
-
+
def __init__(self, space, w_self):
AbstractShadow.__init__(self, space, w_self)
self.changed()
@@ -78,7 +78,7 @@
_attrs_ = ["name", "_instance_size", "instance_varsized", "instance_kind",
"_s_methoddict", "_s_superclass", "subclass_s"]
-
+
def __init__(self, space, w_self):
# fields added here should also be in objspace.py:56ff, 300ff
self.name = ''
@@ -449,17 +449,19 @@
class ContextPartShadow(AbstractRedirectingShadow):
__metaclass__ = extendabletype
- _attrs_ = ['_s_sender', '_pc', '_temps_and_stack',
+ _attrs_ = ['_direct_s_sender', '_virtual_s_sender', '_pc', '_temps_and_stack',
'_stack_ptr', 'instances_w']
_virtualizable_ = [
- "_s_sender", "_pc",
+ "_virtual_s_sender", "_direct_s_sender",
+ "_pc",
"_temps_and_stack[*]", "_stack_ptr",
"_w_self", "_w_self_size"
]
def __init__(self, space, w_self):
- self._s_sender = None
+ self._virtual_s_sender = jit.vref_None
+ self._direct_s_sender = None
AbstractRedirectingShadow.__init__(self, space, w_self)
self.instances_w = {}
@@ -541,25 +543,35 @@
" Return self of the method, or the method that contains the block "
return self.s_home().w_receiver()
- def store_s_sender(self, s_sender):
- assert s_sender is None or isinstance(s_sender, ContextPartShadow)
- self._s_sender = s_sender
- raise error.SenderChainManipulation(self)
+ def store_s_sender(self, direct=None, virtual=jit.vref_None, raiseError=True):
+ assert direct is None or virtual is jit.vref_None # can only set one or the other
+ if self._virtual_s_sender is not jit.vref_None and virtual is jit.vref_None:
+ # if we have a vref but we're removing it...
+ sender = self._virtual_s_sender()
+ jit.virtual_ref_finish(self._virtual_s_sender, sender)
+ self._virtual_s_sender = virtual
+ self._direct_s_sender = direct
+ if raiseError:
+ raise error.SenderChainManipulation(self)
def store_w_sender(self, w_sender):
assert isinstance(w_sender, model.W_PointersObject)
if w_sender.is_same_object(self.space.w_nil):
- self._s_sender = None
+ self.store_s_sender(raiseError=False)
else:
- self.store_s_sender(w_sender.as_context_get_shadow(self.space))
+ self.store_s_sender(direct=w_sender.as_context_get_shadow(self.space))
def w_sender(self):
- if self._s_sender is None:
+ sender = self.s_sender()
+ if sender is None:
return self.space.w_nil
- return self._s_sender.w_self()
+ return sender.w_self()
def s_sender(self):
- return self._s_sender
+ if self._direct_s_sender:
+ return self._direct_s_sender
+ else:
+ return self._virtual_s_sender()
def store_unwrap_pc(self, w_pc):
if w_pc.is_same_object(self.space.w_nil):
@@ -592,10 +604,16 @@
def mark_returned(self):
self.store_pc(-1)
try:
- self.store_s_sender(None)
+ self.store_s_sender()
except error.SenderChainManipulation, e:
assert self == e.s_context
+ def unvirtualize_sender(self):
+ sender = self.s_sender()
+ self.store_s_sender(direct=sender, raiseError=False)
+ if sender:
+ sender.unvirtualize_sender()
+
def is_returned(self):
return self.pc() == -1 and self.w_sender is self.space.w_nil
@@ -879,7 +897,7 @@
s_new_context.store_w_method(s_method.w_self())
if s_sender:
try:
- s_new_context.store_s_sender(s_sender)
+ s_new_context.store_s_sender(virtual=jit.virtual_ref(s_sender))
except error.SenderChainManipulation, e:
assert s_new_context == e.s_context
s_new_context.store_w_receiver(w_receiver)
From noreply at buildbot.pypy.org Wed Jul 2 17:52:53 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 2 Jul 2014 17:52:53 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk vref: virtualize sender only around
c_loop
Message-ID: <20140702155253.00D1E1D34FF@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: vref
Changeset: r849:2f3aaab262c5
Date: 2014-07-02 17:48 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/2f3aaab262c5/
Log: virtualize sender only around c_loop
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -86,7 +86,6 @@
s_new_context = self.c_loop(s_new_context)
except StackOverflow, e:
s_new_context = e.s_context
- s_new_context.unvirtualize_sender()
except Return, nlr:
s_new_context = s_sender
while s_new_context is not nlr.s_target_context:
@@ -99,15 +98,28 @@
except ProcessSwitch, p:
if self.trace:
print "====== Switch from: %s to: %s ======" % (s_new_context.short_str(), p.s_new_context.short_str())
- s_new_context.unvirtualize_sender()
s_new_context = p.s_new_context
def c_loop(self, s_context, may_context_switch=True):
+ s_sender = s_context.s_sender()
+ s_sender_ref = jit.vref_None
+ if s_sender:
+ s_sender_ref = jit.virtual_ref(s_sender)
+ s_context.store_s_sender(virtual=s_sender_ref, raiseError=False)
+ try:
+ self._c_loop_virtual(s_context, may_context_switch=may_context_switch)
+ finally:
+ if s_sender:
+ jit.virtual_ref_finish(s_sender_ref, s_sender)
+ s_context.restore_s_sender(s_sender)
+
+ def _c_loop_virtual(self, s_context, may_context_switch=True):
assert isinstance(s_context, ContextPartShadow)
old_pc = 0
if not jit.we_are_jitted() and may_context_switch:
self.quick_check_for_interrupt(s_context)
method = s_context.s_method()
+
while True:
pc = s_context.pc()
if pc < old_pc:
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1315,7 +1315,7 @@
# Set some fields
s_block_ctx.store_pc(s_block_ctx.initialip())
try:
- s_block_ctx.store_s_sender(virtual=jit.virtual_ref(s_frame))
+ s_block_ctx.store_s_sender(direct=s_frame)
except SenderChainManipulation, e:
assert e.s_context == s_block_ctx
return s_block_ctx
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -543,12 +543,14 @@
" Return self of the method, or the method that contains the block "
return self.s_home().w_receiver()
+ def restore_s_sender(self, s_direct):
+ if self._virtual_s_sender is not jit.vref_None:
+ # virtual sender wasn't already cleared by e.g. mark_returned
+ self._virtual_s_sender = jit.vref_None
+ self._direct_s_sender = s_direct
+
def store_s_sender(self, direct=None, virtual=jit.vref_None, raiseError=True):
assert direct is None or virtual is jit.vref_None # can only set one or the other
- if self._virtual_s_sender is not jit.vref_None and virtual is jit.vref_None:
- # if we have a vref but we're removing it...
- sender = self._virtual_s_sender()
- jit.virtual_ref_finish(self._virtual_s_sender, sender)
self._virtual_s_sender = virtual
self._direct_s_sender = direct
if raiseError:
@@ -608,12 +610,6 @@
except error.SenderChainManipulation, e:
assert self == e.s_context
- def unvirtualize_sender(self):
- sender = self.s_sender()
- self.store_s_sender(direct=sender, raiseError=False)
- if sender:
- sender.unvirtualize_sender()
-
def is_returned(self):
return self.pc() == -1 and self.w_sender is self.space.w_nil
@@ -897,7 +893,7 @@
s_new_context.store_w_method(s_method.w_self())
if s_sender:
try:
- s_new_context.store_s_sender(virtual=jit.virtual_ref(s_sender))
+ s_new_context.store_s_sender(direct=s_sender)
except error.SenderChainManipulation, e:
assert s_new_context == e.s_context
s_new_context.store_w_receiver(w_receiver)
From noreply at buildbot.pypy.org Wed Jul 2 17:53:08 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 2 Jul 2014 17:53:08 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk vref: stub bitblt primitives so they
don't error
Message-ID: <20140702155308.5310A1D34FF@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: vref
Changeset: r850:f5a42c0946bf
Date: 2014-07-02 17:49 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/f5a42c0946bf/
Log: stub bitblt primitives so they don't error
diff too long, truncating to 2000 out of 351216 lines
diff --git a/images/Squeak4.5-noBitBlt.changes b/images/Squeak4.5-noBitBlt.changes
old mode 100644
new mode 100755
--- a/images/Squeak4.5-noBitBlt.changes
+++ b/images/Squeak4.5-noBitBlt.changes
@@ -36,4 +36,144 @@
Workspace allInstances do: [:w | w topView delete].
ReleaseBuilderFor4dot4 prepareNewBuild.
Smalltalk snapshot: true andQuit: true.
-!
----End fileIn of a stream----!
----SNAPSHOT----{31 March 2013 . 3:27:34 pm} Squeak4.5-12327.image priorSource: 7430688!
!Installer methodsFor: 'squeakmap' stamp: 'fbs 1/28/2013 19:25' prior: 57597950!
packageAndVersionFrom: pkg
| p |
p := ReadStream on: pkg .
^{(p upTo: $(). p upTo: $)} collect: [:s | s withBlanksTrimmed].! !
"Installer-Core"!
!Categorizer methodsFor: 'fileIn/Out' stamp: 'cwp 6/20/2012 16:58'!
scanFrom: aStream environment: anEnvironment
^ self scanFrom: aStream! !
!ClassCategoryReader methodsFor: 'fileIn/Out' stamp: 'cwp 6/20/2012 17:21'!
scanFrom: aStream environment: anEnvironment
"File in methods from the stream, aStream."
| methodText |
[methodText := aStream nextChunkText.
methodText size > 0] whileTrue:
[class
compile: methodText
environment: anEnvironment
classified: category
withStamp: changeStamp
notifying: nil]! !
!ClassCommentReader methodsFor: 'as yet unclassified' stamp: 'cwp 6/20/2012 17:22'!
scanFrom: aStream environment: anEnvironment
^ self scanFrom: aStream! !
!Metaclass methodsFor: 'compiling' stamp: 'cwp 6/20/2012 17:29'!
bindingOf: varName environment: anEnvironment
^ thisClass classBindingOf: varName environment: anEnvironment! !
!LargePositiveInteger methodsFor: 'arithmetic' stamp: 'nice 12/30/2012 20:03' prior: 22505876!
\\ aNumber
"Primitive. Take the receiver modulo the argument. The result is the
remainder rounded towards negative infinity, of the receiver divided
by the argument. Fail if the argument is 0. Fail if either the argument
or the result is not a SmallInteger or a LargePositiveInteger less than
2-to-the-30th (1073741824). Optional. See Object documentation whatIsAPrimitive."
aNumber isInteger
ifTrue:
[| neg qr q r |
neg := self negative == aNumber negative == false.
qr := (self digitDiv:
(aNumber class == SmallInteger
ifTrue: [aNumber abs]
ifFalse: [aNumber])
neg: neg).
q := qr first normalize.
r := qr last normalize.
^(q negative
ifTrue: [r isZero not]
ifFalse: [q isZero and: [neg]])
ifTrue: [r + aNumber]
ifFalse: [r]].
^super \\ aNumber
! !
!LargePositiveInteger methodsFor: 'converting' stamp: 'nice 1/27/2012 22:41' prior: 37616324!
asFloat
"Answer a Float that best approximates the value of the receiver.
This algorithm is optimized to process only the significant digits of a LargeInteger.
And it does honour IEEE 754 round to nearest even mode in case of excess precision (see details below)."
"How numbers are rounded in IEEE 754 default rounding mode:
A shift is applied so that the highest 53 bits are placed before the floating point to form a mantissa.
The trailing bits form the fraction part placed after the floating point.
This fractional number must be rounded to the nearest integer.
If fraction part is 2r0.1, exactly between two consecutive integers, there is a tie.
The nearest even integer is chosen in this case.
Examples (First 52bits of mantissa are omitted for brevity):
2r0.00001 is rounded downward to 2r0
2r1.00001 is rounded downward to 2r1
2r0.1 is a tie and rounded to 2r0 (nearest even)
2r1.1 is a tie and rounded to 2r10 (nearest even)
2r0.10001 is rounded upward to 2r1
2r1.10001 is rounded upward to 2r10
Thus, if the next bit after floating point is 0, the mantissa is left unchanged.
If next bit after floating point is 1, an odd mantissa is always rounded upper.
An even mantissa is rounded upper only if the fraction part is not a tie."
"Algorihm details:
The floating point hardware can perform the rounding correctly with several excess bits as long as there is a single inexact operation.
This can be obtained by splitting the mantissa plus excess bits in two part with less bits than Float precision.
Note 1: the inexact flag in floating point hardware must not be trusted because in some cases the operations would be exact but would not take into account some bits that were truncated before the Floating point operations.
Note 2: the floating point hardware is presumed configured in default rounding mode."
| mantissa shift excess result n |
"Check how many bits excess the maximum precision of a Float mantissa."
excess := self highBitOfMagnitude - Float precision.
excess > 7
ifTrue:
["Remove the excess bits but seven."
mantissa := self bitShiftMagnitude: 7 - excess.
shift := excess - 7.
"An even mantissa with a single excess bit immediately following would be truncated.
But this would not be correct if above shift has truncated some extra bits.
Check this case, and round excess bits upper manually."
((mantissa digitAt: 1) = 2r01000000 and: [self anyBitOfMagnitudeFrom: 1 to: shift])
ifTrue: [mantissa := mantissa + 1]]
ifFalse:
[mantissa := self.
shift := 0].
"There will be a single inexact round off at last iteration"
result := (mantissa digitAt: (n := mantissa digitLength)) asFloat.
[(n := n - 1) > 0] whileTrue: [
result := 256.0 * result + (mantissa digitAt: n) asFloat].
^result timesTwoPower: shift.! !
!LargePositiveInteger methodsFor: 'private' stamp: 'nice 12/30/2012 14:25'!
primitiveQuo: anInteger
"Primitive. Divide the receiver by the argument and return the result.
Round the result down towards zero to make it a whole integer. Fail if
the argument is 0. Fail if either the argument or the result is not a
SmallInteger or a LargePositiveInteger less than 2-to-the-30th (1073741824). Optional. See
Object documentation whatIsAPrimitive."
^nil! !
!LargePositiveInteger methodsFor: 'arithmetic' stamp: 'nice 12/30/2012 14:34'!
rem: aNumber
"Remainder defined in terms of quo:. See super rem:.
This is defined only to speed up case of very large integers."
(self primitiveQuo: aNumber)
ifNotNil: [:quo | ^self - (quo * aNumber)].
aNumber isInteger
ifTrue:
[| ng rem |
ng := self negative == aNumber negative == false.
rem := (self digitDiv:
(aNumber class == SmallInteger
ifTrue: [aNumber abs]
ifFalse: [aNumber])
neg: ng) at: 2.
^ rem normalize].
^super rem: aNumber! !
!LargeNegativeInteger methodsFor: 'converting' stamp: 'nice 1/1/2013 15:42' prior: 37616204!
asFloat
^super asFloat negated! !
!UndefinedObject methodsFor: 'class hierarchy' stamp: 'cwp 6/22/2012 15:39'!
literalScannedAs: scannedLiteral environment: anEnvironment notifying: requestor
^ scannedLiteral! !
!Behavior methodsFor: 'testing method dictionary' stamp: 'cwp 6/20/2012 17:32'!
bindingOf: varName environment: anEnvironment
^superclass bindingOf: varName environment: anEnvironment! !
!Behavior methodsFor: 'testing method dictionary' stamp: 'cwp 6/20/2012 17:30'!
classBindingOf: varName environment: anEnvironment
^self bindingOf: varName environment: anEnvironment! !
!Behavior methodsFor: 'printing' stamp: 'cwp 6/22/2012 15:37'!
literalScannedAs: scannedLiteral environment: anEnvironment notifying: requestor
"Postprocesses a literal scanned by Scanner scanToken (esp. xLitQuote).
If scannedLiteral is not an association, answer it.
Else, if it is of the form:
nil->#NameOfMetaclass
answer nil->theMetaclass, if any has that name, else report an error.
Else, if it is of the form:
#NameOfGlobalVariable->anythiEng
answer the global, class, or pool association with that nameE, if any, else
add it to Undeclared a answer the new Association."
| key value |
(scannedLiteral isVariableBinding)
ifFalse: [^ scannedLiteral].
key := scannedLiteral key.
value := scannedLiteral value.
key ifNil: "###"
[(self bindingOf: value environment: anEnvironment) ifNotNil:
[:assoc|
(assoc value isKindOf: Behavior) ifTrue:
[^ nil->assoc value class]].
requestor notify: 'No such metaclass'.
^false].
(key isSymbol) ifTrue: "##"
[(self bindingOf: key environment: anEnvironment) ifNotNil:
[:assoc | ^assoc].
^ anEnvironment undeclared: key].
requestor notify: '## must be followed by a non-local variable name'.
^false
" Form literalScannedAs: 14 notifying: nil 14
Form literalScannedAs: #OneBitForm notiEfying: nil OneBitForm
Form literalScannedAs: ##OneBitForm notifying: nil OneBitForm->a Form
Form literalScannedAs: ##Form notifying: nil Form->Form
Form literalScannedAs: ###Form notifying: nil nilE->Form class
"! !
!Fraction methodsFor: 'converting' stamp: 'nice 11/21/2011 22:34' prior: 37619655!
asFloat
"Answer a Float that closely approximates the value of the receiver.
This implementation will answer the closest floating point number to the receiver.
In case of a tie, it will use the IEEE 754 round to nearest even mode.
In case of overflow, it will answer +/- Float infinity."
| a b mantissa exponent hasTruncatedBits lostBit n ha hb hm |
a := numerator abs.
b := denominator. "denominator is always positive"
ha := a highBitOfMagnitude.
hb := b highBitOfMagnitude.
"Number of bits to keep in mantissa plus one to handle rounding."
n := 1 + Float precision.
"If both numerator and denominator are represented exactly in floating point number,
then fastest thing to do is to use hardwired float division."
(ha < n and: [hb < n]) ifTrue: [^numerator asFloat / denominator asFloat].
"Shift the fraction by a power of two exponent so as to obtain a mantissa with n bits.
First guess is rough, the mantissa might have n+1 bits."
exponent := ha - hb - n.
exponent >= 0
ifTrue: [b := b bitShift: exponent]
ifFalse: [a := a bitShift: exponent negated].
mantissa := a quo: b.
hasTruncatedBits := a > (mantissa * b).
hm := mantissa highBit.
"Check for gradual underflow, in which case the mantissa will loose bits.
Keep at least one bit to let underflow preserve the sign of zero."
lostBit := Float emin - (exponent + hm - 1).
lostBit > 0 ifTrue: [n := n - lostBit max: 1].
"Remove excess bits in the mantissa."
hm > n
ifTrue:
[exponent := exponent + hm - n.
hasTruncatedBits := hasTruncatedBits or: [mantissa anyBitOfMagnitudeFrom: 1 to: hm - n].
mantissa := mantissa bitShift: n - hm].
"Check if mantissa must be rounded upward.
The case of tie (mantissa odd & hasTruncatedBits not)
will be handled by Integer>>asFloat."
(hasTruncatedBits and: [mantissa odd])
ifTrue: [mantissa := mantissa + 1].
^ (self positive
ifTrue: [mantissa asFloat]
ifFalse: [mantissa asFloat negated])
timesTwoPower: exponent! !
!Float methodsFor: 'arithmetic' stamp: 'nice 12/20/2012 23:16' prior: 20878776!
negated
"Answer a Number that is the negation of the receiver.
Implementation note: this version cares of negativeZero."
^-1.0 * self! !
!ClassDescription methodsFor: 'compiling' stamp: 'cwp 6/20/2012 17:21'!
compile: text environment: anEnvironment classified: category withStamp: changeStamp notifying: requestor
^ self
compile: text
environment: anEnvironment
classified: category
withStamp: changeStamp
notifying: requestor
logSource: self acceptsLoggingOfCompilation! !
!ClassDescription methodsFor: 'compiling' stamp: 'cwp 12/27/2012 13:17'!
compile: text environment: anEnvironment classified: category withStamp: changeStamp notifying: requestor logSource: logSource
| methodAndNode context methodNode |
context := CompilationCue
source: text
class: self
environment: anEnvironment
category: category
requestor: requestor.
methodNode := self newCompiler compile: context ifFail: [^ nil].
methodAndNode := CompiledMethodWithNode
generateMethodFromNode: methodNode
trailer: self defaultMethodTrailer.
logSource ifTrue: [
self logMethodSource: text forMethodWithNode: methodAndNode
inCategory: category withStamp: changeStamp notifying: requestor.
].
self addAndClassifySelector: methodAndNode selector withMethod: methodAndNode
method inProtocol: category notifying: requestor.
self instanceSide noteCompilationOf: methodAndNode selector meta: self isClassSide.
^ methodAndNode selector! !
!Class methodsFor: 'compiling' stamp: 'cwp 6/20/2012 09:47'!
bindingOf: varName environment: anEnvironment
"Answer the binding of some variable resolved in the scope of the receiver"
| aSymbol binding |
aSymbol := varName asSymbol.
"First look in classVar dictionary."
binding := self classPool bindingOf: aSymbol.
binding ifNotNil:[^binding].
"Next look in shared pools."
self sharedPools do:[:pool |
binding := pool bindingOf: aSymbol.
binding ifNotNil:[^binding].
].
"Next look in declared environment."
binding := anEnvironment bindingOf: aSymbol.
binding ifNotNil:[^binding].
"Finally look higher up the superclass chain and fail at the end."
superclass == nil
ifTrue: [^ nil]
ifFalse: [^ superclass bindingOf: aSymbol].
! !
"Kernel"!
ParseNode subclass: #Encoder
instanceVariableNames: 'scopeTable nTemps supered requestor class selector literalStream selectorSet litIndSet litSet sourceRanges globalSourceRanges addedSelectorAndMethodClassLiterals optimizedSelectors cue'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
!Encoder commentStamp: 'cwp 12/26/2012 23:29' prior: 36323851!
I encode names and literals into tree nodes with byte codes for the compiler. Byte codes for literals are not assigned until the tree-sizing pass of the compiler, because only then is it known which literals are actually needed. I also keep track of sourceCode ranges during parsing and code generation so I can provide an inverse map for the debugger.!
Scanner subclass: #Parser
instanceVariableNames: 'here hereType hereMark hereEnd prevMark prevEnd encoder requestor parseNode failBlock requestorOffset tempsMark doitFlag properties category queriedUnusedTemporaries cue'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
!Parser commentStamp: 'cwp 12/26/2012 23:34' prior: 38557958!
I parse Smalltalk syntax and create a MethodNode that is the root of the parse tree. I look one token ahead.!
Object subclass: #CompilationCue
instanceVariableNames: 'source context receiver class environment category requestor'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
Object subclass: #Compiler
instanceVariableNames: 'sourceStream requestor class category context parser cue'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
!Compiler commentStamp: 'cwp 12/26/2012 23:17' prior: 59257505!
The compiler accepts Smalltalk source code and compiles it with respect to a given class. The user of the compiler supplies a context so that temporary variables are accessible during compilation. If there is an error, a requestor (usually a kind of StringHolderController) is sent the message notify:at:in: so that the error message can be displayed. If there is no error, then the result of compilation is a MethodNode, which is the root of a parse tree whose nodes are kinds of ParseNodes. The parse tree can be sent messages to (1) generate code for a CompiledMethod (this is done for compiling methods or evaluating expressions); (2) pretty-print the code (for formatting); or (3) produce a map from object code back to source code (used by debugger program-counter selection). See also Parser, Encoder, ParseNode.!
!Encoder methodsFor: 'initialize-release' stamp: 'cwp 12/26/2012 23:34'!
init: aCue notifying: anObject
"The use of the variable requestor is a bit confusing here. This is
*not* the original requestor, which is available through the cue.
It's the Parser instance that is using the encoder."
self setCue: aCue.
requestor := anObject.
nTemps := 0.
supered := false.
self initScopeAndLiteralTables.
cue getClass variablesAndOffsetsDo:
[:variable "" :offset "" |
offset isNil
ifTrue: [scopeTable at: variable name put: (FieldNode new fieldDefinition: variable)]
ifFalse: [scopeTable
at: variable
put: (offset >= 0
ifTrue: [InstanceVariableNode new
name: variable index: offset]
ifFalse: [MaybeContextInstanceVariableNode new
name: variable index: offset negated])]].
cue context ~~ nil ifTrue:
[| homeNode |
homeNode := self bindTemp: self doItInContextName.
"0th temp = aContext passed as arg"
cue context tempNames withIndexDo:
[:variable :index|
scopeTable
at: variable
put: (MessageAsTempNode new
receiver: homeNode
selector: #namedTempAt:
arguments: (Array with: (self encodeLiteral: index))
precedence: 3
from: self)]].
sourceRanges := Dictionary new: 32.
globalSourceRanges := OrderedCollection new: 32
! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/26/2012 23:30'!
setCue: aCue
cue := aCue.
"Also set legacy instance variables for methods that
don't use cue yet"
class := cue getClass.! !
!Dictionary methodsFor: '*Compiler' stamp: 'cwp 6/22/2012 09:17'!
bindingOf: varName ifAbsent: aBlock
^self associationAt: varName ifAbsent: aBlock! !
!Parser methodsFor: 'private' stamp: 'cwp 12/26/2012 23:37'!
init: sourceStream cue: aCue failBlock: aBlock
self setCue: aCue.
failBlock := aBlock.
requestorOffset := 0.
super scan: sourceStream.
prevMark := hereMark := mark.
self advance
! !
!Parser methodsFor: 'public access' stamp: 'cwp 12/26/2012 23:41'!
parse: sourceStream cue: aCue noPattern: noPattern ifFail: aBlock
"Answer a MethodNode for the argument, sourceStream, that is the root of
a parse tree. Parsing is done with respect to the CompilationCue to
resolve variables. Errors in parsing are reported to the cue's requestor;
otherwise aBlock is evaluated. The argument noPattern is a Boolean that is
true if the the sourceStream does not contain a method header (i.e., for DoIts)."
| methNode repeatNeeded myStream s p subSelection |
myStream := sourceStream.
[repeatNeeded := false.
p := myStream position.
s := myStream upToEnd.
myStream position: p.
subSelection := aCue requestor notNil and: [aCue requestor selectionInterval = (p + 1 to: p + s size)].
self encoder init: aCue notifying: self.
self init: myStream cue: aCue failBlock: [^ aBlock value].
doitFlag := noPattern.
failBlock:= aBlock.
[methNode := self method: noPattern context: cue context]
on: ReparseAfterSourceEditing
do: [ :ex |
repeatNeeded := true.
myStream := subSelection
ifTrue:
[ReadStream
on: cue requestor text string
from: cue requestor selectionInterval first
to: cue requestor selectionInterval last]
ifFalse:
[ReadStream on: cue requestor text string]].
repeatNeeded] whileTrue:
[encoder := self encoder class new].
methNode sourceText: s.
^methNode
! !
!Parser methodsFor: 'private' stamp: 'cwp 12/26/2012 23:35'!
setCue: aCue
cue := aCue.
"Also set legacy variables for methods that don't use cue yet."
requestor := cue requestor.
category := cue category.! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:53'!
class: aClass
^ self
context: nil
class: aClass
requestor: nil! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:53'!
context: aContext class: aClass requestor: anObject
^ self
source: nil
context: aContext
receiver: nil
class: aClass
environment: (aClass ifNotNil: [aClass environment])
category: nil
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:16'!
source: aTextOrStream class: aClass environment: anEnvironment category: aString requestor: anObject
^ self
source: aTextOrStream
context: nil
receiver: nil
class: aClass
environment: anEnvironment
category: aString
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:53'!
source: aTextOrStream context: aContext class: aClass category: aString requestor: anObject
^ self
source: aTextOrStream
context: aContext
receiver: (aContext ifNotNil: [aContext receiver])
class: aClass
environment: (aClass ifNotNil: [aClass environment])
category: aString
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:54'!
source: aTextOrStream context: aContext class: aClass requestor: anObject
^ self
source: aTextOrStream
context: aContext
class: aClass
category: nil
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:55'!
source: aTextOrStream context: aContext receiver: recObject class: aClass environment: anEnvironment category: aString requestor: reqObject
^ self basicNew
initializeWithSource: aTextOrStream
context: aContext
receiver: recObject
class: aClass
environment: anEnvironment
category: aString
requestor: reqObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:16'!
source: aString environment: anEnvironment
^ self
source: aString
context: nil
receiver: nil
class: UndefinedObject
environment: anEnvironment
category: nil
requestor: nil! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:54'!
source: aTextOrStream requestor: anObject
^ self
source: aTextOrStream
context: nil
class: nil
requestor: anObject! !
!CompilationCue methodsFor: 'binding' stamp: 'cwp 6/20/2012 09:39'!
bindingOf: aSymbol
^ class bindingOf: aSymbol environment: environment! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
category
^ category! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 12/26/2012 23:19'!
context
^ context! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
environment
^ environment! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:16'!
getClass
^ class! !
!CompilationCue methodsFor: 'initialization' stamp: 'cwp 12/26/2012 23:16'!
initializeWithSource: aTextOrString context: aContext receiver: recObject class: aClass environment: anEnvironment category: aString requestor: reqObject
self initialize.
source := aTextOrString isStream ifTrue: [aTextOrString contents] ifFalse: [aTextOrString].
context := aContext.
receiver := recObject.
class := aClass.
environment := anEnvironment.
category := aString.
requestor := reqObject! !
!CompilationCue methodsFor: 'binding' stamp: 'cwp 6/22/2012 15:39'!
literalScannedAs: anObject notifying: anEncoder
^ class literalScannedAs: anObject environment: environment notifying: anEncoder! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
receiver
^ receiver! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:16'!
requestor
^ requestor! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
source
^ source! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:44'!
sourceStream
^ source readStream! !
!Compiler class methodsFor: 'evaluating' stamp: 'cwp 6/20/2012 17:25'!
evaluate: aString environment: anEnvironment
^ self
evaluate: aString
environment: anEnvironment
logged: false! !
!Compiler class methodsFor: 'evaluating' stamp: 'cwp 12/27/2012 12:36'!
evaluate: aString environment: anEnvironment logged: aBoolean
| cue |
cue := CompilationCue
source: aString
environment: anEnvironment.
^ self new
evaluate: aString
cue: cue
ifFail: [^ nil]
logged: aBoolean! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 13:18'!
compile: aCue ifFail: failBlock
"Answer a MethodNode. If the MethodNode can not be created, notify
the requestor in the contxt. If the requestor is nil, evaluate failBlock
instead. The MethodNode is the root of a parse tree. It can be told
to generate a CompiledMethod to be installed in the method dictionary
of the class specified by the context."
self setCue: aCue.
self source: cue source.
^self
translate: sourceStream
noPattern: false
ifFail: failBlock! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 00:06'!
evaluate: textOrStream cue: aCue ifFail: failBlock logged: logFlag
"Compiles the sourceStream into a parse tree, then generates code into
a method. Finally, the compiled method is invoked from here via withArgs:executeMethod:, hence the system no longer creates Doit method
litter on errors."
| methodNode method value toLog itsSelection itsSelectionString |
self setCue: aCue.
self source: textOrStream.
methodNode := self translate: sourceStream noPattern: true ifFail: [^failBlock value].
method := self interactive
ifTrue: [methodNode generateWithTempNames]
ifFalse: [methodNode generate].
value := cue receiver
withArgs: (cue context ifNil: [#()] ifNotNil: [{cue context}])
executeMethod: method.
logFlag ifTrue:
[toLog := ((cue requestor respondsTo: #selection)
and:[(itsSelection := cue requestor selection) notNil
and:[(itsSelectionString := itsSelection asString) isEmptyOrNil not]])
ifTrue:[itsSelectionString]
ifFalse:[sourceStream contents].
SystemChangeNotifier uniqueInstance evaluated: toLog context: cue context].
^ value
! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/26/2012 23:20'!
setCue: aCue
cue := aCue.
"Set legacy instance variables for methods that don't use cue yet."
requestor := cue requestor.
class := cue getClass.
category := cue category.
context := cue context.! !
!Compiler methodsFor: 'private' stamp: 'cwp 6/19/2012 21:58'!
source: textOrStream
sourceStream := (textOrStream isKindOf: PositionableStream)
ifTrue: [ textOrStream ]
ifFalse: [ ReadStream on: textOrStream asString ]! !
"Compiler"!
!SmartRefStream class methodsFor: 'i/o' stamp: 'cwp 6/20/2012 17:42'!
scanFrom: aByteStream environment: anEnvironment
^ self scanFrom: aByteStream! !
!SmartRefStream methodsFor: 'read write' stamp: 'cwp 6/20/2012 17:41'!
scanFrom: aByteStream environment: anEnvironment
^ self scanFrom: aByteStream! !
!ImageSegment methodsFor: 'fileIn/Out' stamp: 'cwp 6/20/2012 17:23'!
scanFrom: aStream environment: anEnvironment
^ self scanFrom: aStream! !
!PseudoClass methodsFor: 'printing' stamp: 'cwp 6/22/2012 15:39'!
literalScannedAs: scannedLiteral environment: anEnvironment notifying: requestor
^ scannedLiteral! !
!InternalTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:34'!
scanFrom: aStream environment: anEnvironment
"Read a definition of dictionary.
Make sure current locale corresponds my locale id"
| aString newTranslations assoc currentPlatform |
newTranslations := Dictionary new.
currentPlatform := Locale currentPlatform.
[Locale
currentPlatform: (Locale localeID: id).
[aString := aStream nextChunk withSqueakLineEndings.
aString size > 0] whileTrue:
[assoc := Compiler evaluate: aString environment: anEnvironment.
assoc value = ''
ifTrue: [self class registerPhrase: assoc key]
ifFalse: [newTranslations add: assoc]]]
ensure: [Locale currentPlatform: currentPlatform].
self mergeTranslations: newTranslations! !
!NaturalLanguageTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:26'!
scanFrom: aStream environment: anEnvironment
"Read a definition of dictionary.
Make sure current locale corresponds my locale id"
| newTranslations currentPlatform |
newTranslations := Dictionary new.
currentPlatform := Locale currentPlatform.
[| aString assoc |
Locale currentPlatform: (Locale localeID: id).
[aString := aStream nextChunk withSqueakLineEndings.
aString size > 0] whileTrue:
[assoc := Compiler evaluate: aString environment: anEnvironment.
assoc value = ''
ifTrue: [self class registerPhrase: assoc key]
ifFalse: [newTranslations add: assoc]]]
ensure: [Locale currentPlatform: currentPlatform].
self mergeTranslations: newTranslations! !
!ObjectScanner methodsFor: 'scanning' stamp: 'cwp 6/20/2012 17:39'!
scanFrom: aByteStream environment: anEnvironment
"This should probably be reimplemented using an environment
for compilation. For now, don't change anything"
^ self scanFrom: aByteStream! !
!SystemDictionary methodsFor: 'accessing' stamp: 'cwp 6/22/2012 09:16'!
bindingOf: varName ifAbsent: aBlock
"SystemDictionary includes Symbols only"
^super bindingOf: varName asSymbol ifAbsent: aBlock! !
!SystemDictionary methodsFor: 'accessing' stamp: 'cwp 6/22/2012 15:48'!
undeclared
^ self at: #Undeclared! !
"System"!
!ExceptionTests methodsFor: 'testing-outer' stamp: 'fbs 1/1/2013 22:14' prior: 40840955!
expectedFailures
^ #().! !
"Tests"!
ReleaseBuilder subclass: #ReleaseBuilderFor4dot5
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'ReleaseBuilder'!
!ReleaseBuilderFor4dot5 commentStamp: 'fbs 1/1/2013 20:25' prior: 0!
The release builder for Squeak 4.5!
!ReleaseBuilder class methodsFor: 'scripts' stamp: 'fbs 12/31/2012 20:43'!
transferCurrentPackagesAsUser: username password: password
"Copy the packages currently loaded in the image from the trunk repository to my releaseRepository."
| trunkRep releaseRep |
trunkRep := self trunkRepository.
releaseRep := self releaseRepository
user: username;
password: password;
yourself.
MCWorkingCopy allManagers do:
[ : eachWorkingCopy | eachWorkingCopy ancestors do:
[ : eachVersionInfo | (releaseRep includesVersionNamed: eachVersionInfo versionName) ifFalse:
[ (trunkRep versionWithInfo: eachVersionInfo)
ifNil: [ Warning signal: eachVersionInfo name , ' not found in ', trunkRep ]
ifNotNilDo: [ : ver | releaseRep storeVersion: ver ] ] ] ]! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
openWelcomeWorkspaces
TheWorldMainDockingBar instance
showWelcomeText: #squeakUserInterface
label: 'Squeak User Interface'
in: (40 @ 40 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #workingWithSqueak
label: 'Working With Squeak'
in: (80 @ 80 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #licenseInformation
label: 'License Information'
in: (120 @ 120 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #welcomeFutureDirections
label: 'Future Directions'
in: (160 @ 160 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #welcomeToSqueak
label: 'Welcome to Squeak 4.5'
in: (200 @ 200 extent: 500 @ 300)! !
!ReleaseBuilderFor4dot5 class methodsFor: 'scripts' stamp: 'fbs 1/1/2013 20:22'!
prepareNewBuild
super prepareNewBuild.
MCMockPackageInfo initialize.! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:24'!
releaseRepository
"At release time, change 'trunk' to 'squeak45'."
^ MCHttpRepository
location: 'http://source.squeak.org/trunk'
user: 'squeak'
password: 'squeak'! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:22'!
setDisplayExtent: extent
"Uncomment next line when the primitives become available in the Squeak VM."
" DisplayScreen hostWindowSize: extent."
Display extent = extent ifFalse: [ Warning signal: 'Display extent not set to ', extent ]! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
setPreferences
Preferences
installBrightWindowColors ;
setPreference: #scrollBarsWithoutMenuButton toValue: true ;
setPreference: #swapMouseButtons toValue: true ;
setPreference: #annotationPanes toValue: true ;
setPreference: #showSplitterHandles toValue: false ;
setPreference: #showBoundsInHalo toValue: true ;
setPreference: #alternateHandlesLook toValue: false ;
setPreference: #roundedMenuCorners toValue: false ;
setPreference: #roundedWindowCorners toValue: false.
PluggableButtonMorph roundedButtonCorners: false.
FillInTheBlankMorph roundedDialogCorners: false.
Workspace shouldStyle: false.
NetNameResolver enableIPv6: true.! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
switchToNewRepository
| old44Repository |
MCMcmUpdater defaultUpdateURL: self releaseRepository description.
old44Repository := MCRepositoryGroup default repositories
detect: [:each | each description includesSubString: 'squeak44'] ifNone: [nil].
old44Repository
ifNotNil: [MCRepositoryGroup default removeRepository: old44Repository].
MCRepositoryGroup default addRepository: self releaseRepository! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
versionString
^ 'Squeak4.5'.! !
ReleaseBuilder class removeSelector: #transferCurrentPackages!
"ReleaseBuilder"!
!Environment class methodsFor: 'as yet unclassified' stamp: 'cwp 1/1/2013 18:52' prior: 40834114!
initialize
self install! !
"Environments"!
!Parser methodsFor: 'private' stamp: 'cwp 12/26/2012 23:59' prior: 52081878!
initPattern: aString notifying: req return: aBlock
| result |
self
init: (ReadStream on: aString asString)
cue: (CompilationCue source: aString requestor: req)
failBlock: [^nil].
encoder := self.
result := aBlock value: (self pattern: false inContext: nil).
encoder := failBlock := nil. "break cycles"
^result! !
!Parser methodsFor: 'public access' stamp: 'cwp 12/27/2012 00:01' prior: 34175471!
parse: sourceStream class: class category: aCategory noPattern: noPattern context: aContext notifying: req ifFail: aBlock
| c |
c := CompilationCue
source: sourceStream
context: aContext
class: class
category: aCategory
requestor: req.
^ self
parse: sourceStream
cue: c
noPattern: noPattern
ifFail: aBlock! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 09:11' prior: 34183963!
evaluate: textOrStream in: aContext to: receiver notifying: aRequestor ifFail: failBlock logged: logFlag
"Compiles the sourceStream into a parse tree, then generates code into
a method. If aContext is not nil, the text can refer to temporaries in that
context (the Debugger uses this). If aRequestor is not nil, then it will receive
a notify:at: message before the attempt to evaluate is aborted. Finally, the
compiled method is invoked from here via withArgs:executeMethod:, hence
the system no longer creates Doit method litter on errors."
| theClass |
theClass := ((aContext == nil ifTrue: [receiver] ifFalse: [aContext receiver]) class).
self setCue: (CompilationCue
source: textOrStream
context: aContext
receiver: receiver
class: theClass
environment: theClass environment
category: nil
requestor: aRequestor).
^ self evaluate: textOrStream cue: cue ifFail: failBlock logged: logFlag! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 09:17' prior: 34185488!
from: textOrStream class: aClass classified: aCategory context: aContext notifying: req
self source: textOrStream.
self setCue:
(CompilationCue
source: textOrStream
context: aContext
class: aClass
category: aCategory
requestor: req)! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/26/2012 23:55' prior: 50781309!
from: textOrStream class: aClass context: aContext notifying: req
self source: textOrStream.
self setCue:
(CompilationCue
source: textOrStream
context: aContext
class: aClass
requestor: req)
! !
!Encoder methodsFor: 'initialize-release' stamp: 'cwp 12/27/2012 09:41' prior: 50996506!
init: aClass context: aContext notifying: anObject
| c |
c := CompilationCue
context: aContext
class: aClass
requestor: nil.
self init: c notifying: anObject! !
!Encoder methodsFor: 'initialize-release' stamp: 'cwp 12/26/2012 23:58' prior: 39061698!
temps: tempVars literals: lits class: cl
"Initialize this encoder for decompilation."
self setCue: (CompilationCue class: cl).
supered := false.
nTemps := tempVars size.
tempVars do: [:node | scopeTable at: node name put: node].
literalStream := WriteStream on: (Array new: lits size).
literalStream nextPutAll: lits.
sourceRanges := Dictionary new: 32.
globalSourceRanges := OrderedCollection new: 32.! !
"Compiler"!
!Class methodsFor: 'class variables' stamp: 'cwp 6/22/2012 15:48' prior: 36026010!
addClassVarName: aString
"Add the argument, aString, as a class variable of the receiver.
Signal an error if the first character of aString is not capitalized,
or if it is already a variable named in the class."
| symbol oldState |
oldState := self copy.
aString first canBeGlobalVarInitial
ifFalse: [^self error: aString, ' class variable name should be capitalized; proceed to include anyway.'].
symbol := aString asSymbol.
self withAllSubclasses do:
[:subclass |
(self canFindWithoutEnvironment: symbol) ifTrue: [
(DuplicateVariableError new)
superclass: superclass; "fake!!!!!!"
variable: aString;
signal: aString, ' is already defined']].
classPool == nil ifTrue: [classPool := Dictionary new].
(classPool includesKey: symbol) ifFalse:
["Pick up any refs in Undeclared"
classPool declare: symbol from: environment undeclared.
SystemChangeNotifier uniqueInstance classDefinitionChangedFrom: oldState to: self]! !
!Class methodsFor: 'compiling' stamp: 'cwp 6/20/2012 09:48' prior: 54782024!
bindingOf: varName
^ self bindingOf: varName environment: self environment! !
!Class methodsFor: 'organization' stamp: 'cwp 6/25/2012 18:25' prior: 54785804!
category
"Answer the system organization category for the receiver. First check whether the
category name stored in the ivar is still correct and only if this fails look it up
(latter is much more expensive)"
category ifNotNil: [ :symbol |
((self environment organization listAtCategoryNamed: symbol) includes: self name)
ifTrue: [ ^symbol ] ].
category := self environment organization categoryOfElement: self name.
^category! !
!Class methodsFor: 'initialize-release' stamp: 'cwp 6/22/2012 15:49' prior: 36027730!
declare: varString
"Declare class variables common to all instances. Answer whether
recompilation is advisable."
| newVars conflicts |
newVars :=
(Scanner new scanFieldNames: varString)
collect: [:x | x asSymbol].
newVars do:
[:var | var first canBeGlobalVarInitial
ifFalse: [self error: var, ' class variable name should be capitalized; proceed to include anyway.']].
conflicts := false.
classPool == nil
ifFalse: [(classPool keys reject: [:x | newVars includes: x]) do:
[:var | self removeClassVarName: var]].
(newVars reject: [:var | self classPool includesKey: var])
do: [:var | "adding"
"check if new vars defined elsewhere"
(self canFindWithoutEnvironment: var) ifTrue: [
(DuplicateVariableError new)
superclass: superclass; "fake!!!!!!"
variable: var;
signal: var, ' is already defined'.
conflicts := true]].
newVars size > 0
ifTrue:
[classPool := self classPool.
"in case it was nil"
newVars do: [:var | classPool declare: var from: environment undeclared]].
^conflicts! !
!Class methodsFor: 'class variables' stamp: 'cwp 6/22/2012 15:49' prior: 54802475!
removeClassVarName: aString
"Remove the class variable whose name is the argument, aString, from
the names defined in the receiver, a class. Create an error notification if
aString is not a class variable or if it is still being used in the code of
the class."
| aSymbol |
aSymbol := aString asSymbol.
(classPool includesKey: aSymbol)
ifFalse: [^self error: aString, ' is not a class variable'].
self withAllSubclasses do:[:subclass |
(Array with: subclass with: subclass class) do:[:classOrMeta |
(classOrMeta whichSelectorsReferTo: (classPool associationAt: aSymbol))
isEmpty ifFalse: [
InMidstOfFileinNotification signal ifTrue: [
Transcript cr; show: self name, ' (' , aString , ' is Undeclared) '.
^ environment undeclared declare: aSymbol from: classPool].
(self confirm: (aString,' is still used in code of class ', classOrMeta name,
'.\Is it okay to move it to Undeclared?') withCRs)
ifTrue:[^Undeclared declare: aSymbol from: classPool]
ifFalse:[^self]]]].
classPool removeKey: aSymbol.
classPool isEmpty ifTrue: [classPool := nil].
! !
!Class methodsFor: 'class name' stamp: 'cwp 6/22/2012 15:49' prior: 54796206!
rename: aString
"The new name of the receiver is the argument, aString."
| oldName newName |
(newName := aString asSymbol) = (oldName := self name)
ifTrue: [^ self].
(self environment includesKey: newName)
ifTrue: [^ self error: newName , ' already exists'].
(environment undeclared includesKey: newName)
ifTrue: [self inform: 'There are references to, ' , aString printString , '
from Undeclared. Check them after this change.'].
name := newName.
self environment renameClass: self from: oldName! !
!ClassBuilder methodsFor: 'class definition' stamp: 'cwp 6/22/2012 01:05' prior: 39054430!
name: className inEnvironment: env subclassOf: newSuper type: type instanceVariableNames: instVarString classVariableNames: classVarString poolDictionaries: poolString category: category unsafe: unsafe
"Define a new class in the given environment.
If unsafe is true do not run any validation checks.
This facility is provided to implement important system changes."
| oldClass instVars classVars copyOfOldClass newClass |
environ := env.
instVars := Scanner new scanFieldNames: instVarString.
classVars := (Scanner new scanFieldNames: classVarString) collect: [:x | x asSymbol].
"Validate the proposed name"
unsafe ifFalse:[(self validateClassName: className) ifFalse:[^nil]].
oldClass := env at: className ifAbsent:[nil].
oldClass isBehavior
ifFalse: [oldClass := nil] "Already checked in #validateClassName:"
ifTrue: [
copyOfOldClass := oldClass copy.
copyOfOldClass superclass addSubclass: copyOfOldClass].
[ | newCategory needNew force organization oldCategory |
unsafe ifFalse:[
"Run validation checks so we know that we have a good chance for recompilation"
(self validateSuperclass: newSuper forSubclass: oldClass) ifFalse:[^nil].
(self validateInstvars: instVars from: oldClass forSuper: newSuper) ifFalse:[^nil].
(self validateClassvars: classVars from: oldClass forSuper: newSuper) ifFalse:[^nil].
(self validateSubclassFormat: type from: oldClass forSuper: newSuper extra: instVars size) ifFalse:[^nil]].
"See if we need a new subclass"
needNew := self needsSubclassOf: newSuper type: type instanceVariables: instVars from: oldClass.
needNew == nil ifTrue:[^nil]. "some error"
(needNew and:[unsafe not]) ifTrue:[
"Make sure we don't redefine any dangerous classes"
(self tooDangerousClasses includes: oldClass name) ifTrue:[
self error: oldClass name, ' cannot be changed'.
].
"Check if the receiver should not be redefined"
(oldClass ~~ nil and:[oldClass shouldNotBeRedefined]) ifTrue:[
self notify: oldClass name asText allBold,
' should not be redefined. \Proceed to store over it.' withCRs]].
needNew ifTrue:[
"Create the new class"
newClass := self
newSubclassOf: newSuper
type: type
instanceVariables: instVars
from: oldClass.
newClass == nil ifTrue:[^nil]. "Some error"
newClass setName: className.
newClass environment: environ.
] ifFalse:[
"Reuse the old class"
newClass := oldClass.
].
"Install the class variables and pool dictionaries... "
force := (newClass declare: classVarString) | (newClass sharing: poolString).
"... classify ..."
newCategory := category asSymbol.
organization := environ ifNotNil:[environ organization].
oldClass isNil ifFalse: [oldCategory := (organization categoryOfElement: oldClass name) asSymbol].
organization classify: newClass name under: newCategory suppressIfDefault: true.
"... recompile ..."
newClass := self recompile: force from: oldClass to: newClass mutate: false.
"... export if not yet done ..."
(environ at: newClass name ifAbsent:[nil]) == newClass ifFalse:[
[environ at: newClass name put: newClass]
on: AttemptToWriteReadOnlyGlobal do:[:ex| ex resume: true].
environ flushClassNameCache.
].
newClass doneCompiling.
"... notify interested clients ..."
oldClass isNil ifTrue: [
SystemChangeNotifier uniqueInstance classAdded: newClass inCategory: newCategory.
^ newClass].
newCategory ~= oldCategory
ifTrue: [SystemChangeNotifier uniqueInstance class: newClass recategorizedFrom: oldCategory to: category]
ifFalse: [SystemChangeNotifier uniqueInstance classDefinitionChangedFrom: copyOfOldClass to: newClass.].
] ensure:
[copyOfOldClass ifNotNil: [copyOfOldClass superclass removeSubclass: copyOfOldClass].
Behavior flushObsoleteSubclasses.
].
^newClass! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 22:57' prior: 18572019!
superclass: newSuper
subclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class."
| env |
env := EnvironmentRequest signal ifNil: [newSuper environment].
^self
name: t
inEnvironment: env
subclassOf: newSuper
type: newSuper typeOfClass
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:01' prior: 50629912!
superclass: aClass
variableByteSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class in which the subclass is to
have indexable byte-sized nonpointer variables."
| oldClassOrNil actualType env |
(aClass instSize > 0)
ifTrue: [^self error: 'cannot make a byte subclass of a class with named fields'].
(aClass isVariable and: [aClass isWords])
ifTrue: [^self error: 'cannot make a byte subclass of a class with word fields'].
(aClass isVariable and: [aClass isPointers])
ifTrue: [^self error: 'cannot make a byte subclass of a class with pointer fields'].
oldClassOrNil := aClass environment at: t ifAbsent:[nil].
actualType := (oldClassOrNil notNil
and: [oldClassOrNil typeOfClass == #compiledMethod])
ifTrue: [#compiledMethod]
ifFalse: [#bytes].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: actualType
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:03' prior: 18573442!
superclass: aClass
variableSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class in which the subclass is to
have indexable pointer variables."
| env |
aClass isBits ifTrue:
[^self error: 'cannot make a pointer subclass of a class with non-pointer fields'].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: #variable
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:04' prior: 18574098!
superclass: aClass
variableWordSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class in which the subclass is to
have indexable word-sized nonpointer variables."
| env |
(aClass instSize > 0)
ifTrue: [^self error: 'cannot make a word subclass of a class with named fields'].
(aClass isVariable and: [aClass isBytes])
ifTrue: [^self error: 'cannot make a word subclass of a class with byte fields'].
(aClass isVariable and: [aClass isPointers])
ifTrue: [^self error: 'cannot make a word subclass of a class with pointer fields'].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: #words
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:04' prior: 18575028!
superclass: aClass
weakSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class (the receiver) in which the subclass is to
have weak indexable pointer variables."
| env |
aClass isBits
ifTrue: [^self error: 'cannot make a pointer subclass of a class with non-pointer fields'].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: #weak
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
"Kernel"!
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:21' prior: 59135029!
ambiguousSelector: aString inRange: anInterval
| correctedSelector userSelection offset intervalWithOffset |
self interactive ifFalse: [
"In non interactive mode, compile with backward comapatibility: $- is part of literal argument"
Transcript cr; store: encoder classEncoding; nextPutAll:#'>>';store: encoder selector; show: ' would send ' , token , '-'.
^super ambiguousSelector: aString inRange: anInterval].
"handle the text selection"
userSelection := cue requestor selectionInterval.
intervalWithOffset := anInterval first + requestorOffset to: anInterval last + requestorOffset.
cue requestor selectFrom: intervalWithOffset first to: intervalWithOffset last.
cue requestor select.
"Build the menu with alternatives"
correctedSelector := AmbiguousSelector
signalName: aString
inRange: intervalWithOffset.
correctedSelector ifNil: [^self fail].
"Execute the selected action"
offset := self substituteWord: correctedSelector wordInterval: intervalWithOffset offset: 0.
cue requestor deselect.
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last + offset.
token := (correctedSelector readStream upTo: Character space) asSymbol! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:21' prior: 38558136!
collectTemporaryDeclarationsFrom: methodNode
| tempsMarks str |
tempsMarks := OrderedCollection new.
str := cue requestor text asString.
methodNode accept: (ParseNodeEnumerator
ofBlock: [ :aNode |
| mark |
(aNode class canUnderstand: #tempsMark)
ifTrue:
[mark := aNode tempsMark.
(mark notNil and: [ mark between: 1 and: str size ] and: [ (str at: mark) = $| ])
ifTrue: [ tempsMarks addLast: aNode ]]]).
(tempsMark notNil and: [ tempsMark between: 1 and: str size ] and: [ (str at: tempsMark) = $| ])
ifTrue: [ tempsMarks addLast: self ].
^ tempsMarks sorted: [ :a :b | a tempsMark > b tempsMark ]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:20' prior: 52096606!
correctSelector: proposedKeyword wordIntervals: spots exprInterval: expInt ifAbort: abortAction
"Correct the proposedKeyword to some selector symbol, correcting the original text if such action is indicated. abortAction is invoked if the proposedKeyword couldn't be converted into a valid selector. Spots is an ordered collection of intervals within the test stream of the for each of the keyword parts."
| correctSelector userSelection |
"If we can't ask the user, assume that the keyword will be defined later"
self interactive ifFalse: [^proposedKeyword asSymbol].
userSelection := cue requestor selectionInterval.
cue requestor selectFrom: spots first first to: spots last last.
cue requestor select.
correctSelector := UnknownSelector name: proposedKeyword.
correctSelector ifNil: [^abortAction value].
cue requestor deselect.
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last.
self substituteSelector: correctSelector keywords wordIntervals: spots.
^(proposedKeyword last ~~ $:
and: [correctSelector last == $:])
ifTrue: [abortAction value]
ifFalse: [correctSelector]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:20' prior: 33907242!
correctVariable: proposedVariable interval: spot
"Correct the proposedVariable to a known variable, or declare it as a new
variable if such action is requested. We support declaring lowercase
variables as temps or inst-vars, and uppercase variables as Globals or
ClassVars, depending on whether the context is nil (class=UndefinedObject).
Spot is the interval within the test stream of the variable.
rr 3/4/2004 10:26 : adds the option to define a new class. "
"Check if this is an i-var, that has been corrected already (ugly)"
"Display the pop-up menu"
| binding userSelection action |
(encoder classEncoding instVarNames includes: proposedVariable) ifTrue:
[^InstanceVariableNode new
name: proposedVariable
index: (encoder classEncoding allInstVarNames indexOf: proposedVariable)].
"If we can't ask the user for correction, make it undeclared"
self interactive ifFalse: [^encoder undeclared: proposedVariable].
"First check to see if the requestor knows anything about the variable"
(binding := cue requestor bindingOf: proposedVariable)
ifNotNil: [^encoder global: binding name: proposedVariable].
userSelection := cue requestor selectionInterval.
cue requestor selectFrom: spot first to: spot last.
cue requestor select.
"Build the menu with alternatives"
action := UndeclaredVariable
signalFor: self
name: proposedVariable
inRange: spot.
action ifNil: [^self fail].
"Execute the selected action"
cue requestor deselect.
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last.
^action value! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:19' prior: 34172921!
declareUndeclaredTemps: methodNode
"Declare any undeclared temps, declaring them at the smallest enclosing scope."
| undeclared userSelection blocksToVars |
(undeclared := encoder undeclaredTemps) isEmpty ifTrue:
[^self].
userSelection := cue requestor selectionInterval.
blocksToVars := IdentityDictionary new.
undeclared do:
[:var|
(blocksToVars
at: (var tag == #method
ifTrue: [methodNode block]
ifFalse: [methodNode accept: (VariableScopeFinder new ofVariable: var)])
ifAbsentPut: [SortedCollection new]) add: var name].
(blocksToVars removeKey: methodNode block ifAbsent: []) ifNotNil:
[:rootVars|
rootVars do: [:varName| self pasteTempAtMethodLevel: varName]].
(blocksToVars keys sorted: [:a :b| a tempsMark < b tempsMark]) do:
[:block| | decl |
decl := (blocksToVars at: block) reduce: [:a :b| a, ' ', b].
block temporaries isEmpty
ifTrue:
[self substituteWord: ' | ', decl, ' |'
wordInterval: (block tempsMark + 1 to: block tempsMark)
offset: requestorOffset]
ifFalse:
[self substituteWord: decl, ' '
wordInterval: (block tempsMark to: block tempsMark - 1)
offset: requestorOffset]].
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last + requestorOffset.
ReparseAfterSourceEditing signal! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 11:45' prior: 37183770!
defineClass: className
"prompts the user to define a new class,
asks for it's category, and lets the users edit further
the definition"
| sym cat def d2 |
sym := className asSymbol.
cat := UIManager default request: 'Enter class category : ' initialAnswer: self encoder classEncoding theNonMetaClass category.
cat
ifEmpty: [cat := 'Unknown'].
def := 'Object subclass: #' , sym , '
instanceVariableNames: ''''
classVariableNames: ''''
poolDictionaries: ''''
category: ''' , cat , ''''.
d2 := UIManager default request: 'Edit class definition : ' initialAnswer: def.
d2
ifEmpty: [d2 := def].
Compiler evaluate: d2.
^ encoder
global: (cue environment bindingOf: sym)
name: sym! !
!Parser methodsFor: 'primitives' stamp: 'cwp 12/27/2012 11:46' prior: 37184567!
externalFunctionDeclaration
"Parse the function declaration for a call to an external library."
| descriptorClass callType modifier retType externalName args argType module fn |
descriptorClass := cue environment
valueOf: #ExternalFunction
ifAbsent: [^ false].
callType := descriptorClass callingConventionFor: here.
callType == nil ifTrue:[^false].
[modifier := descriptorClass callingConventionModifierFor: token.
modifier notNil] whileTrue:
[self advance.
callType := callType bitOr: modifier].
"Parse return type"
self advance.
retType := self externalType: descriptorClass.
retType == nil ifTrue:[^self expected:'return type'].
"Parse function name or index"
externalName := here.
(self match: #string)
ifTrue:[externalName := externalName asSymbol]
ifFalse:[(self match:#number) ifFalse:[^self expected:'function name or index']].
(self matchToken: #'(') ifFalse:[^self expected:'argument list'].
args := WriteStream on: Array new.
[here == #')'] whileFalse:[
argType := self externalType: descriptorClass.
argType == nil ifTrue:[^self expected:'argument'].
argType isVoid & argType isPointerType not ifFalse:[args nextPut: argType].
].
(self matchToken: #')') ifFalse:[^self expected:')'].
(self matchToken: 'module:') ifTrue:[
module := here.
(self match: #string) ifFalse:[^self expected: 'String'].
module := module asSymbol].
Smalltalk at: #ExternalLibraryFunction ifPresent:[:xfn|
fn := xfn name: externalName
module: module
callType: callType
returnType: retType
argumentTypes: args contents.
self allocateLiteral: fn.
].
(self matchToken: 'error:')
ifTrue:
[| errorCodeVariable |
errorCodeVariable := here.
(hereType == #string
or: [hereType == #word]) ifFalse:[^self expected: 'error code (a variable or string)'].
self advance.
self addPragma: (Pragma keyword: #primitive:error: arguments: (Array with: 120 with: errorCodeVariable)).
fn ifNotNil: [fn setErrorCodeName: errorCodeVariable]]
ifFalse:
[self addPragma: (Pragma keyword: #primitive: arguments: #(120))].
^true
! !
!Parser methodsFor: 'error handling' stamp: 'cwp 12/27/2012 10:19' prior: 58306169!
interactive
"Answer true if compilation is interactive"
^ cue requestor notNil! !
!Parser methodsFor: 'error handling' stamp: 'cwp 12/27/2012 10:22' prior: 58137223!
notify: string at: location
cue requestor isNil
ifTrue: [(encoder == self or: [encoder isNil]) ifTrue: [^ self fail "failure setting up syntax error"].
SyntaxErrorNotification
inClass: encoder classEncoding
category: cue category
withCode:
(source contents asText
copyReplaceFrom: location
to: location - 1
with: ((string , ' ->') asText allBold
addAttribute: TextColor red; yourself))
doitFlag: doitFlag
errorMessage: string
location: location]
ifFalse: [cue requestor
notify: string , ' ->'
at: location
in: source].
^self fail! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:17' prior: 34177108!
pasteTempAtMethodLevel: name
| insertion delta theTextString characterBeforeMark |
theTextString := cue requestor text string.
characterBeforeMark := theTextString at: tempsMark-1 ifAbsent: [$ ].
(theTextString at: tempsMark) = $| ifTrue: [
"Paste it before the second vertical bar"
insertion := name, ' '.
characterBeforeMark isSeparator ifFalse: [
insertion := ' ', insertion].
delta := 0.
] ifFalse: [
"No bars - insert some with CR, tab"
insertion := '| ' , name , ' |',String cr.
delta := 2. "the bar and CR"
characterBeforeMark = Character tab ifTrue: [
insertion := insertion , String tab.
delta := delta + 1. "the tab"
].
].
tempsMark := tempsMark +
(self substituteWord: insertion
wordInterval: (tempsMark to: tempsMark-1)
offset: 0) - delta! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:16' prior: 52095305!
queryUndefined
| varStart varName |
varName := parseNode key.
varStart := self endOfLastToken + requestorOffset - varName size + 1.
cue requestor selectFrom: varStart to: varStart + varName size - 1; select.
(UndefinedVariable name: varName) ifFalse: [^ self fail]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:15' prior: 38599341!
removeEmptyTempDeclarationsFrom: methodNode
| sourceCode madeChanges tempsMarkHolder |
sourceCode := cue requestor text asString.
tempsMarkHolder := self collectTemporaryDeclarationsFrom: methodNode.
madeChanges := false.
tempsMarkHolder do: [ :currentBlock | | tempsMarkChar0 tempsMarkChar1 tempsMarkChar2 end start |
tempsMarkChar0 := (sourceCode at: currentBlock tempsMark).
tempsMarkChar1 := (sourceCode at: currentBlock tempsMark - 1).
tempsMarkChar2 := (sourceCode at: currentBlock tempsMark - 2).
tempsMarkChar0 = $| & tempsMarkChar1 = $|
ifTrue:
[ end := currentBlock tempsMark.
start := end - 1].
tempsMarkChar0 = $| & tempsMarkChar1 = $ & tempsMarkChar2 = $|
ifTrue:
[ end := currentBlock tempsMark.
start := end - 2].
start notNil & end notNil ifTrue: [
| lineStart lineEnd |
lineStart := 1 + (sourceCode
lastIndexOf: Character cr
startingAt: start - 1
ifAbsent: [ 0 ]).
lineEnd := sourceCode
indexOf: Character cr
startingAt: end + 1
ifAbsent: [ sourceCode size ].
((sourceCode indexOfAnyOf: CharacterSet nonSeparators startingAt: lineStart) >= start
and: [ (sourceCode indexOfAnyOf: CharacterSet nonSeparators startingAt: end + 1) > lineEnd ]) ifTrue: [
start := lineStart.
end := lineEnd ].
cue requestor correctFrom: start to: end with: ''.
madeChanges := true.
currentBlock tempsMark: nil ] ].
madeChanges ifTrue: [ReparseAfterSourceEditing signal]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:15' prior: 38561281!
removeUnusedTemporaryNamed: temp from: str lookingAt: currentBlock movingTempMarksOf: someBlocks
| start end |
end := currentBlock tempsMark - 1.
["Beginning at right temp marker..."
start := end - temp size + 1.
end < temp size or: [ (str at: start) = $| ]
or: [ temp = (str copyFrom: start to: end)
and: [ ((str at: start - 1) = $| | (str at: start - 1) isSeparator)
& ((str at: end + 1) = $| | (str at: end + 1) isSeparator) ] ]]
whileFalse: [
"Search left for the unused temp"
end := cue requestor nextTokenFrom: end direction: -1 ].
(end < temp size or: [ (str at: start) = $| ])
ifFalse:
[(str at: start - 1) = $
ifTrue: [ start := start - 1 ].
cue requestor correctFrom: start to: end with: ''.
someBlocks do: [ :aBlock | aBlock tempsMark: aBlock tempsMark - (end - start + 1)].
^true ].
^false! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:14' prior: 38562194!
removeUnusedTemps: methodNode
"Scan for unused temp names, and prompt the user about the prospect of removing each one found"
| madeChanges tempsMarkHolder unusedTempNames tempMarkHoldersToChange |
madeChanges := false.
tempMarkHoldersToChange := OrderedCollection new.
tempsMarkHolder := self collectTemporaryDeclarationsFrom: methodNode.
unusedTempNames := encoder unusedTempNames select:
[ :temp | (encoder lookupVariable: temp ifAbsent: [ ]) isUndefTemp
and: [ self queriedUnusedTemporaries at: temp ifAbsentPut: [UnusedVariable name: temp] ]].
tempsMarkHolder do: [ :currentBlock |
tempMarkHoldersToChange add: currentBlock.
unusedTempNames do:
[ :temp |
(self
removeUnusedTemporaryNamed: temp
from: cue requestor text asString
lookingAt: currentBlock
movingTempMarksOf: tempMarkHoldersToChange) ifTrue: [ madeChanges := true ]]].
madeChanges
ifTrue: [ self removeEmptyTempDeclarationsFrom: methodNode.
ReparseAfterSourceEditing signal ]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:14' prior: 34179326!
substituteWord: correctWord wordInterval: spot offset: o
"Substitute the correctSelector into the (presumed interactive) receiver.
Update requestorOffset based on the delta size and answer the updated offset."
cue requestor correctFrom: spot first + o to: spot last + o with: correctWord.
requestorOffset := requestorOffset + correctWord size - spot size.
^o + correctWord size - spot size! !
!Parser methodsFor: 'expression types' stamp: 'cwp 12/27/2012 10:14' prior: 34179807!
temporaries
" [ '|' (variable)* '|' ]"
| vars theActualText |
(self match: #verticalBar) ifFalse:
["no temps"
doitFlag ifTrue:
[tempsMark := self interactive
ifTrue: [cue requestor selectionInterval first]
ifFalse: [1].
^ #()].
tempsMark := hereMark "formerly --> prevMark + prevToken".
tempsMark > 0 ifTrue:
[theActualText := source contents.
[tempsMark < theActualText size and: [(theActualText at: tempsMark) isSeparator]]
whileTrue: [tempsMark := tempsMark + 1]].
^ #()].
vars := OrderedCollection new.
[hereType == #word]
whileTrue: [vars addLast: (encoder bindTemp: self advance)].
(self match: #verticalBar) ifTrue:
[tempsMark := prevMark.
^ vars].
^ self expected: 'Vertical bar'
! !
!Parser methodsFor: 'expression types' stamp: 'cwp 12/27/2012 10:14' prior: 34180638!
temporariesIn: methodSelector
" [ '|' (variable)* '|' ]"
| vars theActualText |
(self match: #verticalBar) ifFalse:
["no temps"
doitFlag ifTrue:
[tempsMark := self interactive
ifTrue: [cue requestor selectionInterval first]
ifFalse: [1].
^ #()].
tempsMark := hereMark "formerly --> prevMark + prevToken".
tempsMark > 0 ifTrue:
[theActualText := source contents.
[tempsMark < theActualText size and: [(theActualText at: tempsMark) isSeparator]]
whileTrue: [tempsMark := tempsMark + 1]].
^ #()].
vars := OrderedCollection new.
[hereType == #word]
whileTrue: [vars addLast: (encoder bindTemp: self advance in: methodSelector)].
(self match: #verticalBar) ifTrue:
[tempsMark := prevMark.
^ vars].
^ self expected: 'Vertical bar'! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 10:11' prior: 53971863!
compiledMethodFor: textOrStream in: aContext to: receiver notifying: aRequestor ifFail: failBlock logged: logFlag
"Compiles the sourceStream into a parse tree, then generates code
into a method, and answers it. If receiver is not nil, then the text can
refer to instance variables of that receiver (the Inspector uses this).
If aContext is not nil, the text can refer to temporaries in that context
(the Debugger uses this). If aRequestor is not nil, then it will receive a
notify:at: message before the attempt to evaluate is aborted."
| methodNode method theClass |
theClass := (aContext == nil ifTrue: [receiver] ifFalse: [aContext receiver]) class.
self from: textOrStream class: theClass context: aContext notifying: aRequestor.
methodNode := self translate: sourceStream noPattern: true ifFail: [^failBlock value].
method := self interactive ifTrue: [ methodNode generateWithTempNames ]
ifFalse: [methodNode generate].
logFlag ifTrue:
[SystemChangeNotifier uniqueInstance evaluated: sourceStream contents context: aContext].
^method! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/27/2012 11:33' prior: 34363593!
format: aStream noPattern: noPattern ifFail: failBlock
^(self parser
parse: aStream
cue: cue
noPattern: noPattern
ifFail: [^failBlock value]) preen! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/27/2012 10:08' prior: 58306325!
interactive
"Answer true if compilation is interactive"
^ cue requestor notNil! !
!Compiler methodsFor: 'error handling' stamp: 'cwp 12/27/2012 10:10' prior: 50779387!
notify: aString at: location
"Refer to the comment in Object|notify:."
^ cue requestor == nil
ifTrue: [SyntaxErrorNotification
inClass: cue getClass
category: cue category
withCode:
(sourceStream contents
copyReplaceFrom: location
to: location - 1
with: aString)
doitFlag: false
errorMessage: aString
location: location]
ifFalse: [cue requestor
notify: aString
at: location
in: sourceStream]! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 11:34' prior: 50777201!
parse: textOrStream in: aClass notifying: req
"Compile the argument, textOrStream, with respect to the class, aClass, and
answer the MethodNode that is the root of the resulting parse tree. Notify the
argument, req, if an error occurs. The failBlock is defaulted to an empty block."
self from: textOrStream class: aClass context: nil notifying: req.
^self parser
parse: sourceStream
cue: cue
noPattern: false
ifFail: []! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 10:09' prior: 36332471!
parser
parser ifNil: [parser := (cue getClass ifNil: [self class]) newParser].
^parser! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/27/2012 11:37' prior: 50780779!
translate: aStream noPattern: noPattern ifFail: failBlock
^self parser
parse: aStream
cue: cue
noPattern: noPattern
ifFail: [^failBlock value]! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 11:37' prior: 19124095!
translate: aStream noPattern: noPattern ifFail: failBlock parser: parser
| tree |
tree := parser
parse: aStream
cue: cue
noPattern: noPattern
ifFail: [^ failBlock value].
^ tree! !
!Encoder methodsFor: 'results' stamp: 'cwp 12/27/2012 10:26' prior: 50999892!
associationForClass
| assoc |
assoc := self environment associationAt: cue getClass name ifAbsent: [nil].
^assoc value == cue getClass
ifTrue: [assoc]
ifFalse: [Association new value: cue getClass]! !
!Encoder methodsFor: 'temps' stamp: 'cwp 12/27/2012 10:25' prior: 20148386!
bindTemp: name in: methodSelector
"Declare a temporary; error not if a field or class variable."
scopeTable at: name ifPresent:[:node|
"When non-interactive raise the error only if its a duplicate"
(node isTemp or:[requestor interactive])
ifTrue:[^self notify:'Name is already defined']
ifFalse:[Transcript
show: '(', name, ' is shadowed in "' , cue getClass printString , '>>' , methodSelector printString , '")']].
^self reallyBind: name! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:25' prior: 20149084!
classEncoding
"This is a hack so that the parser may findout what class it was parsing for when it wants to create a syntax error view."
^ cue getClass! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:39' prior: 20138819!
encodeLiteral: object
^self
name: object
key: (cue literalScannedAs: object notifying: self)
class: LiteralNode
type: LdLitType
set: litSet! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:40' prior: 20139010!
encodeSelector: aSelector
^self
name: aSelector
key: aSelector
class: SelectorNode
type: SendType
set: selectorSet! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:40' prior: 58545123!
environment
"Answer the environment of the current compilation context,
be it in a class or global (e.g. a workspace)"
^cue environment! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 11:41' prior: 50994497!
lookupInPools: varName ifFound: assocBlock
^Symbol
hasInterned: varName
ifTrue:
[:sym|
(cue bindingOf: sym)
ifNil: [^false]
ifNotNil: [:assoc| assocBlock value: assoc]]! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:24' prior: 51004306!
possibleNamesFor: proposedName
| results |
results := cue getClass
possibleVariablesFor: proposedName
continuedFrom: nil.
^ proposedName correctAgainst: nil continuedFrom: results.
! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:24' prior: 50995012!
possibleVariablesFor: proposedVariable
| results |
results := proposedVariable correctAgainstDictionary: scopeTable
continuedFrom: nil.
proposedVariable first canBeGlobalVarInitial ifTrue:
[ results := cue getClass possibleVariablesFor: proposedVariable
continuedFrom: results ].
^ proposedVariable correctAgainst: nil continuedFrom: results.
! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:42' prior: 51002830!
undeclared: name
| sym |
requestor interactive ifTrue:
[requestor requestor == #error: ifTrue:
[requestor error: 'Undeclared'].
^self notify: 'Undeclared'].
"Allow knowlegeable clients to squash the undeclared warning if they want (e.g.
Diffing pretty printers that are simply formatting text). As this breaks
compilation it should only be used by clients that want to discard the result
of the compilation. To squash the warning use e.g.
[Compiler format: code in: class notifying: nil decorated: false]
on: UndeclaredVariableWarning
do: [:ex| ex resume: false]"
sym := name asSymbol.
^(UndeclaredVariableWarning new name: name selector: selector class: cue getClass) signal
ifTrue:
[| undeclared |
undeclared := cue environment undeclared.
undeclared at: sym put: nil.
self global: (undeclared associationAt: sym) name: sym]
ifFalse:
[self global: (Association key: sym) name: sym]! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:23' prior: 51006007!
warnAboutShadowed: name
requestor addWarning: name,' is shadowed'.
selector ifNotNil:
[Transcript cr; show: cue getClass name,'>>', selector, '(', name,' is shadowed)']! !
"Compiler"!
!SmalltalkImage methodsFor: 'housekeeping' stamp: 'cwp 6/22/2012 15:56' prior: 58497062!
cleanOutUndeclared
globals undeclared removeUnreferencedKeys! !
!SmalltalkImage methodsFor: 'special objects' stamp: 'cwp 6/22/2012 09:01' prior: 40515090!
recreateSpecialObjectsArray
"Smalltalk recreateSpecialObjectsArray"
"To external package developers:
**** DO NOT OVERRIDE THIS METHOD. *****
If you are writing a plugin and need additional special object(s) for your own use,
use addGCRoot() function and use own, separate special objects registry "
"The Special Objects Array is an array of objects used by the Squeak virtual machine.
Its contents are critical and accesses to it by the VM are unchecked, so don't even
think of playing here unless you know what you are doing."
| newArray |
newArray := Array new: 56.
"Nil false and true get used throughout the interpreter"
newArray at: 1 put: nil.
newArray at: 2 put: false.
newArray at: 3 put: true.
"This association holds the active process (a ProcessScheduler)"
newArray at: 4 put: (self bindingOf: #Processor).
"Numerous classes below used for type checking and instantiation"
newArray at: 5 put: Bitmap.
newArray at: 6 put: SmallInteger.
newArray at: 7 put: ByteString.
newArray at: 8 put: Array.
newArray at: 9 put: Smalltalk.
newArray at: 10 put: Float.
newArray at: 11 put: MethodContext.
newArray at: 12 put: BlockContext.
newArray at: 13 put: Point.
newArray at: 14 put: LargePositiveInteger.
newArray at: 15 put: Display.
newArray at: 16 put: Message.
newArray at: 17 put: CompiledMethod.
newArray at: 18 put: (self specialObjectsArray at: 18).
"(low space Semaphore)"
newArray at: 19 put: Semaphore.
newArray at: 20 put: Character.
newArray at: 21 put: #doesNotUnderstand:.
newArray at: 22 put: #cannotReturn:.
newArray at: 23 put: nil. "This is the process signalling low space."
"An array of the 32 selectors that are compiled as special bytecodes,
paired alternately with the number of arguments each takes."
newArray at: 24 put: #( #+ 1 #- 1 #< 1 #> 1 #<= 1 #>= 1 #= 1 #~= 1
#* 1 #/ 1 #\\ 1 #@ 1 #bitShift: 1 #// 1 #bitAnd: 1 #bitOr: 1
#at: 1 #at:put: 2 #size 0 #next 0 #nextPut: 1 #atEnd 0 #== 1 #class 0
#blockCopy: 1 #value 0 #value: 1 #do: 1 #new 0 #new: 1 #x 0 #y 0 ).
"An array of the 255 Characters in ascii order.
Cog inlines table into machine code at: prim so do not regenerate it."
newArray at: 25 put: (self specialObjectsArray at: 25).
newArray at: 26 put: #mustBeBoolean.
newArray at: 27 put: ByteArray.
newArray at: 28 put: Process.
"An array of up to 31 classes whose instances will have compact headers"
newArray at: 29 put: self compactClassesArray.
newArray at: 30 put: (self specialObjectsArray at: 30). "(delay Semaphore)"
newArray at: 31 put: (self specialObjectsArray at: 31). "(user interrupt Semaphore)"
"Entries 32 - 34 unreferenced. Previously these contained prototype instances to be copied for fast initialization"
newArray at: 32 put: nil. "was (Float new: 2)"
newArray at: 33 put: nil. "was (LargePositiveInteger new: 4)"
newArray at: 34 put: nil. "was Point new"
newArray at: 35 put: #cannotInterpret:.
"Note: This must be fixed once we start using context prototypes (yeah, right)"
"(MethodContext new: CompiledMethod fullFrameSize)."
newArray at: 36 put: (self specialObjectsArray at: 36). "Is the prototype MethodContext (unused by the VM)"
newArray at: 37 put: BlockClosure.
"(BlockContext new: CompiledMethod fullFrameSize)."
newArray at: 38 put: (self specialObjectsArray at: 38). "Is the prototype BlockContext (unused by the VM)"
"array of objects referred to by external code"
newArray at: 39 put: (self specialObjectsArray at: 39). "preserve external semaphores"
newArray at: 40 put: nil. "Reserved for Mutex in Cog VMs"
newArray at: 41 put: nil. "Reserved for a LinkedList instance for overlapped calls in CogMT"
"finalization Semaphore"
newArray at: 42 put: ((self specialObjectsArray at: 42) ifNil: [Semaphore new]).
newArray at: 43 put: LargeNegativeInteger.
"External objects for callout.
Note: Written so that one can actually completely remove the FFI."
newArray at: 44 put: (self at: #ExternalAddress ifAbsent: []).
newArray at: 45 put: (self at: #ExternalStructure ifAbsent: []).
newArray at: 46 put: (self at: #ExternalData ifAbsent: []).
newArray at: 47 put: (self at: #ExternalFunction ifAbsent: []).
newArray at: 48 put: (self at: #ExternalLibrary ifAbsent: []).
newArray at: 49 put: #aboutToReturn:through:.
newArray at: 50 put: #run:with:in:.
"51 reserved for immutability message"
"newArray at: 51 put: #attemptToAssign:withIndex:."
newArray at: 52 put: #(nil "nil => generic error" #'bad receiver'
#'bad argument' #'bad index'
#'bad number of arguments'
#'inappropriate operation' #'unsupported operation'
#'no modification' #'insufficient object memory'
#'insufficient C memory' #'not found' #'bad method'
#'internal error in named primitive machinery'
#'object may move').
"53 to 55 are for Alien"
newArray at: 53 put: (self at: #Alien ifAbsent: []).
newArray at: 54 put: #invokeCallback:stack:registers:jmpbuf:.
newArray at: 55 put: (self at: #UnsafeAlien ifAbsent: []).
"Weak reference finalization"
newArray at: 56 put: (self at: #WeakFinalizationList ifAbsent: []).
"Now replace the interpreter's reference in one atomic operation"
self specialObjectsArray becomeForward: newArray
! !
!SmalltalkImage methodsFor: 'shrinking' stamp: 'cwp 6/22/2012 15:57' prior: 37288071!
unloadAllKnownPackages
"Unload all packages we know how to unload and reload"
"Prepare unloading"
Smalltalk zapMVCprojects.
Flaps disableGlobalFlaps: false.
StandardScriptingSystem removeUnreferencedPlayers.
Project removeAllButCurrent.
#('Morphic-UserObjects' 'EToy-UserObjects' 'Morphic-Imported' )
do: [:each | SystemOrganization removeSystemCategory: each].
Smalltalk at: #ServiceRegistry ifPresent:[:aClass|
SystemChangeNotifier uniqueInstance
noMoreNotificationsFor: aClass.
].
World removeAllMorphs.
"Go unloading"
#( 'ReleaseBuilder' 'ScriptLoader'
'311Deprecated' '39Deprecated'
'Universes' 'SMLoader' 'SMBase' 'Installer-Core'
'VersionNumberTests' 'VersionNumber'
'Services-Base' 'PreferenceBrowser' 'Nebraska'
'ToolBuilder-MVC' 'ST80'
'CollectionsTests' 'GraphicsTests' 'KernelTests' 'MorphicTests'
'MultilingualTests' 'NetworkTests' 'ToolsTests' 'TraitsTests'
'SystemChangeNotification-Tests' 'FlexibleVocabularies'
'EToys' 'Protocols' 'XML-Parser' 'Tests' 'SUnitGUI'
'Help-Squeak' 'HelpSystem' 'SystemReporter'
) do: [:pkgName|
(MCPackage named: pkgName) unload.
MCMcmUpdater disableUpdatesOfPackage: pkgName.
].
"Traits use custom unload"
Smalltalk at: #Trait ifPresent:[:aClass| aClass unloadTraits].
"Post-unload cleanup"
MCWorkingCopy flushObsoletePackageInfos.
SystemOrganization removeSystemCategory: 'UserObjects'.
Presenter defaultPresenterClass: nil.
World dumpPresenter.
ScheduledControllers := nil.
Preferences removePreference: #allowEtoyUserCustomEvents.
SystemOrganization removeEmptyCategories.
ChangeSet removeChangeSetsNamedSuchThat:[:cs | (cs == ChangeSet current) not].
globals undeclared removeUnreferencedKeys.
StandardScriptingSystem initialize.
MCFileBasedRepository flushAllCaches.
MCDefinition clearInstances.
Behavior flushObsoleteSubclasses.
ChangeSet current clear.
ChangeSet current name: 'Unnamed1'.
Smalltalk flushClassNameCache.
Smalltalk at: #Browser ifPresent:[:br| br initialize].
DebuggerMethodMap voidMapCache.
DataStream initialize.
AppRegistry removeObsolete.
FileServices removeObsolete.
Preferences removeObsolete.
TheWorldMenu removeObsolete.
Smalltalk garbageCollect.
Symbol compactSymbolTable.
TheWorldMainDockingBar updateInstances.
MorphicProject defaultFill: (Color gray: 0.9).
World color: (Color gray: 0.9).
! !
!InternalTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:34' prior: 40472775!
scanFrom: aStream
^ self scanFrom: aStream environment: Environment default! !
!NaturalLanguageTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:27' prior: 40496770!
scanFrom: aStream
^ self scanFrom: aStream environment: Environment default! !
!SystemDictionary methodsFor: 'dictionary access' stamp: 'cwp 6/22/2012 15:58' prior: 30574136!
at: aKey put: anObject
"Override from Dictionary to check Undeclared and fix up
references to undeclared variables."
| index element |
(self includesKey: aKey) ifFalse:
[self declare: aKey from: (self at: #Undeclared).
self flushClassNameCache].
super at: aKey put: anObject.
^ anObject! !
"System"!
CodeHolder subclass: #Browser
instanceVariableNames: 'environment systemOrganizer classOrganizer metaClassOrganizer editSelection metaClassIndicated selectedSystemCategory selectedClassName selectedMessageName selectedMessageCategoryName'
classVariableNames: 'ListClassesHierarchically RecentClasses'
poolDictionaries: ''
category: 'Tools-Browser'!
!Browser commentStamp: 'cwp 12/27/2012 11:09' prior: 36419432!
I represent a query path into the class descriptions, the software of the system.!
!Browser methodsFor: 'accessing' stamp: 'cwp 6/24/2012 23:20'!
selectEnvironment: anEnvironment
environment := anEnvironment.
From noreply at buildbot.pypy.org Wed Jul 2 19:57:08 2014
From: noreply at buildbot.pypy.org (ISF)
Date: Wed, 2 Jul 2014 19:57:08 +0200 (CEST)
Subject: [pypy-commit] pypy ppc-updated-backend: Fix frame_depth calculated
size
Message-ID: <20140702175708.E8E251C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Ivan Sichmann Freitas
Branch: ppc-updated-backend
Changeset: r72317:034413cecc37
Date: 2014-07-02 17:56 +0000
http://bitbucket.org/pypy/pypy/changeset/034413cecc37/
Log: Fix frame_depth calculated size
diff --git a/rpython/jit/backend/ppc/ppc_assembler.py b/rpython/jit/backend/ppc/ppc_assembler.py
--- a/rpython/jit/backend/ppc/ppc_assembler.py
+++ b/rpython/jit/backend/ppc/ppc_assembler.py
@@ -9,7 +9,8 @@
FPR_SAVE_AREA, NONVOLATILES_FLOAT,
FLOAT_INT_CONVERSION, FORCE_INDEX,
SIZE_LOAD_IMM_PATCH_SP,
- FORCE_INDEX_OFS, LR_BC_OFFSET)
+ FORCE_INDEX_OFS, LR_BC_OFFSET,
+ JITFRAME_FIXED_SIZE)
from rpython.jit.backend.ppc.helper.assembler import Saved_Volatiles
from rpython.jit.backend.ppc.helper.regalloc import _check_imm_arg
import rpython.jit.backend.ppc.register as r
@@ -783,6 +784,10 @@
operations = newoperations
return operations
+ def update_frame_depth(self, frame_depth):
+ baseofs = self.cpu.get_baseofs_of_frame_field()
+ self.current_clt.frame_info.update_frame_depth(baseofs, frame_depth)
+
def assemble_loop(self, loopname, inputargs, operations, looptoken, log):
clt = CompiledLoopToken(self.cpu, looptoken.number)
clt.allgcrefs = []
@@ -813,11 +818,11 @@
frame_info = self.datablockwrapper.malloc_aligned(jitframe.JITFRAMEINFO_SIZE,
alignment=WORD)
clt.frame_info = rffi.cast(jitframe.JITFRAMEINFOPTR, frame_info)
- clt.allgcreafs = []
- clt.frame_info.clear()
direct_bootstrap_code = self.mc.currpos()
frame_depth = self.compute_frame_depth(spilling_area, param_depth)
+ frame_depth += JITFRAME_FIXED_SIZE
+ self.update_frame_depth(frame_depth)
self.gen_bootstrap_code(start_pos, frame_depth)
self.write_pending_failure_recoveries()
From noreply at buildbot.pypy.org Wed Jul 2 20:59:11 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 20:59:11 +0200 (CEST)
Subject: [pypy-commit] pypy default: Prescale the dictionary in
ll_dict_update().
Message-ID: <20140702185911.9C3FE1D34FF@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72318:f63fbf006d6f
Date: 2014-07-02 19:53 +0200
http://bitbucket.org/pypy/pypy/changeset/f63fbf006d6f/
Log: Prescale the dictionary in ll_dict_update().
diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py
--- a/rpython/rtyper/lltypesystem/rdict.py
+++ b/rpython/rtyper/lltypesystem/rdict.py
@@ -540,18 +540,21 @@
# avoid extra branches.
def ll_dict_resize(d):
- old_entries = d.entries
- old_size = len(old_entries)
# make a 'new_size' estimate and shrink it if there are many
# deleted entry markers. See CPython for why it is a good idea to
# quadruple the dictionary size as long as it's not too big.
num_items = d.num_items + 1
if num_items > 50000: new_estimate = num_items * 2
else: new_estimate = num_items * 4
+ _ll_dict_resize_to(d, new_estimate)
+ll_dict_resize.oopspec = 'dict.resize(d)'
+
+def _ll_dict_resize_to(d, new_estimate):
new_size = DICT_INITSIZE
while new_size <= new_estimate:
new_size *= 2
- #
+ old_entries = d.entries
+ old_size = len(d.entries)
d.entries = lltype.typeOf(old_entries).TO.allocate(new_size)
d.num_items = 0
d.resize_counter = new_size * 2
@@ -563,7 +566,6 @@
ll_dict_insertclean(d, entry.key, entry.value, hash)
i += 1
old_entries.delete()
-ll_dict_resize.oopspec = 'dict.resize(d)'
# ------- a port of CPython's dictobject.c's lookdict implementation -------
PERTURB_SHIFT = 5
@@ -816,6 +818,16 @@
ll_clear.oopspec = 'dict.clear(d)'
def ll_update(dic1, dic2):
+ # Prescale 'dic1', assuming that most items don't collide.
+ # If this assumption is false, 'dic1' becomes at most two times too large.
+ # * dic2.num_items = upper bound on the number of items added
+ # * (dic1.resize_counter - 1) // 3 = room left in dic1
+ # so, if dic2 has 1 item, we need dic1.resize_counter > 3
+ # if dic2 has 2 items we need dic1.resize_counter > 6 etc.
+ if not (dic1.resize_counter > dic2.num_items * 3):
+ new_estimate = (dic1.num_items + dic2.num_items) * 2
+ _ll_dict_resize_to(dic1, new_estimate)
+ #
entries = dic2.entries
d2len = len(entries)
i = 0
From noreply at buildbot.pypy.org Wed Jul 2 20:59:44 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 20:59:44 +0200 (CEST)
Subject: [pypy-commit] pypy default: Update: now stm is 'only' 2x slower
rather than 10x on translate.py.
Message-ID: <20140702185944.579E31D3500@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72319:787e204c5c92
Date: 2014-07-02 20:10 +0200
http://bitbucket.org/pypy/pypy/changeset/787e204c5c92/
Log: Update: now stm is 'only' 2x slower rather than 10x on translate.py.
diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst
--- a/pypy/doc/stm.rst
+++ b/pypy/doc/stm.rst
@@ -92,9 +92,9 @@
We're busy fixing them as we find them; feel free to `report bugs`_.
* It runs with an overhead as low as 20% on examples like "richards".
- There are also other examples with higher overheads --up to 10x for
- "translate.py"-- which we are still trying to understand. One suspect
- is our partial GC implementation, see below.
+ There are also other examples with higher overheads --currently up to
+ 2x for "translate.py"-- which we are still trying to understand.
+ One suspect is our partial GC implementation, see below.
* Currently limited to 1.5 GB of RAM (this is just a parameter in
`core.h`__). Memory overflows are not correctly handled; they cause
@@ -111,9 +111,8 @@
* The GC is new; although clearly inspired by PyPy's regular GC, it
misses a number of optimizations for now. Programs allocating large
- numbers of small objects that don't immediately die, as well as
- programs that modify large lists or dicts, suffer from these missing
- optimizations.
+ numbers of small objects that don't immediately die (surely a common
+ situation) suffer from these missing optimizations.
* The GC has no support for destructors: the ``__del__`` method is never
called (including on file objects, which won't be closed for you).
From noreply at buildbot.pypy.org Wed Jul 2 20:59:51 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 20:59:51 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: hg merge default
Message-ID: <20140702185951.C77D11D3500@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72320:dd8e2f69fe96
Date: 2014-07-02 20:44 +0200
http://bitbucket.org/pypy/pypy/changeset/dd8e2f69fe96/
Log: hg merge default
diff too long, truncating to 2000 out of 35820 lines
diff --git a/.hgtags b/.hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -6,3 +6,11 @@
9b623bc48b5950cf07184462a0e48f2c4df0d720 pypy-2.1-beta1-arm
9b623bc48b5950cf07184462a0e48f2c4df0d720 pypy-2.1-beta1-arm
ab0dd631c22015ed88e583d9fdd4c43eebf0be21 pypy-2.1-beta1-arm
+20e51c4389ed4469b66bb9d6289ce0ecfc82c4b9 release-2.3.0
+20e51c4389ed4469b66bb9d6289ce0ecfc82c4b9 release-2.3.0
+0000000000000000000000000000000000000000 release-2.3.0
+394146e9bb673514c61f0150ab2013ccf78e8de7 release-2.3
+32f35069a16d819b58c1b6efb17c44e3e53397b2 release-2.2=3.1
+32f35069a16d819b58c1b6efb17c44e3e53397b2 release-2.3.1
+32f35069a16d819b58c1b6efb17c44e3e53397b2 release-2.2=3.1
+0000000000000000000000000000000000000000 release-2.2=3.1
diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -44,31 +44,33 @@
Alex Gaynor
Michael Hudson
David Schneider
+ Matti Picus
+ Brian Kearns
+ Philip Jenvey
Holger Krekel
Christian Tismer
Hakan Ardo
Benjamin Peterson
- Matti Picus
- Philip Jenvey
+ Manuel Jacob
Anders Chrigstrom
- Brian Kearns
Eric van Riet Paap
+ Wim Lavrijsen
+ Ronan Lamy
Richard Emslie
Alexander Schremmer
- Wim Lavrijsen
Dan Villiom Podlaski Christiansen
- Manuel Jacob
Lukas Diekmann
Sven Hager
Anders Lehmann
Aurelien Campeas
Niklaus Haldimann
- Ronan Lamy
Camillo Bruni
Laura Creighton
Toon Verwaest
+ Remi Meier
Leonardo Santagada
Seo Sanghyeon
+ Romain Guillebert
Justin Peel
Ronny Pfannschmidt
David Edelsohn
@@ -80,52 +82,62 @@
Daniel Roberts
Niko Matsakis
Adrien Di Mascio
+ Alexander Hesse
Ludovic Aubry
- Alexander Hesse
Jacob Hallen
- Romain Guillebert
Jason Creighton
Alex Martelli
Michal Bendowski
Jan de Mooij
+ stian
Michael Foord
Stephan Diehl
Stefan Schwarzer
Valentino Volonghi
Tomek Meka
Patrick Maupin
- stian
Bob Ippolito
Bruno Gola
Jean-Paul Calderone
Timo Paulssen
+ Squeaky
Alexandre Fayolle
Simon Burton
Marius Gedminas
John Witulski
+ Konstantin Lopuhin
Greg Price
Dario Bertini
Mark Pearse
Simon Cross
- Konstantin Lopuhin
Andreas Stührk
Jean-Philippe St. Pierre
Guido van Rossum
Pavel Vinogradov
+ Paweł Piotr Przeradowski
Paul deGrandis
Ilya Osadchiy
+ Tobias Oberstein
Adrian Kuhn
Boris Feigin
+ Stefano Rivera
tav
+ Taavi Burns
Georg Brandl
Bert Freudenberg
Stian Andreassen
- Stefano Rivera
+ Laurence Tratt
Wanja Saatkamp
+ Ivan Sichmann Freitas
Gerald Klix
Mike Blume
- Taavi Burns
Oscar Nierstrasz
+ Stefan H. Muller
+ Jeremy Thurgood
+ Gregor Wegberg
+ Rami Chowdhury
+ Tobias Pape
+ Edd Barrett
David Malcolm
Eugene Oden
Henry Mason
@@ -135,18 +147,16 @@
Dusty Phillips
Lukas Renggli
Guenter Jantzen
- Tobias Oberstein
- Remi Meier
Ned Batchelder
Amit Regmi
Ben Young
Nicolas Chauvat
Andrew Durdin
+ Andrew Chambers
Michael Schneider
Nicholas Riley
Jason Chu
Igor Trindade Oliveira
- Jeremy Thurgood
Rocco Moretti
Gintautas Miliauskas
Michael Twomey
@@ -159,18 +169,19 @@
Karl Bartel
Brian Dorsey
Victor Stinner
+ Andrews Medina
Stuart Williams
Jasper Schulz
+ Christian Hudon
Toby Watson
Antoine Pitrou
Aaron Iles
Michael Cheng
Justas Sadzevicius
+ Mikael Schönenberg
Gasper Zejn
Neil Shepperd
- Mikael Schönenberg
Elmo Mäntynen
- Tobias Pape
Jonathan David Riehl
Stanislaw Halik
Anders Qvist
@@ -182,19 +193,18 @@
Alexander Sedov
Corbin Simpson
Christopher Pope
- Laurence Tratt
- Guillebert Romain
+ wenzhuman
Christian Tismer
+ Marc Abramowitz
Dan Stromberg
Stefano Parmesan
- Christian Hudon
Alexis Daboville
Jens-Uwe Mager
Carl Meyer
Karl Ramm
Pieter Zieschang
Gabriel
- Paweł Piotr Przeradowski
+ Lukas Vacek
Andrew Dalke
Sylvain Thenault
Nathan Taylor
@@ -203,8 +213,11 @@
Alejandro J. Cura
Jacob Oscarson
Travis Francis Athougies
+ Ryan Gonzalez
Kristjan Valur Jonsson
+ Sebastian Pawluś
Neil Blakey-Milner
+ anatoly techtonik
Lutz Paelike
Lucio Torre
Lars Wassermann
@@ -218,13 +231,14 @@
Martin Blais
Lene Wagner
Tomo Cocoa
- Andrews Medina
roberto at goyle
+ Yury V. Zaytsev
+ Anna Katrina Dominguez
William Leslie
Bobby Impollonia
timo at eistee.fritz.box
Andrew Thompson
- Yusei Tahara
+ Ben Darnell
Roberto De Ioris
Juan Francisco Cantero Hurtado
Godefroid Chappelle
@@ -234,28 +248,39 @@
Michael Hudson-Doyle
Anders Sigfridsson
Yasir Suhail
+ rafalgalczynski at gmail.com
Floris Bruynooghe
+ Laurens Van Houtven
Akira Li
Gustavo Niemeyer
Stephan Busemann
- Anna Katrina Dominguez
+ Rafał Gałczyński
+ Yusei Tahara
Christian Muirhead
James Lan
shoma hosaka
- Daniel Neuhäuser
+ Daniel Neuh?user
+ Matthew Miller
Buck Golemon
Konrad Delong
Dinu Gherman
Chris Lambacher
coolbutuseless at gmail.com
+ Rodrigo Araújo
+ w31rd0
Jim Baker
- Rodrigo Araújo
+ James Robert
Armin Ronacher
Brett Cannon
yrttyr
+ aliceinwire
+ OlivierBlanvillain
Zooko Wilcox-O Hearn
Tomer Chachamu
Christopher Groskopf
+ Asmo Soinio
+ Stefan Marr
+ jiaaro
opassembler.py
Antony Lee
Jim Hunziker
@@ -263,12 +288,13 @@
Even Wiik Thomassen
jbs
soareschen
+ Kurt Griffiths
+ Mike Bayer
Flavio Percoco
Kristoffer Kleine
yasirs
Michael Chermside
Anna Ravencroft
- Andrew Chambers
Julien Phalip
Dan Loewenherz
diff --git a/_pytest/resultlog.py b/_pytest/resultlog.py
--- a/_pytest/resultlog.py
+++ b/_pytest/resultlog.py
@@ -56,6 +56,9 @@
for line in longrepr.splitlines():
py.builtin.print_(" %s" % line, file=self.logfile)
for key, text in sections:
+ # py.io.StdCaptureFD may send in unicode
+ if isinstance(text, unicode):
+ text = text.encode('utf-8')
py.builtin.print_(" ", file=self.logfile)
py.builtin.print_(" -------------------- %s --------------------"
% key.rstrip(), file=self.logfile)
diff --git a/lib-python/2.7/ctypes/__init__.py b/lib-python/2.7/ctypes/__init__.py
--- a/lib-python/2.7/ctypes/__init__.py
+++ b/lib-python/2.7/ctypes/__init__.py
@@ -389,12 +389,13 @@
func.__name__ = name_or_ordinal
return func
-class PyDLL(CDLL):
- """This class represents the Python library itself. It allows to
- access Python API functions. The GIL is not released, and
- Python exceptions are handled correctly.
- """
- _func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
+# Not in PyPy
+#class PyDLL(CDLL):
+# """This class represents the Python library itself. It allows to
+# access Python API functions. The GIL is not released, and
+# Python exceptions are handled correctly.
+# """
+# _func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
if _os.name in ("nt", "ce"):
@@ -447,15 +448,8 @@
return self._dlltype(name)
cdll = LibraryLoader(CDLL)
-pydll = LibraryLoader(PyDLL)
-
-if _os.name in ("nt", "ce"):
- pythonapi = PyDLL("python dll", None, _sys.dllhandle)
-elif _sys.platform == "cygwin":
- pythonapi = PyDLL("libpython%d.%d.dll" % _sys.version_info[:2])
-else:
- pythonapi = PyDLL(None)
-
+# not on PyPy
+#pydll = LibraryLoader(PyDLL)
if _os.name in ("nt", "ce"):
windll = LibraryLoader(WinDLL)
diff --git a/lib-python/2.7/ctypes/test/test_values.py b/lib-python/2.7/ctypes/test/test_values.py
--- a/lib-python/2.7/ctypes/test/test_values.py
+++ b/lib-python/2.7/ctypes/test/test_values.py
@@ -4,6 +4,7 @@
import unittest
from ctypes import *
+from ctypes.test import xfail
import _ctypes_test
@@ -23,7 +24,8 @@
class Win_ValuesTestCase(unittest.TestCase):
"""This test only works when python itself is a dll/shared library"""
-
+
+ @xfail
def test_optimizeflag(self):
# This test accesses the Py_OptimizeFlag intger, which is
# exported by the Python dll.
@@ -40,6 +42,7 @@
else:
self.assertEqual(opt, 2)
+ @xfail
def test_frozentable(self):
# Python exports a PyImport_FrozenModules symbol. This is a
# pointer to an array of struct _frozen entries. The end of the
@@ -75,6 +78,7 @@
from ctypes import _pointer_type_cache
del _pointer_type_cache[struct_frozen]
+ @xfail
def test_undefined(self):
self.assertRaises(ValueError, c_int.in_dll, pydll, "Undefined_Symbol")
diff --git a/lib-python/2.7/imputil.py b/lib-python/2.7/imputil.py
--- a/lib-python/2.7/imputil.py
+++ b/lib-python/2.7/imputil.py
@@ -422,7 +422,8 @@
saved back to the filesystem for future imports. The source file's
modification timestamp must be provided as a Long value.
"""
- codestring = open(pathname, 'rU').read()
+ with open(pathname, 'rU') as fp:
+ codestring = fp.read()
if codestring and codestring[-1] != '\n':
codestring = codestring + '\n'
code = __builtin__.compile(codestring, pathname, 'exec')
@@ -603,8 +604,8 @@
self.desc = desc
def import_file(self, filename, finfo, fqname):
- fp = open(filename, self.desc[1])
- module = imp.load_module(fqname, fp, filename, self.desc)
+ with open(filename, self.desc[1]) as fp:
+ module = imp.load_module(fqname, fp, filename, self.desc)
module.__file__ = filename
return 0, module, { }
diff --git a/lib-python/2.7/modulefinder.py b/lib-python/2.7/modulefinder.py
--- a/lib-python/2.7/modulefinder.py
+++ b/lib-python/2.7/modulefinder.py
@@ -109,16 +109,16 @@
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
- fp = open(pathname, READ_MODE)
- stuff = ("", "r", imp.PY_SOURCE)
- self.load_module('__main__', fp, pathname, stuff)
+ with open(pathname, READ_MODE) as fp:
+ stuff = ("", "r", imp.PY_SOURCE)
+ self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
- fp = open(pathname, READ_MODE)
- stuff = (ext, "r", imp.PY_SOURCE)
- self.load_module(name, fp, pathname, stuff)
+ with open(pathname, READ_MODE) as fp:
+ stuff = (ext, "r", imp.PY_SOURCE)
+ self.load_module(name, fp, pathname, stuff)
def import_hook(self, name, caller=None, fromlist=None, level=-1):
self.msg(3, "import_hook", name, caller, fromlist, level)
@@ -461,6 +461,8 @@
fp, buf, stuff = self.find_module("__init__", m.__path__)
self.load_module(fqname, fp, buf, stuff)
self.msgout(2, "load_package ->", m)
+ if fp:
+ fp.close()
return m
def add_module(self, fqname):
diff --git a/lib-python/2.7/test/test_argparse.py b/lib-python/2.7/test/test_argparse.py
--- a/lib-python/2.7/test/test_argparse.py
+++ b/lib-python/2.7/test/test_argparse.py
@@ -48,6 +48,9 @@
def tearDown(self):
os.chdir(self.old_dir)
+ import gc
+ # Force a collection which should close FileType() options
+ gc.collect()
for root, dirs, files in os.walk(self.temp_dir, topdown=False):
for name in files:
os.chmod(os.path.join(self.temp_dir, name), stat.S_IWRITE)
diff --git a/lib-python/2.7/test/test_gdbm.py b/lib-python/2.7/test/test_gdbm.py
--- a/lib-python/2.7/test/test_gdbm.py
+++ b/lib-python/2.7/test/test_gdbm.py
@@ -74,6 +74,29 @@
size2 = os.path.getsize(filename)
self.assertTrue(size1 > size2 >= size0)
+ def test_sync(self):
+ # check if sync works at all, not sure how to check it
+ self.g = gdbm.open(filename, 'cf')
+ self.g['x'] = 'x' * 10000
+ self.g.sync()
+
+ def test_get_key(self):
+ self.g = gdbm.open(filename, 'cf')
+ self.g['x'] = 'x' * 10000
+ self.g.close()
+ self.g = gdbm.open(filename, 'r')
+ self.assertEquals(self.g['x'], 'x' * 10000)
+
+ def test_key_with_null_bytes(self):
+ key = 'a\x00b'
+ value = 'c\x00d'
+ self.g = gdbm.open(filename, 'cf')
+ self.g[key] = value
+ self.g.close()
+ self.g = gdbm.open(filename, 'r')
+ self.assertEquals(self.g[key], value)
+ self.assertTrue(key in self.g)
+ self.assertTrue(self.g.has_key(key))
def test_main():
run_unittest(TestGdbm)
diff --git a/lib-python/2.7/timeit.py b/lib-python/2.7/timeit.py
--- a/lib-python/2.7/timeit.py
+++ b/lib-python/2.7/timeit.py
@@ -55,11 +55,6 @@
import gc
import sys
import time
-try:
- import itertools
-except ImportError:
- # Must be an older Python version (see timeit() below)
- itertools = None
__all__ = ["Timer"]
@@ -81,7 +76,8 @@
def inner(_it, _timer):
%(setup)s
_t0 = _timer()
- for _i in _it:
+ while _it > 0:
+ _it -= 1
%(stmt)s
_t1 = _timer()
return _t1 - _t0
@@ -96,7 +92,8 @@
def inner(_it, _timer, _func=func):
setup()
_t0 = _timer()
- for _i in _it:
+ while _it > 0:
+ _it -= 1
_func()
_t1 = _timer()
return _t1 - _t0
@@ -133,9 +130,11 @@
else:
raise ValueError("setup is neither a string nor callable")
self.src = src # Save for traceback display
- code = compile(src, dummy_src_name, "exec")
- exec code in globals(), ns
- self.inner = ns["inner"]
+ def make_inner():
+ code = compile(src, dummy_src_name, "exec")
+ exec code in globals(), ns
+ return ns["inner"]
+ self.make_inner = make_inner
elif hasattr(stmt, '__call__'):
self.src = None
if isinstance(setup, basestring):
@@ -144,7 +143,8 @@
exec _setup in globals(), ns
elif not hasattr(setup, '__call__'):
raise ValueError("setup is neither a string nor callable")
- self.inner = _template_func(setup, stmt)
+ inner = _template_func(setup, stmt)
+ self.make_inner = lambda: inner
else:
raise ValueError("stmt is neither a string nor callable")
@@ -185,15 +185,12 @@
to one million. The main statement, the setup statement and
the timer function to be used are passed to the constructor.
"""
- if itertools:
- it = itertools.repeat(None, number)
- else:
- it = [None] * number
+ inner = self.make_inner()
gcold = gc.isenabled()
if '__pypy__' not in sys.builtin_module_names:
gc.disable() # only do that on CPython
try:
- timing = self.inner(it, self.timer)
+ timing = inner(number, self.timer)
finally:
if gcold:
gc.enable()
diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py
--- a/lib_pypy/_ctypes/function.py
+++ b/lib_pypy/_ctypes/function.py
@@ -219,6 +219,8 @@
if restype is None:
import ctypes
restype = ctypes.c_int
+ if self._argtypes_ is None:
+ self._argtypes_ = []
self._ptr = self._getfuncptr_fromaddress(self._argtypes_, restype)
self._check_argtypes_for_fastpath()
return
diff --git a/lib_pypy/_pypy_interact.py b/lib_pypy/_pypy_interact.py
--- a/lib_pypy/_pypy_interact.py
+++ b/lib_pypy/_pypy_interact.py
@@ -3,6 +3,8 @@
import sys
import os
+irc_header = "And now for something completely different"
+
def interactive_console(mainmodule=None, quiet=False):
# set sys.{ps1,ps2} just before invoking the interactive interpreter. This
@@ -15,8 +17,7 @@
if not quiet:
try:
from _pypy_irc_topic import some_topic
- text = "And now for something completely different: ``%s''" % (
- some_topic(),)
+ text = "%s: ``%s''" % ( irc_header, some_topic())
while len(text) >= 80:
i = text[:80].rfind(' ')
print(text[:i])
diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py
--- a/lib_pypy/_pypy_testcapi.py
+++ b/lib_pypy/_pypy_testcapi.py
@@ -13,7 +13,15 @@
k1 = k1.lstrip('0x').rstrip('L')
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
k2 = k2.lstrip('0').rstrip('L')
- output_dir = tempfile.gettempdir() + os.path.sep + 'tmp_%s%s' %(k1, k2)
+ try:
+ username = os.environ['USER'] #linux, et al
+ except KeyError:
+ try:
+ username = os.environ['USERNAME'] #windows
+ except KeyError:
+ username = os.getuid()
+ output_dir = tempfile.gettempdir() + os.path.sep + 'tmp_%s_%s%s' % (
+ username, k1, k2)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
return output_dir
diff --git a/lib_pypy/_tkinter/license.terms b/lib_pypy/_tkinter/license.terms
new file mode 100644
--- /dev/null
+++ b/lib_pypy/_tkinter/license.terms
@@ -0,0 +1,39 @@
+This software is copyrighted by the Regents of the University of
+California, Sun Microsystems, Inc., and other parties. The following
+terms apply to all files associated with the software unless explicitly
+disclaimed in individual files.
+
+The authors hereby grant permission to use, copy, modify, distribute,
+and license this software and its documentation for any purpose, provided
+that existing copyright notices are retained in all copies and that this
+notice is included verbatim in any distributions. No written agreement,
+license, or royalty fee is required for any of the authorized uses.
+Modifications to this software may be copyrighted by their authors
+and need not follow the licensing terms described here, provided that
+the new terms are clearly indicated on the first page of each file where
+they apply.
+
+IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
+FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
+DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
+INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE
+IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
+NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
+MODIFICATIONS.
+
+GOVERNMENT USE: If you are acquiring this software on behalf of the
+U.S. government, the Government shall have only "Restricted Rights"
+in the software and related documentation as defined in the Federal
+Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2). If you
+are acquiring the software on behalf of the Department of Defense, the
+software shall be classified as "Commercial Computer Software" and the
+Government shall have only "Restricted Rights" as defined in Clause
+252.227-7013 (c) (1) of DFARs. Notwithstanding the foregoing, the
+authors grant the U.S. Government and others acting in its behalf
+permission to use and distribute the software in accordance with the
+terms specified in this license.
diff --git a/lib_pypy/_tkinter/tklib.py b/lib_pypy/_tkinter/tklib.py
--- a/lib_pypy/_tkinter/tklib.py
+++ b/lib_pypy/_tkinter/tklib.py
@@ -121,6 +121,10 @@
incdirs = []
linklibs = ['tcl85', 'tk85']
libdirs = []
+elif sys.platform == 'darwin':
+ incdirs = ['/System/Library/Frameworks/Tk.framework/Versions/Current/Headers/']
+ linklibs = ['tcl', 'tk']
+ libdirs = []
else:
incdirs=['/usr/include/tcl']
linklibs=['tcl', 'tk']
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -443,6 +443,10 @@
for enumname, enumval in zip(tp.enumerators, tp.enumvalues):
if enumname not in library.__dict__:
library.__dict__[enumname] = enumval
+ for key, val in ffi._parser._int_constants.items():
+ if key not in library.__dict__:
+ library.__dict__[key] = val
+
copied_enums.append(True)
if name in library.__dict__:
return
diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py
--- a/lib_pypy/cffi/cparser.py
+++ b/lib_pypy/cffi/cparser.py
@@ -24,6 +24,7 @@
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
_r_words = re.compile(r"\w+|\S")
_parser_cache = None
+_r_int_literal = re.compile(r"^0?x?[0-9a-f]+u?l?$", re.IGNORECASE)
def _get_parser():
global _parser_cache
@@ -99,6 +100,7 @@
self._structnode2type = weakref.WeakKeyDictionary()
self._override = False
self._packed = False
+ self._int_constants = {}
def _parse(self, csource):
csource, macros = _preprocess(csource)
@@ -128,9 +130,10 @@
finally:
if lock is not None:
lock.release()
- return ast, macros
+ # csource will be used to find buggy source text
+ return ast, macros, csource
- def convert_pycparser_error(self, e, csource):
+ def _convert_pycparser_error(self, e, csource):
# xxx look for ":NUM:" at the start of str(e) and try to interpret
# it as a line number
line = None
@@ -142,6 +145,12 @@
csourcelines = csource.splitlines()
if 1 <= linenum <= len(csourcelines):
line = csourcelines[linenum-1]
+ return line
+
+ def convert_pycparser_error(self, e, csource):
+ line = self._convert_pycparser_error(e, csource)
+
+ msg = str(e)
if line:
msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
else:
@@ -160,14 +169,9 @@
self._packed = prev_packed
def _internal_parse(self, csource):
- ast, macros = self._parse(csource)
+ ast, macros, csource = self._parse(csource)
# add the macros
- for key, value in macros.items():
- value = value.strip()
- if value != '...':
- raise api.CDefError('only supports the syntax "#define '
- '%s ..." for now (literally)' % key)
- self._declare('macro ' + key, value)
+ self._process_macros(macros)
# find the first "__dotdotdot__" and use that as a separator
# between the repeated typedefs and the real csource
iterator = iter(ast.ext)
@@ -175,27 +179,61 @@
if decl.name == '__dotdotdot__':
break
#
- for decl in iterator:
- if isinstance(decl, pycparser.c_ast.Decl):
- self._parse_decl(decl)
- elif isinstance(decl, pycparser.c_ast.Typedef):
- if not decl.name:
- raise api.CDefError("typedef does not declare any name",
- decl)
- if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
- and decl.type.type.names == ['__dotdotdot__']):
- realtype = model.unknown_type(decl.name)
- elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
- isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
- isinstance(decl.type.type.type,
- pycparser.c_ast.IdentifierType) and
- decl.type.type.type.names == ['__dotdotdot__']):
- realtype = model.unknown_ptr_type(decl.name)
+ try:
+ for decl in iterator:
+ if isinstance(decl, pycparser.c_ast.Decl):
+ self._parse_decl(decl)
+ elif isinstance(decl, pycparser.c_ast.Typedef):
+ if not decl.name:
+ raise api.CDefError("typedef does not declare any name",
+ decl)
+ if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
+ and decl.type.type.names == ['__dotdotdot__']):
+ realtype = model.unknown_type(decl.name)
+ elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
+ isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
+ isinstance(decl.type.type.type,
+ pycparser.c_ast.IdentifierType) and
+ decl.type.type.type.names == ['__dotdotdot__']):
+ realtype = model.unknown_ptr_type(decl.name)
+ else:
+ realtype = self._get_type(decl.type, name=decl.name)
+ self._declare('typedef ' + decl.name, realtype)
else:
- realtype = self._get_type(decl.type, name=decl.name)
- self._declare('typedef ' + decl.name, realtype)
+ raise api.CDefError("unrecognized construct", decl)
+ except api.FFIError as e:
+ msg = self._convert_pycparser_error(e, csource)
+ if msg:
+ e.args = (e.args[0] + "\n *** Err: %s" % msg,)
+ raise
+
+ def _add_constants(self, key, val):
+ if key in self._int_constants:
+ raise api.FFIError(
+ "multiple declarations of constant: %s" % (key,))
+ self._int_constants[key] = val
+
+ def _process_macros(self, macros):
+ for key, value in macros.items():
+ value = value.strip()
+ match = _r_int_literal.search(value)
+ if match is not None:
+ int_str = match.group(0).lower().rstrip("ul")
+
+ # "010" is not valid oct in py3
+ if (int_str.startswith("0") and
+ int_str != "0" and
+ not int_str.startswith("0x")):
+ int_str = "0o" + int_str[1:]
+
+ pyvalue = int(int_str, 0)
+ self._add_constants(key, pyvalue)
+ elif value == '...':
+ self._declare('macro ' + key, value)
else:
- raise api.CDefError("unrecognized construct", decl)
+ raise api.CDefError('only supports the syntax "#define '
+ '%s ..." (literally) or "#define '
+ '%s 0x1FF" for now' % (key, key))
def _parse_decl(self, decl):
node = decl.type
@@ -227,7 +265,7 @@
self._declare('variable ' + decl.name, tp)
def parse_type(self, cdecl):
- ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)
+ ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
assert not macros
exprnode = ast.ext[-1].type.args.params[0]
if isinstance(exprnode, pycparser.c_ast.ID):
@@ -306,7 +344,8 @@
if ident == 'void':
return model.void_type
if ident == '__dotdotdot__':
- raise api.FFIError('bad usage of "..."')
+ raise api.FFIError(':%d: bad usage of "..."' %
+ typenode.coord.line)
return resolve_common_type(ident)
#
if isinstance(type, pycparser.c_ast.Struct):
@@ -333,7 +372,8 @@
return self._get_struct_union_enum_type('union', typenode, name,
nested=True)
#
- raise api.FFIError("bad or unsupported type declaration")
+ raise api.FFIError(":%d: bad or unsupported type declaration" %
+ typenode.coord.line)
def _parse_function_type(self, typenode, funcname=None):
params = list(getattr(typenode.args, 'params', []))
@@ -499,6 +539,10 @@
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
exprnode.op == '-'):
return -self._parse_constant(exprnode.expr)
+ # load previously defined int constant
+ if (isinstance(exprnode, pycparser.c_ast.ID) and
+ exprnode.name in self._int_constants):
+ return self._int_constants[exprnode.name]
#
if partial_length_ok:
if (isinstance(exprnode, pycparser.c_ast.ID) and
@@ -506,8 +550,8 @@
self._partial_length = True
return '...'
#
- raise api.FFIError("unsupported expression: expected a "
- "simple numeric constant")
+ raise api.FFIError(":%d: unsupported expression: expected a "
+ "simple numeric constant" % exprnode.coord.line)
def _build_enum_type(self, explicit_name, decls):
if decls is not None:
@@ -522,6 +566,7 @@
if enum.value is not None:
nextenumvalue = self._parse_constant(enum.value)
enumvalues.append(nextenumvalue)
+ self._add_constants(enum.name, nextenumvalue)
nextenumvalue += 1
enumvalues = tuple(enumvalues)
tp = model.EnumType(explicit_name, enumerators, enumvalues)
@@ -535,3 +580,5 @@
kind = name.split(' ', 1)[0]
if kind in ('typedef', 'struct', 'union', 'enum'):
self._declare(name, tp)
+ for k, v in other._int_constants.items():
+ self._add_constants(k, v)
diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py
--- a/lib_pypy/cffi/ffiplatform.py
+++ b/lib_pypy/cffi/ffiplatform.py
@@ -38,6 +38,7 @@
import distutils.errors
#
dist = Distribution({'ext_modules': [ext]})
+ dist.parse_config_files()
options = dist.get_option_dict('build_ext')
options['force'] = ('ffiplatform', True)
options['build_lib'] = ('ffiplatform', tmpdir)
diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py
--- a/lib_pypy/cffi/vengine_cpy.py
+++ b/lib_pypy/cffi/vengine_cpy.py
@@ -89,43 +89,54 @@
# by generate_cpy_function_method().
prnt('static PyMethodDef _cffi_methods[] = {')
self._generate("method")
- prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS},')
- prnt(' {NULL, NULL} /* Sentinel */')
+ prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
+ prnt(' {NULL, NULL, 0, NULL} /* Sentinel */')
prnt('};')
prnt()
#
# standard init.
modname = self.verifier.get_module_name()
- if sys.version_info >= (3,):
- prnt('static struct PyModuleDef _cffi_module_def = {')
- prnt(' PyModuleDef_HEAD_INIT,')
- prnt(' "%s",' % modname)
- prnt(' NULL,')
- prnt(' -1,')
- prnt(' _cffi_methods,')
- prnt(' NULL, NULL, NULL, NULL')
- prnt('};')
- prnt()
- initname = 'PyInit_%s' % modname
- createmod = 'PyModule_Create(&_cffi_module_def)'
- errorcase = 'return NULL'
- finalreturn = 'return lib'
- else:
- initname = 'init%s' % modname
- createmod = 'Py_InitModule("%s", _cffi_methods)' % modname
- errorcase = 'return'
- finalreturn = 'return'
+ constants = self._chained_list_constants[False]
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt()
+ prnt('static struct PyModuleDef _cffi_module_def = {')
+ prnt(' PyModuleDef_HEAD_INIT,')
+ prnt(' "%s",' % modname)
+ prnt(' NULL,')
+ prnt(' -1,')
+ prnt(' _cffi_methods,')
+ prnt(' NULL, NULL, NULL, NULL')
+ prnt('};')
+ prnt()
prnt('PyMODINIT_FUNC')
- prnt('%s(void)' % initname)
+ prnt('PyInit_%s(void)' % modname)
prnt('{')
prnt(' PyObject *lib;')
- prnt(' lib = %s;' % createmod)
- prnt(' if (lib == NULL || %s < 0)' % (
- self._chained_list_constants[False],))
- prnt(' %s;' % errorcase)
- prnt(' _cffi_init();')
- prnt(' %s;' % finalreturn)
+ prnt(' lib = PyModule_Create(&_cffi_module_def);')
+ prnt(' if (lib == NULL)')
+ prnt(' return NULL;')
+ prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,))
+ prnt(' Py_DECREF(lib);')
+ prnt(' return NULL;')
+ prnt(' }')
+ prnt(' return lib;')
prnt('}')
+ prnt()
+ prnt('#else')
+ prnt()
+ prnt('PyMODINIT_FUNC')
+ prnt('init%s(void)' % modname)
+ prnt('{')
+ prnt(' PyObject *lib;')
+ prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname)
+ prnt(' if (lib == NULL)')
+ prnt(' return;')
+ prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,))
+ prnt(' return;')
+ prnt(' return;')
+ prnt('}')
+ prnt()
+ prnt('#endif')
def load_library(self):
# XXX review all usages of 'self' here!
@@ -394,7 +405,7 @@
meth = 'METH_O'
else:
meth = 'METH_VARARGS'
- self._prnt(' {"%s", _cffi_f_%s, %s},' % (name, name, meth))
+ self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
_loading_cpy_function = _loaded_noop
@@ -481,8 +492,8 @@
if tp.fldnames is None:
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- self._prnt(' {"%s", %s, METH_NOARGS},' % (layoutfuncname,
- layoutfuncname))
+ self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
+ layoutfuncname))
def _loading_struct_or_union(self, tp, prefix, name, module):
if tp.fldnames is None:
@@ -589,13 +600,7 @@
'variable type'),))
assert delayed
else:
- prnt(' if (LONG_MIN <= (%s) && (%s) <= LONG_MAX)' % (name, name))
- prnt(' o = PyInt_FromLong((long)(%s));' % (name,))
- prnt(' else if ((%s) <= 0)' % (name,))
- prnt(' o = PyLong_FromLongLong((long long)(%s));' % (name,))
- prnt(' else')
- prnt(' o = PyLong_FromUnsignedLongLong('
- '(unsigned long long)(%s));' % (name,))
+ prnt(' o = _cffi_from_c_int_const(%s);' % name)
prnt(' if (o == NULL)')
prnt(' return -1;')
if size_too:
@@ -632,13 +637,18 @@
# ----------
# enums
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
if tp.partial:
for enumerator in tp.enumerators:
self._generate_cpy_const(True, enumerator, delayed=False)
return
#
- funcname = '_cffi_e_%s_%s' % (prefix, name)
+ funcname = self._enum_funcname(prefix, name)
prnt = self._prnt
prnt('static int %s(PyObject *lib)' % funcname)
prnt('{')
@@ -760,17 +770,30 @@
#include
#include
-#ifdef MS_WIN32
-#include /* for alloca() */
-typedef __int8 int8_t;
-typedef __int16 int16_t;
-typedef __int32 int32_t;
-typedef __int64 int64_t;
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int16 uint16_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-typedef unsigned char _Bool;
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+ typedef unsigned char _Bool;
+# endif
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# include
+# endif
#endif
#if PY_MAJOR_VERSION < 3
@@ -795,6 +818,15 @@
#define _cffi_to_c_double PyFloat_AsDouble
#define _cffi_to_c_float PyFloat_AsDouble
+#define _cffi_from_c_int_const(x) \
+ (((x) > 0) ? \
+ ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \
+ ((long long)(x) >= (long long)LONG_MIN) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromLongLong((long long)(x)))
+
#define _cffi_from_c_int(x, type) \
(((type)-1) > 0 ? /* unsigned */ \
(sizeof(type) < sizeof(long) ? PyInt_FromLong(x) : \
@@ -804,14 +836,14 @@
PyLong_FromLongLong(x)))
#define _cffi_to_c_int(o, type) \
- (sizeof(type) == 1 ? (((type)-1) > 0 ? _cffi_to_c_u8(o) \
- : _cffi_to_c_i8(o)) : \
- sizeof(type) == 2 ? (((type)-1) > 0 ? _cffi_to_c_u16(o) \
- : _cffi_to_c_i16(o)) : \
- sizeof(type) == 4 ? (((type)-1) > 0 ? _cffi_to_c_u32(o) \
- : _cffi_to_c_i32(o)) : \
- sizeof(type) == 8 ? (((type)-1) > 0 ? _cffi_to_c_u64(o) \
- : _cffi_to_c_i64(o)) : \
+ (sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
+ : (type)_cffi_to_c_i8(o)) : \
+ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
+ : (type)_cffi_to_c_i16(o)) : \
+ sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
+ : (type)_cffi_to_c_i32(o)) : \
+ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
+ : (type)_cffi_to_c_i64(o)) : \
(Py_FatalError("unsupported size for type " #type), 0))
#define _cffi_to_c_i8 \
@@ -885,25 +917,32 @@
return PyBool_FromLong(was_alive);
}
-static void _cffi_init(void)
+static int _cffi_init(void)
{
- PyObject *module = PyImport_ImportModule("_cffi_backend");
- PyObject *c_api_object;
+ PyObject *module, *c_api_object = NULL;
+ module = PyImport_ImportModule("_cffi_backend");
if (module == NULL)
- return;
+ goto failure;
c_api_object = PyObject_GetAttrString(module, "_C_API");
if (c_api_object == NULL)
- return;
+ goto failure;
if (!PyCapsule_CheckExact(c_api_object)) {
- Py_DECREF(c_api_object);
PyErr_SetNone(PyExc_ImportError);
- return;
+ goto failure;
}
memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
_CFFI_NUM_EXPORTS * sizeof(void *));
+
+ Py_DECREF(module);
Py_DECREF(c_api_object);
+ return 0;
+
+ failure:
+ Py_XDECREF(module);
+ Py_XDECREF(c_api_object);
+ return -1;
}
#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py
--- a/lib_pypy/cffi/vengine_gen.py
+++ b/lib_pypy/cffi/vengine_gen.py
@@ -249,10 +249,10 @@
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
prnt('}')
self.export_symbols.append(layoutfuncname)
- prnt('ssize_t %s(ssize_t i)' % (layoutfuncname,))
+ prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
prnt('{')
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
- prnt(' static ssize_t nums[] = {')
+ prnt(' static intptr_t nums[] = {')
prnt(' sizeof(%s),' % cname)
prnt(' offsetof(struct _cffi_aligncheck, y),')
for fname, ftype, fbitsize in tp.enumfields():
@@ -276,7 +276,7 @@
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
#
- BFunc = self.ffi._typeof_locked("ssize_t(*)(ssize_t)")[0]
+ BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
function = module.load_function(BFunc, layoutfuncname)
layout = []
num = 0
@@ -410,13 +410,18 @@
# ----------
# enums
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
if tp.partial:
for enumerator in tp.enumerators:
self._generate_gen_const(True, enumerator)
return
#
- funcname = '_cffi_e_%s_%s' % (prefix, name)
+ funcname = self._enum_funcname(prefix, name)
self.export_symbols.append(funcname)
prnt = self._prnt
prnt('int %s(char *out_error)' % funcname)
@@ -453,7 +458,7 @@
else:
BType = self.ffi._typeof_locked("char[]")[0]
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
- funcname = '_cffi_e_%s_%s' % (prefix, name)
+ funcname = self._enum_funcname(prefix, name)
function = module.load_function(BFunc, funcname)
p = self.ffi.new(BType, 256)
if function(p) < 0:
@@ -547,20 +552,29 @@
#include
#include /* XXX for ssize_t on some platforms */
-#ifdef _WIN32
-# include
-# define snprintf _snprintf
-typedef __int8 int8_t;
-typedef __int16 int16_t;
-typedef __int32 int32_t;
-typedef __int64 int64_t;
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int16 uint16_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-typedef SSIZE_T ssize_t;
-typedef unsigned char _Bool;
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+ typedef unsigned char _Bool;
+# endif
#else
-# include
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# include
+# endif
#endif
'''
diff --git a/lib_pypy/gdbm.py b/lib_pypy/gdbm.py
new file mode 100644
--- /dev/null
+++ b/lib_pypy/gdbm.py
@@ -0,0 +1,174 @@
+import cffi, os
+
+ffi = cffi.FFI()
+ffi.cdef('''
+#define GDBM_READER ...
+#define GDBM_WRITER ...
+#define GDBM_WRCREAT ...
+#define GDBM_NEWDB ...
+#define GDBM_FAST ...
+#define GDBM_SYNC ...
+#define GDBM_NOLOCK ...
+#define GDBM_REPLACE ...
+
+void* gdbm_open(char *, int, int, int, void (*)());
+void gdbm_close(void*);
+
+typedef struct {
+ char *dptr;
+ int dsize;
+} datum;
+
+datum gdbm_fetch(void*, datum);
+int gdbm_delete(void*, datum);
+int gdbm_store(void*, datum, datum, int);
+int gdbm_exists(void*, datum);
+
+int gdbm_reorganize(void*);
+
+datum gdbm_firstkey(void*);
+datum gdbm_nextkey(void*, datum);
+void gdbm_sync(void*);
+
+char* gdbm_strerror(int);
+int gdbm_errno;
+
+void free(void*);
+''')
+
+try:
+ lib = ffi.verify('''
+ #include "gdbm.h"
+ ''', libraries=['gdbm'])
+except cffi.VerificationError as e:
+ # distutils does not preserve the actual message,
+ # but the verification is simple enough that the
+ # failure must be due to missing gdbm dev libs
+ raise ImportError('%s: %s' %(e.__class__.__name__, e))
+
+class error(Exception):
+ pass
+
+def _fromstr(key):
+ if not isinstance(key, str):
+ raise TypeError("gdbm mappings have string indices only")
+ return {'dptr': ffi.new("char[]", key), 'dsize': len(key)}
+
+class gdbm(object):
+ ll_dbm = None
+
+ def __init__(self, filename, iflags, mode):
+ res = lib.gdbm_open(filename, 0, iflags, mode, ffi.NULL)
+ self.size = -1
+ if not res:
+ self._raise_from_errno()
+ self.ll_dbm = res
+
+ def close(self):
+ if self.ll_dbm:
+ lib.gdbm_close(self.ll_dbm)
+ self.ll_dbm = None
+
+ def _raise_from_errno(self):
+ if ffi.errno:
+ raise error(os.strerror(ffi.errno))
+ raise error(lib.gdbm_strerror(lib.gdbm_errno))
+
+ def __len__(self):
+ if self.size < 0:
+ self.size = len(self.keys())
+ return self.size
+
+ def __setitem__(self, key, value):
+ self._check_closed()
+ self._size = -1
+ r = lib.gdbm_store(self.ll_dbm, _fromstr(key), _fromstr(value),
+ lib.GDBM_REPLACE)
+ if r < 0:
+ self._raise_from_errno()
+
+ def __delitem__(self, key):
+ self._check_closed()
+ res = lib.gdbm_delete(self.ll_dbm, _fromstr(key))
+ if res < 0:
+ raise KeyError(key)
+
+ def __contains__(self, key):
+ self._check_closed()
+ return lib.gdbm_exists(self.ll_dbm, _fromstr(key))
+ has_key = __contains__
+
+ def __getitem__(self, key):
+ self._check_closed()
+ drec = lib.gdbm_fetch(self.ll_dbm, _fromstr(key))
+ if not drec.dptr:
+ raise KeyError(key)
+ res = str(ffi.buffer(drec.dptr, drec.dsize))
+ lib.free(drec.dptr)
+ return res
+
+ def keys(self):
+ self._check_closed()
+ l = []
+ key = lib.gdbm_firstkey(self.ll_dbm)
+ while key.dptr:
+ l.append(str(ffi.buffer(key.dptr, key.dsize)))
+ nextkey = lib.gdbm_nextkey(self.ll_dbm, key)
+ lib.free(key.dptr)
+ key = nextkey
+ return l
+
+ def firstkey(self):
+ self._check_closed()
+ key = lib.gdbm_firstkey(self.ll_dbm)
+ if key.dptr:
+ res = str(ffi.buffer(key.dptr, key.dsize))
+ lib.free(key.dptr)
+ return res
+
+ def nextkey(self, key):
+ self._check_closed()
+ key = lib.gdbm_nextkey(self.ll_dbm, _fromstr(key))
+ if key.dptr:
+ res = str(ffi.buffer(key.dptr, key.dsize))
+ lib.free(key.dptr)
+ return res
+
+ def reorganize(self):
+ self._check_closed()
+ if lib.gdbm_reorganize(self.ll_dbm) < 0:
+ self._raise_from_errno()
+
+ def _check_closed(self):
+ if not self.ll_dbm:
+ raise error("GDBM object has already been closed")
+
+ __del__ = close
+
+ def sync(self):
+ self._check_closed()
+ lib.gdbm_sync(self.ll_dbm)
+
+def open(filename, flags='r', mode=0666):
+ if flags[0] == 'r':
+ iflags = lib.GDBM_READER
+ elif flags[0] == 'w':
+ iflags = lib.GDBM_WRITER
+ elif flags[0] == 'c':
+ iflags = lib.GDBM_WRCREAT
+ elif flags[0] == 'n':
+ iflags = lib.GDBM_NEWDB
+ else:
+ raise error("First flag must be one of 'r', 'w', 'c' or 'n'")
+ for flag in flags[1:]:
+ if flag == 'f':
+ iflags |= lib.GDBM_FAST
+ elif flag == 's':
+ iflags |= lib.GDBM_SYNC
+ elif flag == 'u':
+ iflags |= lib.GDBM_NOLOCK
+ else:
+ raise error("Flag '%s' not supported" % flag)
+ return gdbm(filename, iflags, mode)
+
+open_flags = "rwcnfsu"
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -113,7 +113,7 @@
try:
for name in modlist:
__import__(name)
- except (ImportError, CompilationError, py.test.skip.Exception), e:
+ except (ImportError, CompilationError, py.test.skip.Exception) as e:
errcls = e.__class__.__name__
raise Exception(
"The module %r is disabled\n" % (modname,) +
diff --git a/pypy/doc/Makefile b/pypy/doc/Makefile
--- a/pypy/doc/Makefile
+++ b/pypy/doc/Makefile
@@ -7,63 +7,80 @@
PAPER =
BUILDDIR = _build
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex man changes linkcheck doctest
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make ' where is one of"
- @echo " html to make standalone HTML files"
- @echo " dirhtml to make HTML files named index.html in directories"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " qthelp to make HTML files and a qthelp project"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " man to make manual pages"
- @echo " changes to make an overview of all changed/added/deprecated items"
- @echo " linkcheck to check all external links for integrity"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
- -rm -rf $(BUILDDIR)/*
+ rm -rf $(BUILDDIR)/*
html:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
pickle:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
@@ -72,35 +89,89 @@
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyPy.qhc"
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/PyPy"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PyPy"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
latex:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
- @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
- "run these through (pdf)latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
- @echo "Build finished. The manual pages are in $(BUILDDIR)/man"
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
- # python config/generate.py #readthedocs will not run this Makefile
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst
--- a/pypy/doc/coding-guide.rst
+++ b/pypy/doc/coding-guide.rst
@@ -105,7 +105,7 @@
while True:
try:
w_key = space.next(w_iter)
- except OperationError, e:
+ except OperationError as e:
if not e.match(space, space.w_StopIteration):
raise # re-raise other app-level exceptions
break
@@ -348,8 +348,12 @@
**objects**
- Normal rules apply. Special methods are not honoured, except ``__init__``,
- ``__del__`` and ``__iter__``.
+ Normal rules apply. The only special methods that are honoured are
+ ``__init__``, ``__del__``, ``__len__``, ``__getitem__``, ``__setitem__``,
+ ``__getslice__``, ``__setslice__``, and ``__iter__``. To handle slicing,
+ ``__getslice__`` and ``__setslice__`` must be used; using ``__getitem__`` and
+ ``__setitem__`` for slicing isn't supported. Additionally, using negative
+ indices for slicing is still not support, even when using ``__getslice__``.
This layout makes the number of types to take care about quite limited.
@@ -567,7 +571,7 @@
try:
...
- except OperationError, e:
+ except OperationError as e:
if not e.match(space, space.w_XxxError):
raise
...
diff --git a/pypy/doc/conf.py b/pypy/doc/conf.py
--- a/pypy/doc/conf.py
+++ b/pypy/doc/conf.py
@@ -18,11 +18,31 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('.'))
+
+# -- Read The Docs theme config ------------------------------------------------
+
+# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+if not on_rtd: # only import and set the theme if we're building docs locally
+ try:
+ import sphinx_rtd_theme
+ html_theme = 'sphinx_rtd_theme'
+ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+ except ImportError:
+ print('sphinx_rtd_theme is not installed')
+ html_theme = 'default'
+
+# otherwise, readthedocs.org uses their theme by default, so no need to specify it
+
+
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.ifconfig', 'sphinx.ext.graphviz', 'pypyconfig']
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
+ 'sphinx.ext.todo', 'sphinx.ext.ifconfig', 'sphinx.ext.graphviz',
+ 'pypyconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -45,9 +65,9 @@
# built documents.
#
# The short X.Y version.
-version = '2.2'
+version = '2.3'
# The full version, including alpha/beta/rc tags.
-release = '2.2.1'
+release = '2.3.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -91,7 +111,7 @@
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = 'default'
+#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt
--- a/pypy/doc/config/translation.log.txt
+++ b/pypy/doc/config/translation.log.txt
@@ -2,4 +2,4 @@
These must be enabled by setting the PYPYLOG environment variable.
The exact set of features supported by PYPYLOG is described in
-pypy/translation/c/src/debug_print.h.
+rpython/translator/c/src/debug_print.h.
diff --git a/pypy/doc/contributor.rst b/pypy/doc/contributor.rst
--- a/pypy/doc/contributor.rst
+++ b/pypy/doc/contributor.rst
@@ -99,6 +99,7 @@
Stian Andreassen
Laurence Tratt
Wanja Saatkamp
+ Ivan Sichmann Freitas
Gerald Klix
Mike Blume
Oscar Nierstrasz
@@ -183,7 +184,9 @@
Alejandro J. Cura
Jacob Oscarson
Travis Francis Athougies
+ Ryan Gonzalez
Kristjan Valur Jonsson
+ Sebastian Pawluś
Neil Blakey-Milner
anatoly techtonik
Lutz Paelike
@@ -216,6 +219,7 @@
Michael Hudson-Doyle
Anders Sigfridsson
Yasir Suhail
+ rafalgalczynski at gmail.com
Floris Bruynooghe
Laurens Van Houtven
Akira Li
@@ -245,6 +249,8 @@
Zooko Wilcox-O Hearn
Tomer Chachamu
Christopher Groskopf
+ Asmo Soinio
+ Stefan Marr
jiaaro
opassembler.py
Antony Lee
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -348,4 +348,9 @@
type and vice versa. For builtin types, a dictionary will be returned that
cannot be changed (but still looks and behaves like a normal dictionary).
+* PyPy prints a random line from past #pypy IRC topics at startup in
+ interactive mode. In a released version, this behaviour is supressed, but
+ setting the environment variable PYPY_IRC_TOPIC will bring it back. Note that
+ downstream package providers have been known to totally disable this feature.
+
.. include:: _ref.txt
diff --git a/pypy/doc/ctypes-implementation.rst b/pypy/doc/ctypes-implementation.rst
--- a/pypy/doc/ctypes-implementation.rst
+++ b/pypy/doc/ctypes-implementation.rst
@@ -72,13 +72,11 @@
Here is a list of the limitations and missing features of the
current implementation:
-* ``ctypes.pythonapi`` lets you access the CPython C API emulation layer
- of PyPy, at your own risks and without doing anything sensible about
- the GIL. Since PyPy 2.3, these functions are also named with an extra
- "Py", for example ``PyPyInt_FromLong()``. Basically, don't use this,
- but it might more or less work in simple cases if you do. (Obviously,
- assuming the PyObject pointers you get have any particular fields in
- any particular order is just going to crash.)
+* ``ctypes.pythonapi`` is missing. In previous versions, it was present
+ and redirected to the `cpyext` C API emulation layer, but our
+ implementation did not do anything sensible about the GIL and the
+ functions were named with an extra "Py", for example
+ ``PyPyInt_FromLong()``. It was removed for being unhelpful.
* We copy Python strings instead of having pointers to raw buffers
diff --git a/pypy/doc/extradoc.rst b/pypy/doc/extradoc.rst
--- a/pypy/doc/extradoc.rst
+++ b/pypy/doc/extradoc.rst
@@ -8,6 +8,9 @@
*Articles about PyPy published so far, most recent first:* (bibtex_ file)
+* `A Way Forward in Parallelising Dynamic Languages`_,
+ R. Meier, A. Rigo
+
* `Runtime Feedback in a Meta-Tracing JIT for Efficient Dynamic Languages`_,
C.F. Bolz, A. Cuni, M. Fijalkowski, M. Leuschel, S. Pedroni, A. Rigo
@@ -71,6 +74,7 @@
.. _bibtex: https://bitbucket.org/pypy/extradoc/raw/tip/talk/bibtex.bib
+.. _`A Way Forward in Parallelising Dynamic Languages`: https://bitbucket.org/pypy/extradoc/raw/extradoc/talk/icooolps2014/position-paper.pdf
.. _`Runtime Feedback in a Meta-Tracing JIT for Efficient Dynamic Languages`: https://bitbucket.org/pypy/extradoc/raw/extradoc/talk/icooolps2011/jit-hints.pdf
.. _`Allocation Removal by Partial Evaluation in a Tracing JIT`: https://bitbucket.org/pypy/extradoc/raw/extradoc/talk/pepm2011/bolz-allocation-removal.pdf
.. _`Towards a Jitting VM for Prolog Execution`: http://www.stups.uni-duesseldorf.de/mediawiki/images/a/a7/Pub-BoLeSch2010.pdf
@@ -93,6 +97,11 @@
Talks and Presentations
----------------------------------
+*This part is no longer updated.* The complete list is here__ (in
+alphabetical order).
+
+.. __: https://bitbucket.org/pypy/extradoc/src/extradoc/talk/
+
Talks in 2010
+++++++++++++
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -465,9 +465,13 @@
This is documented (here__ and here__). It needs 4 GB of RAM to run
"rpython targetpypystandalone" on top of PyPy, a bit more when running
-on CPython. If you have less than 4 GB it will just swap forever (or
-fail if you don't have enough swap). On 32-bit, divide the numbers by
-two.
+on top of CPython. If you have less than 4 GB free, it will just swap
+forever (or fail if you don't have enough swap). And we mean *free:*
+if the machine has 4 GB *in total,* then it will swap.
+
+On 32-bit, divide the numbers by two. (We didn't try recently, but in
+the past it was possible to compile a 32-bit version on a 2 GB Linux
+machine with nothing else running: no Gnome/KDE, for example.)
.. __: http://pypy.org/download.html#building-from-source
.. __: https://pypy.readthedocs.org/en/latest/getting-started-python.html#translating-the-pypy-python-interpreter
diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst
--- a/pypy/doc/how-to-release.rst
+++ b/pypy/doc/how-to-release.rst
@@ -28,11 +28,6 @@
pypy/doc/tool/makecontributor.py generates the list of contributors
* rename pypy/doc/whatsnew_head.rst to whatsnew_VERSION.rst
and create a fresh whatsnew_head.rst after the release
-* merge PYPY_IRC_TOPIC environment variable handling from previous release
- in pypy/doc/getting-started-dev.rst, pypy/doc/man/pypy.1.rst, and
- pypy/interpreter/app_main.py so release versions will not print a random
- IRC topic by default.
-* change the tracker to have a new release tag to file bugs against
* go to pypy/tool/release and run:
force-builds.py
* wait for builds to complete, make sure there are no failures
diff --git a/pypy/doc/index-of-release-notes.rst b/pypy/doc/index-of-release-notes.rst
--- a/pypy/doc/index-of-release-notes.rst
+++ b/pypy/doc/index-of-release-notes.rst
@@ -1,19 +1,43 @@
Historical release notes
-------------------------
+========================
+
+Cpython 2.7 compatible versions
+===============================
.. toctree::
+ release-2.3.1.rst
+ release-2.3.0.rst
+ release-2.2.1.rst
+ release-2.2.0.rst
+ release-2.1.0.rst
+ release-2.1.0-beta2.rst
+ release-2.1.0-beta1.rst
+ release-2.1.0.rst
+ release-2.0.2.rst
+ release-2.0.1.rst
+ release-2.0.0.rst
+ release-2.0.0-beta2.rst
+ release-2.0.0-beta1.rst
+ release-1.9.0.rst
+ release-1.8.0.rst
+ release-1.7.0.rst
+ release-1.6.0.rst
+ release-1.5.0.rst
+ release-1.4.1.rst
+ release-1.4.0beta.rst
+ release-1.4.0.rst
+ release-1.3.0.rst
+ release-1.2.0.rst
+ release-1.1.0.rst
+ release-1.0.0.rst
+ release-0.99.0.rst
+ release-0.9.0.rst
+ release-0.8.0.rst
+ release-0.7.0.rst
release-0.6
- release-0.7.0.rst
- release-0.8.0.rst
- release-0.9.0.rst
- release-0.99.0.rst
- release-1.0.0.rst
- release-1.1.0.rst
- release-1.2.0.rst
- release-1.3.0.rst
- release-1.4.0.rst
- release-1.4.0beta.rst
- release-1.4.1.rst
- release-1.5.0.rst
- release-1.6.0.rst
+
+Cpython 3.2 compatible versions
+===============================
+.. toctree::
+ release-pypy3-2.1.0-beta1.rst
diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst
--- a/pypy/doc/index.rst
+++ b/pypy/doc/index.rst
@@ -40,7 +40,7 @@
* `FAQ`_: some frequently asked questions.
-* `Release 2.2.1`_: the latest official release
+* `Release 2.3.1`_: the latest official release
* `PyPy Blog`_: news and status info about PyPy
@@ -110,7 +110,7 @@
.. _`Getting Started`: getting-started.html
.. _`Papers`: extradoc.html
.. _`Videos`: video-index.html
-.. _`Release 2.2.1`: http://pypy.org/download.html
+.. _`Release 2.3.1`: http://pypy.org/download.html
.. _`speed.pypy.org`: http://speed.pypy.org
.. _`RPython toolchain`: translation.html
.. _`potential project ideas`: project-ideas.html
diff --git a/pypy/doc/make.bat b/pypy/doc/make.bat
--- a/pypy/doc/make.bat
+++ b/pypy/doc/make.bat
@@ -2,11 +2,15 @@
REM Command file for Sphinx documentation
-set SPHINXBUILD=sphinx-build
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
@@ -14,16 +18,25 @@
if "%1" == "help" (
:help
echo.Please use `make ^` where ^ is one of
- echo. html to make standalone HTML files
From noreply at buildbot.pypy.org Wed Jul 2 20:59:53 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 20:59:53 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Fix
Message-ID: <20140702185953.1CF021D3500@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72321:5d850e01c23b
Date: 2014-07-02 20:53 +0200
http://bitbucket.org/pypy/pypy/changeset/5d850e01c23b/
Log: Fix
diff --git a/rpython/annotator/builtin.py b/rpython/annotator/builtin.py
--- a/rpython/annotator/builtin.py
+++ b/rpython/annotator/builtin.py
@@ -344,29 +344,6 @@
def robjmodel_keepalive_until_here(*args_s):
return immutablevalue(None)
- at analyzer_for(rpython.rtyper.lltypesystem.llmemory.cast_ptr_to_adr)
-def llmemory_cast_ptr_to_adr(s):
- from rpython.rtyper.llannotation import SomeInteriorPtr
- assert not isinstance(s, SomeInteriorPtr)
- return SomeAddress()
-
- at analyzer_for(rpython.rtyper.lltypesystem.llmemory.cast_adr_to_ptr)
-def llmemory_cast_adr_to_ptr(s, s_type):
- assert s_type.is_constant()
- return SomePtr(s_type.const)
-
- at analyzer_for(rpython.rtyper.lltypesystem.llmemory.cast_adr_to_int)
-def llmemory_cast_adr_to_int(s, s_mode=None):
- return SomeInteger() # xxx
-
- at analyzer_for(rpython.rtyper.lltypesystem.llmemory.cast_adr_to_uint_symbolic)
-def llmemory_cast_adr_to_uint_symbolic(s):
- return SomeInteger(unsigned=True)
-
- at analyzer_for(rpython.rtyper.lltypesystem.llmemory.cast_int_to_adr)
-def llmemory_cast_int_to_adr(s):
- return SomeAddress()
-
try:
import unicodedata
except ImportError:
diff --git a/rpython/rtyper/lltypesystem/llmemory.py b/rpython/rtyper/lltypesystem/llmemory.py
--- a/rpython/rtyper/lltypesystem/llmemory.py
+++ b/rpython/rtyper/lltypesystem/llmemory.py
@@ -769,12 +769,16 @@
res = cast(lltype.Signed, res)
return res
+ at analyzer_for(cast_adr_to_int)
+def ann_cast_adr_to_int(s, s_mode=None):
+ return SomeInteger() # xxx
+
def cast_adr_to_uint_symbolic(adr):
return adr._cast_to_uint()
- at analyzer_for(cast_adr_to_int)
-def ann_cast_adr_to_int(s, s_mode=None):
- return SomeInteger() # xxx
+ at analyzer_for(cast_adr_to_uint_symbolic)
+def ann_cast_adr_to_uint_symbolic(s):
+ return SomeInteger(unsigned=True)
_NONGCREF = lltype.Ptr(lltype.OpaqueType('NONGCREF'))
From noreply at buildbot.pypy.org Wed Jul 2 21:28:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 21:28:46 +0200 (CEST)
Subject: [pypy-commit] pypy default: Details from the stm branch
Message-ID: <20140702192846.3F8221C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72322:659b814b797a
Date: 2014-07-02 21:28 +0200
http://bitbucket.org/pypy/pypy/changeset/659b814b797a/
Log: Details from the stm branch
diff --git a/rpython/jit/backend/x86/callbuilder.py b/rpython/jit/backend/x86/callbuilder.py
--- a/rpython/jit/backend/x86/callbuilder.py
+++ b/rpython/jit/backend/x86/callbuilder.py
@@ -132,7 +132,6 @@
self.mc.ADD(ebp, imm(1)) # ebp any more; and ignore 'fastgil'
def move_real_result_and_call_reacqgil_addr(self, fastgil):
- from rpython.jit.backend.x86.assembler import heap
from rpython.jit.backend.x86 import rx86
#
# check if we need to call the reacqgil() function or not
diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py
--- a/rpython/jit/metainterp/resoperation.py
+++ b/rpython/jit/metainterp/resoperation.py
@@ -499,7 +499,7 @@
'SETARRAYITEM_GC/3d',
'SETARRAYITEM_RAW/3d',
'SETINTERIORFIELD_GC/3d',
- 'SETINTERIORFIELD_RAW/3d', # only used by llsupport/rewrite.py
+ 'SETINTERIORFIELD_RAW/3d', # right now, only used by tests
'RAW_STORE/3d',
'SETFIELD_GC/2d',
'SETFIELD_RAW/2d',
diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py
--- a/rpython/translator/c/genc.py
+++ b/rpython/translator/c/genc.py
@@ -328,6 +328,11 @@
#Since there is no GetErrorMode, do a double Set
old_mode = SetErrorMode(flags)
SetErrorMode(old_mode | flags)
+ if env is None:
+ envrepr = ''
+ else:
+ envrepr = ' [env=%r]' % (env,)
+ log.cmdexec('%s %s%s' % (self.executable_name, args, envrepr))
res = self.translator.platform.execute(self.executable_name, args,
env=env)
if sys.platform == 'win32':
diff --git a/rpython/translator/c/src/mem.h b/rpython/translator/c/src/mem.h
--- a/rpython/translator/c/src/mem.h
+++ b/rpython/translator/c/src/mem.h
@@ -117,7 +117,9 @@
#define OP_BOEHM_DISAPPEARING_LINK(link, obj, r) /* nothing */
#define OP_GC__DISABLE_FINALIZERS(r) /* nothing */
#define OP_GC__ENABLE_FINALIZERS(r) /* nothing */
-#define GC_REGISTER_FINALIZER(a,b,c,d,e) /* nothing */
+#define GC_REGISTER_FINALIZER(a, b, c, d, e) /* nothing */
+#define GC_gcollect() /* nothing */
+#define GC_set_max_heap_size(a) /* nothing */
#endif
/************************************************************/
From noreply at buildbot.pypy.org Wed Jul 2 21:29:52 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 2 Jul 2014 21:29:52 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: fixes
Message-ID: <20140702192952.E19D31C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72323:84e3bd9b16ad
Date: 2014-07-02 21:28 +0200
http://bitbucket.org/pypy/pypy/changeset/84e3bd9b16ad/
Log: fixes
diff --git a/rpython/jit/backend/llsupport/llmodel.py b/rpython/jit/backend/llsupport/llmodel.py
--- a/rpython/jit/backend/llsupport/llmodel.py
+++ b/rpython/jit/backend/llsupport/llmodel.py
@@ -180,7 +180,7 @@
STACK_CHECK_SLOWPATH = lltype.Ptr(lltype.FuncType([lltype.Signed],
lltype.Void))
def insert_stack_check():
- assert not self.cpu.gc_ll_descr.stm
+ assert not self.gc_ll_descr.stm
endaddr = rstack._stack_get_end_adr()
lengthaddr = rstack._stack_get_length_adr()
f = llhelper(STACK_CHECK_SLOWPATH, rstack.stack_check_slowpath)
diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py
--- a/rpython/jit/backend/x86/assembler.py
+++ b/rpython/jit/backend/x86/assembler.py
@@ -2743,9 +2743,7 @@
addr0 = stmtlocal.threadlocal_base()
addr = addr1 - addr0
assert rx86.fits_in_32bits(addr)
- mc = self.mc
- mc.writechar(stmtlocal.SEGMENT_TL) # prefix: %fs or %gs
- mc.MOV_rj(resloc.value, addr) # memory read
+ self.mc.MOV_rj(resloc.value, (stmtlocal.SEGMENT_TL, addr))
def get_set_errno(self, op, loc, issue_a_write):
# this function is only called on Linux
@@ -2753,23 +2751,23 @@
addr = stmtlocal.get_errno_tl()
assert rx86.fits_in_32bits(addr)
mc = self.mc
- mc.writechar(stmtlocal.SEGMENT_TL) # prefix: %fs or %gs
- # !!important: the *next* instruction must be the one using 'addr'!!
+ SEGTL = stmtlocal.SEGMENT_TL
if issue_a_write:
if isinstance(loc, RegLoc):
- mc.MOV32_jr(addr, loc.value) # memory write from reg
+ mc.MOV32_jr((SEGTL, addr), loc.value) # memory write from reg
else:
assert isinstance(loc, ImmedLoc)
newvalue = loc.value
newvalue = rffi.cast(rffi.INT, newvalue)
newvalue = rffi.cast(lltype.Signed, newvalue)
- mc.MOV32_ji(addr, newvalue) # memory write immediate
+ mc.MOV32_ji((SEGTL, addr), newvalue) # memory write immediate
else:
assert isinstance(loc, RegLoc)
if IS_X86_32:
- mc.MOV_rj(loc.value, addr) # memory read
+ mc.MOV_rj(loc.value, (SEGTL, addr)) # memory read
elif IS_X86_64:
- mc.MOVSX32_rj(loc.value, addr) # memory read, sign-extend
+ mc.MOVSX32_rj(loc.value,
+ (SEGTL, addr)) # memory read, sign-extend
genop_discard_list = [Assembler386.not_implemented_op_discard] * rop._LAST
diff --git a/rpython/jit/backend/x86/callbuilder.py b/rpython/jit/backend/x86/callbuilder.py
--- a/rpython/jit/backend/x86/callbuilder.py
+++ b/rpython/jit/backend/x86/callbuilder.py
@@ -126,13 +126,12 @@
self.asm.set_extra_stack_depth(self.mc, -delta * WORD)
css_value = eax
#
- self.mc.MOV(heap(fastgil), css_value)
+ self.mc.MOV(heap(self.asm.SEGMENT_NO, fastgil), css_value)
#
if not we_are_translated(): # for testing: we should not access
self.mc.ADD(ebp, imm(1)) # ebp any more; and ignore 'fastgil'
def move_real_result_and_call_reacqgil_addr(self, fastgil):
- from rpython.jit.backend.x86.assembler import heap
from rpython.jit.backend.x86 import rx86
#
# check if we need to call the reacqgil() function or not
@@ -161,10 +160,11 @@
#
mc.MOV(old_value, imm(1))
if rx86.fits_in_32bits(fastgil):
- mc.XCHG_rj(old_value.value, fastgil)
+ mc.XCHG_rj(old_value.value, (self.asm.SEGMENT_NO, fastgil))
else:
mc.MOV_ri(X86_64_SCRATCH_REG.value, fastgil)
- mc.XCHG_rm(old_value.value, (X86_64_SCRATCH_REG.value, 0))
+ mc.XCHG_rm(old_value.value,
+ (self.asm.SEGMENT_NO, X86_64_SCRATCH_REG.value, 0))
mc.CMP(old_value, css_value)
mc.J_il8(rx86.Conditions['E'], 0)
je_location = mc.get_relative_pos()
diff --git a/rpython/jit/metainterp/executor.py b/rpython/jit/metainterp/executor.py
--- a/rpython/jit/metainterp/executor.py
+++ b/rpython/jit/metainterp/executor.py
@@ -357,6 +357,7 @@
rop.DEBUG_MERGE_POINT,
rop.JIT_DEBUG,
rop.SETARRAYITEM_RAW,
+ rop.SETINTERIORFIELD_RAW,
rop.CALL_PURE,
rop.CALL_RELEASE_GIL,
rop.QUASIIMMUT_FIELD,
diff --git a/rpython/jit/metainterp/optimizeopt/heap.py b/rpython/jit/metainterp/optimizeopt/heap.py
--- a/rpython/jit/metainterp/optimizeopt/heap.py
+++ b/rpython/jit/metainterp/optimizeopt/heap.py
@@ -267,6 +267,7 @@
opnum == rop.SETFIELD_RAW or # no effect on GC struct/array
opnum == rop.SETARRAYITEM_GC or # handled specially
opnum == rop.SETARRAYITEM_RAW or # no effect on GC struct
+ opnum == rop.SETINTERIORFIELD_RAW or # no effect on GC struct
opnum == rop.RAW_STORE or # no effect on GC struct
opnum == rop.STRSETITEM or # no effect on GC struct/array
opnum == rop.UNICODESETITEM or # no effect on GC struct/array
diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py
--- a/rpython/jit/metainterp/resoperation.py
+++ b/rpython/jit/metainterp/resoperation.py
@@ -505,6 +505,7 @@
'SETARRAYITEM_GC/3d',
'SETARRAYITEM_RAW/3d',
'SETINTERIORFIELD_GC/3d',
+ 'SETINTERIORFIELD_RAW/3d', # right now, only used by tests
'RAW_STORE/3d',
'SETFIELD_GC/2d',
'SETFIELD_RAW/2d',
From noreply at buildbot.pypy.org Thu Jul 3 00:28:52 2014
From: noreply at buildbot.pypy.org (alex_gaynor)
Date: Thu, 3 Jul 2014 00:28:52 +0200 (CEST)
Subject: [pypy-commit] pypy.org extradoc: Twisted with SSL works these days
Message-ID: <20140702222852.7B2D51C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Alex Gaynor
Branch: extradoc
Changeset: r514:836e9b3f5226
Date: 2014-07-02 15:28 -0700
http://bitbucket.org/pypy/pypy.org/changeset/836e9b3f5226/
Log: Twisted with SSL works these days
diff --git a/compat.html b/compat.html
--- a/compat.html
+++ b/compat.html
@@ -74,7 +74,7 @@
django
sqlalchemy
flask
-twisted (without ssl support)
+twisted
pylons
divmod's nevow
pyglet
diff --git a/source/compat.txt b/source/compat.txt
--- a/source/compat.txt
+++ b/source/compat.txt
@@ -42,7 +42,7 @@
* flask
-* twisted (without ssl support)
+* twisted
* pylons
From noreply at buildbot.pypy.org Thu Jul 3 05:26:32 2014
From: noreply at buildbot.pypy.org (mattip)
Date: Thu, 3 Jul 2014 05:26:32 +0200 (CEST)
Subject: [pypy-commit] pypy default: allow uint ops as ok_ops since
INVALID_SOCKET is uint on MSVC
Message-ID: <20140703032632.38A491C0ECA@cobra.cs.uni-duesseldorf.de>
Author: mattip
Branch:
Changeset: r72324:eb50bceda86c
Date: 2014-07-03 13:25 +1000
http://bitbucket.org/pypy/pypy/changeset/eb50bceda86c/
Log: allow uint ops as ok_ops since INVALID_SOCKET is uint on MSVC
diff --git a/rpython/translator/backendopt/finalizer.py b/rpython/translator/backendopt/finalizer.py
--- a/rpython/translator/backendopt/finalizer.py
+++ b/rpython/translator/backendopt/finalizer.py
@@ -31,7 +31,7 @@
if op.opname in self.ok_operations:
return self.bottom_result()
if (op.opname.startswith('int_') or op.opname.startswith('float_')
- or op.opname.startswith('cast_')):
+ or op.opname.startswith('uint_') or op.opname.startswith('cast_')):
return self.bottom_result()
if op.opname == 'setfield' or op.opname == 'bare_setfield':
TP = op.args[2].concretetype
From noreply at buildbot.pypy.org Thu Jul 3 11:40:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 11:40:46 +0200 (CEST)
Subject: [pypy-commit] pypy default: Improve the test,
which fails on "pypy -A": try repeatedly to get a
Message-ID: <20140703094046.9221B1C0257@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72325:28a1ebabc3e4
Date: 2014-07-03 11:40 +0200
http://bitbucket.org/pypy/pypy/changeset/28a1ebabc3e4/
Log: Improve the test, which fails on "pypy -A": try repeatedly to get a
small array, fill it with garbage, and then free it. It's likely we
end up at the same location with still the garbage.
diff --git a/pypy/module/micronumpy/test/test_arrayops.py b/pypy/module/micronumpy/test/test_arrayops.py
--- a/pypy/module/micronumpy/test/test_arrayops.py
+++ b/pypy/module/micronumpy/test/test_arrayops.py
@@ -3,18 +3,27 @@
class AppTestNumSupport(BaseNumpyAppTest):
def test_zeros(self):
- from numpypy import zeros, empty
+ from numpypy import zeros
a = zeros(3)
assert len(a) == 3
assert a[0] == a[1] == a[2] == 0
- a = empty(1000)
- assert len(a) == 1000
+
+ def test_empty(self):
+ from numpypy import empty
+ import gc
for i in range(1000):
- if a[i] != 0:
- break
+ a = empty(3)
+ assert len(a) == 3
+ if not (a[0] == a[1] == a[2] == 0):
+ break # done
+ a[0] = 1.23
+ a[1] = 4.56
+ a[2] = 7.89
+ del a
+ gc.collect()
else:
raise AssertionError(
- "empty() returned a zeroed out array of length 1000 (unlikely)")
+ "empty() returned a zeroed out array every time")
def test_where(self):
from numpypy import where, ones, zeros, array
From noreply at buildbot.pypy.org Thu Jul 3 15:31:30 2014
From: noreply at buildbot.pypy.org (Patrick Rein)
Date: Thu, 3 Jul 2014 15:31:30 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk stmgc-c7: Added OSLock class to access
rthread.lock and corresponding primitives.
Message-ID: <20140703133130.3099A1D353B@cobra.cs.uni-duesseldorf.de>
Author: Patrick Rein
Branch: stmgc-c7
Changeset: r851:3f97fdcac997
Date: 2014-07-03 15:26 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/3f97fdcac997/
Log: Added OSLock class to access rthread.lock and corresponding
primitives.
diff --git a/images/Squeak4.5-12568.changes b/images/Squeak4.5-12568.changes
--- a/images/Squeak4.5-12568.changes
+++ b/images/Squeak4.5-12568.changes
@@ -758,4 +758,4 @@
self fieldNew: swapField.
].
- ^ self field! !
----QUIT----{22 May 2014 . 3:33:07 pm} Squeak4.5-12568.image priorSource: 93437!
----STARTUP----{22 May 2014 . 3:33:13 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:33'!
gameLifeOfLife
STMSimulation benchmark.! !
----QUIT----{22 May 2014 . 3:34:03 pm} Squeak4.5-12568.image priorSource: 110218!
----STARTUP----{22 May 2014 . 3:34:57 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:35'!
gameOfLife
STMSimulation benchmark.! !
Integer removeSelector: #gameLifeOfLife!
----QUIT----{22 May 2014 . 3:35:14 pm} Squeak4.5-12568.image priorSource: 110526!
----STARTUP----{22 May 2014 . 3:36:22 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:36' prior: 33665224!
gameOfLife
STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:36:45 pm} Squeak4.5-12568.image priorSource: 110873!
----STARTUP----{22 May 2014 . 3:36:49 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:36:53 pm} Squeak4.5-12568.image priorSource: 111195!
----STARTUP----{22 May 2014 . 3:36:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:37' prior: 33665587!
gameOfLife
SPyVM print: STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:37:32 pm} Squeak4.5-12568.image priorSource: 111392!
----STARTUP----{22 May 2014 . 3:38:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:38:35 pm} Squeak4.5-12568.image priorSource: 111727!
----STARTUP----{2 June 2014 . 12:57:18 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 12:58'!
benchStmParallelWarmed
10 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 12:58:21 pm} Squeak4.5-12568.image priorSource: 111924!
\ No newline at end of file
+ ^ self field! !
----QUIT----{22 May 2014 . 3:33:07 pm} Squeak4.5-12568.image priorSource: 93437!
----STARTUP----{22 May 2014 . 3:33:13 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:33'!
gameLifeOfLife
STMSimulation benchmark.! !
----QUIT----{22 May 2014 . 3:34:03 pm} Squeak4.5-12568.image priorSource: 110218!
----STARTUP----{22 May 2014 . 3:34:57 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:35'!
gameOfLife
STMSimulation benchmark.! !
Integer removeSelector: #gameLifeOfLife!
----QUIT----{22 May 2014 . 3:35:14 pm} Squeak4.5-12568.image priorSource: 110526!
----STARTUP----{22 May 2014 . 3:36:22 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:36' prior: 33665224!
gameOfLife
STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:36:45 pm} Squeak4.5-12568.image priorSource: 110873!
----STARTUP----{22 May 2014 . 3:36:49 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:36:53 pm} Squeak4.5-12568.image priorSource: 111195!
----STARTUP----{22 May 2014 . 3:36:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:37' prior: 33665587!
gameOfLife
SPyVM print: STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:37:32 pm} Squeak4.5-12568.image priorSource: 111392!
----STARTUP----{22 May 2014 . 3:38:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:38:35 pm} Squeak4.5-12568.image priorSource: 111727!
----STARTUP----{2 June 2014 . 12:57:18 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 12:58'!
benchStmParallelWarmed
10 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 12:58:21 pm} Squeak4.5-12568.image priorSource: 111924!
----STARTUP----{26 June 2014 . 2:47:09 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:54 pm'!
Object subclass: #OSLock
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Kernel-STM'!
Object subclass: #OSLock
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Kernel-STM'!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 14:25'!
lock
! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 14:26'!
release
! !
----End fileIn of a stream----!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:45 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 14:48' prior: 33647508!
parallelForkTest
| p |
FileStream stdout nextPutAll: 'starting stm process.'.
p := [ 1 + 1. ] parallelFork.
p wait! !
----End fileIn of D:\code\python\spy-vm\lang-smalltalk\images\Integer-parallelForkTest.st----!
----QUIT----{26 June 2014 . 2:47:49 pm} Squeak4.5-12568.image priorSource: 112268!
----STARTUP----{26 June 2014 . 2:49:11 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:45 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 14:48' prior: 33667646!
parallelForkTest
| p |
FileStream stdout nextPutAll: 'starting stm process.'.
p := [ 1 + 1. ] parallelFork.
p wait! !
----End fileIn of D:\code\python\spy-vm\lang-smalltalk\images\Integer-parallelForkTest.st----!
----SNAPSHOT----{26 June 2014 . 2:49:57 pm} Squeak4.5-12568.image priorSource: 113431!
----QUIT----{26 June 2014 . 2:50 pm} Squeak4.5-12568.image priorSource: 114022!
----STARTUP----{26 June 2014 . 2:52:02 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:51:52 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 14:43'!
osLockTest
| lock process1 process2 process2lock |
lock := OSLock new.
lock lock.
process2lock := OSLock new.
process2lock lock.
process1 := [ SPyVM print: 'First process start'.
lock lock.
process2lock release.
SPyVM print: 'First process after lock'.
lock release. ] parallelFork .
process2 := [ SPyVM print: 'Second process start'.
process2lock lock.
lock lock.
SPyVM print: 'Second process after lock'.
process2lock release.
lock release. ] parallelFork .
SPyVM print: 'Processes initialized.'.
lock release.
process1 wait.
process2 wait.! !
----End fileIn of a stream----!
----QUIT----{26 June 2014 . 2:53:18 pm} Squeak4.5-12568.image priorSource: 114110!
----STARTUP----{26 June 2014 . 3:02:03 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:02'!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
lock release.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:02:52 pm} Squeak4.5-12568.image priorSource: 115096!
----STARTUP----{26 June 2014 . 3:03:40 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33667310!
lock
SPyVM print: '* STM Process did not fork *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33667409!
release
SPyVM print: '* STM Process did not fork *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33670408!
release
SPyVM print: '* OS Lock could not be released *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33670196!
lock
SPyVM print: '* OS Lock could not be locked *' , Character cr.
self primitiveFailed.
self resume! !
----QUIT----{26 June 2014 . 3:04:45 pm} Squeak4.5-12568.image priorSource: 115476!
----STARTUP----{26 June 2014 . 3:08:07 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:08' prior: 33669797!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:08:24 pm} Squeak4.5-12568.image priorSource: 116537!
----STARTUP----{26 June 2014 . 3:09:05 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:09' prior: 33671254!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
lock release.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:09:17 pm} Squeak4.5-12568.image priorSource: 116916!
----STARTUP----{26 June 2014 . 3:57:38 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:57' prior: 33671633!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'
lock release.
SPyVM print: 'Survived lock.'! !
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:58' prior: 33672027!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'.
lock release.
SPyVM print: 'Survived lock.'.
^ self! !
----QUIT----{26 June 2014 . 3:58:58 pm} Squeak4.5-12568.image priorSource: 117310!
----STARTUP----{26 June 2014 . 3:59:21 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:59' prior: 33672253!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived locking.'.
lock release.
SPyVM print: 'Survived releasing.'.
^ self! !
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:59' prior: 33672690!
osLockEasyTest
| lock1 |
lock1 := OSLock new.
lock1 lock.
SPyVM print: 'Survived locking.'.
lock1 release.
SPyVM print: 'Survived releasing.'.
^ self! !
----QUIT----{26 June 2014 . 4:00 pm} Squeak4.5-12568.image priorSource: 117973!
----STARTUP----{26 June 2014 . 4:07:56 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09' prior: 33670843!
lock
self internalLock
! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09'!
internalLock
SPyVM print: '* OS Lock could not be locked *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09'!
internalRelease
SPyVM print: '* OS Lock could not be released *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09' prior: 33670623!
release
self internalRelease! !
----QUIT----{26 June 2014 . 4:09:34 pm} Squeak4.5-12568.image priorSource: 118668!
\ No newline at end of file
diff --git a/images/Squeak4.5-12568.image b/images/Squeak4.5-12568.image
index 0d24c0b0506552d59e3dd4db7be72f7e565a8d7c..6628e9cf95b4860c2c865166b07278b10aec9b80
GIT binary patch
[cut]
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1335,6 +1335,10 @@
STM_ATOMIC_ENTER = 1302 # 790
STM_ATOMIC_LEAVE = 1303 # 791
+# OS Lock Primitives
+OS_LOCK_LOCK = 1304 # 792
+OS_LOCK_RELEASE = 1305 # 793
+
@expose_primitive(BLOCK_COPY, unwrap_spec=[object, int])
def func(interp, s_frame, w_context, argcnt):
@@ -1542,6 +1546,26 @@
rstm.decrement_atomic()
+ at expose_primitive(OS_LOCK_LOCK, unwrap_spec=[object], no_result=True)
+def func(interp, s_frame, w_rcvr):
+ from rpython.rlib import rthread
+
+ if not isinstance(w_rcvr, model.W_PointersObject):
+ raise PrimitiveFailedError("OS_LOCK_LOCK primitive was not called on an OSLock Object")
+
+ lock_shadow = w_rcvr.as_special_get_shadow(interp.space, shadow.OSLockShadow)
+ lock_shadow.os_lock()
+
+ at expose_primitive(OS_LOCK_RELEASE, unwrap_spec=[object], no_result=True)
+def func(interp, s_frame, w_rcvr):
+ from rpython.rlib import rthread
+
+ if not isinstance(w_rcvr, model.W_PointersObject):
+ raise PrimitiveFailedError("OS_LOCK_LOCK primitive was not called on an OSLock Object")
+
+ lock_shadow = w_rcvr.as_special_get_shadow(interp.space, shadow.OSLockShadow)
+ lock_shadow.os_release()
+
# ___________________________________________________________________________
# BlockClosure Primitives
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -1142,4 +1142,17 @@
return lock_result
def signal(self):
- self.lock.release()
\ No newline at end of file
+ self.lock.release()
+
+class OSLockShadow(AbstractShadow):
+
+ def __init__(self, space, w_self):
+ AbstractShadow.__init__(self, space, w_self)
+ self.intern_lock = rthread.allocate_lock()
+
+ def os_lock(self):
+ self.intern_lock.acquire(True)
+
+ def os_release(self):
+ self.intern_lock.release()
+
From noreply at buildbot.pypy.org Thu Jul 3 16:32:24 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Thu, 3 Jul 2014 16:32:24 +0200 (CEST)
Subject: [pypy-commit] pypy scalar-operations: avoid converting scalars to
arrays when calling unary ufuncs
Message-ID: <20140703143224.AB2851D2D3D@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch: scalar-operations
Changeset: r72326:e32c6b0eff6c
Date: 2014-07-03 15:31 +0100
http://bitbucket.org/pypy/pypy/changeset/e32c6b0eff6c/
Log: avoid converting scalars to arrays when calling unary ufuncs
diff --git a/pypy/module/micronumpy/ufuncs.py b/pypy/module/micronumpy/ufuncs.py
--- a/pypy/module/micronumpy/ufuncs.py
+++ b/pypy/module/micronumpy/ufuncs.py
@@ -312,8 +312,8 @@
out = args_w[1]
if space.is_w(out, space.w_None):
out = None
- w_obj = convert_to_array(space, w_obj)
- dtype = w_obj.get_dtype()
+ w_obj = numpify(space, w_obj)
+ dtype = _get_dtype(space, w_obj)
if dtype.is_flexible():
raise OperationError(space.w_TypeError,
space.wrap('Not implemented for this type'))
@@ -323,7 +323,7 @@
raise oefmt(space.w_TypeError,
"ufunc %s not supported for the input type", self.name)
calc_dtype = find_unaryop_result_dtype(space,
- w_obj.get_dtype(),
+ dtype,
promote_to_float=self.promote_to_float,
promote_bools=self.promote_bools)
if out is not None:
@@ -353,6 +353,7 @@
else:
out.fill(space, w_val)
return out
+ assert isinstance(w_obj, W_NDimArray)
shape = shape_agreement(space, w_obj.get_shape(), out,
broadcast_down=False)
return loop.call1(space, shape, self.func, calc_dtype, res_dtype,
From noreply at buildbot.pypy.org Thu Jul 3 16:42:02 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Thu, 3 Jul 2014 16:42:02 +0200 (CEST)
Subject: [pypy-commit] pypy scalar-operations: hg merge default
Message-ID: <20140703144202.AF14E1D2D3D@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch: scalar-operations
Changeset: r72327:58201d7159b3
Date: 2014-07-03 15:41 +0100
http://bitbucket.org/pypy/pypy/changeset/58201d7159b3/
Log: hg merge default
diff too long, truncating to 2000 out of 2386 lines
diff --git a/lib-python/2.7/ctypes/__init__.py b/lib-python/2.7/ctypes/__init__.py
--- a/lib-python/2.7/ctypes/__init__.py
+++ b/lib-python/2.7/ctypes/__init__.py
@@ -389,12 +389,13 @@
func.__name__ = name_or_ordinal
return func
-class PyDLL(CDLL):
- """This class represents the Python library itself. It allows to
- access Python API functions. The GIL is not released, and
- Python exceptions are handled correctly.
- """
- _func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
+# Not in PyPy
+#class PyDLL(CDLL):
+# """This class represents the Python library itself. It allows to
+# access Python API functions. The GIL is not released, and
+# Python exceptions are handled correctly.
+# """
+# _func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
if _os.name in ("nt", "ce"):
@@ -447,15 +448,8 @@
return self._dlltype(name)
cdll = LibraryLoader(CDLL)
-pydll = LibraryLoader(PyDLL)
-
-if _os.name in ("nt", "ce"):
- pythonapi = PyDLL("python dll", None, _sys.dllhandle)
-elif _sys.platform == "cygwin":
- pythonapi = PyDLL("libpython%d.%d.dll" % _sys.version_info[:2])
-else:
- pythonapi = PyDLL(None)
-
+# not on PyPy
+#pydll = LibraryLoader(PyDLL)
if _os.name in ("nt", "ce"):
windll = LibraryLoader(WinDLL)
diff --git a/lib-python/2.7/ctypes/test/test_values.py b/lib-python/2.7/ctypes/test/test_values.py
--- a/lib-python/2.7/ctypes/test/test_values.py
+++ b/lib-python/2.7/ctypes/test/test_values.py
@@ -4,6 +4,7 @@
import unittest
from ctypes import *
+from ctypes.test import xfail
import _ctypes_test
@@ -23,7 +24,8 @@
class Win_ValuesTestCase(unittest.TestCase):
"""This test only works when python itself is a dll/shared library"""
-
+
+ @xfail
def test_optimizeflag(self):
# This test accesses the Py_OptimizeFlag intger, which is
# exported by the Python dll.
@@ -40,6 +42,7 @@
else:
self.assertEqual(opt, 2)
+ @xfail
def test_frozentable(self):
# Python exports a PyImport_FrozenModules symbol. This is a
# pointer to an array of struct _frozen entries. The end of the
@@ -75,6 +78,7 @@
from ctypes import _pointer_type_cache
del _pointer_type_cache[struct_frozen]
+ @xfail
def test_undefined(self):
self.assertRaises(ValueError, c_int.in_dll, pydll, "Undefined_Symbol")
diff --git a/lib-python/2.7/timeit.py b/lib-python/2.7/timeit.py
--- a/lib-python/2.7/timeit.py
+++ b/lib-python/2.7/timeit.py
@@ -55,11 +55,6 @@
import gc
import sys
import time
-try:
- import itertools
-except ImportError:
- # Must be an older Python version (see timeit() below)
- itertools = None
__all__ = ["Timer"]
@@ -81,7 +76,8 @@
def inner(_it, _timer):
%(setup)s
_t0 = _timer()
- for _i in _it:
+ while _it > 0:
+ _it -= 1
%(stmt)s
_t1 = _timer()
return _t1 - _t0
@@ -96,7 +92,8 @@
def inner(_it, _timer, _func=func):
setup()
_t0 = _timer()
- for _i in _it:
+ while _it > 0:
+ _it -= 1
_func()
_t1 = _timer()
return _t1 - _t0
@@ -133,9 +130,11 @@
else:
raise ValueError("setup is neither a string nor callable")
self.src = src # Save for traceback display
- code = compile(src, dummy_src_name, "exec")
- exec code in globals(), ns
- self.inner = ns["inner"]
+ def make_inner():
+ code = compile(src, dummy_src_name, "exec")
+ exec code in globals(), ns
+ return ns["inner"]
+ self.make_inner = make_inner
elif hasattr(stmt, '__call__'):
self.src = None
if isinstance(setup, basestring):
@@ -144,7 +143,8 @@
exec _setup in globals(), ns
elif not hasattr(setup, '__call__'):
raise ValueError("setup is neither a string nor callable")
- self.inner = _template_func(setup, stmt)
+ inner = _template_func(setup, stmt)
+ self.make_inner = lambda: inner
else:
raise ValueError("stmt is neither a string nor callable")
@@ -185,15 +185,12 @@
to one million. The main statement, the setup statement and
the timer function to be used are passed to the constructor.
"""
- if itertools:
- it = itertools.repeat(None, number)
- else:
- it = [None] * number
+ inner = self.make_inner()
gcold = gc.isenabled()
if '__pypy__' not in sys.builtin_module_names:
gc.disable() # only do that on CPython
try:
- timing = self.inner(it, self.timer)
+ timing = inner(number, self.timer)
finally:
if gcold:
gc.enable()
diff --git a/pypy/doc/ctypes-implementation.rst b/pypy/doc/ctypes-implementation.rst
--- a/pypy/doc/ctypes-implementation.rst
+++ b/pypy/doc/ctypes-implementation.rst
@@ -72,13 +72,11 @@
Here is a list of the limitations and missing features of the
current implementation:
-* ``ctypes.pythonapi`` lets you access the CPython C API emulation layer
- of PyPy, at your own risks and without doing anything sensible about
- the GIL. Since PyPy 2.3, these functions are also named with an extra
- "Py", for example ``PyPyInt_FromLong()``. Basically, don't use this,
- but it might more or less work in simple cases if you do. (Obviously,
- assuming the PyObject pointers you get have any particular fields in
- any particular order is just going to crash.)
+* ``ctypes.pythonapi`` is missing. In previous versions, it was present
+ and redirected to the `cpyext` C API emulation layer, but our
+ implementation did not do anything sensible about the GIL and the
+ functions were named with an extra "Py", for example
+ ``PyPyInt_FromLong()``. It was removed for being unhelpful.
* We copy Python strings instead of having pointers to raw buffers
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -465,9 +465,13 @@
This is documented (here__ and here__). It needs 4 GB of RAM to run
"rpython targetpypystandalone" on top of PyPy, a bit more when running
-on CPython. If you have less than 4 GB it will just swap forever (or
-fail if you don't have enough swap). On 32-bit, divide the numbers by
-two.
+on top of CPython. If you have less than 4 GB free, it will just swap
+forever (or fail if you don't have enough swap). And we mean *free:*
+if the machine has 4 GB *in total,* then it will swap.
+
+On 32-bit, divide the numbers by two. (We didn't try recently, but in
+the past it was possible to compile a 32-bit version on a 2 GB Linux
+machine with nothing else running: no Gnome/KDE, for example.)
.. __: http://pypy.org/download.html#building-from-source
.. __: https://pypy.readthedocs.org/en/latest/getting-started-python.html#translating-the-pypy-python-interpreter
diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst
--- a/pypy/doc/stm.rst
+++ b/pypy/doc/stm.rst
@@ -92,9 +92,9 @@
We're busy fixing them as we find them; feel free to `report bugs`_.
* It runs with an overhead as low as 20% on examples like "richards".
- There are also other examples with higher overheads --up to 10x for
- "translate.py"-- which we are still trying to understand. One suspect
- is our partial GC implementation, see below.
+ There are also other examples with higher overheads --currently up to
+ 2x for "translate.py"-- which we are still trying to understand.
+ One suspect is our partial GC implementation, see below.
* Currently limited to 1.5 GB of RAM (this is just a parameter in
`core.h`__). Memory overflows are not correctly handled; they cause
@@ -111,9 +111,8 @@
* The GC is new; although clearly inspired by PyPy's regular GC, it
misses a number of optimizations for now. Programs allocating large
- numbers of small objects that don't immediately die, as well as
- programs that modify large lists or dicts, suffer from these missing
- optimizations.
+ numbers of small objects that don't immediately die (surely a common
+ situation) suffer from these missing optimizations.
* The GC has no support for destructors: the ``__del__`` method is never
called (including on file objects, which won't be closed for you).
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -43,3 +43,7 @@
.. branch: jit-get-errno
Optimize the errno handling in the JIT, notably around external
function calls. Linux-only.
+
+.. branch: disable_pythonapi
+Remove non-functioning ctypes.pyhonapi and ctypes.PyDLL, document this
+incompatibility with cpython. Recast sys.dllhandle to an int.
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -967,6 +967,13 @@
"""
return self.unpackiterable(w_iterable, expected_length)
+ def listview_no_unpack(self, w_iterable):
+ """ Same as listview() if cheap. If 'w_iterable' is something like
+ a generator, for example, then return None instead.
+ May return None anyway.
+ """
+ return None
+
def listview_bytes(self, w_list):
""" Return a list of unwrapped strings out of a list of strings. If the
argument is not a list or does not contain only strings, return None.
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -496,6 +496,13 @@
"""
+class UserDelCallback(object):
+ def __init__(self, w_obj, callback, descrname):
+ self.w_obj = w_obj
+ self.callback = callback
+ self.descrname = descrname
+ self.next = None
+
class UserDelAction(AsyncAction):
"""An action that invokes all pending app-level __del__() method.
This is done as an action instead of immediately when the
@@ -506,12 +513,18 @@
def __init__(self, space):
AsyncAction.__init__(self, space)
- self.dying_objects = []
+ self.dying_objects = None
+ self.dying_objects_last = None
self.finalizers_lock_count = 0
self.enabled_at_app_level = True
def register_callback(self, w_obj, callback, descrname):
- self.dying_objects.append((w_obj, callback, descrname))
+ cb = UserDelCallback(w_obj, callback, descrname)
+ if self.dying_objects_last is None:
+ self.dying_objects = cb
+ else:
+ self.dying_objects_last.next = cb
+ self.dying_objects_last = cb
self.fire()
def perform(self, executioncontext, frame):
@@ -525,13 +538,33 @@
# avoid too deep recursions of the kind of __del__ being called
# while in the middle of another __del__ call.
pending = self.dying_objects
- self.dying_objects = []
+ self.dying_objects = None
+ self.dying_objects_last = None
space = self.space
- for i in range(len(pending)):
- w_obj, callback, descrname = pending[i]
- pending[i] = (None, None, None)
+ while pending is not None:
try:
- callback(w_obj)
+ pending.callback(pending.w_obj)
except OperationError, e:
- e.write_unraisable(space, descrname, w_obj)
+ e.write_unraisable(space, pending.descrname, pending.w_obj)
e.clear(space) # break up reference cycles
+ pending = pending.next
+ #
+ # Note: 'dying_objects' used to be just a regular list instead
+ # of a chained list. This was the cause of "leaks" if we have a
+ # program that constantly creates new objects with finalizers.
+ # Here is why: say 'dying_objects' is a long list, and there
+ # are n instances in it. Then we spend some time in this
+ # function, possibly triggering more GCs, but keeping the list
+ # of length n alive. Then the list is suddenly freed at the
+ # end, and we return to the user program. At this point the
+ # GC limit is still very high, because just before, there was
+ # a list of length n alive. Assume that the program continues
+ # to allocate a lot of instances with finalizers. The high GC
+ # limit means that it could allocate a lot of instances before
+ # reaching it --- possibly more than n. So the whole procedure
+ # repeats with higher and higher values of n.
+ #
+ # This does not occur in the current implementation because
+ # there is no list of length n: if n is large, then the GC
+ # will run several times while walking the list, but it will
+ # see lower and lower memory usage, with no lower bound of n.
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -61,6 +61,13 @@
return self.send_ex(w_arg)
def send_ex(self, w_arg, operr=None):
+ pycode = self.pycode
+ if jit.we_are_jitted() and should_not_inline(pycode):
+ generatorentry_driver.jit_merge_point(gen=self, w_arg=w_arg,
+ operr=operr, pycode=pycode)
+ return self._send_ex(w_arg, operr)
+
+ def _send_ex(self, w_arg, operr):
space = self.space
if self.running:
raise OperationError(space.w_ValueError,
@@ -72,8 +79,7 @@
if operr is None:
operr = OperationError(space.w_StopIteration, space.w_None)
raise operr
- # XXX it's not clear that last_instr should be promoted at all
- # but as long as it is necessary for call_assembler, let's do it early
+
last_instr = jit.promote(frame.last_instr)
if last_instr == -1:
if w_arg and not space.is_w(w_arg, space.w_None):
@@ -214,3 +220,38 @@
"interrupting generator of ")
break
block = block.previous
+
+
+
+def get_printable_location_genentry(bytecode):
+ return '%s ' % (bytecode.get_repr(),)
+generatorentry_driver = jit.JitDriver(greens=['pycode'],
+ reds=['gen', 'w_arg', 'operr'],
+ get_printable_location =
+ get_printable_location_genentry,
+ name='generatorentry')
+
+from pypy.tool.stdlib_opcode import HAVE_ARGUMENT, opmap
+YIELD_VALUE = opmap['YIELD_VALUE']
+
+ at jit.elidable_promote()
+def should_not_inline(pycode):
+ # Should not inline generators with more than one "yield",
+ # as an approximative fix (see issue #1782). There are cases
+ # where it slows things down; for example calls to a simple
+ # generator that just produces a few simple values with a few
+ # consecutive "yield" statements. It fixes the near-infinite
+ # slow-down in issue #1782, though...
+ count_yields = 0
+ code = pycode.co_code
+ n = len(code)
+ i = 0
+ while i < n:
+ c = code[i]
+ op = ord(c)
+ if op == YIELD_VALUE:
+ count_yields += 1
+ i += 1
+ if op >= HAVE_ARGUMENT:
+ i += 2
+ return count_yields >= 2
diff --git a/pypy/interpreter/test/test_generator.py b/pypy/interpreter/test/test_generator.py
--- a/pypy/interpreter/test/test_generator.py
+++ b/pypy/interpreter/test/test_generator.py
@@ -278,4 +278,21 @@
def f():
yield 1
raise StopIteration
- assert tuple(f()) == (1,)
\ No newline at end of file
+ assert tuple(f()) == (1,)
+
+
+def test_should_not_inline(space):
+ from pypy.interpreter.generator import should_not_inline
+ w_co = space.appexec([], '''():
+ def g(x):
+ yield x + 5
+ return g.func_code
+ ''')
+ assert should_not_inline(w_co) == False
+ w_co = space.appexec([], '''():
+ def g(x):
+ yield x + 5
+ yield x + 6
+ return g.func_code
+ ''')
+ assert should_not_inline(w_co) == True
diff --git a/pypy/module/_cffi_backend/ctypefunc.py b/pypy/module/_cffi_backend/ctypefunc.py
--- a/pypy/module/_cffi_backend/ctypefunc.py
+++ b/pypy/module/_cffi_backend/ctypefunc.py
@@ -4,7 +4,7 @@
import sys
-from rpython.rlib import jit, clibffi, jit_libffi
+from rpython.rlib import jit, clibffi, jit_libffi, rgc
from rpython.rlib.jit_libffi import (CIF_DESCRIPTION, CIF_DESCRIPTION_P,
FFI_TYPE, FFI_TYPE_P, FFI_TYPE_PP, SIZE_OF_FFI_ARG)
from rpython.rlib.objectmodel import we_are_translated, instantiate
@@ -63,6 +63,7 @@
CifDescrBuilder(fvarargs, self.ctitem).rawallocate(ctypefunc)
return ctypefunc
+ @rgc.must_be_light_finalizer
def __del__(self):
if self.cif_descr:
lltype.free(self.cif_descr, flavor='raw')
diff --git a/pypy/module/_io/interp_textio.py b/pypy/module/_io/interp_textio.py
--- a/pypy/module/_io/interp_textio.py
+++ b/pypy/module/_io/interp_textio.py
@@ -458,6 +458,10 @@
self._check_init(space)
return space.call_method(self.w_buffer, "seekable")
+ def isatty_w(self, space):
+ self._check_init(space)
+ return space.call_method(self.w_buffer, "isatty")
+
def fileno_w(self, space):
self._check_init(space)
return space.call_method(self.w_buffer, "fileno")
@@ -1035,6 +1039,7 @@
readable = interp2app(W_TextIOWrapper.readable_w),
writable = interp2app(W_TextIOWrapper.writable_w),
seekable = interp2app(W_TextIOWrapper.seekable_w),
+ isatty = interp2app(W_TextIOWrapper.isatty_w),
fileno = interp2app(W_TextIOWrapper.fileno_w),
name = GetSetProperty(W_TextIOWrapper.name_get_w),
buffer = interp_attrproperty_w("w_buffer", cls=W_TextIOWrapper),
diff --git a/pypy/module/_io/test/test_textio.py b/pypy/module/_io/test/test_textio.py
--- a/pypy/module/_io/test/test_textio.py
+++ b/pypy/module/_io/test/test_textio.py
@@ -25,6 +25,12 @@
t = _io.TextIOWrapper(b)
assert t.readable()
assert t.seekable()
+ #
+ class CustomFile(object):
+ def isatty(self): return 'YES'
+ readable = writable = seekable = lambda self: False
+ t = _io.TextIOWrapper(CustomFile())
+ assert t.isatty() == 'YES'
def test_default_implementations(self):
import _io
diff --git a/pypy/module/_lsprof/interp_lsprof.py b/pypy/module/_lsprof/interp_lsprof.py
--- a/pypy/module/_lsprof/interp_lsprof.py
+++ b/pypy/module/_lsprof/interp_lsprof.py
@@ -11,7 +11,7 @@
from rpython.rlib.rtimer import read_timestamp, _is_64_bit
from rpython.rtyper.lltypesystem import rffi, lltype
from rpython.translator.tool.cbuild import ExternalCompilationInfo
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rlib.rarithmetic import r_longlong
import time, sys
diff --git a/pypy/module/_socket/__init__.py b/pypy/module/_socket/__init__.py
--- a/pypy/module/_socket/__init__.py
+++ b/pypy/module/_socket/__init__.py
@@ -6,8 +6,8 @@
}
interpleveldefs = {
- 'SocketType': 'interp_socket.W_RSocket',
- 'socket' : 'interp_socket.W_RSocket',
+ 'SocketType': 'interp_socket.W_Socket',
+ 'socket' : 'interp_socket.W_Socket',
'error' : 'interp_socket.get_error(space, "error")',
'herror' : 'interp_socket.get_error(space, "herror")',
'gaierror' : 'interp_socket.get_error(space, "gaierror")',
diff --git a/pypy/module/_socket/interp_func.py b/pypy/module/_socket/interp_func.py
--- a/pypy/module/_socket/interp_func.py
+++ b/pypy/module/_socket/interp_func.py
@@ -1,8 +1,12 @@
-from pypy.interpreter.gateway import unwrap_spec, WrappedDefault
-from pypy.module._socket.interp_socket import converted_error, W_RSocket, addr_as_object, ipaddr_from_object
from rpython.rlib import rsocket
from rpython.rlib.rsocket import SocketError, INVALID_SOCKET
+
from pypy.interpreter.error import OperationError
+from pypy.interpreter.gateway import unwrap_spec, WrappedDefault
+from pypy.module._socket.interp_socket import (
+ converted_error, W_Socket, addr_as_object, ipaddr_from_object
+)
+
def gethostname(space):
"""gethostname() -> string
@@ -136,10 +140,10 @@
The remaining arguments are the same as for socket().
"""
try:
- sock = rsocket.fromfd(fd, family, type, proto, W_RSocket)
+ sock = rsocket.fromfd(fd, family, type, proto)
except SocketError, e:
raise converted_error(space, e)
- return space.wrap(sock)
+ return space.wrap(W_Socket(sock))
@unwrap_spec(family=int, type=int, proto=int)
def socketpair(space, family=rsocket.socketpair_default_family,
@@ -153,10 +157,13 @@
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
try:
- sock1, sock2 = rsocket.socketpair(family, type, proto, W_RSocket)
+ sock1, sock2 = rsocket.socketpair(family, type, proto)
except SocketError, e:
raise converted_error(space, e)
- return space.newtuple([space.wrap(sock1), space.wrap(sock2)])
+ return space.newtuple([
+ space.wrap(W_Socket(sock1)),
+ space.wrap(W_Socket(sock2))
+ ])
# The following 4 functions refuse all negative numbers, like CPython 2.6.
# They could also check that the argument is not too large, but CPython 2.6
diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py
--- a/pypy/module/_socket/interp_socket.py
+++ b/pypy/module/_socket/interp_socket.py
@@ -1,14 +1,18 @@
+from rpython.rlib import rsocket
+from rpython.rlib.rarithmetic import intmask
+from rpython.rlib.rsocket import (
+ RSocket, AF_INET, SOCK_STREAM, SocketError, SocketErrorWithErrno,
+ RSocketError
+)
+from rpython.rtyper.lltypesystem import lltype, rffi
+
+from pypy.interpreter import gateway
from pypy.interpreter.baseobjspace import W_Root
-from pypy.interpreter.typedef import TypeDef, make_weakref_descr,\
- interp_attrproperty
+from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import interp2app, unwrap_spec, WrappedDefault
-from rpython.rlib.rarithmetic import intmask
-from rpython.rtyper.lltypesystem import lltype, rffi
-from rpython.rlib import rsocket
-from rpython.rlib.rsocket import RSocket, AF_INET, SOCK_STREAM
-from rpython.rlib.rsocket import SocketError, SocketErrorWithErrno, RSocketError
-from pypy.interpreter.error import OperationError, oefmt
-from pypy.interpreter import gateway
+from pypy.interpreter.typedef import (
+ GetSetProperty, TypeDef, make_weakref_descr
+)
# XXX Hack to seperate rpython and pypy
@@ -124,10 +128,18 @@
return addr
-class W_RSocket(W_Root, RSocket):
- def __del__(self):
- self.clear_all_weakrefs()
- RSocket.__del__(self)
+class W_Socket(W_Root):
+ def __init__(self, sock):
+ self.sock = sock
+
+ def get_type_w(self, space):
+ return space.wrap(self.sock.type)
+
+ def get_proto_w(self, space):
+ return space.wrap(self.sock.proto)
+
+ def get_family_w(self, space):
+ return space.wrap(self.sock.family)
def accept_w(self, space):
"""accept() -> (socket object, address info)
@@ -137,22 +149,22 @@
info is a pair (hostaddr, port).
"""
try:
- fd, addr = self.accept()
+ fd, addr = self.sock.accept()
sock = rsocket.make_socket(
- fd, self.family, self.type, self.proto, W_RSocket)
- return space.newtuple([space.wrap(sock),
+ fd, self.sock.family, self.sock.type, self.sock.proto)
+ return space.newtuple([space.wrap(W_Socket(sock)),
addr_as_object(addr, sock.fd, space)])
- except SocketError, e:
+ except SocketError as e:
raise converted_error(space, e)
# convert an Address into an app-level object
def addr_as_object(self, space, address):
- return addr_as_object(address, self.fd, space)
+ return addr_as_object(address, self.sock.fd, space)
# convert an app-level object into an Address
# based on the current socket's family
def addr_from_object(self, space, w_address):
- return addr_from_object(self.family, space, w_address)
+ return addr_from_object(self.sock.family, space, w_address)
def bind_w(self, space, w_addr):
"""bind(address)
@@ -162,8 +174,8 @@
sockets the address is a tuple (ifname, proto [,pkttype [,hatype]])
"""
try:
- self.bind(self.addr_from_object(space, w_addr))
- except SocketError, e:
+ self.sock.bind(self.addr_from_object(space, w_addr))
+ except SocketError as e:
raise converted_error(space, e)
def close_w(self, space):
@@ -172,7 +184,7 @@
Close the socket. It cannot be used after this call.
"""
try:
- self.close()
+ self.sock.close()
except SocketError:
# cpython doesn't return any errors on close
pass
@@ -184,8 +196,8 @@
is a pair (host, port).
"""
try:
- self.connect(self.addr_from_object(space, w_addr))
- except SocketError, e:
+ self.sock.connect(self.addr_from_object(space, w_addr))
+ except SocketError as e:
raise converted_error(space, e)
def connect_ex_w(self, space, w_addr):
@@ -196,15 +208,16 @@
"""
try:
addr = self.addr_from_object(space, w_addr)
- except SocketError, e:
+ except SocketError as e:
raise converted_error(space, e)
- error = self.connect_ex(addr)
+ error = self.sock.connect_ex(addr)
return space.wrap(error)
def dup_w(self, space):
try:
- return self.dup(W_RSocket)
- except SocketError, e:
+ sock = self.sock.dup()
+ return W_Socket(sock)
+ except SocketError as e:
raise converted_error(space, e)
def fileno_w(self, space):
@@ -212,7 +225,7 @@
Return the integer file descriptor of the socket.
"""
- return space.wrap(intmask(self.fd))
+ return space.wrap(intmask(self.sock.fd))
def getpeername_w(self, space):
"""getpeername() -> address info
@@ -221,9 +234,9 @@
info is a pair (hostaddr, port).
"""
try:
- addr = self.getpeername()
- return addr_as_object(addr, self.fd, space)
- except SocketError, e:
+ addr = self.sock.getpeername()
+ return addr_as_object(addr, self.sock.fd, space)
+ except SocketError as e:
raise converted_error(space, e)
def getsockname_w(self, space):
@@ -233,9 +246,9 @@
info is a pair (hostaddr, port).
"""
try:
- addr = self.getsockname()
- return addr_as_object(addr, self.fd, space)
- except SocketError, e:
+ addr = self.sock.getsockname()
+ return addr_as_object(addr, self.sock.fd, space)
+ except SocketError as e:
raise converted_error(space, e)
@unwrap_spec(level=int, optname=int)
@@ -248,11 +261,11 @@
"""
if w_buflen is None:
try:
- return space.wrap(self.getsockopt_int(level, optname))
- except SocketError, e:
+ return space.wrap(self.sock.getsockopt_int(level, optname))
+ except SocketError as e:
raise converted_error(space, e)
buflen = space.int_w(w_buflen)
- return space.wrap(self.getsockopt(level, optname, buflen))
+ return space.wrap(self.sock.getsockopt(level, optname, buflen))
def gettimeout_w(self, space):
"""gettimeout() -> timeout
@@ -260,7 +273,7 @@
Returns the timeout in floating seconds associated with socket
operations. A timeout of None indicates that timeouts on socket
"""
- timeout = self.gettimeout()
+ timeout = self.sock.gettimeout()
if timeout < 0.0:
return space.w_None
return space.wrap(timeout)
@@ -274,8 +287,8 @@
will allow before refusing new connections.
"""
try:
- self.listen(backlog)
- except SocketError, e:
+ self.sock.listen(backlog)
+ except SocketError as e:
raise converted_error(space, e)
@unwrap_spec(w_mode = WrappedDefault("r"),
@@ -298,8 +311,8 @@
the remote end is closed and all data is read, return the empty string.
"""
try:
- data = self.recv(buffersize, flags)
- except SocketError, e:
+ data = self.sock.recv(buffersize, flags)
+ except SocketError as e:
raise converted_error(space, e)
return space.wrap(data)
@@ -310,13 +323,13 @@
Like recv(buffersize, flags) but also return the sender's address info.
"""
try:
- data, addr = self.recvfrom(buffersize, flags)
+ data, addr = self.sock.recvfrom(buffersize, flags)
if addr:
- w_addr = addr_as_object(addr, self.fd, space)
+ w_addr = addr_as_object(addr, self.sock.fd, space)
else:
w_addr = space.w_None
return space.newtuple([space.wrap(data), w_addr])
- except SocketError, e:
+ except SocketError as e:
raise converted_error(space, e)
@unwrap_spec(data='bufferstr', flags=int)
@@ -328,8 +341,8 @@
sent; this may be less than len(data) if the network is busy.
"""
try:
- count = self.send(data, flags)
- except SocketError, e:
+ count = self.sock.send(data, flags)
+ except SocketError as e:
raise converted_error(space, e)
return space.wrap(count)
@@ -343,8 +356,9 @@
to tell how much data has been sent.
"""
try:
- self.sendall(data, flags, space.getexecutioncontext().checksignals)
- except SocketError, e:
+ self.sock.sendall(
+ data, flags, space.getexecutioncontext().checksignals)
+ except SocketError as e:
raise converted_error(space, e)
@unwrap_spec(data='bufferstr')
@@ -364,8 +378,8 @@
w_addr = w_param3
try:
addr = self.addr_from_object(space, w_addr)
- count = self.sendto(data, flags, addr)
- except SocketError, e:
+ count = self.sock.sendto(data, flags, addr)
+ except SocketError as e:
raise converted_error(space, e)
return space.wrap(count)
@@ -377,7 +391,7 @@
setblocking(True) is equivalent to settimeout(None);
setblocking(False) is equivalent to settimeout(0.0).
"""
- self.setblocking(flag)
+ self.sock.setblocking(flag)
@unwrap_spec(level=int, optname=int)
def setsockopt_w(self, space, level, optname, w_optval):
@@ -391,13 +405,13 @@
except:
optval = space.str_w(w_optval)
try:
- self.setsockopt(level, optname, optval)
- except SocketError, e:
+ self.sock.setsockopt(level, optname, optval)
+ except SocketError as e:
raise converted_error(space, e)
return
try:
- self.setsockopt_int(level, optname, optval)
- except SocketError, e:
+ self.sock.setsockopt_int(level, optname, optval)
+ except SocketError as e:
raise converted_error(space, e)
def settimeout_w(self, space, w_timeout):
@@ -415,7 +429,7 @@
if timeout < 0.0:
raise OperationError(space.w_ValueError,
space.wrap('Timeout value out of range'))
- self.settimeout(timeout)
+ self.sock.settimeout(timeout)
@unwrap_spec(nbytes=int, flags=int)
def recv_into_w(self, space, w_buffer, nbytes=0, flags=0):
@@ -424,8 +438,8 @@
if nbytes == 0 or nbytes > lgt:
nbytes = lgt
try:
- return space.wrap(self.recvinto(rwbuffer, nbytes, flags))
- except SocketError, e:
+ return space.wrap(self.sock.recvinto(rwbuffer, nbytes, flags))
+ except SocketError as e:
raise converted_error(space, e)
@unwrap_spec(nbytes=int, flags=int)
@@ -435,13 +449,13 @@
if nbytes == 0 or nbytes > lgt:
nbytes = lgt
try:
- readlgt, addr = self.recvfrom_into(rwbuffer, nbytes, flags)
+ readlgt, addr = self.sock.recvfrom_into(rwbuffer, nbytes, flags)
if addr:
- w_addr = addr_as_object(addr, self.fd, space)
+ w_addr = addr_as_object(addr, self.sock.fd, space)
else:
w_addr = space.w_None
return space.newtuple([space.wrap(readlgt), w_addr])
- except SocketError, e:
+ except SocketError as e:
raise converted_error(space, e)
@unwrap_spec(cmd=int)
@@ -473,7 +487,7 @@
option_ptr.c_keepaliveinterval = space.uint_w(w_interval)
res = _c.WSAIoctl(
- self.fd, cmd, value_ptr, value_size,
+ self.sock.fd, cmd, value_ptr, value_size,
rffi.NULL, 0, recv_ptr, rffi.NULL, rffi.NULL)
if res < 0:
raise converted_error(space, rsocket.last_error())
@@ -494,8 +508,8 @@
(flag == SHUT_RDWR).
"""
try:
- self.shutdown(how)
- except SocketError, e:
+ self.sock.shutdown(how)
+ except SocketError as e:
raise converted_error(space, e)
#------------------------------------------------------------
@@ -536,12 +550,13 @@
@unwrap_spec(family=int, type=int, proto=int)
def newsocket(space, w_subtype, family=AF_INET,
type=SOCK_STREAM, proto=0):
- sock = space.allocate_instance(W_RSocket, w_subtype)
+ self = space.allocate_instance(W_Socket, w_subtype)
try:
- W_RSocket.__init__(sock, family, type, proto)
- except SocketError, e:
+ sock = RSocket(family, type, proto)
+ except SocketError as e:
raise converted_error(space, e)
- return space.wrap(sock)
+ W_Socket.__init__(self, sock)
+ return space.wrap(self)
descr_socket_new = interp2app(newsocket)
# ____________________________________________________________
@@ -597,10 +612,10 @@
socketmethods = {}
for methodname in socketmethodnames:
- method = getattr(W_RSocket, methodname + '_w')
+ method = getattr(W_Socket, methodname + '_w')
socketmethods[methodname] = interp2app(method)
-W_RSocket.typedef = TypeDef("_socket.socket",
+W_Socket.typedef = TypeDef("_socket.socket",
__doc__ = """\
socket([family[, type[, proto]]]) -> socket object
@@ -639,9 +654,9 @@
[*] not available on all platforms!""",
__new__ = descr_socket_new,
- __weakref__ = make_weakref_descr(W_RSocket),
- type = interp_attrproperty('type', W_RSocket),
- proto = interp_attrproperty('proto', W_RSocket),
- family = interp_attrproperty('family', W_RSocket),
+ __weakref__ = make_weakref_descr(W_Socket),
+ type = GetSetProperty(W_Socket.get_type_w),
+ proto = GetSetProperty(W_Socket.get_proto_w),
+ family = GetSetProperty(W_Socket.get_family_w),
** socketmethods
)
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -15,6 +15,7 @@
interp2app, interpindirect2app, unwrap_spec)
from pypy.interpreter.typedef import (
GetSetProperty, TypeDef, make_weakref_descr)
+from pypy.interpreter.generator import GeneratorIterator
from pypy.module._file.interp_file import W_File
from pypy.objspace.std.floatobject import W_FloatObject
@@ -630,6 +631,10 @@
def make_array(mytype):
W_ArrayBase = globals()['W_ArrayBase']
+ unpack_driver = jit.JitDriver(name='unpack_array',
+ greens=['tp'],
+ reds=['self', 'w_iterator'])
+
class W_Array(W_ArrayBase):
itemsize = mytype.bytes
typecode = mytype.typecode
@@ -674,6 +679,10 @@
return rffi.cast(mytype.itemtype, item)
#
# "regular" case: it fits in an rpython integer (lltype.Signed)
+ # or it is a float
+ return self.item_from_int_or_float(item)
+
+ def item_from_int_or_float(self, item):
result = rffi.cast(mytype.itemtype, item)
if mytype.canoverflow:
if rffi.cast(lltype.Signed, result) != item:
@@ -686,8 +695,8 @@
% mytype.bytes)
if not mytype.signed:
msg = 'un' + msg # 'signed' => 'unsigned'
- raise OperationError(space.w_OverflowError,
- space.wrap(msg))
+ raise OperationError(self.space.w_OverflowError,
+ self.space.wrap(msg))
return result
def __del__(self):
@@ -734,27 +743,65 @@
def fromsequence(self, w_seq):
space = self.space
oldlen = self.len
- try:
- new = space.len_w(w_seq)
- self.setlen(self.len + new)
- except OperationError:
- pass
+ newlen = oldlen
- i = 0
- try:
- if mytype.typecode == 'u':
- myiter = space.unpackiterable
- else:
- myiter = space.listview
- for w_i in myiter(w_seq):
- if oldlen + i >= self.len:
- self.setlen(oldlen + i + 1)
- self.buffer[oldlen + i] = self.item_w(w_i)
- i += 1
- except OperationError:
- self.setlen(oldlen + i)
- raise
- self.setlen(oldlen + i)
+ # optimized case for arrays of integers or floats
+ if mytype.unwrap == 'int_w':
+ lst = space.listview_int(w_seq)
+ elif mytype.unwrap == 'float_w':
+ lst = space.listview_float(w_seq)
+ else:
+ lst = None
+ if lst is not None:
+ self.setlen(oldlen + len(lst))
+ try:
+ buf = self.buffer
+ for num in lst:
+ buf[newlen] = self.item_from_int_or_float(num)
+ newlen += 1
+ except OperationError:
+ self.setlen(newlen)
+ raise
+ return
+
+ # this is the common case: w_seq is a list or a tuple
+ lst_w = space.listview_no_unpack(w_seq)
+ if lst_w is not None:
+ self.setlen(oldlen + len(lst_w))
+ buf = self.buffer
+ try:
+ for w_num in lst_w:
+ # note: self.item_w() might invoke arbitrary code.
+ # In case it resizes the same array, then strange
+ # things may happen, but as we don't reload 'buf'
+ # we know that one is big enough for all items
+ # (so at least we avoid crashes)
+ buf[newlen] = self.item_w(w_num)
+ newlen += 1
+ except OperationError:
+ if buf == self.buffer:
+ self.setlen(newlen)
+ raise
+ return
+
+ self._fromiterable(w_seq)
+
+ def _fromiterable(self, w_seq):
+ # a more careful case if w_seq happens to be a very large
+ # iterable: don't copy the items into some intermediate list
+ w_iterator = self.space.iter(w_seq)
+ tp = self.space.type(w_iterator)
+ while True:
+ unpack_driver.jit_merge_point(tp=tp, self=self,
+ w_iterator=w_iterator)
+ space = self.space
+ try:
+ w_item = space.next(w_iterator)
+ except OperationError, e:
+ if not e.match(space, space.w_StopIteration):
+ raise
+ break # done
+ self.descr_append(space, w_item)
def extend(self, w_iterable, accept_different_array=False):
space = self.space
@@ -797,8 +844,9 @@
def descr_append(self, space, w_x):
x = self.item_w(w_x)
- self.setlen(self.len + 1)
- self.buffer[self.len - 1] = x
+ index = self.len
+ self.setlen(index + 1)
+ self.buffer[index] = x
# List interface
def descr_count(self, space, w_val):
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -10,7 +10,7 @@
from rpython.rtyper.lltypesystem import ll2ctypes
from rpython.rtyper.annlowlevel import llhelper
from rpython.rlib.objectmodel import we_are_translated
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.gensupp import NameManager
from rpython.tool.udir import udir
diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py
--- a/pypy/module/cpyext/listobject.py
+++ b/pypy/module/cpyext/listobject.py
@@ -46,11 +46,11 @@
IndexError exception."""
if not isinstance(w_list, W_ListObject):
PyErr_BadInternalCall(space)
- wrappeditems = w_list.getitems()
- if index < 0 or index >= len(wrappeditems):
+ if index < 0 or index >= w_list.length():
raise OperationError(space.w_IndexError, space.wrap(
"list index out of range"))
- return borrow_from(w_list, wrappeditems[index])
+ w_item = w_list.getitem(index)
+ return borrow_from(w_list, w_item)
@cpython_api([PyObject, PyObject], rffi.INT_real, error=-1)
diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py
--- a/pypy/module/cpyext/test/test_cpyext.py
+++ b/pypy/module/cpyext/test/test_cpyext.py
@@ -205,12 +205,7 @@
import sys
if sys.platform != "win32" or sys.version_info < (2, 6):
skip("Windows Python >= 2.6 only")
- assert sys.dllhandle
- assert sys.dllhandle.getaddressindll('PyPyErr_NewException')
- import ctypes # slow
- PyUnicode_GetDefaultEncoding = ctypes.pythonapi.PyPyUnicode_GetDefaultEncoding
- PyUnicode_GetDefaultEncoding.restype = ctypes.c_char_p
- assert PyUnicode_GetDefaultEncoding() == 'ascii'
+ assert isinstance(sys.dllhandle, int)
class AppTestCpythonExtensionBase(LeakCheckingTest):
diff --git a/pypy/module/micronumpy/__init__.py b/pypy/module/micronumpy/__init__.py
--- a/pypy/module/micronumpy/__init__.py
+++ b/pypy/module/micronumpy/__init__.py
@@ -12,7 +12,7 @@
'scalar' : 'ctors.build_scalar',
'array': 'ctors.array',
'zeros': 'ctors.zeros',
- 'empty': 'ctors.zeros',
+ 'empty': 'ctors.empty',
'empty_like': 'ctors.empty_like',
'fromstring': 'ctors.fromstring',
'frombuffer': 'ctors.frombuffer',
diff --git a/pypy/module/micronumpy/base.py b/pypy/module/micronumpy/base.py
--- a/pypy/module/micronumpy/base.py
+++ b/pypy/module/micronumpy/base.py
@@ -33,12 +33,12 @@
self.implementation = implementation
@staticmethod
- def from_shape(space, shape, dtype, order='C', w_instance=None):
+ def from_shape(space, shape, dtype, order='C', w_instance=None, zero=True):
from pypy.module.micronumpy import concrete
from pypy.module.micronumpy.strides import calc_strides
strides, backstrides = calc_strides(shape, dtype.base, order)
impl = concrete.ConcreteArray(shape, dtype.base, order, strides,
- backstrides)
+ backstrides, zero=zero)
if w_instance:
return wrap_impl(space, space.type(w_instance), w_instance, impl)
return W_NDimArray(impl)
diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py
--- a/pypy/module/micronumpy/concrete.py
+++ b/pypy/module/micronumpy/concrete.py
@@ -369,9 +369,11 @@
class ConcreteArray(ConcreteArrayNotOwning):
- def __init__(self, shape, dtype, order, strides, backstrides, storage=lltype.nullptr(RAW_STORAGE)):
+ def __init__(self, shape, dtype, order, strides, backstrides,
+ storage=lltype.nullptr(RAW_STORAGE), zero=True):
if storage == lltype.nullptr(RAW_STORAGE):
- storage = dtype.itemtype.malloc(support.product(shape) * dtype.elsize)
+ storage = dtype.itemtype.malloc(support.product(shape) *
+ dtype.elsize, zero=zero)
ConcreteArrayNotOwning.__init__(self, shape, dtype, order, strides, backstrides,
storage)
diff --git a/pypy/module/micronumpy/ctors.py b/pypy/module/micronumpy/ctors.py
--- a/pypy/module/micronumpy/ctors.py
+++ b/pypy/module/micronumpy/ctors.py
@@ -128,13 +128,19 @@
return w_arr
-def zeros(space, w_shape, w_dtype=None, w_order=None):
+def _zeros_or_empty(space, w_shape, w_dtype, w_order, zero):
dtype = space.interp_w(descriptor.W_Dtype,
space.call_function(space.gettypefor(descriptor.W_Dtype), w_dtype))
if dtype.is_str_or_unicode() and dtype.elsize < 1:
dtype = descriptor.variable_dtype(space, dtype.char + '1')
shape = shape_converter(space, w_shape, dtype)
- return W_NDimArray.from_shape(space, shape, dtype=dtype)
+ return W_NDimArray.from_shape(space, shape, dtype=dtype, zero=zero)
+
+def empty(space, w_shape, w_dtype=None, w_order=None):
+ return _zeros_or_empty(space, w_shape, w_dtype, w_order, zero=False)
+
+def zeros(space, w_shape, w_dtype=None, w_order=None):
+ return _zeros_or_empty(space, w_shape, w_dtype, w_order, zero=True)
@unwrap_spec(subok=bool)
@@ -148,7 +154,8 @@
if dtype.is_str_or_unicode() and dtype.elsize < 1:
dtype = descriptor.variable_dtype(space, dtype.char + '1')
return W_NDimArray.from_shape(space, w_a.get_shape(), dtype=dtype,
- w_instance=w_a if subok else None)
+ w_instance=w_a if subok else None,
+ zero=False)
def _fromstring_text(space, s, count, sep, length, dtype):
diff --git a/pypy/module/micronumpy/test/test_arrayops.py b/pypy/module/micronumpy/test/test_arrayops.py
--- a/pypy/module/micronumpy/test/test_arrayops.py
+++ b/pypy/module/micronumpy/test/test_arrayops.py
@@ -2,6 +2,29 @@
class AppTestNumSupport(BaseNumpyAppTest):
+ def test_zeros(self):
+ from numpypy import zeros
+ a = zeros(3)
+ assert len(a) == 3
+ assert a[0] == a[1] == a[2] == 0
+
+ def test_empty(self):
+ from numpypy import empty
+ import gc
+ for i in range(1000):
+ a = empty(3)
+ assert len(a) == 3
+ if not (a[0] == a[1] == a[2] == 0):
+ break # done
+ a[0] = 1.23
+ a[1] = 4.56
+ a[2] = 7.89
+ del a
+ gc.collect()
+ else:
+ raise AssertionError(
+ "empty() returned a zeroed out array every time")
+
def test_where(self):
from numpypy import where, ones, zeros, array
a = [1, 2, 3, 0, -3]
diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py
--- a/pypy/module/micronumpy/test/test_ndarray.py
+++ b/pypy/module/micronumpy/test/test_ndarray.py
@@ -11,7 +11,7 @@
class MockDtype(object):
class itemtype(object):
@staticmethod
- def malloc(size):
+ def malloc(size, zero=True):
return None
def __init__(self):
diff --git a/pypy/module/micronumpy/types.py b/pypy/module/micronumpy/types.py
--- a/pypy/module/micronumpy/types.py
+++ b/pypy/module/micronumpy/types.py
@@ -117,8 +117,11 @@
def __repr__(self):
return self.__class__.__name__
- def malloc(self, size):
- return alloc_raw_storage(size, track_allocation=False, zero=True)
+ def malloc(self, size, zero=True):
+ if zero:
+ return alloc_raw_storage(size, track_allocation=False, zero=True)
+ else:
+ return alloc_raw_storage(size, track_allocation=False, zero=False)
class Primitive(object):
_mixin_ = True
diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
--- a/pypy/module/posix/interp_posix.py
+++ b/pypy/module/posix/interp_posix.py
@@ -578,13 +578,15 @@
except OperationError, e:
# fall back to the original byte string
result_w[i] = w_bytes
+ return space.newlist(result_w)
else:
dirname = space.str0_w(w_dirname)
result = rposix.listdir(dirname)
- result_w = [space.wrap(s) for s in result]
+ # The list comprehension is a workaround for an obscure translation
+ # bug.
+ return space.newlist_bytes([x for x in result])
except OSError, e:
raise wrap_oserror2(space, e, w_dirname)
- return space.newlist(result_w)
def pipe(space):
"Create a pipe. Returns (read_end, write_end)."
diff --git a/pypy/module/sys/test/test_sysmodule.py b/pypy/module/sys/test/test_sysmodule.py
--- a/pypy/module/sys/test/test_sysmodule.py
+++ b/pypy/module/sys/test/test_sysmodule.py
@@ -391,7 +391,8 @@
import sys
if hasattr(sys, "getwindowsversion"):
v = sys.getwindowsversion()
- assert isinstance(v, tuple)
+ if '__pypy__' in sys.builtin_module_names:
+ assert isinstance(v, tuple)
assert len(v) == 5
assert isinstance(v[0], int)
assert isinstance(v[1], int)
@@ -419,6 +420,10 @@
if hasattr(sys, "winver"):
assert sys.winver == sys.version[:3]
+ def test_dllhandle(self):
+ import sys
+ assert hasattr(sys, 'dllhandle') == (sys.platform == 'win32')
+
def test_dlopenflags(self):
import sys
if hasattr(sys, "setdlopenflags"):
@@ -486,7 +491,8 @@
assert isinstance(sys.version, basestring)
assert isinstance(sys.warnoptions, list)
vi = sys.version_info
- assert isinstance(vi, tuple)
+ if '__pypy__' in sys.builtin_module_names:
+ assert isinstance(vi, tuple)
assert len(vi) == 5
assert isinstance(vi[0], int)
assert isinstance(vi[1], int)
@@ -512,6 +518,8 @@
def test_pypy_attributes(self):
import sys
+ if '__pypy__' not in sys.builtin_module_names:
+ skip("only on PyPy")
assert isinstance(sys.pypy_objspaceclass, str)
vi = sys.pypy_version_info
assert isinstance(vi, tuple)
@@ -528,10 +536,14 @@
def test_subversion(self):
import sys
+ if '__pypy__' not in sys.builtin_module_names:
+ skip("only on PyPy")
assert sys.subversion == ('PyPy', '', '')
def test__mercurial(self):
import sys, re
+ if '__pypy__' not in sys.builtin_module_names:
+ skip("only on PyPy")
project, hgtag, hgid = sys._mercurial
assert project == 'PyPy'
# the tag or branch may be anything, including the empty string
diff --git a/pypy/module/sys/vm.py b/pypy/module/sys/vm.py
--- a/pypy/module/sys/vm.py
+++ b/pypy/module/sys/vm.py
@@ -233,8 +233,6 @@
def get_dllhandle(space):
if not space.config.objspace.usemodules.cpyext:
return space.wrap(0)
- if not space.config.objspace.usemodules._rawffi:
- return space.wrap(0)
return _get_dllhandle(space)
@@ -243,11 +241,14 @@
from pypy.module.cpyext.api import State
handle = space.fromcache(State).get_pythonapi_handle()
- # Make a dll object with it
- from pypy.module._rawffi.interp_rawffi import W_CDLL
- from rpython.rlib.clibffi import RawCDLL
- cdll = RawCDLL(handle)
- return space.wrap(W_CDLL(space, "python api", cdll))
+ # It used to be a CDLL
+ # from pypy.module._rawffi.interp_rawffi import W_CDLL
+ # from rpython.rlib.clibffi import RawCDLL
+ # cdll = RawCDLL(handle)
+ # return space.wrap(W_CDLL(space, "python api", cdll))
+ # Provide a cpython-compatible int
+ from rpython.rtyper.lltypesystem import lltype, rffi
+ return space.wrap(rffi.cast(lltype.Signed, handle))
def getsizeof(space, w_object, w_default=None):
"""Not implemented on PyPy."""
diff --git a/pypy/module/thread/test/support.py b/pypy/module/thread/test/support.py
--- a/pypy/module/thread/test/support.py
+++ b/pypy/module/thread/test/support.py
@@ -44,6 +44,7 @@
spaceconfig = dict(usemodules=('thread', 'rctime', 'signal'))
def setup_class(cls):
+ cls.w_runappdirect = cls.space.wrap(cls.runappdirect)
if cls.runappdirect:
def plain_waitfor(self, condition, delay=1):
adaptivedelay = 0.04
diff --git a/pypy/module/thread/test/test_lock.py b/pypy/module/thread/test/test_lock.py
--- a/pypy/module/thread/test/test_lock.py
+++ b/pypy/module/thread/test/test_lock.py
@@ -57,8 +57,34 @@
assert lock.acquire() is True
assert lock.acquire(False) is False
raises(TypeError, lock.acquire, True, timeout=.1)
- lock._py3k_acquire(True, timeout=.01)
- lock._py3k_acquire(True, .01)
+ if hasattr(lock, '_py3k_acquire'):
+ lock._py3k_acquire(True, timeout=.01)
+ lock._py3k_acquire(True, .01)
+ else:
+ assert self.runappdirect, "missing lock._py3k_acquire()"
+
+ def test_ping_pong(self):
+ # The purpose of this test is that doing a large number of ping-pongs
+ # between two threads, using locks, should complete in a reasonable
+ # time on a translated pypy with -A. If the GIL logic causes too
+ # much sleeping, then it will fail.
+ import thread, time
+ COUNT = 100000 if self.runappdirect else 50
+ lock1 = thread.allocate_lock()
+ lock2 = thread.allocate_lock()
+ def fn():
+ for i in range(COUNT):
+ lock1.acquire()
+ lock2.release()
+ lock2.acquire()
+ print "STARTING"
+ start = time.time()
+ thread.start_new_thread(fn, ())
+ for i in range(COUNT):
+ lock2.acquire()
+ lock1.release()
+ stop = time.time()
+ assert stop - start < 30.0 # ~0.6 sec on pypy-c-jit
def test_compile_lock():
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -421,14 +421,19 @@
assert expected_length >= 0
return self.fixedview(w_obj, expected_length, unroll=True)
+ def listview_no_unpack(self, w_obj):
+ if type(w_obj) is W_ListObject:
+ return w_obj.getitems()
+ elif isinstance(w_obj, W_AbstractTupleObject) and self._uses_tuple_iter(w_obj):
+ return w_obj.getitems_copy()
+ elif isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ return w_obj.getitems()
+ else:
+ return None
+
def listview(self, w_obj, expected_length=-1):
- if type(w_obj) is W_ListObject:
- t = w_obj.getitems()
- elif isinstance(w_obj, W_AbstractTupleObject) and self._uses_tuple_iter(w_obj):
- t = w_obj.getitems_copy()
- elif isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
- t = w_obj.getitems()
- else:
+ t = self.listview_no_unpack(w_obj)
+ if t is None:
return ObjSpace.unpackiterable(self, w_obj, expected_length)
if expected_length != -1 and len(t) != expected_length:
raise self._wrap_expected_length(expected_length, len(t))
diff --git a/pypy/tool/gcdump.py b/pypy/tool/gcdump.py
--- a/pypy/tool/gcdump.py
+++ b/pypy/tool/gcdump.py
@@ -43,7 +43,7 @@
def print_summary(self):
items = self.summary.items()
- items.sort(key=lambda(typenum, stat): stat[1]) # sort by totalsize
+ items.sort(key=lambda (typenum, stat): stat[1]) # sort by totalsize
totalsize = 0
for typenum, stat in items:
totalsize += stat[1]
diff --git a/rpython/conftest.py b/rpython/conftest.py
--- a/rpython/conftest.py
+++ b/rpython/conftest.py
@@ -1,10 +1,8 @@
-from os.path import *
import py, pytest
from rpython.tool import leakfinder
pytest_plugins = 'rpython.tool.pytest.expecttest'
-cdir = realpath(join(dirname(__file__), 'translator', 'c'))
option = None
def braindead_deindent(self):
diff --git a/rpython/jit/backend/detect_cpu.py b/rpython/jit/backend/detect_cpu.py
--- a/rpython/jit/backend/detect_cpu.py
+++ b/rpython/jit/backend/detect_cpu.py
@@ -73,11 +73,14 @@
result = MODEL_X86_64
else:
assert sys.maxint == 2**31-1
- from rpython.jit.backend.x86.detect_sse2 import detect_sse2
- if detect_sse2():
+ from rpython.jit.backend.x86 import detect_sse2
+ if detect_sse2.detect_sse2():
result = MODEL_X86
else:
result = MODEL_X86_NO_SSE2
+ if detect_sse2.detect_x32_mode():
+ raise ProcessorAutodetectError(
+ 'JITting in x32 mode is not implemented')
#
if result.startswith('arm'):
from rpython.jit.backend.arm.detect import detect_float
diff --git a/rpython/jit/backend/llsupport/rewrite.py b/rpython/jit/backend/llsupport/rewrite.py
--- a/rpython/jit/backend/llsupport/rewrite.py
+++ b/rpython/jit/backend/llsupport/rewrite.py
@@ -396,16 +396,6 @@
#op = op.copy_and_change(rop.SETFIELD_RAW)
self.newops.append(op)
- def handle_write_barrier_setinteriorfield(self, op):
- val = op.getarg(0)
- if val not in self.write_barrier_applied:
- v = op.getarg(2)
- if isinstance(v, BoxPtr) or (isinstance(v, ConstPtr) and
- bool(v.value)): # store a non-NULL
- self.gen_write_barrier(val)
- #op = op.copy_and_change(rop.SETINTERIORFIELD_RAW)
- self.newops.append(op)
-
def handle_write_barrier_setarrayitem(self, op):
val = op.getarg(0)
if val not in self.write_barrier_applied:
@@ -413,9 +403,11 @@
if isinstance(v, BoxPtr) or (isinstance(v, ConstPtr) and
bool(v.value)): # store a non-NULL
self.gen_write_barrier_array(val, op.getarg(1))
- #op = op.copy_and_change(rop.SETARRAYITEM_RAW)
+ #op = op.copy_and_change(rop.SET{ARRAYITEM,INTERIORFIELD}_RAW)
self.newops.append(op)
+ handle_write_barrier_setinteriorfield = handle_write_barrier_setarrayitem
+
def gen_write_barrier(self, v_base):
write_barrier_descr = self.gc_ll_descr.write_barrier_descr
args = [v_base]
diff --git a/rpython/jit/backend/llsupport/test/test_rewrite.py b/rpython/jit/backend/llsupport/test/test_rewrite.py
--- a/rpython/jit/backend/llsupport/test/test_rewrite.py
+++ b/rpython/jit/backend/llsupport/test/test_rewrite.py
@@ -669,7 +669,7 @@
jump(p1, p2)
""", """
[p1, p2]
- cond_call_gc_wb(p1, descr=wbdescr)
+ cond_call_gc_wb_array(p1, 0, descr=wbdescr)
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", interiorzdescr=interiorzdescr)
diff --git a/rpython/jit/backend/llsupport/test/ztranslation_test.py b/rpython/jit/backend/llsupport/test/ztranslation_test.py
--- a/rpython/jit/backend/llsupport/test/ztranslation_test.py
+++ b/rpython/jit/backend/llsupport/test/ztranslation_test.py
@@ -10,6 +10,9 @@
from rpython.jit.backend.test.support import CCompiledMixin
from rpython.jit.codewriter.policy import StopAtXPolicy
from rpython.config.config import ConfigError
+from rpython.translator.tool.cbuild import ExternalCompilationInfo
+from rpython.rtyper.lltypesystem import lltype, rffi
+
class TranslationTest(CCompiledMixin):
CPUClass = getcpuclass()
@@ -25,6 +28,7 @@
# - floats neg and abs
# - threadlocalref_get
# - get_errno, set_errno
+ # - llexternal with macro=True
class Frame(object):
_virtualizable_ = ['i']
@@ -36,9 +40,15 @@
pass
t = ThreadLocalReference(Foo)
- @dont_look_inside
- def myabs(x):
- return abs(x)
+ eci = ExternalCompilationInfo(post_include_bits=['''
+#define pypy_my_fabs(x) fabs(x)
+'''])
+ myabs1 = rffi.llexternal('pypy_my_fabs', [lltype.Float],
+ lltype.Float, macro=True, releasegil=False,
+ compilation_info=eci)
+ myabs2 = rffi.llexternal('pypy_my_fabs', [lltype.Float],
+ lltype.Float, macro=True, releasegil=True,
+ compilation_info=eci)
jitdriver = JitDriver(greens = [],
reds = ['total', 'frame', 'j'],
@@ -61,7 +71,7 @@
frame.i -= 1
j *= -0.712
if j + (-j): raise ValueError
- k = myabs(j)
+ k = myabs1(myabs2(j))
if k - abs(j): raise ValueError
if k - abs(-j): raise ValueError
if t.get().nine != 9: raise ValueError
@@ -69,7 +79,6 @@
if rposix.get_errno() != total: raise ValueError
return chr(total % 253)
#
- from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib.libffi import types, CDLL, ArgChain
from rpython.rlib.test.test_clibffi import get_libm_name
libm_name = get_libm_name(sys.platform)
diff --git a/rpython/jit/backend/x86/callbuilder.py b/rpython/jit/backend/x86/callbuilder.py
--- a/rpython/jit/backend/x86/callbuilder.py
+++ b/rpython/jit/backend/x86/callbuilder.py
@@ -132,7 +132,6 @@
self.mc.ADD(ebp, imm(1)) # ebp any more; and ignore 'fastgil'
def move_real_result_and_call_reacqgil_addr(self, fastgil):
- from rpython.jit.backend.x86.assembler import heap
from rpython.jit.backend.x86 import rx86
#
# check if we need to call the reacqgil() function or not
diff --git a/rpython/jit/backend/x86/detect_sse2.py b/rpython/jit/backend/x86/detect_sse2.py
--- a/rpython/jit/backend/x86/detect_sse2.py
+++ b/rpython/jit/backend/x86/detect_sse2.py
@@ -1,3 +1,4 @@
+import sys
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib.rmmap import alloc, free
@@ -18,9 +19,26 @@
free(data, 4096)
return bool(code & (1<<25)) and bool(code & (1<<26))
+def detect_x32_mode():
+ data = alloc(4096)
+ pos = 0 # 32-bit 64-bit / x32
+ for c in ("\x48" # DEC EAX
+ "\xB8\xC8\x00\x00\x00"# MOV EAX, 200 MOV RAX, 0x40404040000000C8
+ "\x40\x40\x40\x40" # 4x INC EAX
+ "\xC3"): # RET RET
+ data[pos] = c
+ pos += 1
+ fnptr = rffi.cast(lltype.Ptr(lltype.FuncType([], lltype.Signed)), data)
+ code = fnptr()
+ free(data, 4096)
+ assert code in (200, 204, 0x40404040000000C8)
+ return code == 200
+
if __name__ == '__main__':
if detect_sse2():
print 'Processor supports sse2.'
else:
print 'Missing processor support for sse2.'
+ if detect_x32_mode():
+ print 'Process is running in "x32" mode.'
diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py
--- a/rpython/jit/metainterp/resoperation.py
+++ b/rpython/jit/metainterp/resoperation.py
@@ -499,7 +499,7 @@
'SETARRAYITEM_GC/3d',
'SETARRAYITEM_RAW/3d',
'SETINTERIORFIELD_GC/3d',
- 'SETINTERIORFIELD_RAW/3d', # only used by llsupport/rewrite.py
+ 'SETINTERIORFIELD_RAW/3d', # right now, only used by tests
'RAW_STORE/3d',
'SETFIELD_GC/2d',
'SETFIELD_RAW/2d',
diff --git a/rpython/memory/test/snippet.py b/rpython/memory/test/snippet.py
--- a/rpython/memory/test/snippet.py
+++ b/rpython/memory/test/snippet.py
@@ -47,22 +47,33 @@
class State:
pass
state = State()
+ def age_of(c):
+ return state.age[ord(c) - ord('a')]
+ def set_age_of(c, newvalue):
+ # NB. this used to be a dictionary, but setting into a dict
+ # consumes memory. This has the effect that this test's
+ # __del__ methods can consume more memory and potentially
+ # cause another collection. This would result in objects
+ # being unexpectedly destroyed at the same 'state.time'.
+ state.age[ord(c) - ord('a')] = newvalue
+
class A:
def __init__(self, key):
self.key = key
self.refs = []
def __del__(self):
- assert state.age[self.key] == -1
- state.age[self.key] = state.time
+ from rpython.rlib.debug import debug_print
+ debug_print("DEL:", self.key)
+ assert age_of(self.key) == -1
+ set_age_of(self.key, state.time)
state.progress = True
def build_example(input):
state.time = 0
- state.age = {}
+ state.age = [-1] * len(letters)
vertices = {}
for c in letters:
vertices[c] = A(c)
- state.age[c] = -1
for c, d in input:
vertices[c].refs.append(vertices[d])
@@ -72,6 +83,8 @@
input, components, strict = examples[i]
build_example(input)
while state.time < len(letters):
+ from rpython.rlib.debug import debug_print
+ debug_print("STATE.TIME:", state.time)
state.progress = False
llop.gc__collect(lltype.Void)
if not state.progress:
@@ -80,16 +93,16 @@
# summarize the finalization order
lst = []
for c in letters:
- lst.append('%s:%d' % (c, state.age[c]))
+ lst.append('%s:%d' % (c, age_of(c)))
summary = ', '.join(lst)
# check that all instances have been finalized
- if -1 in state.age.values():
+ if -1 in state.age:
return error(i, summary, "not all instances finalized")
# check that if a -> b and a and b are not in the same
# strong component, then a is finalized strictly before b
for c, d in strict:
- if state.age[c] >= state.age[d]:
+ if age_of(c) >= age_of(d):
return error(i, summary,
"%s should be finalized before %s"
% (c, d))
@@ -98,7 +111,7 @@
for component in components:
seen = {}
for c in component:
- age = state.age[c]
+ age = age_of(c)
if age in seen:
d = seen[age]
return error(i, summary,
diff --git a/rpython/rlib/_rffi_stacklet.py b/rpython/rlib/_rffi_stacklet.py
--- a/rpython/rlib/_rffi_stacklet.py
+++ b/rpython/rlib/_rffi_stacklet.py
@@ -3,7 +3,7 @@
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.rtyper.tool import rffi_platform
from rpython.rlib.rarithmetic import is_emulated_long
-from rpython.conftest import cdir
+from rpython.translator import cdir
cdir = py.path.local(cdir)
diff --git a/rpython/rlib/_rsocket_rffi.py b/rpython/rlib/_rsocket_rffi.py
--- a/rpython/rlib/_rsocket_rffi.py
+++ b/rpython/rlib/_rsocket_rffi.py
@@ -493,10 +493,16 @@
getnameinfo = external('getnameinfo', [sockaddr_ptr, socklen_t, CCHARP,
size_t, CCHARP, size_t, rffi.INT], rffi.INT)
-htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False)
-htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False)
-ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False)
-ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False)
+if sys.platform.startswith("openbsd"):
+ htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True)
+ htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True)
+ ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True)
+ ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True)
+else:
+ htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False)
+ htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False)
+ ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False)
+ ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False)
if _POSIX:
inet_aton = external('inet_aton', [CCHARP, lltype.Ptr(in_addr)],
diff --git a/rpython/rlib/clibffi.py b/rpython/rlib/clibffi.py
--- a/rpython/rlib/clibffi.py
+++ b/rpython/rlib/clibffi.py
@@ -15,7 +15,7 @@
from rpython.rlib.objectmodel import specialize
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.platform import platform
-from rpython.conftest import cdir
+from rpython.translator import cdir
from platform import machine
import py
import os
diff --git a/rpython/rlib/rdtoa.py b/rpython/rlib/rdtoa.py
--- a/rpython/rlib/rdtoa.py
+++ b/rpython/rlib/rdtoa.py
@@ -1,7 +1,7 @@
from __future__ import with_statement
from rpython.rlib import rfloat
from rpython.translator.tool.cbuild import ExternalCompilationInfo
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib import jit
from rpython.rlib.rstring import StringBuilder
diff --git a/rpython/rlib/rgil.py b/rpython/rlib/rgil.py
--- a/rpython/rlib/rgil.py
+++ b/rpython/rlib/rgil.py
@@ -1,5 +1,5 @@
import py
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
diff --git a/rpython/rlib/rsignal.py b/rpython/rlib/rsignal.py
--- a/rpython/rlib/rsignal.py
+++ b/rpython/rlib/rsignal.py
@@ -1,7 +1,7 @@
import signal as cpy_signal
import sys
import py
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rtyper.tool import rffi_platform
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.translator.tool.cbuild import ExternalCompilationInfo
diff --git a/rpython/rlib/rsocket.py b/rpython/rlib/rsocket.py
--- a/rpython/rlib/rsocket.py
+++ b/rpython/rlib/rsocket.py
@@ -15,17 +15,18 @@
# It's unclear if makefile() and SSL support belong here or only as
# app-level code for PyPy.
+from rpython.rlib import _rsocket_rffi as _c, jit, rgc
from rpython.rlib.objectmodel import instantiate, keepalive_until_here
-from rpython.rlib import _rsocket_rffi as _c
from rpython.rlib.rarithmetic import intmask, r_uint
from rpython.rlib.rthread import dummy_lock
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper.lltypesystem.rffi import sizeof, offsetof
-INVALID_SOCKET = _c.INVALID_SOCKET
-from rpython.rlib import jit
+
+
# Usage of @jit.dont_look_inside in this file is possibly temporary
# and only because some lltypes declared in _rsocket_rffi choke the
# JIT's codewriter right now (notably, FixedSizeArray).
+INVALID_SOCKET = _c.INVALID_SOCKET
def mallocbuf(buffersize):
@@ -86,6 +87,7 @@
self.addr_p = addr
self.addrlen = addrlen
+ @rgc.must_be_light_finalizer
def __del__(self):
if self.addr_p:
lltype.free(self.addr_p, flavor='raw', track_allocation=False)
@@ -493,8 +495,8 @@
class RSocket(object):
"""RPython-level socket object.
"""
- _mixin_ = True # for interp_socket.py
fd = _c.INVALID_SOCKET
+
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0,
fd=_c.INVALID_SOCKET):
"""Create a new socket."""
@@ -509,6 +511,7 @@
self.proto = proto
self.timeout = defaults.timeout
+ @rgc.must_be_light_finalizer
def __del__(self):
fd = self.fd
if fd != _c.INVALID_SOCKET:
diff --git a/rpython/rlib/rstack.py b/rpython/rlib/rstack.py
--- a/rpython/rlib/rstack.py
+++ b/rpython/rlib/rstack.py
@@ -10,7 +10,7 @@
from rpython.rlib import rgc
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper.lltypesystem.lloperation import llop
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
# ____________________________________________________________
diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py
--- a/rpython/rlib/rthread.py
+++ b/rpython/rlib/rthread.py
@@ -1,6 +1,6 @@
from rpython.rtyper.lltypesystem import rffi, lltype, llmemory
from rpython.translator.tool.cbuild import ExternalCompilationInfo
-from rpython.conftest import cdir
+from rpython.translator import cdir
import py
from rpython.rlib import jit, rgc
from rpython.rlib.debug import ll_assert
@@ -59,7 +59,7 @@
c_thread_acquirelock = llexternal('RPyThreadAcquireLock', [TLOCKP, rffi.INT],
rffi.INT,
releasegil=True) # release the GIL
-c_thread_acquirelock_timed = llexternal('RPyThreadAcquireLockTimed',
+c_thread_acquirelock_timed = llexternal('RPyThreadAcquireLockTimed',
[TLOCKP, rffi.LONGLONG, rffi.INT],
rffi.INT,
releasegil=True) # release the GIL
diff --git a/rpython/rtyper/lltypesystem/module/ll_math.py b/rpython/rtyper/lltypesystem/module/ll_math.py
--- a/rpython/rtyper/lltypesystem/module/ll_math.py
+++ b/rpython/rtyper/lltypesystem/module/ll_math.py
@@ -3,7 +3,7 @@
import py
import sys
-from rpython.conftest import cdir
+from rpython.translator import cdir
from rpython.rlib import jit, rposix
from rpython.rlib.rfloat import INFINITY, NAN, isfinite, isinf, isnan
from rpython.rtyper.lltypesystem import lltype, rffi
diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py
--- a/rpython/rtyper/lltypesystem/rdict.py
+++ b/rpython/rtyper/lltypesystem/rdict.py
@@ -540,18 +540,21 @@
# avoid extra branches.
def ll_dict_resize(d):
- old_entries = d.entries
- old_size = len(old_entries)
# make a 'new_size' estimate and shrink it if there are many
# deleted entry markers. See CPython for why it is a good idea to
# quadruple the dictionary size as long as it's not too big.
num_items = d.num_items + 1
if num_items > 50000: new_estimate = num_items * 2
else: new_estimate = num_items * 4
+ _ll_dict_resize_to(d, new_estimate)
+ll_dict_resize.oopspec = 'dict.resize(d)'
+
+def _ll_dict_resize_to(d, new_estimate):
new_size = DICT_INITSIZE
while new_size <= new_estimate:
new_size *= 2
- #
+ old_entries = d.entries
+ old_size = len(d.entries)
d.entries = lltype.typeOf(old_entries).TO.allocate(new_size)
d.num_items = 0
d.resize_counter = new_size * 2
@@ -563,7 +566,6 @@
ll_dict_insertclean(d, entry.key, entry.value, hash)
i += 1
old_entries.delete()
-ll_dict_resize.oopspec = 'dict.resize(d)'
# ------- a port of CPython's dictobject.c's lookdict implementation -------
PERTURB_SHIFT = 5
@@ -816,6 +818,16 @@
ll_clear.oopspec = 'dict.clear(d)'
def ll_update(dic1, dic2):
+ # Prescale 'dic1', assuming that most items don't collide.
+ # If this assumption is false, 'dic1' becomes at most two times too large.
+ # * dic2.num_items = upper bound on the number of items added
+ # * (dic1.resize_counter - 1) // 3 = room left in dic1
+ # so, if dic2 has 1 item, we need dic1.resize_counter > 3
+ # if dic2 has 2 items we need dic1.resize_counter > 6 etc.
+ if not (dic1.resize_counter > dic2.num_items * 3):
+ new_estimate = (dic1.num_items + dic2.num_items) * 2
+ _ll_dict_resize_to(dic1, new_estimate)
+ #
entries = dic2.entries
d2len = len(entries)
i = 0
diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
--- a/rpython/rtyper/lltypesystem/rffi.py
+++ b/rpython/rtyper/lltypesystem/rffi.py
@@ -95,6 +95,8 @@
name, macro, ext_type, compilation_info)
else:
_callable = ll2ctypes.LL2CtypesCallable(ext_type, calling_conv)
+ else:
From noreply at buildbot.pypy.org Thu Jul 3 16:52:12 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Thu, 3 Jul 2014 16:52:12 +0200 (CEST)
Subject: [pypy-commit] pypy scalar-operations: Re-enable important test
Message-ID: <20140703145212.D93B51D3493@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch: scalar-operations
Changeset: r72328:210fb8550c7c
Date: 2014-07-03 15:51 +0100
http://bitbucket.org/pypy/pypy/changeset/210fb8550c7c/
Log: Re-enable important test
diff --git a/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py b/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py
--- a/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py
@@ -30,6 +30,7 @@
""")
def test_array_getitem_accumulate(self):
+ """Check that operations/ufuncs on array items are jitted correctly"""
def main():
import _numpypy.multiarray as np
arr = np.zeros((300, 300))
@@ -43,7 +44,6 @@
log = self.run(main, [])
assert log.result == 0
loop, = log.loops_by_filename(self.filepath)
- skip('used to pass on 69421-f3e717c94913')
assert loop.match("""
i81 = int_lt(i76, 300)
guard_true(i81, descr=...)
From noreply at buildbot.pypy.org Thu Jul 3 20:03:38 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:38 +0200 (CEST)
Subject: [pypy-commit] pypy default: Tweak to dict.update().
Message-ID: <20140703180338.646DF1D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72329:b5a2e5eb59dc
Date: 2014-07-03 12:36 +0200
http://bitbucket.org/pypy/pypy/changeset/b5a2e5eb59dc/
Log: Tweak to dict.update().
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -492,6 +492,23 @@
def view_as_kwargs(self, w_dict):
return (None, None)
+ def getiterkeys(self, w_dict):
+ raise NotImplementedError
+
+ def getitervalues(self, w_dict):
+ raise NotImplementedError
+
+ def getiteritems(self, w_dict):
+ raise NotImplementedError
+
+ def rev_update1_dict_dict(self, w_dict, w_updatedict):
+ iteritems = self.iteritems(w_dict)
+ while True:
+ w_key, w_value = iteritems.next_item()
+ if w_key is None:
+ break
+ w_updatedict.setitem(w_key, w_value)
+
class EmptyDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("empty")
@@ -593,11 +610,13 @@
# ---------- iterator interface ----------------
def getiterkeys(self, w_dict):
- return iter([None])
- getitervalues = getiterkeys
+ return iter([])
+
+ def getitervalues(self, w_dict):
+ return iter([])
def getiteritems(self, w_dict):
- return iter([(None, None)])
+ return iter([])
# Iterator Implementation base classes
@@ -725,9 +744,29 @@
def iteritems(self, w_dict):
return IterClassItems(self.space, self, w_dict)
+
+ @jit.look_inside_iff(lambda self, w_dict, w_updatedict:
+ w_dict_unrolling_heuristic(w_dict))
+ def rev_update1_dict_dict(self, w_dict, w_updatedict):
+ if override_next_item is not None:
+ # this is very similar to the general version, but the difference
+ # is that it is specialized to call a specific next_item()
+ iteritems = IterClassItems(self.space, self, w_dict)
+ while True:
+ w_key, w_value = iteritems.next_item()
+ if w_key is None:
+ break
+ w_updatedict.setitem(w_key, w_value)
+ else:
+ for key, value in self.getiteritems(w_dict):
+ w_key = wrapkey(self.space, key)
+ w_value = wrapvalue(self.space, value)
+ w_updatedict.setitem(w_key, w_value)
+
dictimpl.iterkeys = iterkeys
dictimpl.itervalues = itervalues
dictimpl.iteritems = iteritems
+ dictimpl.rev_update1_dict_dict = rev_update1_dict_dict
create_iterator_classes(EmptyDictStrategy)
@@ -1063,15 +1102,8 @@
update1_keys(space, w_dict, w_data, data_w)
- at jit.look_inside_iff(lambda space, w_dict, w_data:
- w_dict_unrolling_heuristic(w_data))
def update1_dict_dict(space, w_dict, w_data):
- iterator = w_data.iteritems()
- while True:
- w_key, w_value = iterator.next_item()
- if w_key is None:
- break
- w_dict.setitem(w_key, w_value)
+ w_data.strategy.rev_update1_dict_dict(w_data, w_dict)
def update1_pairs(space, w_dict, data_w):
From noreply at buildbot.pypy.org Thu Jul 3 20:03:40 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:40 +0200 (CEST)
Subject: [pypy-commit] pypy default: For dict.update(),
pre-scale the dictionary from RPython code in
Message-ID: <20140703180340.9C7701D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72330:0a347de43469
Date: 2014-07-03 17:52 +0200
http://bitbucket.org/pypy/pypy/changeset/0a347de43469/
Log: For dict.update(), pre-scale the dictionary from RPython code in
dictmultiobject.py.
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -1,8 +1,9 @@
"""The builtin dict implementation"""
-from rpython.rlib import jit, rerased
+from rpython.rlib import jit, rerased, objectmodel
from rpython.rlib.debug import mark_dict_non_null
from rpython.rlib.objectmodel import newlist_hint, r_dict, specialize
+from rpython.rlib.unroll import SpecTag
from rpython.tool.sourcetools import func_renamer, func_with_new_name
from pypy.interpreter.baseobjspace import W_Root
@@ -509,6 +510,9 @@
break
w_updatedict.setitem(w_key, w_value)
+ def prepare_update(self, w_dict, num_extra):
+ pass
+
class EmptyDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("empty")
@@ -748,20 +752,32 @@
@jit.look_inside_iff(lambda self, w_dict, w_updatedict:
w_dict_unrolling_heuristic(w_dict))
def rev_update1_dict_dict(self, w_dict, w_updatedict):
+ # the logic is to call prepare_dict_update() after the first setitem():
+ # it gives the w_updatedict a chance to switch its strategy.
if override_next_item is not None:
# this is very similar to the general version, but the difference
# is that it is specialized to call a specific next_item()
iteritems = IterClassItems(self.space, self, w_dict)
+ spec = _SPEC1
while True:
w_key, w_value = iteritems.next_item()
if w_key is None:
break
w_updatedict.setitem(w_key, w_value)
+ if spec is _SPEC1:
+ spec = _SPEC2
+ w_updatedict.strategy.prepare_update(w_updatedict,
+ w_dict.length() - 1)
else:
+ spec = _SPEC1
for key, value in self.getiteritems(w_dict):
w_key = wrapkey(self.space, key)
w_value = wrapvalue(self.space, value)
w_updatedict.setitem(w_key, w_value)
+ if spec is _SPEC1:
+ spec = _SPEC2
+ w_updatedict.strategy.prepare_update(w_updatedict,
+ w_dict.length() - 1)
dictimpl.iterkeys = iterkeys
dictimpl.itervalues = itervalues
@@ -770,6 +786,9 @@
create_iterator_classes(EmptyDictStrategy)
+_SPEC1 = SpecTag()
+_SPEC2 = SpecTag()
+
# concrete subclasses of the above
@@ -884,6 +903,10 @@
def getiteritems(self, w_dict):
return self.unerase(w_dict.dstorage).iteritems()
+ def prepare_update(self, w_dict, num_extra):
+ objectmodel.prepare_dict_update(self.unerase(w_dict.dstorage),
+ num_extra)
+
class ObjectDictStrategy(AbstractTypedStrategy, DictStrategy):
erase, unerase = rerased.new_erasing_pair("object")
diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py
--- a/rpython/annotator/unaryop.py
+++ b/rpython/annotator/unaryop.py
@@ -388,6 +388,9 @@
return SomeImpossibleValue()
dct1.dictdef.union(dct2.dictdef)
+ def method__prepare_dict_update(dct, num):
+ pass
+
def method_keys(self):
return getbookkeeper().newlist(self.dictdef.read_key())
diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py
--- a/rpython/rlib/objectmodel.py
+++ b/rpython/rlib/objectmodel.py
@@ -740,6 +740,14 @@
return repr(self.key)
+def prepare_dict_update(dict, n_elements):
+ """RPython hint that the given dict (or r_dict) will soon be
+ enlarged by n_elements."""
+ if we_are_translated():
+ dict._prepare_dict_update(n_elements)
+ # ^^ call an extra method that doesn't exist before translation
+
+
# ____________________________________________________________
def import_from_mixin(M, special_methods=['__init__', '__del__']):
diff --git a/rpython/rlib/test/test_objectmodel.py b/rpython/rlib/test/test_objectmodel.py
--- a/rpython/rlib/test/test_objectmodel.py
+++ b/rpython/rlib/test/test_objectmodel.py
@@ -321,6 +321,14 @@
res = self.interpret(g, [3])
assert res == 77
+ def test_prepare_dict_update(self):
+ def g(n):
+ d = {}
+ prepare_dict_update(d, n)
+ return 42
+ res = self.interpret(g, [3])
+ assert res == 42 # "did not crash"
+
def test_compute_hash(self):
class Foo(object):
pass
diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py
--- a/rpython/rtyper/lltypesystem/rdict.py
+++ b/rpython/rtyper/lltypesystem/rdict.py
@@ -286,6 +286,11 @@
hop.exception_cannot_occur()
return hop.gendirectcall(ll_update, v_dic1, v_dic2)
+ def rtype_method__prepare_dict_update(self, hop):
+ v_dict, v_num = hop.inputargs(self, lltype.Signed)
+ hop.exception_cannot_occur()
+ hop.gendirectcall(ll_prepare_dict_update, v_dict, v_num)
+
def _rtype_method_kvi(self, hop, ll_func):
v_dic, = hop.inputargs(self)
r_list = hop.r_result
@@ -543,13 +548,14 @@
# make a 'new_size' estimate and shrink it if there are many
# deleted entry markers. See CPython for why it is a good idea to
# quadruple the dictionary size as long as it's not too big.
- num_items = d.num_items + 1
- if num_items > 50000: new_estimate = num_items * 2
- else: new_estimate = num_items * 4
- _ll_dict_resize_to(d, new_estimate)
+ # (Quadrupling comes from '(d.num_items + d.num_items + 1) * 2'
+ # as long as num_items is not too large.)
+ num_extra = min(d.num_items + 1, 30000)
+ _ll_dict_resize_to(d, num_extra)
ll_dict_resize.oopspec = 'dict.resize(d)'
-def _ll_dict_resize_to(d, new_estimate):
+def _ll_dict_resize_to(d, num_extra):
+ new_estimate = (d.num_items + num_extra) * 2
new_size = DICT_INITSIZE
while new_size <= new_estimate:
new_size *= 2
@@ -818,16 +824,7 @@
ll_clear.oopspec = 'dict.clear(d)'
def ll_update(dic1, dic2):
- # Prescale 'dic1', assuming that most items don't collide.
- # If this assumption is false, 'dic1' becomes at most two times too large.
- # * dic2.num_items = upper bound on the number of items added
- # * (dic1.resize_counter - 1) // 3 = room left in dic1
- # so, if dic2 has 1 item, we need dic1.resize_counter > 3
- # if dic2 has 2 items we need dic1.resize_counter > 6 etc.
- if not (dic1.resize_counter > dic2.num_items * 3):
- new_estimate = (dic1.num_items + dic2.num_items) * 2
- _ll_dict_resize_to(dic1, new_estimate)
- #
+ ll_prepare_dict_update(dic1, dic2.num_items)
entries = dic2.entries
d2len = len(entries)
i = 0
@@ -842,6 +839,16 @@
i += 1
ll_update.oopspec = 'dict.update(dic1, dic2)'
+def ll_prepare_dict_update(d, num_extra):
+ # Prescale 'd' for 'num_extra' items, assuming that most items don't
+ # collide. If this assumption is false, 'd' becomes too large by at
+ # most 'num_extra'. The logic is based on:
+ # (d.resize_counter - 1) // 3 = room left in d
+ # so, if num_extra == 1, we need d.resize_counter > 3
+ # if num_extra == 2, we need d.resize_counter > 6 etc.
+ jit.conditional_call(d.resize_counter <= num_extra * 3,
+ _ll_dict_resize_to, d, num_extra)
+
# this is an implementation of keys(), values() and items()
# in a single function.
# note that by specialization on func, three different
From noreply at buildbot.pypy.org Thu Jul 3 20:03:42 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:42 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Abandon the merge: needs more work to
fix e.g. the threadlocalref
Message-ID: <20140703180342.3BED21D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72331:2aef0e942480
Date: 2014-07-03 18:16 +0200
http://bitbucket.org/pypy/pypy/changeset/2aef0e942480/
Log: Abandon the merge: needs more work to fix e.g. the threadlocalref
and I don't want to do it right now :-/
From noreply at buildbot.pypy.org Thu Jul 3 20:03:43 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:43 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Import stm.rst from trunk again. (This
checkin is also here to
Message-ID: <20140703180343.DEA7D1D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72332:c65fdedbe74a
Date: 2014-07-03 18:17 +0200
http://bitbucket.org/pypy/pypy/changeset/c65fdedbe74a/
Log: Import stm.rst from trunk again. (This checkin is also here to mark
the current head of the stmgc-c7 branch.)
diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst
--- a/pypy/doc/stm.rst
+++ b/pypy/doc/stm.rst
@@ -30,7 +30,8 @@
``pypy-stm`` is a variant of the regular PyPy interpreter. With caveats_
listed below, it should be in theory within 20%-50% slower than a
-regular PyPy, comparing the JIT version in both cases. It is called
+regular PyPy, comparing the JIT version in both cases (but see below!).
+It is called
STM for Software Transactional Memory, which is the internal technique
used (see `Reference to implementation details`_).
@@ -90,6 +91,11 @@
* So far, small examples work fine, but there are still a few bugs.
We're busy fixing them as we find them; feel free to `report bugs`_.
+* It runs with an overhead as low as 20% on examples like "richards".
+ There are also other examples with higher overheads --currently up to
+ 2x for "translate.py"-- which we are still trying to understand.
+ One suspect is our partial GC implementation, see below.
+
* Currently limited to 1.5 GB of RAM (this is just a parameter in
`core.h`__). Memory overflows are not correctly handled; they cause
segfaults.
@@ -105,9 +111,8 @@
* The GC is new; although clearly inspired by PyPy's regular GC, it
misses a number of optimizations for now. Programs allocating large
- numbers of small objects that don't immediately die, as well as
- programs that modify large lists or dicts, suffer from these missing
- optimizations.
+ numbers of small objects that don't immediately die (surely a common
+ situation) suffer from these missing optimizations.
* The GC has no support for destructors: the ``__del__`` method is never
called (including on file objects, which won't be closed for you).
From noreply at buildbot.pypy.org Thu Jul 3 20:03:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:46 +0200 (CEST)
Subject: [pypy-commit] pypy default: Translation fixes
Message-ID: <20140703180346.A7AEA1D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72333:bc07df429f7b
Date: 2014-07-03 18:20 +0200
http://bitbucket.org/pypy/pypy/changeset/bc07df429f7b/
Log: Translation fixes
diff --git a/pypy/module/pypyjit/policy.py b/pypy/module/pypyjit/policy.py
--- a/pypy/module/pypyjit/policy.py
+++ b/pypy/module/pypyjit/policy.py
@@ -13,19 +13,21 @@
cache = space.fromcache(Cache)
if cache.in_recursion:
return
- if space.is_true(cache.w_abort_hook):
+ w_abort_hook = cache.w_abort_hook
+ assert w_abort_hook is not None
+ if space.is_true(w_abort_hook):
cache.in_recursion = True
oplist_w = wrap_oplist(space, logops, operations)
try:
try:
- space.call_function(cache.w_abort_hook,
+ space.call_function(w_abort_hook,
space.wrap(jitdriver.name),
wrap_greenkey(space, jitdriver, greenkey, greenkey_repr),
space.wrap(Counters.counter_names[reason]),
space.newlist(oplist_w)
)
except OperationError, e:
- e.write_unraisable(space, "jit hook ", cache.w_abort_hook)
+ e.write_unraisable(space, "jit hook ", w_abort_hook)
finally:
cache.in_recursion = False
diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py
--- a/rpython/rlib/objectmodel.py
+++ b/rpython/rlib/objectmodel.py
@@ -740,6 +740,7 @@
return repr(self.key)
+ at specialize.call_location()
def prepare_dict_update(dict, n_elements):
"""RPython hint that the given dict (or r_dict) will soon be
enlarged by n_elements."""
From noreply at buildbot.pypy.org Thu Jul 3 20:03:52 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:52 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: dummy merge, abandon 2aef0e942480
Message-ID: <20140703180352.9E71F1D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72334:d0483f8d8fcd
Date: 2014-07-03 18:23 +0200
http://bitbucket.org/pypy/pypy/changeset/d0483f8d8fcd/
Log: dummy merge, abandon 2aef0e942480
From noreply at buildbot.pypy.org Thu Jul 3 20:03:54 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 3 Jul 2014 20:03:54 +0200 (CEST)
Subject: [pypy-commit] pypy default: More attempts at translation fixes
Message-ID: <20140703180354.64AAD1D34B9@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72335:d4f1606fbfe7
Date: 2014-07-03 20:02 +0200
http://bitbucket.org/pypy/pypy/changeset/d4f1606fbfe7/
Log: More attempts at translation fixes
diff --git a/pypy/module/pypyjit/interp_resop.py b/pypy/module/pypyjit/interp_resop.py
--- a/pypy/module/pypyjit/interp_resop.py
+++ b/pypy/module/pypyjit/interp_resop.py
@@ -56,6 +56,7 @@
jit hook won't be called for that.
"""
cache = space.fromcache(Cache)
+ assert w_hook is not None
cache.w_compile_hook = w_hook
cache.in_recursion = NonConstant(False)
@@ -90,6 +91,7 @@
as attributes on JitLoopInfo object.
"""
cache = space.fromcache(Cache)
+ assert w_hook is not None
cache.w_abort_hook = w_hook
cache.in_recursion = NonConstant(False)
diff --git a/pypy/module/pypyjit/policy.py b/pypy/module/pypyjit/policy.py
--- a/pypy/module/pypyjit/policy.py
+++ b/pypy/module/pypyjit/policy.py
@@ -13,21 +13,19 @@
cache = space.fromcache(Cache)
if cache.in_recursion:
return
- w_abort_hook = cache.w_abort_hook
- assert w_abort_hook is not None
- if space.is_true(w_abort_hook):
+ if space.is_true(cache.w_abort_hook):
cache.in_recursion = True
oplist_w = wrap_oplist(space, logops, operations)
try:
try:
- space.call_function(w_abort_hook,
+ space.call_function(cache.w_abort_hook,
space.wrap(jitdriver.name),
wrap_greenkey(space, jitdriver, greenkey, greenkey_repr),
space.wrap(Counters.counter_names[reason]),
space.newlist(oplist_w)
)
except OperationError, e:
- e.write_unraisable(space, "jit hook ", w_abort_hook)
+ e.write_unraisable(space, "jit hook ", cache.w_abort_hook)
finally:
cache.in_recursion = False
From noreply at buildbot.pypy.org Thu Jul 3 20:36:04 2014
From: noreply at buildbot.pypy.org (Corbin Simpson)
Date: Thu, 3 Jul 2014 20:36:04 +0200 (CEST)
Subject: [pypy-commit] pypy promote-unicode: Introduce promote_unicode().
Message-ID: <20140703183604.7309F1D34C3@cobra.cs.uni-duesseldorf.de>
Author: Corbin Simpson
Branch: promote-unicode
Changeset: r72336:3489a054a745
Date: 2014-07-03 11:35 -0700
http://bitbucket.org/pypy/pypy/changeset/3489a054a745/
Log: Introduce promote_unicode().
This is a companion to promote_string() which promotes Unicode
strings by value. To use, simply ``from rpython.rlib.jit import
promote_unicode`` and then ``promote_unicode(any_unicode_string)``.
A few tests are included, and a couple bits of testing mocks were
improved to permit tests to not fail.
diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py
--- a/rpython/jit/codewriter/jtransform.py
+++ b/rpython/jit/codewriter/jtransform.py
@@ -571,6 +571,23 @@
op1 = SpaceOperation('str_guard_value', [op.args[0], c, descr],
op.result)
return [SpaceOperation('-live-', [], None), op1, None]
+ if (hints.get('promote_unicode') and
+ op.args[0].concretetype is not lltype.Void):
+ U = lltype.Ptr(rstr.UNICODE)
+ assert op.args[0].concretetype == U
+ self._register_extra_helper(EffectInfo.OS_UNIEQ_NONNULL,
+ "str.eq_nonnull",
+ [U, U],
+ lltype.Signed,
+ EffectInfo.EF_ELIDABLE_CANNOT_RAISE)
+ descr, p = self.callcontrol.callinfocollection.callinfo_for_oopspec(
+ EffectInfo.OS_UNIEQ_NONNULL)
+ # XXX this is fairly ugly way of creating a constant,
+ # however, callinfocollection has no better interface
+ c = Constant(p.adr.ptr, lltype.typeOf(p.adr.ptr))
+ op1 = SpaceOperation('str_guard_value', [op.args[0], c, descr],
+ op.result)
+ return [SpaceOperation('-live-', [], None), op1, None]
if hints.get('force_virtualizable'):
return SpaceOperation('hint_force_virtualizable', [op.args[0]], None)
if hints.get('force_no_const'): # for tests only
diff --git a/rpython/jit/codewriter/test/test_jtransform.py b/rpython/jit/codewriter/test/test_jtransform.py
--- a/rpython/jit/codewriter/test/test_jtransform.py
+++ b/rpython/jit/codewriter/test/test_jtransform.py
@@ -107,7 +107,7 @@
return True
return False
def callinfo_for_oopspec(self, oopspecindex):
- assert oopspecindex == effectinfo.EffectInfo.OS_STREQ_NONNULL
+ # assert oopspecindex == effectinfo.EffectInfo.OS_STREQ_NONNULL
class c:
class adr:
ptr = 1
@@ -1059,6 +1059,21 @@
assert op1.result == v2
assert op0.opname == '-live-'
+def test_unicode_promote():
+ PUNICODE = lltype.Ptr(rstr.UNICODE)
+ v1 = varoftype(PUNICODE)
+ v2 = varoftype(PUNICODE)
+ op = SpaceOperation('hint',
+ [v1, Constant({'promote_unicode': True}, lltype.Void)],
+ v2)
+ tr = Transformer(FakeCPU(), FakeBuiltinCallControl())
+ op0, op1, _ = tr.rewrite_operation(op)
+ assert op1.opname == 'str_guard_value'
+ assert op1.args[0] == v1
+ assert op1.args[2] == 'calldescr'
+ assert op1.result == v2
+ assert op0.opname == '-live-'
+
def test_double_promote_str():
PSTR = lltype.Ptr(rstr.STR)
v1 = varoftype(PSTR)
diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py
--- a/rpython/rlib/jit.py
+++ b/rpython/rlib/jit.py
@@ -60,6 +60,7 @@
* promote - promote the argument from a variable into a constant
* promote_string - same, but promote string by *value*
+ * promote_unicode - same, but promote unicode string by *value*
* access_directly - directly access a virtualizable, as a structure
and don't treat it as a virtualizable
* fresh_virtualizable - means that virtualizable was just allocated.
@@ -79,6 +80,9 @@
def promote_string(x):
return hint(x, promote_string=True)
+def promote_unicode(x):
+ return hint(x, promote_unicode=True)
+
def dont_look_inside(func):
""" Make sure the JIT does not trace inside decorated function
(it becomes a call instead)
From noreply at buildbot.pypy.org Thu Jul 3 23:06:38 2014
From: noreply at buildbot.pypy.org (wenzhuman)
Date: Thu, 3 Jul 2014 23:06:38 +0200 (CEST)
Subject: [pypy-commit] pypy gc_no_cleanup_nursery: simplify the malloc
method logic
Message-ID: <20140703210638.0C00D1D3493@cobra.cs.uni-duesseldorf.de>
Author: wenzhuman
Branch: gc_no_cleanup_nursery
Changeset: r72337:6dd33f47d74f
Date: 2014-07-03 12:48 +0000
http://bitbucket.org/pypy/pypy/changeset/6dd33f47d74f/
Log: simplify the malloc method logic
diff --git a/rpython/memory/gc/base.py b/rpython/memory/gc/base.py
--- a/rpython/memory/gc/base.py
+++ b/rpython/memory/gc/base.py
@@ -151,14 +151,14 @@
assert not needs_finalizer
itemsize = self.varsize_item_sizes(typeid)
offset_to_length = self.varsize_offset_to_length(typeid)
- if not hasattr(self, 'malloc_varsize'):
+ if self.malloc_zero_filled:
malloc_varsize = self.malloc_varsize_clear
else:
malloc_varsize = self.malloc_varsize
ref = malloc_varsize(typeid, length, size, itemsize,
offset_to_length)
else:
- if not hasattr(self, 'malloc_fixedsize'):
+ if self.malloc_zero_filled:
malloc_fixedsize = self.malloc_fixedsize_clear
else:
malloc_fixedsize = self.malloc_fixedsize
diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py
--- a/rpython/memory/gctransform/framework.py
+++ b/rpython/memory/gctransform/framework.py
@@ -273,7 +273,11 @@
s_gcref = SomePtr(llmemory.GCREF)
gcdata = self.gcdata
translator = self.translator
- if hasattr(GCClass, 'malloc_fixedsize_clear'):
+ #use the GC flag to find which malloc method to use
+ #malloc_zero_filled == Ture -> malloc_fixedsize/varsize_clear
+ #malloc_zero_filled == Flase -> malloc_fixedsize/varsize
+ malloc_fixedsize_meth = None
+ if GCClass.malloc_zero_filled:
malloc_fixedsize_clear_meth = GCClass.malloc_fixedsize_clear.im_func
self.malloc_fixedsize_clear_ptr = getfn(
malloc_fixedsize_clear_meth,
@@ -283,8 +287,13 @@
annmodel.SomeBool(),
annmodel.SomeBool()], s_gcref,
inline = False)
+ self.malloc_fixedsize_ptr = self.malloc_fixedsize_clear_ptr
+ self.malloc_varsize_ptr = getfn(
+ GCClass.malloc_varsize_clear.im_func,
+ [s_gc, s_typeid16]
+ + [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref)
- if hasattr(GCClass, 'malloc_fixedsize'):
+ else:
malloc_fixedsize_meth = GCClass.malloc_fixedsize.im_func
self.malloc_fixedsize_ptr = getfn(
malloc_fixedsize_meth,
@@ -294,19 +303,11 @@
annmodel.SomeBool(),
annmodel.SomeBool()], s_gcref,
inline = False)
- else:
- malloc_fixedsize_meth = None
- self.malloc_fixedsize_ptr = self.malloc_fixedsize_clear_ptr
- if hasattr(GCClass, 'malloc_varsize'):
- self.malloc_varsize_ptr = getfn(
+ self.malloc_varsize_ptr = getfn(
GCClass.malloc_varsize.im_func,
[s_gc, s_typeid16]
+ [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref)
- else:
- self.malloc_varsize_ptr = getfn(
- GCClass.malloc_varsize_clear.im_func,
- [s_gc, s_typeid16]
- + [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref)
+
self.collect_ptr = getfn(GCClass.collect.im_func,
[s_gc, annmodel.SomeInteger()], annmodel.s_None)
self.can_move_ptr = getfn(GCClass.can_move.im_func,
From noreply at buildbot.pypy.org Thu Jul 3 23:06:39 2014
From: noreply at buildbot.pypy.org (wenzhuman)
Date: Thu, 3 Jul 2014 23:06:39 +0200 (CEST)
Subject: [pypy-commit] pypy gc_no_cleanup_nursery: passed the backend test
Message-ID: <20140703210639.7398D1D3493@cobra.cs.uni-duesseldorf.de>
Author: wenzhuman
Branch: gc_no_cleanup_nursery
Changeset: r72338:d3b83138e0d7
Date: 2014-07-03 17:03 +0000
http://bitbucket.org/pypy/pypy/changeset/d3b83138e0d7/
Log: passed the backend test
diff --git a/rpython/jit/backend/llsupport/test/test_gc.py b/rpython/jit/backend/llsupport/test/test_gc.py
--- a/rpython/jit/backend/llsupport/test/test_gc.py
+++ b/rpython/jit/backend/llsupport/test/test_gc.py
@@ -59,7 +59,7 @@
x += self.gcheaderbuilder.size_gc_header
return x, tid
- def do_malloc_fixedsize_clear(self, RESTYPE, type_id, size,
+ def do_malloc_fixedsize(self, RESTYPE, type_id, size,
has_finalizer, has_light_finalizer,
contains_weakptr):
assert not contains_weakptr
@@ -70,7 +70,7 @@
self.record.append(("fixedsize", repr(size), tid, p))
return p
- def do_malloc_varsize_clear(self, RESTYPE, type_id, length, size,
+ def do_malloc_varsize(self, RESTYPE, type_id, length, size,
itemsize, offset_to_length):
p, tid = self._malloc(type_id, size + itemsize * length)
(p + offset_to_length).signed[0] = length
diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py
--- a/rpython/memory/gctransform/framework.py
+++ b/rpython/memory/gctransform/framework.py
@@ -279,7 +279,7 @@
malloc_fixedsize_meth = None
if GCClass.malloc_zero_filled:
malloc_fixedsize_clear_meth = GCClass.malloc_fixedsize_clear.im_func
- self.malloc_fixedsize_clear_ptr = getfn(
+ self.malloc_fixedsize_ptr = getfn(
malloc_fixedsize_clear_meth,
[s_gc, s_typeid16,
annmodel.SomeInteger(nonneg=True),
@@ -287,7 +287,6 @@
annmodel.SomeBool(),
annmodel.SomeBool()], s_gcref,
inline = False)
- self.malloc_fixedsize_ptr = self.malloc_fixedsize_clear_ptr
self.malloc_varsize_ptr = getfn(
GCClass.malloc_varsize_clear.im_func,
[s_gc, s_typeid16]
From noreply at buildbot.pypy.org Thu Jul 3 23:58:09 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:09 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Implement "yield from" opcode
Message-ID: <20140703215809.5853A1D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72339:02025d9b1f7d
Date: 2014-06-22 19:08 +0200
http://bitbucket.org/pypy/pypy/changeset/02025d9b1f7d/
Log: Implement "yield from" opcode
diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py
--- a/pypy/interpreter/astcompiler/codegen.py
+++ b/pypy/interpreter/astcompiler/codegen.py
@@ -871,11 +871,12 @@
self.load_const(self.space.w_None)
self.emit_op(ops.YIELD_VALUE)
- def visit_YieldFrom(self, yie):
- # XXX not correctly implemented.
- self.update_position(yie.lineno)
- yie.value.walkabout(self)
- self.emit_op(ops.YIELD_VALUE)
+ def visit_YieldFrom(self, yfr):
+ self.update_position(yfr.lineno)
+ yfr.value.walkabout(self)
+ self.emit_op(ops.GET_ITER)
+ self.load_const(self.space.w_None)
+ self.emit_op(ops.YIELD_FROM)
def visit_Num(self, num):
self.update_position(num.lineno)
diff --git a/pypy/interpreter/astcompiler/symtable.py b/pypy/interpreter/astcompiler/symtable.py
--- a/pypy/interpreter/astcompiler/symtable.py
+++ b/pypy/interpreter/astcompiler/symtable.py
@@ -431,6 +431,10 @@
self.scope.note_yield(yie)
ast.GenericASTVisitor.visit_Yield(self, yie)
+ def visit_YieldFrom(self, yfr):
+ self.scope.note_yield(yfr)
+ ast.GenericASTVisitor.visit_YieldFrom(self, yfr)
+
def visit_Global(self, glob):
for name in glob.names:
old_role = self.scope.lookup_role(name)
diff --git a/pypy/interpreter/astcompiler/test/test_astbuilder.py b/pypy/interpreter/astcompiler/test/test_astbuilder.py
--- a/pypy/interpreter/astcompiler/test/test_astbuilder.py
+++ b/pypy/interpreter/astcompiler/test/test_astbuilder.py
@@ -928,7 +928,6 @@
expr = self.get_first_expr("yield")
assert isinstance(expr, ast.Yield)
assert expr.value is None
- assert expr.is_from == 0
expr = self.get_first_expr("yield x")
assert isinstance(expr.value, ast.Name)
assign = self.get_first_stmt("x = yield x")
@@ -937,8 +936,8 @@
def test_yield_from(self):
expr = self.get_first_expr("yield from x")
+ assert isinstance(expr, ast.YieldFrom)
assert isinstance(expr.value, ast.Name)
- assert expr.is_from == 1
def test_unaryop(self):
unary_ops = (
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -965,6 +965,15 @@
yield self.st, 'x = list(d for d in [1] or [])', 'x', [1]
yield self.st, 'y = [d for d in [1] or []]', 'y', [1]
+ def test_yield_from(self):
+ test = """if 1:
+ def f():
+ yield from range(3)
+ def g():
+ return list(f())
+ """
+ yield self.st, test, "g()", range(3)
+
class AppTestCompiler:
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -401,6 +401,8 @@
self.WITH_CLEANUP(oparg, next_instr)
elif opcode == opcodedesc.YIELD_VALUE.index:
self.YIELD_VALUE(oparg, next_instr)
+ elif opcode == opcodedesc.YIELD_FROM.index:
+ self.YIELD_FROM(oparg, next_instr)
else:
self.MISSING_OPCODE(oparg, next_instr)
@@ -1000,6 +1002,34 @@
def YIELD_VALUE(self, oparg, next_instr):
raise Yield
+ def YIELD_FROM(self, oparg, next_instr):
+ space = self.space
+ w_value = self.popvalue()
+ w_gen = self.peekvalue()
+ try:
+ if space.is_none(w_value):
+ w_retval = space.next(w_gen)
+ else:
+ w_retval = space.call_method(w_gen, "send", w_value)
+ except OperationError as e:
+ if not e.match(self.space, self.space.w_StopIteration):
+ raise
+ self.popvalue() # Remove iter from stack
+ try:
+ w_value = space.getattr(e.get_w_value(space), space.wrap("value"))
+ except OperationError as e:
+ if not e.match(self.space, self.space.w_AttributeError):
+ raise
+ w_value = space.w_None
+ self.pushvalue(w_value)
+ return next_instr
+ else:
+ # iter remains on stack, w_retval is value to be yielded.
+ self.pushvalue(w_retval)
+ # and repeat...
+ self.last_instr = self.last_instr - 1
+ raise Yield
+
def jump_absolute(self, jumpto, ec):
# this function is overridden by pypy.module.pypyjit.interp_jit
check_nonneg(jumpto)
From noreply at buildbot.pypy.org Thu Jul 3 23:58:10 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:10 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Make memoryview objects weakrefable
Message-ID: <20140703215810.A72341D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72340:b82a4b6b39d3
Date: 2014-06-22 20:18 +0200
http://bitbucket.org/pypy/pypy/changeset/b82a4b6b39d3/
Log: Make memoryview objects weakrefable
diff --git a/pypy/objspace/std/memoryobject.py b/pypy/objspace/std/memoryobject.py
--- a/pypy/objspace/std/memoryobject.py
+++ b/pypy/objspace/std/memoryobject.py
@@ -7,7 +7,7 @@
from pypy.interpreter.baseobjspace import W_Root
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import interp2app
-from pypy.interpreter.typedef import TypeDef, GetSetProperty
+from pypy.interpreter.typedef import TypeDef, GetSetProperty, make_weakref_descr
def _buffer_setitem(space, buf, w_index, w_obj):
@@ -173,6 +173,7 @@
__repr__ = interp2app(W_MemoryView.descr_repr),
__enter__ = interp2app(W_MemoryView.descr_enter),
__exit__ = interp2app(W_MemoryView.descr_exit),
+ __weakref__ = make_weakref_descr(W_MemoryView),
tobytes = interp2app(W_MemoryView.descr_tobytes),
tolist = interp2app(W_MemoryView.descr_tolist),
release = interp2app(W_MemoryView.descr_release),
diff --git a/pypy/objspace/std/test/test_memoryobject.py b/pypy/objspace/std/test/test_memoryobject.py
--- a/pypy/objspace/std/test/test_memoryobject.py
+++ b/pypy/objspace/std/test/test_memoryobject.py
@@ -87,6 +87,11 @@
def test_hash(self):
raises(TypeError, "hash(memoryview(b'hello'))")
+ def test_weakref(self):
+ import weakref
+ m = memoryview(b'hello')
+ weakref.ref(m)
+
def test_getitem_only_ints(self):
class MyInt(object):
def __init__(self, x):
From noreply at buildbot.pypy.org Thu Jul 3 23:58:12 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:12 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Memoryview objects are now hashable.
Message-ID: <20140703215812.0EA731D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72341:b8bb27a1905f
Date: 2014-06-22 20:34 +0200
http://bitbucket.org/pypy/pypy/changeset/b8bb27a1905f/
Log: Memoryview objects are now hashable.
diff --git a/pypy/objspace/std/memoryobject.py b/pypy/objspace/std/memoryobject.py
--- a/pypy/objspace/std/memoryobject.py
+++ b/pypy/objspace/std/memoryobject.py
@@ -4,6 +4,7 @@
import operator
from rpython.rlib.buffer import Buffer, SubBuffer
+from rpython.rlib.objectmodel import compute_hash
from pypy.interpreter.baseobjspace import W_Root
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import interp2app
@@ -34,6 +35,7 @@
def __init__(self, buf):
assert isinstance(buf, Buffer)
self.buf = buf
+ self._hash = -1
def buffer_w(self, space, flags):
self._check_released(space)
@@ -142,6 +144,15 @@
else:
return self.getrepr(space, u'memory')
+ def descr_hash(self, space):
+ if self._hash == -1:
+ self._check_released(space)
+ if not self.buf.readonly:
+ raise OperationError(space.w_ValueError, space.wrap(
+ "cannot hash writable memoryview object"))
+ self._hash = compute_hash(self.buf.as_str())
+ return space.wrap(self._hash)
+
def descr_release(self, space):
self.buf = None
@@ -171,6 +182,7 @@
__ne__ = interp2app(W_MemoryView.descr_ne),
__setitem__ = interp2app(W_MemoryView.descr_setitem),
__repr__ = interp2app(W_MemoryView.descr_repr),
+ __hash__ = interp2app(W_MemoryView.descr_hash),
__enter__ = interp2app(W_MemoryView.descr_enter),
__exit__ = interp2app(W_MemoryView.descr_exit),
__weakref__ = make_weakref_descr(W_MemoryView),
diff --git a/pypy/objspace/std/test/test_memoryobject.py b/pypy/objspace/std/test/test_memoryobject.py
--- a/pypy/objspace/std/test/test_memoryobject.py
+++ b/pypy/objspace/std/test/test_memoryobject.py
@@ -85,7 +85,7 @@
assert repr(memoryview(b'hello')).startswith('
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72342:4deadf46db89
Date: 2014-06-22 20:41 +0200
http://bitbucket.org/pypy/pypy/changeset/4deadf46db89/
Log: mmap.error is OSError
diff --git a/pypy/module/mmap/__init__.py b/pypy/module/mmap/__init__.py
--- a/pypy/module/mmap/__init__.py
+++ b/pypy/module/mmap/__init__.py
@@ -8,7 +8,7 @@
'ACCESS_WRITE': 'space.wrap(interp_mmap.ACCESS_WRITE)',
'ACCESS_COPY' : 'space.wrap(interp_mmap.ACCESS_COPY)',
'mmap': 'interp_mmap.W_MMap',
- 'error': 'space.fromcache(interp_mmap.Cache).w_error',
+ 'error': 'space.w_OSError',
}
appleveldefs = {
diff --git a/pypy/module/mmap/interp_mmap.py b/pypy/module/mmap/interp_mmap.py
--- a/pypy/module/mmap/interp_mmap.py
+++ b/pypy/module/mmap/interp_mmap.py
@@ -283,10 +283,6 @@
ACCESS_WRITE = rmmap.ACCESS_WRITE
ACCESS_COPY = rmmap.ACCESS_COPY
-class Cache:
- def __init__(self, space):
- self.w_error = space.new_exception_class("mmap.error",
- space.w_EnvironmentError)
def mmap_error(space, e):
if isinstance(e, RValueError):
@@ -296,8 +292,7 @@
return OperationError(space.w_TypeError,
space.wrap(e.message))
elif isinstance(e, OSError):
- w_error = space.fromcache(Cache).w_error
- return wrap_oserror(space, e, w_exception_class=w_error)
+ return wrap_oserror(space, e)
else:
# bogus 'e'?
return OperationError(space.w_SystemError, space.wrap('%s' % e))
diff --git a/pypy/module/mmap/test/test_mmap.py b/pypy/module/mmap/test/test_mmap.py
--- a/pypy/module/mmap/test/test_mmap.py
+++ b/pypy/module/mmap/test/test_mmap.py
@@ -31,9 +31,7 @@
assert isinstance(mmap.PROT_READ, int)
assert isinstance(mmap.PROT_WRITE, int)
- assert 'mmap.error' in str(mmap.error)
- assert mmap.error is not EnvironmentError
- assert issubclass(mmap.error, EnvironmentError)
+ assert mmap.error is OSError
def test_args(self):
from mmap import mmap
From noreply at buildbot.pypy.org Thu Jul 3 23:58:14 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:14 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: BlockingIOError is now in the exceptions
module.
Message-ID: <20140703215814.AA51E1D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72343:0e4c3da62a4b
Date: 2014-06-23 10:03 +0200
http://bitbucket.org/pypy/pypy/changeset/0e4c3da62a4b/
Log: BlockingIOError is now in the exceptions module.
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -1807,6 +1807,7 @@
'BaseException',
'BufferError',
'BytesWarning',
+ 'BlockingIOError',
'DeprecationWarning',
'EOFError',
'EnvironmentError',
diff --git a/pypy/module/_io/__init__.py b/pypy/module/_io/__init__.py
--- a/pypy/module/_io/__init__.py
+++ b/pypy/module/_io/__init__.py
@@ -7,7 +7,7 @@
interpleveldefs = {
'DEFAULT_BUFFER_SIZE': 'space.wrap(interp_iobase.DEFAULT_BUFFER_SIZE)',
- 'BlockingIOError': 'interp_io.W_BlockingIOError',
+ 'BlockingIOError': 'space.w_BlockingIOError',
'UnsupportedOperation':
'space.fromcache(interp_io.Cache).w_unsupportedoperation',
'_IOBase': 'interp_iobase.W_IOBase',
diff --git a/pypy/module/_io/interp_bufferedio.py b/pypy/module/_io/interp_bufferedio.py
--- a/pypy/module/_io/interp_bufferedio.py
+++ b/pypy/module/_io/interp_bufferedio.py
@@ -12,20 +12,18 @@
from pypy.module._io.interp_iobase import (
W_IOBase, DEFAULT_BUFFER_SIZE, convert_size, trap_eintr,
check_readable_w, check_writable_w, check_seekable_w)
-from pypy.module._io.interp_io import W_BlockingIOError
from rpython.rlib import rthread
STATE_ZERO, STATE_OK, STATE_DETACHED = range(3)
def make_write_blocking_error(space, written):
- w_type = space.gettypeobject(W_BlockingIOError.typedef)
w_value = space.call_function(
- w_type,
+ space.w_BlockingIOError,
space.wrap(rposix.get_errno()),
space.wrap("write could not complete without blocking"),
space.wrap(written))
- return OperationError(w_type, w_value)
+ return OperationError(space.w_BlockingIOError, w_value)
class TryLock(object):
@@ -734,11 +732,8 @@
try:
self._writer_flush_unlocked(space)
except OperationError, e:
- if not e.match(space, space.gettypeobject(
- W_BlockingIOError.typedef)):
+ if not e.match(space, space.w_BlockingIOError):
raise
- w_exc = e.get_w_value(space)
- assert isinstance(w_exc, W_BlockingIOError)
if self.readable:
self._reader_reset_buf()
# Make some place by shifting the buffer
diff --git a/pypy/module/_io/interp_io.py b/pypy/module/_io/interp_io.py
--- a/pypy/module/_io/interp_io.py
+++ b/pypy/module/_io/interp_io.py
@@ -16,25 +16,6 @@
"io.UnsupportedOperation",
space.newtuple([space.w_ValueError, space.w_IOError]))
-class W_BlockingIOError(W_IOError):
- def __init__(self, space):
- W_IOError.__init__(self, space)
- self.written = 0
-
- @unwrap_spec(written=int)
- def descr_init(self, space, w_errno, w_strerror, written=0):
- W_IOError.descr_init(self, space, [w_errno, w_strerror])
- self.written = written
-
-W_BlockingIOError.typedef = TypeDef(
- 'BlockingIOError', W_IOError.typedef,
- __doc__ = ("Exception raised when I/O would block on a non-blocking "
- "I/O stream"),
- __new__ = generic_new_descr(W_BlockingIOError),
- __init__ = interp2app(W_BlockingIOError.descr_init),
- characters_written = interp_attrproperty('written', W_BlockingIOError),
- )
-
DEFAULT_BUFFER_SIZE = 8 * 1024
@unwrap_spec(mode=str, buffering=int,
diff --git a/pypy/module/_io/test/test_io.py b/pypy/module/_io/test/test_io.py
--- a/pypy/module/_io/test/test_io.py
+++ b/pypy/module/_io/test/test_io.py
@@ -56,7 +56,7 @@
import _io
try:
raise _io.BlockingIOError(42, "test blocking", 123)
- except IOError as e:
+ except OSError as e:
assert isinstance(e, _io.BlockingIOError)
assert e.errno == 42
assert e.strerror == "test blocking"
diff --git a/pypy/module/exceptions/interp_exceptions.py b/pypy/module/exceptions/interp_exceptions.py
--- a/pypy/module/exceptions/interp_exceptions.py
+++ b/pypy/module/exceptions/interp_exceptions.py
@@ -73,9 +73,10 @@
"""
from pypy.interpreter.baseobjspace import W_Root
-from pypy.interpreter.typedef import (TypeDef, GetSetProperty, descr_get_dict,
- descr_set_dict, descr_del_dict)
-from pypy.interpreter.gateway import interp2app
+from pypy.interpreter.typedef import (
+ TypeDef, GetSetProperty, interp_attrproperty,
+ descr_get_dict, descr_set_dict, descr_del_dict)
+from pypy.interpreter.gateway import interp2app, unwrap_spec
from pypy.interpreter.error import OperationError, setup_context
from pypy.interpreter.pytraceback import PyTraceback, check_traceback
from rpython.rlib import rwin32
@@ -552,8 +553,45 @@
)
# Various OSError subclasses added in Python 3.3
-W_BlockingIOError = _new_exception(
- "BlockingIOError", W_OSError, "I/O operation would block.")
+class W_BlockingIOError(W_OSError):
+ "I/O operation would block."
+
+ def __init__(self, space):
+ W_OSError.__init__(self, space)
+ self.written = -1
+
+ def descr_init(self, space, args_w):
+ W_OSError.descr_init(self, space, args_w)
+ # BlockingIOError's 3rd argument can be the number of
+ # characters written.
+ if len(args_w) >= 3:
+ try:
+ written = space.int_w(args_w[2])
+ except OperationError:
+ pass
+ else:
+ self.written = written
+
+ def descr_get_written(self, space):
+ if self.written == -1:
+ raise OperationError(space.w_AttributeError,
+ space.wrap("characters_written"))
+ return space.wrap(self.written)
+
+ def descr_set_written(self, space, w_written):
+ self.written = space.int_w(w_written)
+
+
+W_BlockingIOError.typedef = TypeDef(
+ 'BlockingIOError', W_OSError.typedef,
+ __doc__ = ("Exception raised when I/O would block on a non-blocking "
+ "I/O stream"),
+ __new__ = _new(W_BlockingIOError),
+ __init__ = interp2app(W_BlockingIOError.descr_init),
+ characters_written = GetSetProperty(W_BlockingIOError.descr_get_written,
+ W_BlockingIOError.descr_set_written),
+ )
+
W_ConnectionError = _new_exception(
"ConnectionError", W_OSError, "Connection error.")
W_ChildProcessError = _new_exception(
diff --git a/pypy/module/exceptions/test/test_exc.py b/pypy/module/exceptions/test/test_exc.py
--- a/pypy/module/exceptions/test/test_exc.py
+++ b/pypy/module/exceptions/test/test_exc.py
@@ -292,3 +292,14 @@
assert ImportError("message", path="y").path == "y"
raises(TypeError, ImportError, invalid="z")
+ def test_blockingioerror(self):
+ args = ("a", "b", "c", "d", "e")
+ for n in range(6):
+ e = BlockingIOError(*args[:n])
+ raises(AttributeError, getattr, e, 'characters_written')
+ e = BlockingIOError("a", "b", 3)
+ assert e.characters_written == 3
+ e.characters_written = 5
+ assert e.characters_written == 5
+
+
From noreply at buildbot.pypy.org Thu Jul 3 23:58:16 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:16 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: PEP3151: OSError is IOError is
EnvironmentError is socket.error is select.error!
Message-ID: <20140703215816.1540F1D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72344:38117d8ea60f
Date: 2014-06-23 09:47 +0200
http://bitbucket.org/pypy/pypy/changeset/38117d8ea60f/
Log: PEP3151: OSError is IOError is EnvironmentError is socket.error is
select.error!
diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py
--- a/pypy/module/_socket/interp_socket.py
+++ b/pypy/module/_socket/interp_socket.py
@@ -565,8 +565,7 @@
class SocketAPI:
def __init__(self, space):
- self.w_error = space.new_exception_class(
- "_socket.error", space.w_IOError)
+ self.w_error = space.w_OSError
self.w_herror = space.new_exception_class(
"_socket.herror", self.w_error)
self.w_gaierror = space.new_exception_class(
diff --git a/pypy/module/exceptions/interp_exceptions.py b/pypy/module/exceptions/interp_exceptions.py
--- a/pypy/module/exceptions/interp_exceptions.py
+++ b/pypy/module/exceptions/interp_exceptions.py
@@ -33,11 +33,11 @@
+-- AssertionError
+-- AttributeError
+-- BufferError
- +-- EnvironmentError
- | +-- IOError
- | +-- OSError
- | +-- WindowsError (Windows)
- | +-- VMSError (VMS)
+ +-- OSError
+ | = EnvironmentError
+ | = IOError
+ | = WindowsError (Windows)
+ | = VMSError (VMS)
+-- EOFError
+-- ImportError
+-- LookupError
@@ -439,8 +439,8 @@
W_Warning,
"""Base class for warnings about features which will be deprecated in the future.""")
-class W_EnvironmentError(W_Exception):
- """Base class for I/O related errors."""
+class W_OSError(W_Exception):
+ """OS system call failed."""
def __init__(self, space):
self.w_errno = space.w_None
@@ -484,21 +484,21 @@
))
return W_BaseException.descr_str(self, space)
-W_EnvironmentError.typedef = TypeDef(
- 'EnvironmentError',
+W_OSError.typedef = TypeDef(
+ 'OSError',
W_Exception.typedef,
- __doc__ = W_EnvironmentError.__doc__,
- __new__ = _new(W_EnvironmentError),
- __reduce__ = interp2app(W_EnvironmentError.descr_reduce),
- __init__ = interp2app(W_EnvironmentError.descr_init),
- __str__ = interp2app(W_EnvironmentError.descr_str),
- errno = readwrite_attrproperty_w('w_errno', W_EnvironmentError),
- strerror = readwrite_attrproperty_w('w_strerror', W_EnvironmentError),
- filename = readwrite_attrproperty_w('w_filename', W_EnvironmentError),
+ __doc__ = W_OSError.__doc__,
+ __new__ = _new(W_OSError),
+ __reduce__ = interp2app(W_OSError.descr_reduce),
+ __init__ = interp2app(W_OSError.descr_init),
+ __str__ = interp2app(W_OSError.descr_str),
+ errno = readwrite_attrproperty_w('w_errno', W_OSError),
+ strerror = readwrite_attrproperty_w('w_strerror', W_OSError),
+ filename = readwrite_attrproperty_w('w_filename', W_OSError),
)
-W_OSError = _new_exception('OSError', W_EnvironmentError,
- """OS system call failed.""")
+W_EnvironmentError = W_OSError
+W_IOError = W_OSError
class W_WindowsError(W_OSError):
"""MS-Windows OS system call failed."""
@@ -643,9 +643,6 @@
W_NameError = _new_exception('NameError', W_Exception,
"""Name not found globally.""")
-W_IOError = _new_exception('IOError', W_EnvironmentError,
- """I/O operation failed.""")
-
class W_SyntaxError(W_Exception):
"""Invalid syntax."""
diff --git a/pypy/module/select/__init__.py b/pypy/module/select/__init__.py
--- a/pypy/module/select/__init__.py
+++ b/pypy/module/select/__init__.py
@@ -11,7 +11,7 @@
interpleveldefs = {
'select': 'interp_select.select',
- 'error' : 'space.fromcache(interp_select.Cache).w_error'
+ 'error' : 'space.w_OSError',
}
if os.name =='posix':
diff --git a/pypy/module/select/interp_select.py b/pypy/module/select/interp_select.py
--- a/pypy/module/select/interp_select.py
+++ b/pypy/module/select/interp_select.py
@@ -7,10 +7,6 @@
defaultevents = rpoll.POLLIN | rpoll.POLLOUT | rpoll.POLLPRI
-class Cache:
- def __init__(self, space):
- self.w_error = space.new_exception_class("select.error")
-
def poll(space):
"""Returns a polling object, which supports registering and
unregistering file descriptors, and then polling them for I/O events."""
@@ -63,9 +59,8 @@
try:
retval = rpoll.poll(self.fddict, timeout)
except rpoll.PollError, e:
- w_errortype = space.fromcache(Cache).w_error
message = e.get_msg()
- raise OperationError(w_errortype,
+ raise OperationError(space.w_OSError,
space.newtuple([space.wrap(e.errno),
space.wrap(message)]))
finally:
@@ -125,8 +120,7 @@
if res < 0:
errno = _c.geterrno()
msg = _c.socket_strerror_str(errno)
- w_errortype = space.fromcache(Cache).w_error
- raise OperationError(w_errortype, space.newtuple([
+ raise OperationError(space.w_OSError, space.newtuple([
space.wrap(errno), space.wrap(msg)]))
resin_w = []
From noreply at buildbot.pypy.org Thu Jul 3 23:58:17 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:17 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Fix some cpyext tests.
Message-ID: <20140703215817.580741D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72345:a53f63e78743
Date: 2014-06-23 14:11 +0200
http://bitbucket.org/pypy/pypy/changeset/a53f63e78743/
Log: Fix some cpyext tests.
diff --git a/pypy/module/_frozen_importlib/__init__.py b/pypy/module/_frozen_importlib/__init__.py
--- a/pypy/module/_frozen_importlib/__init__.py
+++ b/pypy/module/_frozen_importlib/__init__.py
@@ -24,8 +24,9 @@
source = fp.read()
pathname = ""
code_w = ec.compiler.compile(source, pathname, 'exec', 0)
- w_dict = space.newdict()
- space.setitem(w_dict, space.wrap('__name__'), self.w_name)
+ space.setitem(self.w_dict, space.wrap('__name__'), self.w_name)
+ space.setitem(self.w_dict, space.wrap('__builtins__'),
+ space.wrap(space.builtin))
code_w.exec_code(space, self.w_dict, self.w_dict)
def startup(self, space):
diff --git a/pypy/module/cpyext/test/date.c b/pypy/module/cpyext/test/date.c
--- a/pypy/module/cpyext/test/date.c
+++ b/pypy/module/cpyext/test/date.c
@@ -17,6 +17,8 @@
PyObject *module, *othermodule;
module = PyModule_Create(&moduledef);
othermodule = PyImport_ImportModule("apple.banana");
+ if (!othermodule)
+ return NULL;
Py_DECREF(othermodule);
return module;
}
diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py
--- a/pypy/module/cpyext/test/test_cpyext.py
+++ b/pypy/module/cpyext/test/test_cpyext.py
@@ -217,10 +217,12 @@
class AppTestCpythonExtensionBase(LeakCheckingTest):
def setup_class(cls):
- cls.space.getbuiltinmodule("cpyext")
- from pypy.module.imp.importing import importhook
- importhook(cls.space, "os") # warm up reference counts
- state = cls.space.fromcache(RefcountState)
+ space = cls.space
+ space.getbuiltinmodule("cpyext")
+ # 'import os' to warm up reference counts
+ w_import = space.builtin.getdictvalue(space, '__import__')
+ space.call_function(w_import, space.wrap("os"))
+ state = space.fromcache(RefcountState)
state.non_heaptypes_w[:] = []
def setup_method(self, func):
diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py
--- a/pypy/module/imp/importing.py
+++ b/pypy/module/imp/importing.py
@@ -79,10 +79,9 @@
source = fp.read()
pathname = "" % modulename
code_w = ec.compiler.compile(source, pathname, 'exec', 0)
- w_dict = space.newdict()
w_mod = add_module(space, space.wrap(modulename))
space.setitem(space.sys.get('modules'), w_mod.w_name, w_mod)
- space.setitem(w_dict, space.wrap('__name__'), w_mod.w_name)
+ space.setitem(w_mod.w_dict, space.wrap('__name__'), w_mod.w_name)
code_w.exec_code(space, w_mod.w_dict, w_mod.w_dict)
assert check_sys_modules_w(space, modulename)
return w_mod
From noreply at buildbot.pypy.org Thu Jul 3 23:58:18 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:18 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Another fix
Message-ID: <20140703215818.946001D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72346:ca2e17af825d
Date: 2014-06-23 17:48 +0200
http://bitbucket.org/pypy/pypy/changeset/ca2e17af825d/
Log: Another fix
diff --git a/pypy/module/cpyext/test/test_thread.py b/pypy/module/cpyext/test/test_thread.py
--- a/pypy/module/cpyext/test/test_thread.py
+++ b/pypy/module/cpyext/test/test_thread.py
@@ -18,7 +18,7 @@
results = []
def some_thread():
res = module.get_thread_ident()
- results.append((res, threading._get_ident()))
+ results.append((res, threading.get_ident()))
some_thread()
assert results[0][0] == results[0][1]
From noreply at buildbot.pypy.org Thu Jul 3 23:58:19 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:19 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Apply fix for Cpython Issue14857
Message-ID: <20140703215819.D8D7B1D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72347:8f89bba8441c
Date: 2014-07-03 23:23 +0200
http://bitbucket.org/pypy/pypy/changeset/8f89bba8441c/
Log: Apply fix for Cpython Issue14857
diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py
--- a/pypy/interpreter/astcompiler/codegen.py
+++ b/pypy/interpreter/astcompiler/codegen.py
@@ -1354,10 +1354,10 @@
# compile the body proper
self._handle_body(cls.body)
# return the (empty) __class__ cell
- scope = self.scope.lookup("@__class__")
+ scope = self.scope.lookup("__class__")
if scope == symtable.SCOPE_CELL:
# Return the cell where to store __class__
- self.emit_op_arg(ops.LOAD_CLOSURE, self.cell_vars["@__class__"])
+ self.emit_op_arg(ops.LOAD_CLOSURE, self.cell_vars["__class__"])
else:
# This happens when nobody references the cell
self.load_const(self.space.w_None)
diff --git a/pypy/interpreter/astcompiler/symtable.py b/pypy/interpreter/astcompiler/symtable.py
--- a/pypy/interpreter/astcompiler/symtable.py
+++ b/pypy/interpreter/astcompiler/symtable.py
@@ -240,7 +240,7 @@
def note_symbol(self, identifier, role):
# Special-case super: it counts as a use of __class__
if role == SYM_USED and identifier == 'super':
- self.note_symbol('@__class__', SYM_USED)
+ self.note_symbol('__class__', SYM_USED)
return Scope.note_symbol(self, identifier, role)
def note_yield(self, yield_node):
@@ -298,12 +298,12 @@
return misc.mangle(name, self.name)
def _pass_special_names(self, local, new_bound):
- assert '@__class__' in local
- new_bound['@__class__'] = None
+ assert '__class__' in local
+ new_bound['__class__'] = None
def _finalize_cells(self, free):
for name, role in self.symbols.iteritems():
- if role == SCOPE_LOCAL and name in free and name == '@__class__':
+ if role == SCOPE_LOCAL and name in free and name == '__class__':
self.symbols[name] = SCOPE_CELL
del free[name]
@@ -392,7 +392,7 @@
clsdef.kwargs.walkabout(self)
self.visit_sequence(clsdef.decorator_list)
self.push_scope(ClassScope(clsdef), clsdef)
- self.note_symbol('@__class__', SYM_ASSIGNED)
+ self.note_symbol('__class__', SYM_ASSIGNED)
self.note_symbol('__locals__', SYM_PARAM)
self.visit_sequence(clsdef.body)
self.pop_scope()
diff --git a/pypy/module/__builtin__/descriptor.py b/pypy/module/__builtin__/descriptor.py
--- a/pypy/module/__builtin__/descriptor.py
+++ b/pypy/module/__builtin__/descriptor.py
@@ -67,7 +67,7 @@
"super(): arg[0] deleted"))
index = 0
for name in code.co_freevars:
- if name == "@__class__":
+ if name == "__class__":
break
index += 1
else:
diff --git a/pypy/module/__builtin__/test/test_descriptor.py b/pypy/module/__builtin__/test/test_descriptor.py
--- a/pypy/module/__builtin__/test/test_descriptor.py
+++ b/pypy/module/__builtin__/test/test_descriptor.py
@@ -400,3 +400,21 @@
assert x.y == 42
del x.x
assert x.z == 42
+
+ def test___class___variable(self):
+ class X:
+ def f(self):
+ return __class__
+ assert X().f() is X
+
+ class X:
+ @classmethod
+ def f(cls):
+ return __class__
+ assert X.f() is X
+
+ class X:
+ @staticmethod
+ def f():
+ return __class__
+ assert X.f() is X
From noreply at buildbot.pypy.org Thu Jul 3 23:58:21 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:21 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Apply fix for Cpython Issue15839
Message-ID: <20140703215821.3C8CF1D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72348:7562de248abb
Date: 2014-07-03 23:35 +0200
http://bitbucket.org/pypy/pypy/changeset/7562de248abb/
Log: Apply fix for Cpython Issue15839
diff --git a/pypy/module/__builtin__/descriptor.py b/pypy/module/__builtin__/descriptor.py
--- a/pypy/module/__builtin__/descriptor.py
+++ b/pypy/module/__builtin__/descriptor.py
@@ -56,14 +56,14 @@
frame = ec.gettopframe()
code = frame.pycode
if not code:
- raise OperationError(space.w_SystemError, space.wrap(
+ raise OperationError(space.w_RuntimeError, space.wrap(
"super(): no code object"))
if code.co_argcount == 0:
- raise OperationError(space.w_SystemError, space.wrap(
+ raise OperationError(space.w_RuntimeError, space.wrap(
"super(): no arguments"))
w_obj = frame.locals_stack_w[0]
if not w_obj:
- raise OperationError(space.w_SystemError, space.wrap(
+ raise OperationError(space.w_RuntimeError, space.wrap(
"super(): arg[0] deleted"))
index = 0
for name in code.co_freevars:
@@ -71,11 +71,15 @@
break
index += 1
else:
- raise OperationError(space.w_SystemError, space.wrap(
+ raise OperationError(space.w_RuntimeError, space.wrap(
"super(): __class__ cell not found"))
# a kind of LOAD_DEREF
cell = frame.cells[len(code.co_cellvars) + index]
- w_starttype = cell.get()
+ try:
+ w_starttype = cell.get()
+ except ValueError:
+ raise OperationError(space.w_RuntimeError, space.wrap(
+ "super(): empty __class__ cell"))
w_obj_or_type = w_obj
if space.is_none(w_obj_or_type):
diff --git a/pypy/module/__builtin__/test/test_descriptor.py b/pypy/module/__builtin__/test/test_descriptor.py
--- a/pypy/module/__builtin__/test/test_descriptor.py
+++ b/pypy/module/__builtin__/test/test_descriptor.py
@@ -418,3 +418,20 @@
def f():
return __class__
assert X.f() is X
+
+ def test_obscure_super_errors(self):
+ """
+ def f():
+ super()
+ raises(RuntimeError, f)
+ def f(x):
+ del x
+ super()
+ raises(RuntimeError, f, None)
+ class X:
+ def f(x):
+ nonlocal __class__
+ del __class__
+ super()
+ raises(RuntimeError, X().f)
+ """
From noreply at buildbot.pypy.org Thu Jul 3 23:58:22 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:22 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Fix for cpython Issue #17983
Message-ID: <20140703215822.7DDE71D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72349:37095df3c8ad
Date: 2014-07-03 23:51 +0200
http://bitbucket.org/pypy/pypy/changeset/37095df3c8ad/
Log: Fix for cpython Issue #17983
diff --git a/pypy/interpreter/astcompiler/symtable.py b/pypy/interpreter/astcompiler/symtable.py
--- a/pypy/interpreter/astcompiler/symtable.py
+++ b/pypy/interpreter/astcompiler/symtable.py
@@ -437,6 +437,9 @@
def visit_Global(self, glob):
for name in glob.names:
+ if isinstance(self.scope, ClassScope) and name == '__class__':
+ raise SyntaxError("cannot make __class__ global",
+ glob.lineno, glob.col_offset)
old_role = self.scope.lookup_role(name)
if old_role & (SYM_USED | SYM_ASSIGNED):
if old_role & SYM_ASSIGNED:
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -974,6 +974,15 @@
"""
yield self.st, test, "g()", range(3)
+ def test__class__global(self):
+ source = """if 1:
+ class X:
+ global __class__
+ def f(self):
+ super()
+ """
+ py.test.raises(SyntaxError, self.simple_test, source, None, None)
+
class AppTestCompiler:
From noreply at buildbot.pypy.org Thu Jul 3 23:58:23 2014
From: noreply at buildbot.pypy.org (amauryfa)
Date: Thu, 3 Jul 2014 23:58:23 +0200 (CEST)
Subject: [pypy-commit] pypy py3.3: Python3.3 slightly changed the error
message
Message-ID: <20140703215823.AF4181D293B@cobra.cs.uni-duesseldorf.de>
Author: Amaury Forgeot d'Arc
Branch: py3.3
Changeset: r72350:b0c62d972ccf
Date: 2014-07-03 23:54 +0200
http://bitbucket.org/pypy/pypy/changeset/b0c62d972ccf/
Log: Python3.3 slightly changed the error message
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -347,7 +347,7 @@
A().m()
except ImportError as e:
msg = str(e)
- ''', "msg", "No module named __foo__")
+ ''', "msg", "No module named '__foo__'")
def test_if_stmts(self):
yield self.st, "a = 42\nif a > 10: a += 2", "a", 44
From noreply at buildbot.pypy.org Fri Jul 4 00:13:02 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 00:13:02 +0200 (CEST)
Subject: [pypy-commit] pypy default: Copy the checkin message as comment to
the newly introduced function.
Message-ID: <20140703221302.CD3681D2D6D@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72351:2d41724c01ad
Date: 2014-07-04 00:12 +0200
http://bitbucket.org/pypy/pypy/changeset/2d41724c01ad/
Log: Copy the checkin message as comment to the newly introduced
function.
diff --git a/lib-python/2.7/timeit.py b/lib-python/2.7/timeit.py
--- a/lib-python/2.7/timeit.py
+++ b/lib-python/2.7/timeit.py
@@ -131,6 +131,14 @@
raise ValueError("setup is neither a string nor callable")
self.src = src # Save for traceback display
def make_inner():
+ # PyPy tweak: recompile the source code each time before
+ # calling inner(). There are situations like Issue #1776
+ # where PyPy tries to reuse the JIT code from before,
+ # but that's not going to work: the first thing the
+ # function does is the "-s" statement, which may declare
+ # new classes (here a namedtuple). We end up with
+ # bridges from the inner loop; more and more of them
+ # every time we call inner().
code = compile(src, dummy_src_name, "exec")
exec code in globals(), ns
return ns["inner"]
From noreply at buildbot.pypy.org Fri Jul 4 10:04:53 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 10:04:53 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Use a regular lock as the fall-back
"atomic" object here, as it also
Message-ID: <20140704080453.237CB1C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72352:617dab8c0da1
Date: 2014-07-04 10:04 +0200
http://bitbucket.org/pypy/pypy/changeset/617dab8c0da1/
Log: Use a regular lock as the fall-back "atomic" object here, as it also
supports "with" directly.
diff --git a/lib_pypy/transaction.py b/lib_pypy/transaction.py
--- a/lib_pypy/transaction.py
+++ b/lib_pypy/transaction.py
@@ -17,17 +17,11 @@
try:
from __pypy__.thread import atomic
except ImportError:
- # Not a STM-enabled PyPy. We can still provide a version of 'atomic'
- # that is good enough for our purposes. With this limited version,
+ # Not a STM-enabled PyPy. We can use a regular lock for 'atomic',
+ # which is good enough for our purposes. With this limited version,
# an atomic block in thread X will not prevent running thread Y, if
# thread Y is not within an atomic block at all.
- _atomic_global_lock = thread.allocate_lock()
- class _Atomic(object):
- def __enter__(self):
- _atomic_global_lock.acquire()
- def __exit__(self, *args):
- _atomic_global_lock.release()
- atomic = _Atomic()
+ atomic = thread.allocate_lock()
try:
from __pypy__.thread import signals_enabled
From noreply at buildbot.pypy.org Fri Jul 4 13:58:58 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 13:58:58 +0200 (CEST)
Subject: [pypy-commit] pypy default: Add a warning to the tproxy docs
Message-ID: <20140704115858.1F5DA1D3545@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72353:261cce2fec2d
Date: 2014-07-04 13:58 +0200
http://bitbucket.org/pypy/pypy/changeset/261cce2fec2d/
Log: Add a warning to the tproxy docs
diff --git a/pypy/doc/objspace-proxies.rst b/pypy/doc/objspace-proxies.rst
--- a/pypy/doc/objspace-proxies.rst
+++ b/pypy/doc/objspace-proxies.rst
@@ -26,6 +26,16 @@
Transparent Proxies
================================
+.. warning::
+
+ This is a feature that was tried experimentally long ago, and we
+ found no really good use cases. The basic functionality is still
+ there, but we don't recommend using it. Some of the examples below
+ might not work any more (e.g. you can't tproxy a list object any
+ more). The rest can be done by hacking in standard Python. If
+ anyone is interested in working on tproxy again, he is welcome, but
+ we don't regard this as an interesting extension.
+
PyPy's Transparent Proxies allow routing of operations on objects
to a callable. Application level code can customize objects without
interfering with the type system - ``type(proxied_list) is list`` holds true
From noreply at buildbot.pypy.org Fri Jul 4 14:21:06 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 14:21:06 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Cancel 3e144ed1d5b7: it makes
translate.py take 25% longer...
Message-ID: <20140704122106.F20A51D35D7@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72354:3676ac825f07
Date: 2014-07-04 14:20 +0200
http://bitbucket.org/pypy/pypy/changeset/3676ac825f07/
Log: Cancel 3e144ed1d5b7: it makes translate.py take 25% longer...
diff --git a/rpython/translator/stm/src_stm/stmgcintf.c b/rpython/translator/stm/src_stm/stmgcintf.c
--- a/rpython/translator/stm/src_stm/stmgcintf.c
+++ b/rpython/translator/stm/src_stm/stmgcintf.c
@@ -42,8 +42,7 @@
if (((long)pypy_stm_nursery_low_fill_mark_saved) > 0) {
pypy_stm_nursery_low_fill_mark_saved = 0;
}
- } else {
- /* if (((long)pypy_stm_nursery_low_fill_mark) > 0) */
+ } else if (((long)pypy_stm_nursery_low_fill_mark) > 0) {
/* if not set to unlimited by pypy_stm_setup() (s.b.) */
pypy_stm_nursery_low_fill_mark = 0;
}
From noreply at buildbot.pypy.org Fri Jul 4 14:23:48 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 14:23:48 +0200 (CEST)
Subject: [pypy-commit] pypy default: Improve the speed of some non-jitted
parts of the code, by disabling the assert() there when
Message-ID: <20140704122348.073E91D35E1@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72355:7c2871e9cb26
Date: 2014-07-04 14:21 +0200
http://bitbucket.org/pypy/pypy/changeset/7c2871e9cb26/
Log: Improve the speed of some non-jitted parts of the code, by disabling
the assert() there when compiled in non-debug mode.
diff --git a/rpython/translator/tool/cbuild.py b/rpython/translator/tool/cbuild.py
--- a/rpython/translator/tool/cbuild.py
+++ b/rpython/translator/tool/cbuild.py
@@ -361,4 +361,8 @@
typedef unsigned long Unsigned;
# define SIGNED_MIN LONG_MIN
#endif
+
+#if !defined(RPY_ASSERT) && !defined(RPY_LL_ASSERT)
+# define NDEBUG
+#endif
'''
From noreply at buildbot.pypy.org Fri Jul 4 14:23:49 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 14:23:49 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Improve the speed of some non-jitted
parts of the code, by disabling the assert() there when
Message-ID: <20140704122349.8F04F1D35E1@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72356:4f5e243c027d
Date: 2014-07-04 14:21 +0200
http://bitbucket.org/pypy/pypy/changeset/4f5e243c027d/
Log: Improve the speed of some non-jitted parts of the code, by disabling
the assert() there when compiled in non-debug mode.
diff --git a/rpython/translator/tool/cbuild.py b/rpython/translator/tool/cbuild.py
--- a/rpython/translator/tool/cbuild.py
+++ b/rpython/translator/tool/cbuild.py
@@ -366,4 +366,8 @@
#else
typedef unsigned char bool_t;
#endif
+
+#if !defined(RPY_ASSERT) && !defined(RPY_LL_ASSERT)
+# define NDEBUG
+#endif
'''
From noreply at buildbot.pypy.org Fri Jul 4 14:23:50 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 14:23:50 +0200 (CEST)
Subject: [pypy-commit] pypy default: merge heads
Message-ID: <20140704122350.DB14C1D35E1@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72357:b668c9626bce
Date: 2014-07-04 14:23 +0200
http://bitbucket.org/pypy/pypy/changeset/b668c9626bce/
Log: merge heads
diff --git a/pypy/doc/objspace-proxies.rst b/pypy/doc/objspace-proxies.rst
--- a/pypy/doc/objspace-proxies.rst
+++ b/pypy/doc/objspace-proxies.rst
@@ -26,6 +26,16 @@
Transparent Proxies
================================
+.. warning::
+
+ This is a feature that was tried experimentally long ago, and we
+ found no really good use cases. The basic functionality is still
+ there, but we don't recommend using it. Some of the examples below
+ might not work any more (e.g. you can't tproxy a list object any
+ more). The rest can be done by hacking in standard Python. If
+ anyone is interested in working on tproxy again, he is welcome, but
+ we don't regard this as an interesting extension.
+
PyPy's Transparent Proxies allow routing of operations on objects
to a callable. Application level code can customize objects without
interfering with the type system - ``type(proxied_list) is list`` holds true
From noreply at buildbot.pypy.org Fri Jul 4 16:40:03 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 16:40:03 +0200 (CEST)
Subject: [pypy-commit] pypy default: Fix a big leak in our '_ssl' module.
Argh. Thanks bob_grigoryan on irc.
Message-ID: <20140704144003.1BA6B1C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72358:0726938dad41
Date: 2014-07-04 16:39 +0200
http://bitbucket.org/pypy/pypy/changeset/0726938dad41/
Log: Fix a big leak in our '_ssl' module. Argh. Thanks bob_grigoryan on
irc.
diff --git a/pypy/module/_ssl/interp_ssl.py b/pypy/module/_ssl/interp_ssl.py
--- a/pypy/module/_ssl/interp_ssl.py
+++ b/pypy/module/_ssl/interp_ssl.py
@@ -233,40 +233,40 @@
return self.space.wrap('')
raise ssl_error(self.space, "Socket closed without SSL shutdown handshake")
- raw_buf, gc_buf = rffi.alloc_buffer(num_bytes)
- while True:
- err = 0
+ with rffi.scoped_alloc_buffer(num_bytes) as buf:
+ while True:
+ err = 0
- count = libssl_SSL_read(self.ssl, raw_buf, num_bytes)
- err = libssl_SSL_get_error(self.ssl, count)
+ count = libssl_SSL_read(self.ssl, buf.raw, num_bytes)
+ err = libssl_SSL_get_error(self.ssl, count)
- if err == SSL_ERROR_WANT_READ:
- sockstate = check_socket_and_wait_for_timeout(self.space,
- self.w_socket, False)
- elif err == SSL_ERROR_WANT_WRITE:
- sockstate = check_socket_and_wait_for_timeout(self.space,
- self.w_socket, True)
- elif (err == SSL_ERROR_ZERO_RETURN and
- libssl_SSL_get_shutdown(self.ssl) == SSL_RECEIVED_SHUTDOWN):
- return self.space.wrap("")
- else:
- sockstate = SOCKET_OPERATION_OK
+ if err == SSL_ERROR_WANT_READ:
+ sockstate = check_socket_and_wait_for_timeout(self.space,
+ self.w_socket, False)
+ elif err == SSL_ERROR_WANT_WRITE:
+ sockstate = check_socket_and_wait_for_timeout(self.space,
+ self.w_socket, True)
+ elif (err == SSL_ERROR_ZERO_RETURN and
+ libssl_SSL_get_shutdown(self.ssl) == SSL_RECEIVED_SHUTDOWN):
+ return self.space.wrap("")
+ else:
+ sockstate = SOCKET_OPERATION_OK
- if sockstate == SOCKET_HAS_TIMED_OUT:
- raise ssl_error(self.space, "The read operation timed out")
- elif sockstate == SOCKET_IS_NONBLOCKING:
- break
+ if sockstate == SOCKET_HAS_TIMED_OUT:
+ raise ssl_error(self.space, "The read operation timed out")
+ elif sockstate == SOCKET_IS_NONBLOCKING:
+ break
- if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
- continue
- else:
- break
+ if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
+ continue
+ else:
+ break
- if count <= 0:
- raise _ssl_seterror(self.space, self, count)
+ if count <= 0:
+ raise _ssl_seterror(self.space, self, count)
- result = rffi.str_from_buffer(raw_buf, gc_buf, num_bytes, count)
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
+ result = buf.str(count)
+
return self.space.wrap(result)
def _refresh_nonblocking(self, space):
From noreply at buildbot.pypy.org Fri Jul 4 16:45:12 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 16:45:12 +0200 (CEST)
Subject: [pypy-commit] pypy default: Replace more usages of
rffi.alloc_buffer() with scoped_alloc_buffer().
Message-ID: <20140704144512.631A81C0EEA@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72359:8bc897775808
Date: 2014-07-04 16:44 +0200
http://bitbucket.org/pypy/pypy/changeset/8bc897775808/
Log: Replace more usages of rffi.alloc_buffer() with
scoped_alloc_buffer().
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -442,8 +442,8 @@
def test_copy(self, space, api):
w_x = space.wrap(u"abcd\u0660")
- target_chunk, _ = rffi.alloc_unicodebuffer(space.int_w(space.len(w_x)))
- #lltype.malloc(Py_UNICODE, space.int_w(space.len(w_x)), flavor='raw')
+ count1 = space.int_w(space.len(w_x))
+ target_chunk = lltype.malloc(rffi.CWCHARP.TO, count1, flavor='raw')
x_chunk = api.PyUnicode_AS_UNICODE(w_x)
api.Py_UNICODE_COPY(target_chunk, x_chunk, 4)
diff --git a/rpython/rlib/rfile.py b/rpython/rlib/rfile.py
--- a/rpython/rlib/rfile.py
+++ b/rpython/rlib/rfile.py
@@ -188,16 +188,13 @@
finally:
lltype.free(buf, flavor='raw')
else:
- raw_buf, gc_buf = rffi.alloc_buffer(size)
- try:
- returned_size = c_fread(raw_buf, 1, size, ll_file)
+ with rffi.scoped_alloc_buffer(size) as buf:
+ returned_size = c_fread(buf.raw, 1, size, ll_file)
returned_size = intmask(returned_size) # is between 0 and size
if returned_size == 0:
if not c_feof(ll_file):
raise _error(ll_file)
- s = rffi.str_from_buffer(raw_buf, gc_buf, size, returned_size)
- finally:
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
+ s = buf.str(returned_size)
return s
def seek(self, pos, whence=0):
@@ -270,25 +267,21 @@
def readline(self):
if self.ll_file:
- raw_buf, gc_buf = rffi.alloc_buffer(BASE_LINE_SIZE)
- try:
- c = self._readline1(raw_buf)
+ with rffi.scoped_alloc_buffer(BASE_LINE_SIZE) as buf:
+ c = self._readline1(buf.raw)
if c >= 0:
- return rffi.str_from_buffer(raw_buf, gc_buf,
- BASE_LINE_SIZE, c)
+ return buf.str(c)
#
# this is the rare case: the line is longer than BASE_LINE_SIZE
s = StringBuilder()
while True:
- s.append_charpsize(raw_buf, BASE_LINE_SIZE - 1)
- c = self._readline1(raw_buf)
+ s.append_charpsize(buf.raw, BASE_LINE_SIZE - 1)
+ c = self._readline1(buf.raw)
if c >= 0:
break
#
- s.append_charpsize(raw_buf, c)
+ s.append_charpsize(buf.raw, c)
return s.build()
- finally:
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
raise ValueError("I/O operation on closed file")
diff --git a/rpython/rlib/rsocket.py b/rpython/rlib/rsocket.py
--- a/rpython/rlib/rsocket.py
+++ b/rpython/rlib/rsocket.py
@@ -827,15 +827,12 @@
if timeout == 1:
raise SocketTimeout
elif timeout == 0:
- raw_buf, gc_buf = rffi.alloc_buffer(buffersize)
- try:
+ with rffi.scoped_alloc_buffer(buffersize) as buf:
read_bytes = _c.socketrecv(self.fd,
- rffi.cast(rffi.VOIDP, raw_buf),
+ rffi.cast(rffi.VOIDP, buf.raw),
buffersize, flags)
if read_bytes >= 0:
- return rffi.str_from_buffer(raw_buf, gc_buf, buffersize, read_bytes)
- finally:
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
+ return buf.str(read_bytes)
raise self.error_handler()
def recvinto(self, rwbuffer, nbytes, flags=0):
@@ -852,11 +849,10 @@
if timeout == 1:
raise SocketTimeout
elif timeout == 0:
- raw_buf, gc_buf = rffi.alloc_buffer(buffersize)
- try:
+ with rffi.scoped_alloc_buffer(buffersize) as buf:
address, addr_p, addrlen_p = self._addrbuf()
try:
- read_bytes = _c.recvfrom(self.fd, raw_buf, buffersize, flags,
+ read_bytes = _c.recvfrom(self.fd, buf.raw, buffersize, flags,
addr_p, addrlen_p)
addrlen = rffi.cast(lltype.Signed, addrlen_p[0])
finally:
@@ -867,10 +863,8 @@
address.addrlen = addrlen
else:
address = None
- data = rffi.str_from_buffer(raw_buf, gc_buf, buffersize, read_bytes)
+ data = buf.str(read_bytes)
return (data, address)
- finally:
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
raise self.error_handler()
def recvfrom_into(self, rwbuffer, nbytes, flags=0):
diff --git a/rpython/rtyper/module/ll_os.py b/rpython/rtyper/module/ll_os.py
--- a/rpython/rtyper/module/ll_os.py
+++ b/rpython/rtyper/module/ll_os.py
@@ -1006,15 +1006,12 @@
if count < 0:
raise OSError(errno.EINVAL, None)
rposix.validate_fd(fd)
- raw_buf, gc_buf = rffi.alloc_buffer(count)
- try:
- void_buf = rffi.cast(rffi.VOIDP, raw_buf)
+ with rffi.scoped_alloc_buffer(count) as buf:
+ void_buf = rffi.cast(rffi.VOIDP, buf.raw)
got = rffi.cast(lltype.Signed, os_read(fd, void_buf, count))
if got < 0:
raise OSError(rposix.get_errno(), "os_read failed")
- return rffi.str_from_buffer(raw_buf, gc_buf, count, got)
- finally:
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
+ return buf.str(got)
return extdef([int, int], SomeString(can_be_None=True),
"ll_os.ll_os_read", llimpl=os_read_llimpl)
From noreply at buildbot.pypy.org Fri Jul 4 18:22:09 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 4 Jul 2014 18:22:09 +0200 (CEST)
Subject: [pypy-commit] benchmarks default: Add a mini benchmark.
Message-ID: <20140704162209.37A8E1C1347@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r264:620a3e907582
Date: 2014-07-04 18:21 +0200
http://bitbucket.org/pypy/benchmarks/changeset/620a3e907582/
Log: Add a mini benchmark.
diff --git a/multithread/minibench1.py b/multithread/minibench1.py
new file mode 100644
--- /dev/null
+++ b/multithread/minibench1.py
@@ -0,0 +1,28 @@
+import thread, sys
+
+def f(n, lock):
+ total = 0
+ lst1 = ["foo"]
+ for i in xrange(n):
+ lst1.append(i)
+ total += lst1.pop()
+ sys.stdout.write('%d\n' % total)
+ lock.release()
+
+
+T = 4 # number of threads
+N = 100000000 # number of iterations in each thread
+if len(sys.argv) >= 2:
+ T = int(sys.argv[1])
+ if len(sys.argv) >= 3:
+ N = int(sys.argv[2])
+
+locks = []
+for i in range(T):
+ lock = thread.allocate_lock()
+ lock.acquire()
+ locks.append(lock)
+ thread.start_new_thread(f, (N, lock))
+
+for lock in locks:
+ lock.acquire()
From noreply at buildbot.pypy.org Sat Jul 5 10:48:43 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 10:48:43 +0200 (CEST)
Subject: [pypy-commit] pypy.org extradoc: Mention that the Ubuntu 12.04
binary works well on 14.04 too
Message-ID: <20140705084843.1E52A1C3288@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: extradoc
Changeset: r515:8e87d836c8d9
Date: 2014-07-05 10:48 +0200
http://bitbucket.org/pypy/pypy.org/changeset/8e87d836c8d9/
Log: Mention that the Ubuntu 12.04 binary works well on 14.04 too
diff --git a/download.html b/download.html
--- a/download.html
+++ b/download.html
@@ -90,7 +90,7 @@
portable Linux binaries.
- Linux x86 binary (32bit, tar.bz2 built on Ubuntu 10.04.4 LTS) (see [1] below)
-- Linux x86 binary (64bit, tar.bz2 built on Ubuntu 12.04.2 LTS) (see [1] below)
+- Linux x86-64 binary (64bit, tar.bz2 built on Ubuntu 12.04 - 14.04) (see [1] below)
- ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Raspbian) (see [1] below)
- ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Ubuntu Raring) (see [1] below)
- ARM Softfloat Linux binary (ARMEL/gnueabi, tar.bz2, Ubuntu Precise) (see [1] below)
@@ -111,8 +111,8 @@
them unless you're ready to hack your system by adding symlinks to the
libraries it tries to open.
-- Linux binary (32bit, tar.bz2 built on Ubuntu 10.04.4 LTS) (see [1] below)
-- Linux binary (64bit, tar.bz2 built on Ubuntu 12.04.2 LTS) (see [1] below)
+- Linux x86 binary (32bit, tar.bz2 built on Ubuntu 10.04.4 LTS) (see [1] below)
+- Linux x86-64 binary (64bit, tar.bz2 built on Ubuntu 12.04 - 14.04) (see [1] below)
- ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Raspbian) (see [1] below)
- ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Ubuntu Raring) (see [1] below)
- ARM Softfloat Linux binary (ARMEL/gnueabi, tar.bz2, Ubuntu Precise) (see [1] below)
diff --git a/source/download.txt b/source/download.txt
--- a/source/download.txt
+++ b/source/download.txt
@@ -73,7 +73,7 @@
.. _`portable Linux binaries`: https://github.com/squeaky-pl/portable-pypy
* `Linux x86 binary (32bit, tar.bz2 built on Ubuntu 10.04.4 LTS)`__ (see ``[1]`` below)
-* `Linux x86 binary (64bit, tar.bz2 built on Ubuntu 12.04.2 LTS)`__ (see ``[1]`` below)
+* `Linux x86-64 binary (64bit, tar.bz2 built on Ubuntu 12.04 - 14.04)`__ (see ``[1]`` below)
* `ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Raspbian)`__ (see ``[1]`` below)
* `ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Ubuntu Raring)`__ (see ``[1]`` below)
* `ARM Softfloat Linux binary (ARMEL/gnueabi, tar.bz2, Ubuntu Precise)`__ (see ``[1]`` below)
@@ -107,8 +107,8 @@
them** unless you're ready to hack your system by adding symlinks to the
libraries it tries to open.
-* `Linux binary (32bit, tar.bz2 built on Ubuntu 10.04.4 LTS)`__ (see ``[1]`` below)
-* `Linux binary (64bit, tar.bz2 built on Ubuntu 12.04.2 LTS)`__ (see ``[1]`` below)
+* `Linux x86 binary (32bit, tar.bz2 built on Ubuntu 10.04.4 LTS)`__ (see ``[1]`` below)
+* `Linux x86-64 binary (64bit, tar.bz2 built on Ubuntu 12.04 - 14.04)`__ (see ``[1]`` below)
* `ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Raspbian)`__ (see ``[1]`` below)
* `ARM Hardfloat Linux binary (ARMHF/gnueabihf, tar.bz2, Ubuntu Raring)`__ (see ``[1]`` below)
* `ARM Softfloat Linux binary (ARMEL/gnueabi, tar.bz2, Ubuntu Precise)`__ (see ``[1]`` below)
From noreply at buildbot.pypy.org Sat Jul 5 15:56:43 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 15:56:43 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Print "PyPy-STM" instead of "PyPy" in
the banner, to distinguish the two
Message-ID: <20140705135643.6ED3A1C3334@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72360:b2f768401d32
Date: 2014-07-05 15:56 +0200
http://bitbucket.org/pypy/pypy/changeset/b2f768401d32/
Log: Print "PyPy-STM" instead of "PyPy" in the banner, to distinguish the
two more easily.
diff --git a/pypy/module/sys/version.py b/pypy/module/sys/version.py
--- a/pypy/module/sys/version.py
+++ b/pypy/module/sys/version.py
@@ -61,13 +61,17 @@
ver = "%d.%d.%d" % (PYPY_VERSION[0], PYPY_VERSION[1], PYPY_VERSION[2])
if PYPY_VERSION[3] != "final":
ver = ver + "-%s%d" %(PYPY_VERSION[3], PYPY_VERSION[4])
- return space.wrap("%d.%d.%d (%s, %s, %s)\n[PyPy %s%s]" % (
+ extra = ''
+ if space.config.translation.stm:
+ extra = '-STM'
+ return space.wrap("%d.%d.%d (%s, %s, %s)\n[PyPy%s %s%s]" % (
CPYTHON_VERSION[0],
CPYTHON_VERSION[1],
CPYTHON_VERSION[2],
get_repo_version_info(root=pypyroot)[1],
date,
time,
+ extra,
ver,
compiler_version()))
From noreply at buildbot.pypy.org Sat Jul 5 17:05:35 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 17:05:35 +0200 (CEST)
Subject: [pypy-commit] cffi default: prepare for 0.8.3
Message-ID: <20140705150535.775401C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1534:057515cfe543
Date: 2014-07-05 16:47 +0200
http://bitbucket.org/cffi/cffi/changeset/057515cfe543/
Log: prepare for 0.8.3
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -5504,7 +5504,7 @@
if (v == NULL || PyModule_AddObject(m, "_C_API", v) < 0)
INITERROR;
- v = PyText_FromString("0.8.2");
+ v = PyText_FromString("0.8.3");
if (v == NULL || PyModule_AddObject(m, "__version__", v) < 0)
INITERROR;
diff --git a/c/test_c.py b/c/test_c.py
--- a/c/test_c.py
+++ b/c/test_c.py
@@ -3199,4 +3199,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.2"
+ assert __version__ == "0.8.3"
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.2"
-__version_info__ = (0, 8, 2)
+__version__ = "0.8.3"
+__version_info__ = (0, 8, 3)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.2'
+release = '0.8.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,7 +88,7 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.2.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.3.tar.gz
- Or grab the most current version by following the instructions below.
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -113,7 +113,7 @@
`Mailing list `_
""",
- version='0.8.2',
+ version='0.8.3',
packages=['cffi'],
zip_safe=False,
From noreply at buildbot.pypy.org Sat Jul 5 17:05:37 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 17:05:37 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: hg merge default
Message-ID: <20140705150537.1D4AF1C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1535:1f0fc199c139
Date: 2014-07-05 16:47 +0200
http://bitbucket.org/cffi/cffi/changeset/1f0fc199c139/
Log: hg merge default
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -5,7 +5,6 @@
#ifdef MS_WIN32
#include
#include "misc_win32.h"
-#include /* for alloca() */
#else
#include
#include
@@ -13,9 +12,32 @@
#include
#include
#include
-#if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+#endif
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+ typedef unsigned char _Bool;
+# endif
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
# include
-#endif
+# endif
#endif
#include "malloc_closure.h"
@@ -5482,7 +5504,7 @@
if (v == NULL || PyModule_AddObject(m, "_C_API", v) < 0)
INITERROR;
- v = PyText_FromString("0.8.2");
+ v = PyText_FromString("0.8.3");
if (v == NULL || PyModule_AddObject(m, "__version__", v) < 0)
INITERROR;
diff --git a/c/check__thread.c b/c/check__thread.c
deleted file mode 100644
--- a/c/check__thread.c
+++ /dev/null
@@ -1,1 +0,0 @@
-__thread int some_threadlocal_variable_42;
diff --git a/c/minibuffer.h b/c/minibuffer.h
--- a/c/minibuffer.h
+++ b/c/minibuffer.h
@@ -105,8 +105,9 @@
static int mb_getbuf(MiniBufferObj *self, Py_buffer *view, int flags)
{
- return PyBuffer_FillInfo(view, NULL, self->mb_data, self->mb_size,
- /*readonly=*/0, PyBUF_CONTIG | PyBUF_FORMAT);
+ return PyBuffer_FillInfo(view, (PyObject *)self,
+ self->mb_data, self->mb_size,
+ /*readonly=*/0, flags);
}
static PySequenceMethods mb_as_sequence = {
diff --git a/c/misc_win32.h b/c/misc_win32.h
--- a/c/misc_win32.h
+++ b/c/misc_win32.h
@@ -1,3 +1,4 @@
+#include /* for alloca() */
/************************************************************/
/* errno and GetLastError support */
@@ -192,7 +193,27 @@
static void *dlsym(void *handle, const char *symbol)
{
- return GetProcAddress((HMODULE)handle, symbol);
+ void *address = GetProcAddress((HMODULE)handle, symbol);
+#ifndef MS_WIN64
+ if (!address) {
+ /* If 'symbol' is not found, then try '_symbol at N' for N in
+ (0, 4, 8, 12, ..., 124). Unlike ctypes, we try to do that
+ for any symbol, although in theory it should only be done
+ for __stdcall functions.
+ */
+ int i;
+ char *mangled_name = alloca(1 + strlen(symbol) + 1 + 3 + 1);
+ if (!mangled_name)
+ return NULL;
+ for (i = 0; i < 32; i++) {
+ sprintf(mangled_name, "_%s@%d", symbol, i * 4);
+ address = GetProcAddress((HMODULE)handle, mangled_name);
+ if (address)
+ break;
+ }
+ }
+#endif
+ return address;
}
static void dlclose(void *handle)
@@ -210,21 +231,6 @@
return buf;
}
-
-/************************************************************/
-/* types */
-
-typedef __int8 int8_t;
-typedef __int16 int16_t;
-typedef __int32 int32_t;
-typedef __int64 int64_t;
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int16 uint16_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-typedef unsigned char _Bool;
-
-
/************************************************************/
/* obscure */
diff --git a/c/test_c.py b/c/test_c.py
--- a/c/test_c.py
+++ b/c/test_c.py
@@ -1102,7 +1102,7 @@
def test_read_variable():
## FIXME: this test assumes glibc specific behavior, it's not compliant with C standard
## https://bugs.pypy.org/issue1643
- if sys.platform == 'win32' or sys.platform == 'darwin' or sys.platform.startswith('freebsd'):
+ if not sys.platform.startswith("linux"):
py.test.skip("untested")
BVoidP = new_pointer_type(new_void_type())
ll = find_and_load_library('c')
@@ -1112,7 +1112,7 @@
def test_read_variable_as_unknown_length_array():
## FIXME: this test assumes glibc specific behavior, it's not compliant with C standard
## https://bugs.pypy.org/issue1643
- if sys.platform == 'win32' or sys.platform == 'darwin' or sys.platform.startswith('freebsd'):
+ if not sys.platform.startswith("linux"):
py.test.skip("untested")
BCharP = new_pointer_type(new_primitive_type("char"))
BArray = new_array_type(BCharP, None)
@@ -1124,7 +1124,7 @@
def test_write_variable():
## FIXME: this test assumes glibc specific behavior, it's not compliant with C standard
## https://bugs.pypy.org/issue1643
- if sys.platform == 'win32' or sys.platform == 'darwin' or sys.platform.startswith('freebsd'):
+ if not sys.platform.startswith("linux"):
py.test.skip("untested")
BVoidP = new_pointer_type(new_void_type())
ll = find_and_load_library('c')
@@ -3199,4 +3199,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.2"
+ assert __version__ == "0.8.3"
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.2"
-__version_info__ = (0, 8, 2)
+__version__ = "0.8.3"
+__version_info__ = (0, 8, 3)
diff --git a/cffi/api.py b/cffi/api.py
--- a/cffi/api.py
+++ b/cffi/api.py
@@ -443,6 +443,10 @@
for enumname, enumval in zip(tp.enumerators, tp.enumvalues):
if enumname not in library.__dict__:
library.__dict__[enumname] = enumval
+ for key, val in ffi._parser._int_constants.items():
+ if key not in library.__dict__:
+ library.__dict__[key] = val
+
copied_enums.append(True)
if name in library.__dict__:
return
diff --git a/cffi/cparser.py b/cffi/cparser.py
--- a/cffi/cparser.py
+++ b/cffi/cparser.py
@@ -24,6 +24,7 @@
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
_r_words = re.compile(r"\w+|\S")
_parser_cache = None
+_r_int_literal = re.compile(r"^0?x?[0-9a-f]+u?l?$", re.IGNORECASE)
def _get_parser():
global _parser_cache
@@ -99,6 +100,7 @@
self._structnode2type = weakref.WeakKeyDictionary()
self._override = False
self._packed = False
+ self._int_constants = {}
def _parse(self, csource):
csource, macros = _preprocess(csource)
@@ -128,9 +130,10 @@
finally:
if lock is not None:
lock.release()
- return ast, macros
+ # csource will be used to find buggy source text
+ return ast, macros, csource
- def convert_pycparser_error(self, e, csource):
+ def _convert_pycparser_error(self, e, csource):
# xxx look for ":NUM:" at the start of str(e) and try to interpret
# it as a line number
line = None
@@ -142,6 +145,12 @@
csourcelines = csource.splitlines()
if 1 <= linenum <= len(csourcelines):
line = csourcelines[linenum-1]
+ return line
+
+ def convert_pycparser_error(self, e, csource):
+ line = self._convert_pycparser_error(e, csource)
+
+ msg = str(e)
if line:
msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
else:
@@ -160,14 +169,9 @@
self._packed = prev_packed
def _internal_parse(self, csource):
- ast, macros = self._parse(csource)
+ ast, macros, csource = self._parse(csource)
# add the macros
- for key, value in macros.items():
- value = value.strip()
- if value != '...':
- raise api.CDefError('only supports the syntax "#define '
- '%s ..." for now (literally)' % key)
- self._declare('macro ' + key, value)
+ self._process_macros(macros)
# find the first "__dotdotdot__" and use that as a separator
# between the repeated typedefs and the real csource
iterator = iter(ast.ext)
@@ -175,27 +179,61 @@
if decl.name == '__dotdotdot__':
break
#
- for decl in iterator:
- if isinstance(decl, pycparser.c_ast.Decl):
- self._parse_decl(decl)
- elif isinstance(decl, pycparser.c_ast.Typedef):
- if not decl.name:
- raise api.CDefError("typedef does not declare any name",
- decl)
- if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
- and decl.type.type.names == ['__dotdotdot__']):
- realtype = model.unknown_type(decl.name)
- elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
- isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
- isinstance(decl.type.type.type,
- pycparser.c_ast.IdentifierType) and
- decl.type.type.type.names == ['__dotdotdot__']):
- realtype = model.unknown_ptr_type(decl.name)
+ try:
+ for decl in iterator:
+ if isinstance(decl, pycparser.c_ast.Decl):
+ self._parse_decl(decl)
+ elif isinstance(decl, pycparser.c_ast.Typedef):
+ if not decl.name:
+ raise api.CDefError("typedef does not declare any name",
+ decl)
+ if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
+ and decl.type.type.names == ['__dotdotdot__']):
+ realtype = model.unknown_type(decl.name)
+ elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
+ isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
+ isinstance(decl.type.type.type,
+ pycparser.c_ast.IdentifierType) and
+ decl.type.type.type.names == ['__dotdotdot__']):
+ realtype = model.unknown_ptr_type(decl.name)
+ else:
+ realtype = self._get_type(decl.type, name=decl.name)
+ self._declare('typedef ' + decl.name, realtype)
else:
- realtype = self._get_type(decl.type, name=decl.name)
- self._declare('typedef ' + decl.name, realtype)
+ raise api.CDefError("unrecognized construct", decl)
+ except api.FFIError as e:
+ msg = self._convert_pycparser_error(e, csource)
+ if msg:
+ e.args = (e.args[0] + "\n *** Err: %s" % msg,)
+ raise
+
+ def _add_constants(self, key, val):
+ if key in self._int_constants:
+ raise api.FFIError(
+ "multiple declarations of constant: %s" % (key,))
+ self._int_constants[key] = val
+
+ def _process_macros(self, macros):
+ for key, value in macros.items():
+ value = value.strip()
+ match = _r_int_literal.search(value)
+ if match is not None:
+ int_str = match.group(0).lower().rstrip("ul")
+
+ # "010" is not valid oct in py3
+ if (int_str.startswith("0") and
+ int_str != "0" and
+ not int_str.startswith("0x")):
+ int_str = "0o" + int_str[1:]
+
+ pyvalue = int(int_str, 0)
+ self._add_constants(key, pyvalue)
+ elif value == '...':
+ self._declare('macro ' + key, value)
else:
- raise api.CDefError("unrecognized construct", decl)
+ raise api.CDefError('only supports the syntax "#define '
+ '%s ..." (literally) or "#define '
+ '%s 0x1FF" for now' % (key, key))
def _parse_decl(self, decl):
node = decl.type
@@ -227,7 +265,7 @@
self._declare('variable ' + decl.name, tp)
def parse_type(self, cdecl):
- ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)
+ ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
assert not macros
exprnode = ast.ext[-1].type.args.params[0]
if isinstance(exprnode, pycparser.c_ast.ID):
@@ -306,7 +344,8 @@
if ident == 'void':
return model.void_type
if ident == '__dotdotdot__':
- raise api.FFIError('bad usage of "..."')
+ raise api.FFIError(':%d: bad usage of "..."' %
+ typenode.coord.line)
return resolve_common_type(ident)
#
if isinstance(type, pycparser.c_ast.Struct):
@@ -333,7 +372,8 @@
return self._get_struct_union_enum_type('union', typenode, name,
nested=True)
#
- raise api.FFIError("bad or unsupported type declaration")
+ raise api.FFIError(":%d: bad or unsupported type declaration" %
+ typenode.coord.line)
def _parse_function_type(self, typenode, funcname=None):
params = list(getattr(typenode.args, 'params', []))
@@ -499,6 +539,10 @@
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
exprnode.op == '-'):
return -self._parse_constant(exprnode.expr)
+ # load previously defined int constant
+ if (isinstance(exprnode, pycparser.c_ast.ID) and
+ exprnode.name in self._int_constants):
+ return self._int_constants[exprnode.name]
#
if partial_length_ok:
if (isinstance(exprnode, pycparser.c_ast.ID) and
@@ -506,8 +550,8 @@
self._partial_length = True
return '...'
#
- raise api.FFIError("unsupported expression: expected a "
- "simple numeric constant")
+ raise api.FFIError(":%d: unsupported expression: expected a "
+ "simple numeric constant" % exprnode.coord.line)
def _build_enum_type(self, explicit_name, decls):
if decls is not None:
@@ -522,6 +566,7 @@
if enum.value is not None:
nextenumvalue = self._parse_constant(enum.value)
enumvalues.append(nextenumvalue)
+ self._add_constants(enum.name, nextenumvalue)
nextenumvalue += 1
enumvalues = tuple(enumvalues)
tp = model.EnumType(explicit_name, enumerators, enumvalues)
@@ -535,3 +580,5 @@
kind = name.split(' ', 1)[0]
if kind in ('typedef', 'struct', 'union', 'enum'):
self._declare(name, tp)
+ for k, v in other._int_constants.items():
+ self._add_constants(k, v)
diff --git a/cffi/ffiplatform.py b/cffi/ffiplatform.py
--- a/cffi/ffiplatform.py
+++ b/cffi/ffiplatform.py
@@ -38,6 +38,7 @@
import distutils.errors
#
dist = Distribution({'ext_modules': [ext]})
+ dist.parse_config_files()
options = dist.get_option_dict('build_ext')
options['force'] = ('ffiplatform', True)
options['build_lib'] = ('ffiplatform', tmpdir)
diff --git a/cffi/vengine_cpy.py b/cffi/vengine_cpy.py
--- a/cffi/vengine_cpy.py
+++ b/cffi/vengine_cpy.py
@@ -89,43 +89,54 @@
# by generate_cpy_function_method().
prnt('static PyMethodDef _cffi_methods[] = {')
self._generate("method")
- prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS},')
- prnt(' {NULL, NULL} /* Sentinel */')
+ prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
+ prnt(' {NULL, NULL, 0, NULL} /* Sentinel */')
prnt('};')
prnt()
#
# standard init.
modname = self.verifier.get_module_name()
- if sys.version_info >= (3,):
- prnt('static struct PyModuleDef _cffi_module_def = {')
- prnt(' PyModuleDef_HEAD_INIT,')
- prnt(' "%s",' % modname)
- prnt(' NULL,')
- prnt(' -1,')
- prnt(' _cffi_methods,')
- prnt(' NULL, NULL, NULL, NULL')
- prnt('};')
- prnt()
- initname = 'PyInit_%s' % modname
- createmod = 'PyModule_Create(&_cffi_module_def)'
- errorcase = 'return NULL'
- finalreturn = 'return lib'
- else:
- initname = 'init%s' % modname
- createmod = 'Py_InitModule("%s", _cffi_methods)' % modname
- errorcase = 'return'
- finalreturn = 'return'
+ constants = self._chained_list_constants[False]
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt()
+ prnt('static struct PyModuleDef _cffi_module_def = {')
+ prnt(' PyModuleDef_HEAD_INIT,')
+ prnt(' "%s",' % modname)
+ prnt(' NULL,')
+ prnt(' -1,')
+ prnt(' _cffi_methods,')
+ prnt(' NULL, NULL, NULL, NULL')
+ prnt('};')
+ prnt()
prnt('PyMODINIT_FUNC')
- prnt('%s(void)' % initname)
+ prnt('PyInit_%s(void)' % modname)
prnt('{')
prnt(' PyObject *lib;')
- prnt(' lib = %s;' % createmod)
- prnt(' if (lib == NULL || %s < 0)' % (
- self._chained_list_constants[False],))
- prnt(' %s;' % errorcase)
- prnt(' _cffi_init();')
- prnt(' %s;' % finalreturn)
+ prnt(' lib = PyModule_Create(&_cffi_module_def);')
+ prnt(' if (lib == NULL)')
+ prnt(' return NULL;')
+ prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,))
+ prnt(' Py_DECREF(lib);')
+ prnt(' return NULL;')
+ prnt(' }')
+ prnt(' return lib;')
prnt('}')
+ prnt()
+ prnt('#else')
+ prnt()
+ prnt('PyMODINIT_FUNC')
+ prnt('init%s(void)' % modname)
+ prnt('{')
+ prnt(' PyObject *lib;')
+ prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname)
+ prnt(' if (lib == NULL)')
+ prnt(' return;')
+ prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,))
+ prnt(' return;')
+ prnt(' return;')
+ prnt('}')
+ prnt()
+ prnt('#endif')
def load_library(self):
# XXX review all usages of 'self' here!
@@ -394,7 +405,7 @@
meth = 'METH_O'
else:
meth = 'METH_VARARGS'
- self._prnt(' {"%s", _cffi_f_%s, %s},' % (name, name, meth))
+ self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
_loading_cpy_function = _loaded_noop
@@ -481,8 +492,8 @@
if tp.fldnames is None:
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
- self._prnt(' {"%s", %s, METH_NOARGS},' % (layoutfuncname,
- layoutfuncname))
+ self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
+ layoutfuncname))
def _loading_struct_or_union(self, tp, prefix, name, module):
if tp.fldnames is None:
@@ -589,13 +600,7 @@
'variable type'),))
assert delayed
else:
- prnt(' if (LONG_MIN <= (%s) && (%s) <= LONG_MAX)' % (name, name))
- prnt(' o = PyInt_FromLong((long)(%s));' % (name,))
- prnt(' else if ((%s) <= 0)' % (name,))
- prnt(' o = PyLong_FromLongLong((long long)(%s));' % (name,))
- prnt(' else')
- prnt(' o = PyLong_FromUnsignedLongLong('
- '(unsigned long long)(%s));' % (name,))
+ prnt(' o = _cffi_from_c_int_const(%s);' % name)
prnt(' if (o == NULL)')
prnt(' return -1;')
if size_too:
@@ -632,13 +637,18 @@
# ----------
# enums
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
if tp.partial:
for enumerator in tp.enumerators:
self._generate_cpy_const(True, enumerator, delayed=False)
return
#
- funcname = '_cffi_e_%s_%s' % (prefix, name)
+ funcname = self._enum_funcname(prefix, name)
prnt = self._prnt
prnt('static int %s(PyObject *lib)' % funcname)
prnt('{')
@@ -760,17 +770,30 @@
#include
#include
-#ifdef MS_WIN32
-#include /* for alloca() */
-typedef __int8 int8_t;
-typedef __int16 int16_t;
-typedef __int32 int32_t;
-typedef __int64 int64_t;
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int16 uint16_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-typedef unsigned char _Bool;
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+ typedef unsigned char _Bool;
+# endif
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# include
+# endif
#endif
#if PY_MAJOR_VERSION < 3
@@ -795,6 +818,15 @@
#define _cffi_to_c_double PyFloat_AsDouble
#define _cffi_to_c_float PyFloat_AsDouble
+#define _cffi_from_c_int_const(x) \
+ (((x) > 0) ? \
+ ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \
+ ((long long)(x) >= (long long)LONG_MIN) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromLongLong((long long)(x)))
+
#define _cffi_from_c_int(x, type) \
(((type)-1) > 0 ? /* unsigned */ \
(sizeof(type) < sizeof(long) ? PyInt_FromLong(x) : \
@@ -804,14 +836,14 @@
PyLong_FromLongLong(x)))
#define _cffi_to_c_int(o, type) \
- (sizeof(type) == 1 ? (((type)-1) > 0 ? _cffi_to_c_u8(o) \
- : _cffi_to_c_i8(o)) : \
- sizeof(type) == 2 ? (((type)-1) > 0 ? _cffi_to_c_u16(o) \
- : _cffi_to_c_i16(o)) : \
- sizeof(type) == 4 ? (((type)-1) > 0 ? _cffi_to_c_u32(o) \
- : _cffi_to_c_i32(o)) : \
- sizeof(type) == 8 ? (((type)-1) > 0 ? _cffi_to_c_u64(o) \
- : _cffi_to_c_i64(o)) : \
+ (sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
+ : (type)_cffi_to_c_i8(o)) : \
+ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
+ : (type)_cffi_to_c_i16(o)) : \
+ sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
+ : (type)_cffi_to_c_i32(o)) : \
+ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
+ : (type)_cffi_to_c_i64(o)) : \
(Py_FatalError("unsupported size for type " #type), 0))
#define _cffi_to_c_i8 \
@@ -885,25 +917,32 @@
return PyBool_FromLong(was_alive);
}
-static void _cffi_init(void)
+static int _cffi_init(void)
{
- PyObject *module = PyImport_ImportModule("_cffi_backend");
- PyObject *c_api_object;
+ PyObject *module, *c_api_object = NULL;
+ module = PyImport_ImportModule("_cffi_backend");
if (module == NULL)
- return;
+ goto failure;
c_api_object = PyObject_GetAttrString(module, "_C_API");
if (c_api_object == NULL)
- return;
+ goto failure;
if (!PyCapsule_CheckExact(c_api_object)) {
- Py_DECREF(c_api_object);
PyErr_SetNone(PyExc_ImportError);
- return;
+ goto failure;
}
memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
_CFFI_NUM_EXPORTS * sizeof(void *));
+
+ Py_DECREF(module);
Py_DECREF(c_api_object);
+ return 0;
+
+ failure:
+ Py_XDECREF(module);
+ Py_XDECREF(c_api_object);
+ return -1;
}
#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
diff --git a/cffi/vengine_gen.py b/cffi/vengine_gen.py
--- a/cffi/vengine_gen.py
+++ b/cffi/vengine_gen.py
@@ -249,10 +249,10 @@
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
prnt('}')
self.export_symbols.append(layoutfuncname)
- prnt('ssize_t %s(ssize_t i)' % (layoutfuncname,))
+ prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
prnt('{')
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
- prnt(' static ssize_t nums[] = {')
+ prnt(' static intptr_t nums[] = {')
prnt(' sizeof(%s),' % cname)
prnt(' offsetof(struct _cffi_aligncheck, y),')
for fname, ftype, fbitsize in tp.enumfields():
@@ -276,7 +276,7 @@
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
#
- BFunc = self.ffi._typeof_locked("ssize_t(*)(ssize_t)")[0]
+ BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
function = module.load_function(BFunc, layoutfuncname)
layout = []
num = 0
@@ -410,13 +410,18 @@
# ----------
# enums
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
if tp.partial:
for enumerator in tp.enumerators:
self._generate_gen_const(True, enumerator)
return
#
- funcname = '_cffi_e_%s_%s' % (prefix, name)
+ funcname = self._enum_funcname(prefix, name)
self.export_symbols.append(funcname)
prnt = self._prnt
prnt('int %s(char *out_error)' % funcname)
@@ -453,7 +458,7 @@
else:
BType = self.ffi._typeof_locked("char[]")[0]
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
- funcname = '_cffi_e_%s_%s' % (prefix, name)
+ funcname = self._enum_funcname(prefix, name)
function = module.load_function(BFunc, funcname)
p = self.ffi.new(BType, 256)
if function(p) < 0:
@@ -547,20 +552,29 @@
#include
#include /* XXX for ssize_t on some platforms */
-#ifdef _WIN32
-# include
-# define snprintf _snprintf
-typedef __int8 int8_t;
-typedef __int16 int16_t;
-typedef __int32 int32_t;
-typedef __int64 int64_t;
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int16 uint16_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-typedef SSIZE_T ssize_t;
-typedef unsigned char _Bool;
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+ typedef unsigned char _Bool;
+# endif
#else
-# include
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# include
+# endif
#endif
'''
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.2'
+release = '0.8.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,31 +1,34 @@
CFFI documentation
================================
-Foreign Function Interface for Python calling C code. The aim of this project
-is to provide a convenient and reliable way of calling C code from Python.
-The interface is based on `LuaJIT's FFI`_ and follows a few principles:
+C Foreign Function Interface for Python. The goal is to provide a
+convenient and reliable way to call compiled C code from Python using
+interface declarations written in C.
-* The goal is to call C code from Python. You should be able to do so
- without learning a 3rd language: every alternative requires you to learn
- their own language (Cython_, SWIG_) or API (ctypes_). So we tried to
- assume that you know Python and C and minimize the extra bits of API that
- you need to learn.
+The interface is based on `LuaJIT's FFI`_, and follows a few principles:
+
+* The goal is to call C code from Python without learning a 3rd language:
+ existing alternatives require users to learn domain specific language
+ (Cython_, SWIG_) or API (ctypes_). The CFFI design requires users to know
+ only C and Python, minimizing the extra bits of API that need to be learned.
* Keep all the Python-related logic in Python so that you don't need to
write much C code (unlike `CPython native C extensions`_).
-* Work either at the level of the ABI (Application Binary Interface)
- or the API (Application Programming Interface). Usually, C
- libraries have a specified C API but often not an ABI (e.g. they may
+* The preferred way is to work at the level of the API (Application
+ Programming Interface): the C compiler is called from the declarations
+ you write to validate and link to the C language constructs.
+ Alternatively, it is also possible to work at the ABI level
+ (Application Binary Interface), the way ctypes_ work.
+ However, on non-Windows platforms, C libraries typically
+ have a specified C API but not an ABI (e.g. they may
document a "struct" as having at least these fields, but maybe more).
- (ctypes_ works at the ABI level, whereas Cython_ and `native C extensions`_
- work at the API level.)
-* We try to be complete. For now some C99 constructs are not supported,
+* Try to be complete. For now some C99 constructs are not supported,
but all C89 should be, including macros (and including macro "abuses",
which you can `manually wrap`_ in saner-looking C functions).
-* We attempt to support both PyPy and CPython, with a reasonable path
+* Attempt to support both PyPy and CPython, with a reasonable path
for other Python implementations like IronPython and Jython.
* Note that this project is **not** about embedding executable C code in
@@ -38,7 +41,7 @@
.. _`CPython native C extensions`: http://docs.python.org/extending/extending.html
.. _`native C extensions`: http://docs.python.org/extending/extending.html
.. _`ctypes`: http://docs.python.org/library/ctypes.html
-.. _`Weave`: http://www.scipy.org/Weave
+.. _`Weave`: http://wiki.scipy.org/Weave
.. _`manually wrap`: `The verification step`_
@@ -85,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.2.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.3.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: 37fc88c62f40d04e8a18192433f951ec
+ - MD5: ...
- - SHA: 75a6c433664a7a38d4d03cecbdc72cef4c3cceac
+ - SHA: ...
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
@@ -851,7 +854,7 @@
``ffi`` normally caches the string ``"int[]"`` to not need to re-parse
it all the time.
-.. versionadded:: 0.9
+.. versionadded:: 0.8.2
The ``ffi.cdef()`` call takes an optional argument ``packed``: if
True, then all structs declared within this cdef are "packed". This
has a meaning similar to ``__attribute__((packed))`` in GCC. It
@@ -1195,13 +1198,14 @@
owned memory will not be freed as long as the buffer is alive.
Moreover buffer objects now support weakrefs to them.
-.. versionchanged:: 0.9
- Before version 0.9, ``bytes(buf)`` was supported in Python 3 to get
+.. versionchanged:: 0.8.2
+ Before version 0.8.2, ``bytes(buf)`` was supported in Python 3 to get
the content of the buffer, but on Python 2 it would return the repr
``<_cffi_backend.buffer object>``. This has been fixed. But you
should avoid using ``str(buf)``: it now gives inconsistent results
between Python 2 and Python 3 (this is similar to how ``str()``
- gives inconsistent results on regular byte strings).
+ gives inconsistent results on regular byte strings). Use ``buf[:]``
+ instead.
``ffi.typeof("C type" or cdata object)``: return an object of type
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -42,25 +42,14 @@
resultlist[:] = res
def ask_supports_thread():
- if sys.platform == "darwin":
- sys.stderr.write("OS/X: confusion between 'cc' versus 'gcc'")
- sys.stderr.write(" (see issue 123)\n")
- sys.stderr.write("will not use '__thread' in the C code\n")
- return
- import distutils.errors
- from distutils.ccompiler import new_compiler
- compiler = new_compiler(force=1)
- try:
- compiler.compile(['c/check__thread.c'])
- except distutils.errors.CompileError:
- sys.stderr.write("the above error message can be safely ignored;\n")
- sys.stderr.write("will not use '__thread' in the C code\n")
+ from distutils.core import Distribution
+ config = Distribution().get_command_obj('config')
+ ok = config.try_compile('__thread int some_threadlocal_variable_42;')
+ if ok:
+ define_macros.append(('USE__THREAD', None))
else:
- define_macros.append(('USE__THREAD', None))
- try:
- os.unlink('c/check__thread.o')
- except OSError:
- pass
+ sys.stderr.write("Note: will not use '__thread' in the C code\n")
+ sys.stderr.write("The above error message can be safely ignored\n")
def use_pkg_config():
_ask_pkg_config(include_dirs, '--cflags-only-I', '-I', sysroot=True)
@@ -124,7 +113,7 @@
`Mailing list `_
""",
- version='0.8.2',
+ version='0.8.3',
packages=['cffi'],
zip_safe=False,
diff --git a/testing/backend_tests.py b/testing/backend_tests.py
--- a/testing/backend_tests.py
+++ b/testing/backend_tests.py
@@ -865,25 +865,25 @@
def test_enum(self):
ffi = FFI(backend=self.Backend())
- ffi.cdef("enum foo { A, B, CC, D };")
- assert ffi.string(ffi.cast("enum foo", 0)) == "A"
- assert ffi.string(ffi.cast("enum foo", 2)) == "CC"
- assert ffi.string(ffi.cast("enum foo", 3)) == "D"
+ ffi.cdef("enum foo { A0, B0, CC0, D0 };")
+ assert ffi.string(ffi.cast("enum foo", 0)) == "A0"
+ assert ffi.string(ffi.cast("enum foo", 2)) == "CC0"
+ assert ffi.string(ffi.cast("enum foo", 3)) == "D0"
assert ffi.string(ffi.cast("enum foo", 4)) == "4"
- ffi.cdef("enum bar { A, B=-2, CC, D, E };")
- assert ffi.string(ffi.cast("enum bar", 0)) == "A"
- assert ffi.string(ffi.cast("enum bar", -2)) == "B"
- assert ffi.string(ffi.cast("enum bar", -1)) == "CC"
- assert ffi.string(ffi.cast("enum bar", 1)) == "E"
+ ffi.cdef("enum bar { A1, B1=-2, CC1, D1, E1 };")
+ assert ffi.string(ffi.cast("enum bar", 0)) == "A1"
+ assert ffi.string(ffi.cast("enum bar", -2)) == "B1"
+ assert ffi.string(ffi.cast("enum bar", -1)) == "CC1"
+ assert ffi.string(ffi.cast("enum bar", 1)) == "E1"
assert ffi.cast("enum bar", -2) != ffi.cast("enum bar", -2)
assert ffi.cast("enum foo", 0) != ffi.cast("enum bar", 0)
assert ffi.cast("enum bar", 0) != ffi.cast("int", 0)
- assert repr(ffi.cast("enum bar", -1)) == ""
+ assert repr(ffi.cast("enum bar", -1)) == ""
assert repr(ffi.cast("enum foo", -1)) == ( # enums are unsigned, if
"") # they contain no neg value
- ffi.cdef("enum baz { A=0x1000, B=0x2000 };")
- assert ffi.string(ffi.cast("enum baz", 0x1000)) == "A"
- assert ffi.string(ffi.cast("enum baz", 0x2000)) == "B"
+ ffi.cdef("enum baz { A2=0x1000, B2=0x2000 };")
+ assert ffi.string(ffi.cast("enum baz", 0x1000)) == "A2"
+ assert ffi.string(ffi.cast("enum baz", 0x2000)) == "B2"
def test_enum_in_struct(self):
ffi = FFI(backend=self.Backend())
@@ -1322,6 +1322,16 @@
e = ffi.cast("enum e", 0)
assert ffi.string(e) == "AA" # pick the first one arbitrarily
+ def test_enum_refer_previous_enum_value(self):
+ ffi = FFI(backend=self.Backend())
+ ffi.cdef("enum e { AA, BB=2, CC=4, DD=BB, EE, FF=CC, GG=FF };")
+ assert ffi.string(ffi.cast("enum e", 2)) == "BB"
+ assert ffi.string(ffi.cast("enum e", 3)) == "EE"
+ assert ffi.sizeof("char[DD]") == 2
+ assert ffi.sizeof("char[EE]") == 3
+ assert ffi.sizeof("char[FF]") == 4
+ assert ffi.sizeof("char[GG]") == 4
+
def test_nested_anonymous_struct(self):
ffi = FFI(backend=self.Backend())
ffi.cdef("""
@@ -1543,6 +1553,7 @@
ffi2.include(ffi1)
p = ffi2.cast("enum foo", 1)
assert ffi2.string(p) == "FB"
+ assert ffi2.sizeof("char[FC]") == 2
def test_include_typedef_2(self):
backend = self.Backend()
@@ -1570,3 +1581,25 @@
assert s[0].a == b'X'
assert s[1].b == -4892220
assert s[1].a == b'Y'
+
+ def test_define_integer_constant(self):
+ ffi = FFI(backend=self.Backend())
+ ffi.cdef("""
+ #define DOT_0 0
+ #define DOT 100
+ #define DOT_OCT 0100l
+ #define DOT_HEX 0x100u
+ #define DOT_HEX2 0X10
+ #define DOT_UL 1000UL
+ enum foo {AA, BB=DOT, CC};
+ """)
+ lib = ffi.dlopen(None)
+ assert ffi.string(ffi.cast("enum foo", 100)) == "BB"
+ assert lib.DOT_0 == 0
+ assert lib.DOT == 100
+ assert lib.DOT_OCT == 0o100
+ assert lib.DOT_HEX == 0x100
+ assert lib.DOT_HEX2 == 0x10
+ assert lib.DOT_UL == 1000
+
+
diff --git a/testing/test_function.py b/testing/test_function.py
--- a/testing/test_function.py
+++ b/testing/test_function.py
@@ -402,3 +402,18 @@
if wr() is not None:
import gc; gc.collect()
assert wr() is None # 'data' does not leak
+
+ def test_windows_stdcall(self):
+ if sys.platform != 'win32':
+ py.test.skip("Windows-only test")
+ if self.Backend is CTypesBackend:
+ py.test.skip("not with the ctypes backend")
+ ffi = FFI(backend=self.Backend())
+ ffi.cdef("""
+ BOOL QueryPerformanceFrequency(LONGLONG *lpFrequency);
+ """)
+ m = ffi.dlopen("Kernel32.dll")
+ p_freq = ffi.new("LONGLONG *")
+ res = m.QueryPerformanceFrequency(p_freq)
+ assert res != 0
+ assert p_freq[0] != 0
diff --git a/testing/test_parsing.py b/testing/test_parsing.py
--- a/testing/test_parsing.py
+++ b/testing/test_parsing.py
@@ -161,9 +161,10 @@
def test_define_not_supported_for_now():
ffi = FFI(backend=FakeBackend())
- e = py.test.raises(CDefError, ffi.cdef, "#define FOO 42")
- assert str(e.value) == \
- 'only supports the syntax "#define FOO ..." for now (literally)'
+ e = py.test.raises(CDefError, ffi.cdef, '#define FOO "blah"')
+ assert str(e.value) == (
+ 'only supports the syntax "#define FOO ..." (literally)'
+ ' or "#define FOO 0x1FF" for now')
def test_unnamed_struct():
ffi = FFI(backend=FakeBackend())
diff --git a/testing/test_verify.py b/testing/test_verify.py
--- a/testing/test_verify.py
+++ b/testing/test_verify.py
@@ -1,4 +1,4 @@
-import py
+import py, re
import sys, os, math, weakref
from cffi import FFI, VerificationError, VerificationMissing, model
from testing.support import *
@@ -29,6 +29,24 @@
def setup_module():
import cffi.verifier
cffi.verifier.cleanup_tmpdir()
+ #
+ # check that no $ sign is produced in the C file; it used to be the
+ # case that anonymous enums would produce '$enum_$1', which was
+ # used as part of a function name. GCC accepts such names, but it's
+ # apparently non-standard.
+ _r_comment = re.compile(r"/\*.*?\*/|//.*?$", re.DOTALL | re.MULTILINE)
+ _r_string = re.compile(r'\".*?\"')
+ def _write_source_and_check(self, file=None):
+ base_write_source(self, file)
+ if file is None:
+ f = open(self.sourcefilename)
+ data = f.read()
+ f.close()
+ data = _r_comment.sub(' ', data)
+ data = _r_string.sub('"skipped"', data)
+ assert '$' not in data
+ base_write_source = cffi.verifier.Verifier._write_source
+ cffi.verifier.Verifier._write_source = _write_source_and_check
def test_module_type():
@@ -153,6 +171,9 @@
all_primitive_types = model.PrimitiveType.ALL_PRIMITIVE_TYPES
+if sys.platform == 'win32':
+ all_primitive_types = all_primitive_types.copy()
+ del all_primitive_types['ssize_t']
all_integer_types = sorted(tp for tp in all_primitive_types
if all_primitive_types[tp] == 'i')
all_float_types = sorted(tp for tp in all_primitive_types
@@ -1452,8 +1473,8 @@
assert func() == 42
def test_FILE_stored_in_stdout():
- if sys.platform == 'win32':
- py.test.skip("MSVC: cannot assign to stdout")
+ if not sys.platform.startswith('linux'):
+ py.test.skip("likely, we cannot assign to stdout")
ffi = FFI()
ffi.cdef("int printf(const char *, ...); FILE *setstdout(FILE *);")
lib = ffi.verify("""
@@ -1636,8 +1657,8 @@
ffi = FFI()
ffi.cdef("""
int (*python_callback)(int how_many, int *values);
- void *const c_callback; /* pass this ptr to C routines */
- int some_c_function(void *cb);
+ int (*const c_callback)(int,...); /* pass this ptr to C routines */
+ int some_c_function(int(*cb)(int,...));
""")
lib = ffi.verify("""
#include
@@ -1884,3 +1905,60 @@
p = lib.f2(42)
x = lib.f1(p)
assert x == 42
+
+def _run_in_multiple_threads(test1):
+ test1()
+ import sys
+ try:
+ import thread
+ except ImportError:
+ import _thread as thread
+ errors = []
+ def wrapper(lock):
+ try:
+ test1()
+ except:
+ errors.append(sys.exc_info())
+ lock.release()
+ locks = []
+ for i in range(10):
+ _lock = thread.allocate_lock()
+ _lock.acquire()
+ thread.start_new_thread(wrapper, (_lock,))
+ locks.append(_lock)
+ for _lock in locks:
+ _lock.acquire()
+ if errors:
+ raise errors[0][1]
+
+def test_errno_working_even_with_pypys_jit():
+ ffi = FFI()
+ ffi.cdef("int f(int);")
+ lib = ffi.verify("""
+ #include
+ int f(int x) { return (errno = errno + x); }
+ """)
+ @_run_in_multiple_threads
+ def test1():
+ ffi.errno = 0
+ for i in range(10000):
+ e = lib.f(1)
+ assert e == i + 1
+ assert ffi.errno == e
+ for i in range(10000):
+ ffi.errno = i
+ e = lib.f(42)
+ assert e == i + 42
+
+def test_getlasterror_working_even_with_pypys_jit():
+ if sys.platform != 'win32':
+ py.test.skip("win32-only test")
+ ffi = FFI()
+ ffi.cdef("void SetLastError(DWORD);")
+ lib = ffi.dlopen("Kernel32.dll")
+ @_run_in_multiple_threads
+ def test1():
+ for i in range(10000):
+ n = (1 << 29) + i
+ lib.SetLastError(n)
+ assert ffi.getwinerror()[0] == n
From noreply at buildbot.pypy.org Sat Jul 5 17:05:38 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 17:05:38 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: Update MD5/SHA
Message-ID: <20140705150538.453F21C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1536:91e72d4a59d9
Date: 2014-07-05 17:05 +0200
http://bitbucket.org/cffi/cffi/changeset/91e72d4a59d9/
Log: Update MD5/SHA
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -92,9 +92,9 @@
- Or grab the most current version by following the instructions below.
- - MD5: ...
+ - MD5: 57e140a7d475f58bada8f2ada3f5749e
- - SHA: ...
+ - SHA: 4fd222f3044b9210476255d753c0bb22b8050f99
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
From noreply at buildbot.pypy.org Sat Jul 5 17:32:01 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 17:32:01 +0200 (CEST)
Subject: [pypy-commit] pypy default: One more ignored instruction
Message-ID: <20140705153201.14AE61C3334@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72361:85672cabac67
Date: 2014-07-05 17:31 +0200
http://bitbucket.org/pypy/pypy/changeset/85672cabac67/
Log: One more ignored instruction
diff --git a/rpython/translator/c/gcc/trackgcroot.py b/rpython/translator/c/gcc/trackgcroot.py
--- a/rpython/translator/c/gcc/trackgcroot.py
+++ b/rpython/translator/c/gcc/trackgcroot.py
@@ -523,6 +523,8 @@
'movnt', 'mfence', 'lfence', 'sfence',
# bit manipulations
'andn', 'bextr', 'blsi', 'blsmask', 'blsr', 'tzcnt', 'lzcnt',
+ # uh, this can occur with a 'call' on the following line...
+ 'rex64',
])
# a partial list is hopefully good enough for now; it's all to support
From noreply at buildbot.pypy.org Sat Jul 5 18:40:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 18:40:46 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: A workaround by Alex Gaynor for a
bug in distutils that shows up on OS/X
Message-ID: <20140705164046.AD0001C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1537:c63026a567f1
Date: 2014-07-05 18:40 +0200
http://bitbucket.org/cffi/cffi/changeset/c63026a567f1/
Log: A workaround by Alex Gaynor for a bug in distutils that shows up on
OS/X
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -43,6 +43,8 @@
def ask_supports_thread():
from distutils.core import Distribution
+ from distutils.sysconfig import get_config_vars
+ get_config_vars() # workaround for a bug of distutils, e.g. on OS/X
config = Distribution().get_command_obj('config')
ok = config.try_compile('__thread int some_threadlocal_variable_42;')
if ok:
From noreply at buildbot.pypy.org Sat Jul 5 18:53:24 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 18:53:24 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: Update to 0.8.4
Message-ID: <20140705165324.D6A1E1C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1538:39abae73cbd4
Date: 2014-07-05 18:53 +0200
http://bitbucket.org/cffi/cffi/changeset/39abae73cbd4/
Log: Update to 0.8.4
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.3"
-__version_info__ = (0, 8, 3)
+__version__ = "0.8.4"
+__version_info__ = (0, 8, 4)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.3'
+release = '0.8.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.3.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.4.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: 57e140a7d475f58bada8f2ada3f5749e
+ - MD5: ...
- - SHA: 4fd222f3044b9210476255d753c0bb22b8050f99
+ - SHA: ...
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -115,7 +115,7 @@
`Mailing list `_
""",
- version='0.8.3',
+ version='0.8.4',
packages=['cffi'],
zip_safe=False,
diff --git a/testing/test_version.py b/testing/test_version.py
--- a/testing/test_version.py
+++ b/testing/test_version.py
@@ -10,6 +10,7 @@
'0.7.1': '0.7', # did not change
'0.7.2': '0.7', # did not change
'0.8.1': '0.8', # did not change (essentially)
+ '0.8.4': '0.8.3', # did not change
}
def test_version():
From noreply at buildbot.pypy.org Sat Jul 5 18:55:19 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 18:55:19 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: Add the MD5/SHA
Message-ID: <20140705165519.E2ADA1C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1539:19a9c5b072f3
Date: 2014-07-05 18:55 +0200
http://bitbucket.org/cffi/cffi/changeset/19a9c5b072f3/
Log: Add the MD5/SHA
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -92,9 +92,9 @@
- Or grab the most current version by following the instructions below.
- - MD5: ...
+ - MD5: 148894125d3fa696b418dc6559818f7a
- - SHA: ...
+ - SHA: 754ad62d0868bd48f34b2a5818575493e15b5514
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
From noreply at buildbot.pypy.org Sat Jul 5 18:55:21 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 18:55:21 +0200 (CEST)
Subject: [pypy-commit] cffi default: hg merge release-0.8
Message-ID: <20140705165521.1DFE21C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1540:41a3446cfe40
Date: 2014-07-05 18:55 +0200
http://bitbucket.org/cffi/cffi/changeset/41a3446cfe40/
Log: hg merge release-0.8
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.3"
-__version_info__ = (0, 8, 3)
+__version__ = "0.8.4"
+__version_info__ = (0, 8, 4)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.3'
+release = '0.8.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.3.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.4.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: ...
+ - MD5: 148894125d3fa696b418dc6559818f7a
- - SHA: ...
+ - SHA: 754ad62d0868bd48f34b2a5818575493e15b5514
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -43,6 +43,8 @@
def ask_supports_thread():
from distutils.core import Distribution
+ from distutils.sysconfig import get_config_vars
+ get_config_vars() # workaround for a bug of distutils, e.g. on OS/X
config = Distribution().get_command_obj('config')
ok = config.try_compile('__thread int some_threadlocal_variable_42;')
if ok:
@@ -113,7 +115,7 @@
`Mailing list `_
""",
- version='0.8.3',
+ version='0.8.4',
packages=['cffi'],
zip_safe=False,
diff --git a/testing/test_version.py b/testing/test_version.py
--- a/testing/test_version.py
+++ b/testing/test_version.py
@@ -10,6 +10,7 @@
'0.7.1': '0.7', # did not change
'0.7.2': '0.7', # did not change
'0.8.1': '0.8', # did not change (essentially)
+ '0.8.4': '0.8.3', # did not change
}
def test_version():
From noreply at buildbot.pypy.org Sat Jul 5 19:12:24 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 19:12:24 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: Argh. The version 0.8.4 works with
the backend "0.8.4" or "0.8", but
Message-ID: <20140705171224.1C7CB1C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1541:9b86d5f7007f
Date: 2014-07-05 19:12 +0200
http://bitbucket.org/cffi/cffi/changeset/9b86d5f7007f/
Log: Argh. The version 0.8.4 works with the backend "0.8.4" or "0.8",
but not "0.8.3". As a result, the release 0.8.4 is completely
unusable. Get rid of the possibility to have some different version
numbers in the backend, and prepare for 0.8.5...
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -5504,7 +5504,7 @@
if (v == NULL || PyModule_AddObject(m, "_C_API", v) < 0)
INITERROR;
- v = PyText_FromString("0.8.3");
+ v = PyText_FromString("0.8.5");
if (v == NULL || PyModule_AddObject(m, "__version__", v) < 0)
INITERROR;
diff --git a/c/test_c.py b/c/test_c.py
--- a/c/test_c.py
+++ b/c/test_c.py
@@ -3199,4 +3199,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.3"
+ assert __version__ == "0.8.5"
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.4"
-__version_info__ = (0, 8, 4)
+__version__ = "0.8.5"
+__version_info__ = (0, 8, 5)
diff --git a/cffi/api.py b/cffi/api.py
--- a/cffi/api.py
+++ b/cffi/api.py
@@ -55,8 +55,7 @@
# _cffi_backend.so compiled.
import _cffi_backend as backend
from . import __version__
- assert (backend.__version__ == __version__ or
- backend.__version__ == __version__[:3])
+ assert backend.__version__ == __version__
# (If you insist you can also try to pass the option
# 'backend=backend_ctypes.CTypesBackend()', but don't
# rely on it! It's probably not going to work well.)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.4'
+release = '0.8.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.4.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.5.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: 148894125d3fa696b418dc6559818f7a
+ - MD5: ...
- - SHA: 754ad62d0868bd48f34b2a5818575493e15b5514
+ - SHA: ...
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -115,7 +115,7 @@
`Mailing list `_
""",
- version='0.8.4',
+ version='0.8.5',
packages=['cffi'],
zip_safe=False,
diff --git a/testing/test_version.py b/testing/test_version.py
--- a/testing/test_version.py
+++ b/testing/test_version.py
@@ -5,19 +5,20 @@
if '_cffi_backend' in sys.builtin_module_names:
py.test.skip("this is embedded version")
-BACKEND_VERSIONS = {
- '0.4.2': '0.4', # did not change
- '0.7.1': '0.7', # did not change
- '0.7.2': '0.7', # did not change
- '0.8.1': '0.8', # did not change (essentially)
- '0.8.4': '0.8.3', # did not change
- }
+#BACKEND_VERSIONS = {
+# '0.4.2': '0.4', # did not change
+# '0.7.1': '0.7', # did not change
+# '0.7.2': '0.7', # did not change
+# '0.8.1': '0.8', # did not change (essentially)
+# '0.8.4': '0.8.3', # did not change
+# }
def test_version():
v = cffi.__version__
version_info = '.'.join(str(i) for i in cffi.__version_info__)
assert v == version_info
- assert BACKEND_VERSIONS.get(v, v) == _cffi_backend.__version__
+ #v = BACKEND_VERSIONS.get(v, v)
+ assert v == _cffi_backend.__version__
def test_doc_version():
parent = os.path.dirname(os.path.dirname(__file__))
@@ -48,5 +49,5 @@
v = cffi.__version__
p = os.path.join(parent, 'c', 'test_c.py')
content = open(p).read()
- assert (('assert __version__ == "%s"' % BACKEND_VERSIONS.get(v, v))
- in content)
+ #v = BACKEND_VERSIONS.get(v, v)
+ assert (('assert __version__ == "%s"' % v) in content)
From noreply at buildbot.pypy.org Sat Jul 5 19:16:27 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 19:16:27 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: MD5/SHA
Message-ID: <20140705171627.8C4871C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1542:9c34ce4ba70e
Date: 2014-07-05 19:16 +0200
http://bitbucket.org/cffi/cffi/changeset/9c34ce4ba70e/
Log: MD5/SHA
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -92,9 +92,9 @@
- Or grab the most current version by following the instructions below.
- - MD5: ...
+ - MD5: 964981f3fada08abbe9a6f8948f3a4c3
- - SHA: ...
+ - SHA: f921b0ad5360c58a87c927b63d5a177ac3e8847d
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
From noreply at buildbot.pypy.org Sat Jul 5 19:16:28 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 19:16:28 +0200 (CEST)
Subject: [pypy-commit] cffi default: hg merge release-0.8
Message-ID: <20140705171628.D5BEC1C0ECA@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1543:76f18e78b377
Date: 2014-07-05 19:16 +0200
http://bitbucket.org/cffi/cffi/changeset/76f18e78b377/
Log: hg merge release-0.8
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -5504,7 +5504,7 @@
if (v == NULL || PyModule_AddObject(m, "_C_API", v) < 0)
INITERROR;
- v = PyText_FromString("0.8.3");
+ v = PyText_FromString("0.8.5");
if (v == NULL || PyModule_AddObject(m, "__version__", v) < 0)
INITERROR;
diff --git a/c/test_c.py b/c/test_c.py
--- a/c/test_c.py
+++ b/c/test_c.py
@@ -3199,4 +3199,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.3"
+ assert __version__ == "0.8.5"
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.4"
-__version_info__ = (0, 8, 4)
+__version__ = "0.8.5"
+__version_info__ = (0, 8, 5)
diff --git a/cffi/api.py b/cffi/api.py
--- a/cffi/api.py
+++ b/cffi/api.py
@@ -55,8 +55,7 @@
# _cffi_backend.so compiled.
import _cffi_backend as backend
from . import __version__
- assert (backend.__version__ == __version__ or
- backend.__version__ == __version__[:3])
+ assert backend.__version__ == __version__
# (If you insist you can also try to pass the option
# 'backend=backend_ctypes.CTypesBackend()', but don't
# rely on it! It's probably not going to work well.)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.4'
+release = '0.8.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.4.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.5.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: 148894125d3fa696b418dc6559818f7a
+ - MD5: 964981f3fada08abbe9a6f8948f3a4c3
- - SHA: 754ad62d0868bd48f34b2a5818575493e15b5514
+ - SHA: f921b0ad5360c58a87c927b63d5a177ac3e8847d
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -115,7 +115,7 @@
`Mailing list `_
""",
- version='0.8.4',
+ version='0.8.5',
packages=['cffi'],
zip_safe=False,
diff --git a/testing/test_version.py b/testing/test_version.py
--- a/testing/test_version.py
+++ b/testing/test_version.py
@@ -5,19 +5,20 @@
if '_cffi_backend' in sys.builtin_module_names:
py.test.skip("this is embedded version")
-BACKEND_VERSIONS = {
- '0.4.2': '0.4', # did not change
- '0.7.1': '0.7', # did not change
- '0.7.2': '0.7', # did not change
- '0.8.1': '0.8', # did not change (essentially)
- '0.8.4': '0.8.3', # did not change
- }
+#BACKEND_VERSIONS = {
+# '0.4.2': '0.4', # did not change
+# '0.7.1': '0.7', # did not change
+# '0.7.2': '0.7', # did not change
+# '0.8.1': '0.8', # did not change (essentially)
+# '0.8.4': '0.8.3', # did not change
+# }
def test_version():
v = cffi.__version__
version_info = '.'.join(str(i) for i in cffi.__version_info__)
assert v == version_info
- assert BACKEND_VERSIONS.get(v, v) == _cffi_backend.__version__
+ #v = BACKEND_VERSIONS.get(v, v)
+ assert v == _cffi_backend.__version__
def test_doc_version():
parent = os.path.dirname(os.path.dirname(__file__))
@@ -48,5 +49,5 @@
v = cffi.__version__
p = os.path.join(parent, 'c', 'test_c.py')
content = open(p).read()
- assert (('assert __version__ == "%s"' % BACKEND_VERSIONS.get(v, v))
- in content)
+ #v = BACKEND_VERSIONS.get(v, v)
+ assert (('assert __version__ == "%s"' % v) in content)
From noreply at buildbot.pypy.org Sat Jul 5 19:41:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 19:41:46 +0200 (CEST)
Subject: [pypy-commit] cffi default: Fix for Windows,
which doesn't have a reasonable snprintf()
Message-ID: <20140705174146.3191E1C024A@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1544:079492211215
Date: 2014-07-05 19:41 +0200
http://bitbucket.org/cffi/cffi/changeset/079492211215/
Log: Fix for Windows, which doesn't have a reasonable snprintf()
diff --git a/cffi/vengine_gen.py b/cffi/vengine_gen.py
--- a/cffi/vengine_gen.py
+++ b/cffi/vengine_gen.py
@@ -435,14 +435,14 @@
enumerator, enumerator, enumvalue))
prnt(' char buf[64];')
prnt(' if ((%s) < 0)' % enumerator)
- prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator)
+ prnt(' sprintf(buf, "%%ld", (long)(%s));' % enumerator)
prnt(' else')
- prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
+ prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
enumerator)
- prnt(' snprintf(out_error, 255,'
+ prnt(' sprintf(out_error,'
' "%s has the real value %s, not %s",')
prnt(' "%s", buf, "%d");' % (
- enumerator, enumvalue))
+ enumerator[:100], enumvalue))
prnt(' return -1;')
prnt(' }')
prnt(' return 0;')
From noreply at buildbot.pypy.org Sat Jul 5 20:08:10 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 20:08:10 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: hg merge default
Message-ID: <20140705180810.3584F1C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1545:4573982bcf68
Date: 2014-07-05 19:49 +0200
http://bitbucket.org/cffi/cffi/changeset/4573982bcf68/
Log: hg merge default
diff --git a/cffi/vengine_gen.py b/cffi/vengine_gen.py
--- a/cffi/vengine_gen.py
+++ b/cffi/vengine_gen.py
@@ -435,14 +435,14 @@
enumerator, enumerator, enumvalue))
prnt(' char buf[64];')
prnt(' if ((%s) < 0)' % enumerator)
- prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator)
+ prnt(' sprintf(buf, "%%ld", (long)(%s));' % enumerator)
prnt(' else')
- prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
+ prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
enumerator)
- prnt(' snprintf(out_error, 255,'
+ prnt(' sprintf(out_error,'
' "%s has the real value %s, not %s",')
prnt(' "%s", buf, "%d");' % (
- enumerator, enumvalue))
+ enumerator[:100], enumvalue))
prnt(' return -1;')
prnt(' }')
prnt(' return 0;')
From noreply at buildbot.pypy.org Sat Jul 5 20:08:11 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 20:08:11 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: Prepare release 0.8.6
Message-ID: <20140705180811.94FDE1C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1546:452a1c1a5005
Date: 2014-07-05 20:06 +0200
http://bitbucket.org/cffi/cffi/changeset/452a1c1a5005/
Log: Prepare release 0.8.6
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -5504,7 +5504,7 @@
if (v == NULL || PyModule_AddObject(m, "_C_API", v) < 0)
INITERROR;
- v = PyText_FromString("0.8.5");
+ v = PyText_FromString("0.8.6");
if (v == NULL || PyModule_AddObject(m, "__version__", v) < 0)
INITERROR;
diff --git a/c/test_c.py b/c/test_c.py
--- a/c/test_c.py
+++ b/c/test_c.py
@@ -3199,4 +3199,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.5"
+ assert __version__ == "0.8.6"
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.5"
-__version_info__ = (0, 8, 5)
+__version__ = "0.8.6"
+__version_info__ = (0, 8, 6)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.5'
+release = '0.8.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.5.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.6.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: 964981f3fada08abbe9a6f8948f3a4c3
+ - MD5: ...
- - SHA: f921b0ad5360c58a87c927b63d5a177ac3e8847d
+ - SHA: ...
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -115,7 +115,7 @@
`Mailing list `_
""",
- version='0.8.5',
+ version='0.8.6',
packages=['cffi'],
zip_safe=False,
From noreply at buildbot.pypy.org Sat Jul 5 20:08:12 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 20:08:12 +0200 (CEST)
Subject: [pypy-commit] cffi release-0.8: update MD5/SHA
Message-ID: <20140705180812.EC1A21C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: release-0.8
Changeset: r1547:ca52363ff6ac
Date: 2014-07-05 20:08 +0200
http://bitbucket.org/cffi/cffi/changeset/ca52363ff6ac/
Log: update MD5/SHA
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -92,9 +92,9 @@
- Or grab the most current version by following the instructions below.
- - MD5: ...
+ - MD5: 474b5a68299a6f05009171de1dc91be6
- - SHA: ...
+ - SHA: 4e82390201e6f30e9df8a91cd176df19b8f2d547
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
From noreply at buildbot.pypy.org Sat Jul 5 20:08:14 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sat, 5 Jul 2014 20:08:14 +0200 (CEST)
Subject: [pypy-commit] cffi default: hg merge release-0.8
Message-ID: <20140705180814.5B9111C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1548:59fd1de71875
Date: 2014-07-05 20:08 +0200
http://bitbucket.org/cffi/cffi/changeset/59fd1de71875/
Log: hg merge release-0.8
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -5504,7 +5504,7 @@
if (v == NULL || PyModule_AddObject(m, "_C_API", v) < 0)
INITERROR;
- v = PyText_FromString("0.8.5");
+ v = PyText_FromString("0.8.6");
if (v == NULL || PyModule_AddObject(m, "__version__", v) < 0)
INITERROR;
diff --git a/c/test_c.py b/c/test_c.py
--- a/c/test_c.py
+++ b/c/test_c.py
@@ -3199,4 +3199,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.5"
+ assert __version__ == "0.8.6"
diff --git a/cffi/__init__.py b/cffi/__init__.py
--- a/cffi/__init__.py
+++ b/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8.5"
-__version_info__ = (0, 8, 5)
+__version__ = "0.8.6"
+__version_info__ = (0, 8, 6)
diff --git a/doc/source/conf.py b/doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
-release = '0.8.5'
+release = '0.8.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/source/index.rst b/doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -88,13 +88,13 @@
Download and Installation:
-* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.5.tar.gz
+* http://pypi.python.org/packages/source/c/cffi/cffi-0.8.6.tar.gz
- Or grab the most current version by following the instructions below.
- - MD5: 964981f3fada08abbe9a6f8948f3a4c3
+ - MD5: 474b5a68299a6f05009171de1dc91be6
- - SHA: f921b0ad5360c58a87c927b63d5a177ac3e8847d
+ - SHA: 4e82390201e6f30e9df8a91cd176df19b8f2d547
* Or get it from the `Bitbucket page`_:
``hg clone https://bitbucket.org/cffi/cffi``
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -115,7 +115,7 @@
`Mailing list `_
""",
- version='0.8.5',
+ version='0.8.6',
packages=['cffi'],
zip_safe=False,
From noreply at buildbot.pypy.org Sat Jul 5 22:45:51 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Sat, 5 Jul 2014 22:45:51 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: Rename gc_header ->
needs_gc_header.
Message-ID: <20140705204551.96AC91C024A@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72362:5a07adb52ffb
Date: 2014-07-05 22:38 +0200
http://bitbucket.org/pypy/pypy/changeset/5a07adb52ffb/
Log: Rename gc_header -> needs_gc_header.
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -41,7 +41,7 @@
class Type(object):
varsize = False
- gc_header = False
+ needs_gc_header = False
def repr_type(self, extra_len=None):
return self.typestr
@@ -415,11 +415,11 @@
class StructType(Type):
- def setup(self, name, fields, gc_header):
+ def setup(self, name, fields, needs_gc_header):
self.name = name
- self.gc_header = gc_header
+ self.needs_gc_header = needs_gc_header
fields = list(fields)
- if gc_header:
+ if needs_gc_header:
fields = database.genllvm.gcpolicy.get_gc_fields() + fields
elif all(t is LLVMVoid for t, f in fields):
fields.append((LLVMSigned, '_fill'))
@@ -437,8 +437,8 @@
return
fields = ((db.get_type(type._flds[f]), f) for f in type._names)
is_gc = type._gckind == 'gc'
- gc_header = is_gc and type._first_struct() == (None, None)
- self.setup('%' + type._name, fields, gc_header)
+ needs_gc_header = is_gc and type._first_struct() == (None, None)
+ self.setup('%' + type._name, fields, needs_gc_header)
def repr_type(self, extra_len=None):
if extra_len not in self.size_variants:
@@ -463,7 +463,7 @@
return self.name[1:]
def is_zero(self, value):
- if self.gc_header:
+ if self.needs_gc_header:
return False
elif self.fldnames_wo_voids == ['_fill']:
return True
@@ -476,7 +476,7 @@
def repr_value(self, value, extra_len=None):
if self.is_zero(value):
return 'zeroinitializer'
- if self.gc_header:
+ if self.needs_gc_header:
data = database.genllvm.gcpolicy.get_gc_field_values(value)
data.extend(getattr(value, fn) for _, fn in self.fields[1:])
else:
@@ -576,7 +576,7 @@
varsize = True
def setup(self, of, is_gc):
- self.gc_header = is_gc
+ self.needs_gc_header = is_gc
self.bare_array_type = BareArrayType()
self.bare_array_type.setup(of, None)
self.struct_type = StructType()
@@ -605,7 +605,7 @@
return self.struct_type.repr_type_and_value(ArrayHelper(value))
def add_indices(self, gep, index):
- if self.gc_header:
+ if self.needs_gc_header:
gep.add_field_index(2)
else:
gep.add_field_index(1)
@@ -749,7 +749,7 @@
class_ = _LL_TO_LLVM[type.__class__]
self.types[type] = ret = class_()
ret.setup_from_lltype(self, type)
- if ret.gc_header:
+ if ret.needs_gc_header:
_llvm_needs_header[type] = database.genllvm.gcpolicy \
.get_gc_fields_lltype() # hint for ll2ctypes
return ret
@@ -1227,7 +1227,7 @@
self.w('{result.V} = add {result.T} 0, {type.length}'
.format(**locals()))
else:
- if type.gc_header:
+ if type.needs_gc_header:
gep.add_field_index(1)
else:
gep.add_field_index(0)
From noreply at buildbot.pypy.org Sat Jul 5 22:45:52 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Sat, 5 Jul 2014 22:45:52 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: Close head (the parent
of this commit was pushed by accident).
Message-ID: <20140705204552.D93AA1C024A@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72363:640b6e023555
Date: 2014-07-05 22:42 +0200
http://bitbucket.org/pypy/pypy/changeset/640b6e023555/
Log: Close head (the parent of this commit was pushed by accident).
From noreply at buildbot.pypy.org Sat Jul 5 22:45:54 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Sat, 5 Jul 2014 22:45:54 +0200 (CEST)
Subject: [pypy-commit] pypy closed-branches: Merge closed head 32435d62aa33
on branch gc-pinning
Message-ID: <20140705204554.4689E1C024A@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: closed-branches
Changeset: r72364:a9e0f15c33e3
Date: 2014-07-05 22:44 +0200
http://bitbucket.org/pypy/pypy/changeset/a9e0f15c33e3/
Log: Merge closed head 32435d62aa33 on branch gc-pinning
From noreply at buildbot.pypy.org Sat Jul 5 22:45:55 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Sat, 5 Jul 2014 22:45:55 +0200 (CEST)
Subject: [pypy-commit] pypy closed-branches: Merge closed head d79aec73fa3c
on branch gc-two-end-nursery
Message-ID: <20140705204555.668D81C024A@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: closed-branches
Changeset: r72365:52d662f367bc
Date: 2014-07-05 22:44 +0200
http://bitbucket.org/pypy/pypy/changeset/52d662f367bc/
Log: Merge closed head d79aec73fa3c on branch gc-two-end-nursery
From noreply at buildbot.pypy.org Sat Jul 5 22:45:56 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Sat, 5 Jul 2014 22:45:56 +0200 (CEST)
Subject: [pypy-commit] pypy closed-branches: Merge closed head 640b6e023555
on branch llvm-translation-backend
Message-ID: <20140705204556.9AB321C024A@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: closed-branches
Changeset: r72366:b8457b91c09a
Date: 2014-07-05 22:44 +0200
http://bitbucket.org/pypy/pypy/changeset/b8457b91c09a/
Log: Merge closed head 640b6e023555 on branch llvm-translation-backend
From noreply at buildbot.pypy.org Sat Jul 5 22:45:57 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Sat, 5 Jul 2014 22:45:57 +0200 (CEST)
Subject: [pypy-commit] pypy closed-branches: re-close this branch
Message-ID: <20140705204557.BD7E11C024A@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: closed-branches
Changeset: r72367:613891c5da29
Date: 2014-07-05 22:44 +0200
http://bitbucket.org/pypy/pypy/changeset/613891c5da29/
Log: re-close this branch
From noreply at buildbot.pypy.org Sun Jul 6 14:36:08 2014
From: noreply at buildbot.pypy.org (mattip)
Date: Sun, 6 Jul 2014 14:36:08 +0200 (CEST)
Subject: [pypy-commit] pypy default: improve windows build instructions
Message-ID: <20140706123608.082771C0250@cobra.cs.uni-duesseldorf.de>
Author: mattip
Branch:
Changeset: r72368:979f8b8abacb
Date: 2014-07-06 22:35 +1000
http://bitbucket.org/pypy/pypy/changeset/979f8b8abacb/
Log: improve windows build instructions
diff --git a/pypy/doc/windows.rst b/pypy/doc/windows.rst
--- a/pypy/doc/windows.rst
+++ b/pypy/doc/windows.rst
@@ -132,19 +132,23 @@
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Download http://www.gzip.org/zlib/zlib-1.2.3.tar.gz and extract it in
-the base directory. Then compile::
+the base directory. Then compile as a static library::
cd zlib-1.2.3
nmake -f win32\Makefile.msc
- copy zlib1.dll \zlib.dll
+ copy zlib1.lib
+ copy zlib.h zconf.h
The bz2 compression library
~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Get the same version of bz2 used by python and compile as a static library::
svn export http://svn.python.org/projects/external/bzip2-1.0.6
cd bzip2-1.0.6
nmake -f makefile.msc
- copy bzip.dll \bzip.dll
+ copy libbz2.lib
+ copy bzlib.h
+
The sqlite3 database library
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -166,7 +170,8 @@
is actually enough for pypy).
Then, copy the file ``win32\bin\release\libexpat.dll`` somewhere in
-your PATH.
+your PATH, ``win32\bin\release\libexpat.lib`` somewhere in LIB, and
+both ``lib\expat.h`` and ``lib\expat_external.h`` somewhere in INCLUDE.
The OpenSSL library
~~~~~~~~~~~~~~~~~~~
From noreply at buildbot.pypy.org Sun Jul 6 15:41:11 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Sun, 6 Jul 2014 15:41:11 +0200 (CEST)
Subject: [pypy-commit] pypy default: Merged in scalar-operations (pull
request #243)
Message-ID: <20140706134111.323781C0906@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch:
Changeset: r72369:f1bd7e48eb65
Date: 2014-07-06 14:40 +0100
http://bitbucket.org/pypy/pypy/changeset/f1bd7e48eb65/
Log: Merged in scalar-operations (pull request #243)
Fix performance regression on ufunc(, ) in numpy
diff --git a/pypy/module/micronumpy/base.py b/pypy/module/micronumpy/base.py
--- a/pypy/module/micronumpy/base.py
+++ b/pypy/module/micronumpy/base.py
@@ -18,7 +18,12 @@
pass
-class W_NDimArray(W_Root):
+class W_NumpyObject(W_Root):
+ """Base class for ndarrays and scalars (aka boxes)."""
+ _attrs_ = []
+
+
+class W_NDimArray(W_NumpyObject):
__metaclass__ = extendabletype
def __init__(self, implementation):
@@ -85,6 +90,14 @@
w_val = dtype.coerce(space, space.wrap(0))
return convert_to_array(space, w_val)
+ @staticmethod
+ def from_scalar(space, w_scalar):
+ """Convert a scalar into a 0-dim array"""
+ dtype = w_scalar.get_dtype(space)
+ w_arr = W_NDimArray.from_shape(space, [], dtype)
+ w_arr.set_scalar_value(w_scalar)
+ return w_arr
+
def convert_to_array(space, w_obj):
from pypy.module.micronumpy.ctors import array
diff --git a/pypy/module/micronumpy/boxes.py b/pypy/module/micronumpy/boxes.py
--- a/pypy/module/micronumpy/boxes.py
+++ b/pypy/module/micronumpy/boxes.py
@@ -1,4 +1,3 @@
-from pypy.interpreter.baseobjspace import W_Root
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import interp2app, unwrap_spec
from pypy.interpreter.mixedmodule import MixedModule
@@ -14,7 +13,7 @@
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.tool.sourcetools import func_with_new_name
from pypy.module.micronumpy import constants as NPY
-from pypy.module.micronumpy.base import W_NDimArray
+from pypy.module.micronumpy.base import W_NDimArray, W_NumpyObject
from pypy.module.micronumpy.concrete import VoidBoxStorage
from pypy.module.micronumpy.flagsobj import W_FlagsObject
@@ -126,7 +125,7 @@
return ret
-class W_GenericBox(W_Root):
+class W_GenericBox(W_NumpyObject):
_attrs_ = ['w_flags']
def descr__new__(space, w_subtype, __args__):
@@ -136,6 +135,12 @@
def get_dtype(self, space):
return self._get_dtype(space)
+ def is_scalar(self):
+ return True
+
+ def get_scalar_value(self):
+ return self
+
def item(self, space):
return self.get_dtype(space).itemtype.to_builtin_type(space, self)
diff --git a/pypy/module/micronumpy/ctors.py b/pypy/module/micronumpy/ctors.py
--- a/pypy/module/micronumpy/ctors.py
+++ b/pypy/module/micronumpy/ctors.py
@@ -4,7 +4,8 @@
from rpython.rlib.rstring import strip_spaces
from rpython.rtyper.lltypesystem import lltype, rffi
from pypy.module.micronumpy import descriptor, loop
-from pypy.module.micronumpy.base import W_NDimArray, convert_to_array
+from pypy.module.micronumpy.base import (
+ W_NDimArray, convert_to_array, W_NumpyObject)
from pypy.module.micronumpy.converters import shape_converter
@@ -24,24 +25,44 @@
return box
+def try_array_method(space, w_object, w_dtype=None):
+ w___array__ = space.lookup(w_object, "__array__")
+ if w___array__ is None:
+ return None
+ if w_dtype is None:
+ w_dtype = space.w_None
+ w_array = space.get_and_call_function(w___array__, w_object, w_dtype)
+ if isinstance(w_array, W_NDimArray):
+ return w_array
+ else:
+ raise oefmt(space.w_ValueError,
+ "object __array__ method not producing an array")
+
+
@unwrap_spec(ndmin=int, copy=bool, subok=bool)
def array(space, w_object, w_dtype=None, copy=True, w_order=None, subok=False,
ndmin=0):
+ w_res = _array(space, w_object, w_dtype, copy, w_order, subok)
+ shape = w_res.get_shape()
+ if len(shape) < ndmin:
+ shape = [1] * (ndmin - len(shape)) + shape
+ impl = w_res.implementation.set_shape(space, w_res, shape)
+ if w_res is w_object:
+ return W_NDimArray(impl)
+ else:
+ w_res.implementation = impl
+ return w_res
+
+def _array(space, w_object, w_dtype=None, copy=True, w_order=None, subok=False):
from pypy.module.micronumpy import strides
# for anything that isn't already an array, try __array__ method first
if not isinstance(w_object, W_NDimArray):
- w___array__ = space.lookup(w_object, "__array__")
- if w___array__ is not None:
- if space.is_none(w_dtype):
- w_dtype = space.w_None
- w_array = space.get_and_call_function(w___array__, w_object, w_dtype)
- if isinstance(w_array, W_NDimArray):
- # feed w_array back into array() for other properties
- return array(space, w_array, w_dtype, False, w_order, subok, ndmin)
- else:
- raise oefmt(space.w_ValueError,
- "object __array__ method not producing an array")
+ w_array = try_array_method(space, w_object, w_dtype)
+ if w_array is not None:
+ # continue with w_array, but do further operations in place
+ w_object = w_array
+ copy = False
dtype = descriptor.decode_w_dtype(space, w_dtype)
@@ -57,19 +78,10 @@
# arrays with correct dtype
if isinstance(w_object, W_NDimArray) and \
(space.is_none(w_dtype) or w_object.get_dtype() is dtype):
- shape = w_object.get_shape()
if copy:
- w_ret = w_object.descr_copy(space)
+ return w_object.descr_copy(space)
else:
- if ndmin <= len(shape):
- return w_object
- new_impl = w_object.implementation.set_shape(space, w_object, shape)
- w_ret = W_NDimArray(new_impl)
- if ndmin > len(shape):
- shape = [1] * (ndmin - len(shape)) + shape
- w_ret.implementation = w_ret.implementation.set_shape(space,
- w_ret, shape)
- return w_ret
+ return w_object
# not an array or incorrect dtype
shape, elems_w = strides.find_shape_and_elems(space, w_object, dtype)
@@ -81,8 +93,6 @@
# promote S0 -> S1, U0 -> U1
dtype = descriptor.variable_dtype(space, dtype.char + '1')
- if ndmin > len(shape):
- shape = [1] * (ndmin - len(shape)) + shape
w_arr = W_NDimArray.from_shape(space, shape, dtype, order=order)
if len(elems_w) == 1:
w_arr.set_scalar_value(dtype.coerce(space, elems_w[0]))
@@ -91,6 +101,33 @@
return w_arr
+def numpify(space, w_object):
+ """Convert the object to a W_NumpyObject"""
+ # XXX: code duplication with _array()
+ from pypy.module.micronumpy import strides
+ if isinstance(w_object, W_NumpyObject):
+ return w_object
+ # for anything that isn't already an array, try __array__ method first
+ w_array = try_array_method(space, w_object)
+ if w_array is not None:
+ return w_array
+
+ shape, elems_w = strides.find_shape_and_elems(space, w_object, None)
+ dtype = strides.find_dtype_for_seq(space, elems_w, None)
+ if dtype is None:
+ dtype = descriptor.get_dtype_cache(space).w_float64dtype
+ elif dtype.is_str_or_unicode() and dtype.elsize < 1:
+ # promote S0 -> S1, U0 -> U1
+ dtype = descriptor.variable_dtype(space, dtype.char + '1')
+
+ if len(elems_w) == 1:
+ return dtype.coerce(space, elems_w[0])
+ else:
+ w_arr = W_NDimArray.from_shape(space, shape, dtype)
+ loop.assign(space, w_arr, elems_w)
+ return w_arr
+
+
def _zeros_or_empty(space, w_shape, w_dtype, w_order, zero):
dtype = space.interp_w(descriptor.W_Dtype,
space.call_function(space.gettypefor(descriptor.W_Dtype), w_dtype))
diff --git a/pypy/module/micronumpy/ufuncs.py b/pypy/module/micronumpy/ufuncs.py
--- a/pypy/module/micronumpy/ufuncs.py
+++ b/pypy/module/micronumpy/ufuncs.py
@@ -7,6 +7,7 @@
from rpython.tool.sourcetools import func_with_new_name
from pypy.module.micronumpy import boxes, descriptor, loop, constants as NPY
from pypy.module.micronumpy.base import convert_to_array, W_NDimArray
+from pypy.module.micronumpy.ctors import numpify
from pypy.module.micronumpy.strides import shape_agreement
@@ -17,6 +18,13 @@
def done_if_false(dtype, val):
return not dtype.itemtype.bool(val)
+def _get_dtype(space, w_npyobj):
+ if isinstance(w_npyobj, boxes.W_GenericBox):
+ return w_npyobj.get_dtype(space)
+ else:
+ assert isinstance(w_npyobj, W_NDimArray)
+ return w_npyobj.get_dtype()
+
class W_Ufunc(W_Root):
_immutable_fields_ = [
@@ -304,8 +312,8 @@
out = args_w[1]
if space.is_w(out, space.w_None):
out = None
- w_obj = convert_to_array(space, w_obj)
- dtype = w_obj.get_dtype()
+ w_obj = numpify(space, w_obj)
+ dtype = _get_dtype(space, w_obj)
if dtype.is_flexible():
raise OperationError(space.w_TypeError,
space.wrap('Not implemented for this type'))
@@ -315,7 +323,7 @@
raise oefmt(space.w_TypeError,
"ufunc %s not supported for the input type", self.name)
calc_dtype = find_unaryop_result_dtype(space,
- w_obj.get_dtype(),
+ dtype,
promote_to_float=self.promote_to_float,
promote_bools=self.promote_bools)
if out is not None:
@@ -345,6 +353,7 @@
else:
out.fill(space, w_val)
return out
+ assert isinstance(w_obj, W_NDimArray)
shape = shape_agreement(space, w_obj.get_shape(), out,
broadcast_down=False)
return loop.call1(space, shape, self.func, calc_dtype, res_dtype,
@@ -385,10 +394,10 @@
else:
[w_lhs, w_rhs] = args_w
w_out = None
- w_lhs = convert_to_array(space, w_lhs)
- w_rhs = convert_to_array(space, w_rhs)
- w_ldtype = w_lhs.get_dtype()
- w_rdtype = w_rhs.get_dtype()
+ w_lhs = numpify(space, w_lhs)
+ w_rhs = numpify(space, w_rhs)
+ w_ldtype = _get_dtype(space, w_lhs)
+ w_rdtype = _get_dtype(space, w_rhs)
if w_ldtype.is_str() and w_rdtype.is_str() and \
self.comparison_func:
pass
@@ -451,6 +460,12 @@
else:
out = arr
return out
+ if isinstance(w_lhs, boxes.W_GenericBox):
+ w_lhs = W_NDimArray.from_scalar(space, w_lhs)
+ assert isinstance(w_lhs, W_NDimArray)
+ if isinstance(w_rhs, boxes.W_GenericBox):
+ w_rhs = W_NDimArray.from_scalar(space, w_rhs)
+ assert isinstance(w_rhs, W_NDimArray)
new_shape = shape_agreement(space, w_lhs.get_shape(), w_rhs)
new_shape = shape_agreement(space, new_shape, out, broadcast_down=False)
return loop.call2(space, new_shape, self.func, calc_dtype,
diff --git a/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py b/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py
--- a/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py
@@ -30,6 +30,7 @@
""")
def test_array_getitem_accumulate(self):
+ """Check that operations/ufuncs on array items are jitted correctly"""
def main():
import _numpypy.multiarray as np
arr = np.zeros((300, 300))
@@ -43,7 +44,6 @@
log = self.run(main, [])
assert log.result == 0
loop, = log.loops_by_filename(self.filepath)
- skip('used to pass on 69421-f3e717c94913')
assert loop.match("""
i81 = int_lt(i76, 300)
guard_true(i81, descr=...)
From noreply at buildbot.pypy.org Sun Jul 6 15:41:23 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Sun, 6 Jul 2014 15:41:23 +0200 (CEST)
Subject: [pypy-commit] pypy scalar-operations: Close branch scalar-operations
Message-ID: <20140706134123.628B41C0906@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch: scalar-operations
Changeset: r72370:82fa12110c8f
Date: 2014-07-06 14:40 +0100
http://bitbucket.org/pypy/pypy/changeset/82fa12110c8f/
Log: Close branch scalar-operations
From noreply at buildbot.pypy.org Sun Jul 6 15:45:38 2014
From: noreply at buildbot.pypy.org (rlamy)
Date: Sun, 6 Jul 2014 15:45:38 +0200 (CEST)
Subject: [pypy-commit] pypy default: update whatsnew-head.rst
Message-ID: <20140706134538.7180A1C0906@cobra.cs.uni-duesseldorf.de>
Author: Ronan Lamy
Branch:
Changeset: r72371:fbd0dadee790
Date: 2014-07-06 14:45 +0100
http://bitbucket.org/pypy/pypy/changeset/fbd0dadee790/
Log: update whatsnew-head.rst
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -47,3 +47,6 @@
.. branch: disable_pythonapi
Remove non-functioning ctypes.pyhonapi and ctypes.PyDLL, document this
incompatibility with cpython. Recast sys.dllhandle to an int.
+
+.. branch: scalar-operations
+Fix performance regression on ufunc(, ) in numpy.
From noreply at buildbot.pypy.org Sun Jul 6 19:29:13 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Sun, 6 Jul 2014 19:29:13 +0200 (CEST)
Subject: [pypy-commit] cffi default: Oops. Using memcpy() here can be bogus
because the addresses can overlap.
Message-ID: <20140706172913.3BEE11D2960@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r1549:133eb25752ff
Date: 2014-07-06 19:29 +0200
http://bitbucket.org/cffi/cffi/changeset/133eb25752ff/
Log: Oops. Using memcpy() here can be bogus because the addresses can
overlap.
diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -1959,7 +1959,7 @@
if ((ctv->ct_flags & CT_ARRAY) && (ctv->ct_itemdescr == ct) &&
(get_array_length((CDataObject *)v) == length)) {
/* fast path: copying from exactly the correct type */
- memcpy(cdata, ((CDataObject *)v)->c_data, itemsize * length);
+ memmove(cdata, ((CDataObject *)v)->c_data, itemsize * length);
return 0;
}
}
From noreply at buildbot.pypy.org Mon Jul 7 07:38:32 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Mon, 7 Jul 2014 07:38:32 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: Explicitly pass
-fno-rtti when compiling PyPyGC.cpp.
Message-ID: <20140707053832.CE06C1D3528@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72372:ca9f8a00d634
Date: 2014-07-06 12:14 +0200
http://bitbucket.org/pypy/pypy/changeset/ca9f8a00d634/
Log: Explicitly pass -fno-rtti when compiling PyPyGC.cpp.
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -1839,7 +1839,7 @@
gc_cpp = this_file.new(basename='PyPyGC.cpp')
gc_lib = this_file.new(purebasename='PyPyGC',
ext=self.translator.platform.so_ext)
- cflags = cmdexec('llvm-config --cxxflags').strip()
+ cflags = cmdexec('llvm-config --cxxflags').strip() + ' -fno-rtti'
cmdexec('clang {} -shared {} -o {}'.format(cflags, gc_cpp, gc_lib))
return gc_lib
From noreply at buildbot.pypy.org Mon Jul 7 07:38:34 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Mon, 7 Jul 2014 07:38:34 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: minor style fix
Message-ID: <20140707053834.130671D3528@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72373:405a6af63224
Date: 2014-07-06 12:16 +0200
http://bitbucket.org/pypy/pypy/changeset/405a6af63224/
Log: minor style fix
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -55,7 +55,7 @@
def get_extra_len(self, value):
raise NotImplementedError("Override in subclass.")
- def repr_value(self, obj):
+ def repr_value(self, value):
raise NotImplementedError("Override in subclass.")
def repr_type_and_value(self, value):
@@ -714,14 +714,16 @@
ptr_type.refs[obj] = 'null'
-_LL_TO_LLVM = {lltype.Ptr: PtrType,
- lltype.Struct: StructType, lltype.GcStruct: StructType,
- lltype.Array: ArrayType, lltype.GcArray: ArrayType,
- lltype.FixedSizeArray: BareArrayType,
- lltype.FuncType: FuncType,
- lltype.OpaqueType: OpaqueType, lltype.GcOpaqueType: OpaqueType,
- llgroup.GroupType: GroupType,
- llmemory._WeakRefType: OpaqueType}
+_LL_TO_LLVM = {
+ lltype.Ptr: PtrType,
+ lltype.Struct: StructType, lltype.GcStruct: StructType,
+ lltype.Array: ArrayType, lltype.GcArray: ArrayType,
+ lltype.FixedSizeArray: BareArrayType,
+ lltype.FuncType: FuncType,
+ lltype.OpaqueType: OpaqueType, lltype.GcOpaqueType: OpaqueType,
+ llgroup.GroupType: GroupType,
+ llmemory._WeakRefType: OpaqueType,
+}
class Database(object):
identifier_regex = re.compile('^[%@][a-zA-Z$._][a-zA-Z$._0-9]*$')
From noreply at buildbot.pypy.org Mon Jul 7 07:38:35 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Mon, 7 Jul 2014 07:38:35 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: Rename PtrType.to() ->
PtrType.tmp() to avoid ambiguity with PtrType's `to` attribute.
Message-ID: <20140707053835.5D4BA1D3528@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72374:e752ad534f8f
Date: 2014-07-07 07:16 +0200
http://bitbucket.org/pypy/pypy/changeset/e752ad534f8f/
Log: Rename PtrType.to() -> PtrType.tmp() to avoid ambiguity with
PtrType's `to` attribute.
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -407,7 +407,7 @@
to = parent_type.add_indices(gep, child)
self.refs[obj] = (
'bitcast({} getelementptr inbounds({}, {}) to {})'
- .format(PtrType.to(to).repr_type(), parent_ref,
+ .format(PtrType.tmp(to).repr_type(), parent_ref,
', '.join(gep.indices), self.repr_type()))
else:
self.to.repr_ref(self, obj)
@@ -1185,7 +1185,7 @@
def _get_element(self, result, var, *fields):
if result.type is not LLVMVoid:
- t = self._tmp(PtrType.to(result.type))
+ t = self._tmp(PtrType.tmp(result.type))
self._get_element_ptr(var, fields, t)
self.w('{result.V} = load {t.TV}'.format(**locals()))
op_getfield = op_bare_getfield = _get_element
@@ -1196,7 +1196,7 @@
fields = rest[:-1]
value = rest[-1]
if value.type is not LLVMVoid:
- t = self._tmp(PtrType.to(value.type))
+ t = self._tmp(PtrType.tmp(value.type))
self._get_element_ptr(var, fields, t)
self.w('store {value.TV}, {t.TV}'.format(**locals()))
op_setfield = op_bare_setfield = _set_element
@@ -1204,17 +1204,17 @@
op_setarrayitem = op_bare_setarrayitem = _set_element
def op_direct_fieldptr(self, result, ptr, field):
- t = self._tmp(PtrType.to(result.type.to.of))
+ t = self._tmp(PtrType.tmp(result.type.to.of))
self._get_element_ptr(ptr, [field], t)
self.w('{result.V} = bitcast {t.TV} to {result.T}'.format(**locals()))
def op_direct_arrayitems(self, result, ptr):
- t = self._tmp(PtrType.to(result.type.to.of))
+ t = self._tmp(PtrType.tmp(result.type.to.of))
self._get_element_ptr(ptr, [ConstantRepr(LLVMSigned, 0)], t)
self.w('{result.V} = bitcast {t.TV} to {result.T}'.format(**locals()))
def op_direct_ptradd(self, result, var, val):
- t = self._tmp(PtrType.to(result.type.to.of))
+ t = self._tmp(PtrType.tmp(result.type.to.of))
self.w('{t.V} = getelementptr inbounds {var.TV}, i64 0, {val.TV}'
.format(**locals()))
self.w('{result.V} = bitcast {t.TV} to {result.T}'.format(**locals()))
@@ -1233,7 +1233,7 @@
gep.add_field_index(1)
else:
gep.add_field_index(0)
- t = self._tmp(PtrType.to(LLVMSigned))
+ t = self._tmp(PtrType.tmp(LLVMSigned))
gep.assign(t)
self.w('{result.V} = load {t.TV}'.format(**locals()))
op_getinteriorarraysize = op_getarraysize
@@ -1333,9 +1333,9 @@
self.op_direct_call(result, get_repr(raw_free), ptr)
def _get_addr(self, ptr_to, addr, offset):
- t1 = self._tmp(PtrType.to(LLVMChar))
- t2 = self._tmp(PtrType.to(LLVMChar))
- t3 = self._tmp(PtrType.to(ptr_to))
+ t1 = self._tmp(PtrType.tmp(LLVMChar))
+ t2 = self._tmp(PtrType.tmp(LLVMChar))
+ t3 = self._tmp(PtrType.tmp(ptr_to))
self._cast(t1, addr)
self.w('{t2.V} = getelementptr inbounds {t1.TV}, {offset.TV}'
.format(**locals()))
From noreply at buildbot.pypy.org Mon Jul 7 07:38:36 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Mon, 7 Jul 2014 07:38:36 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: oops
Message-ID: <20140707053836.A6F3D1D3528@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72375:f878b24a4a5e
Date: 2014-07-07 07:19 +0200
http://bitbucket.org/pypy/pypy/changeset/f878b24a4a5e/
Log: oops
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -365,7 +365,7 @@
self.refs = {None: 'null'}
@classmethod
- def to(cls, to):
+ def tmp(cls, to):
# call __new__ to prevent __init__ from being called
self = cls.__new__(cls)
self.to = to
From noreply at buildbot.pypy.org Mon Jul 7 07:38:37 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Mon, 7 Jul 2014 07:38:37 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: Move weakref and RTTI
functionality out of OpaqueType.
Message-ID: <20140707053837.EAEDF1D3528@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72376:afe803c5e077
Date: 2014-07-07 07:30 +0200
http://bitbucket.org/pypy/pypy/changeset/afe803c5e077/
Log: Move weakref and RTTI functionality out of OpaqueType.
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -13,6 +13,7 @@
from rpython.memory.gctransform.refcounting import RefcountingGCTransformer
from rpython.memory.gctransform.shadowstack import (
ShadowStackFrameworkGCTransformer)
+from rpython.memory.gctypelayout import WEAKREF, convert_weakref_to
from rpython.rlib import exports
from rpython.rlib.jit import _we_are_jitted
from rpython.rlib.objectmodel import (Symbolic, ComputedIntSymbolic,
@@ -43,6 +44,9 @@
varsize = False
needs_gc_header = False
+ def setup_from_lltype(self, db, type):
+ pass
+
def repr_type(self, extra_len=None):
return self.typestr
@@ -94,7 +98,8 @@
else:
ptr_type.refs[obj] = name
hash_ = database.genllvm.gcpolicy.get_prebuilt_hash(obj)
- if (obj._TYPE._hints.get('immutable', False) and
+ if (hasattr(obj._TYPE, '_hints') and
+ obj._TYPE._hints.get('immutable', False) and
obj._TYPE._gckind != 'gc'):
global_attrs += 'constant'
else:
@@ -346,7 +351,7 @@
lltype.Float: FloatType('double', 64),
lltype.SingleFloat: FloatType('float', 32),
lltype.LongFloat: FloatType('x86_fp80', 80),
- llmemory.Address: LLVMAddress
+ llmemory.Address: LLVMAddress,
}
for type in rffi.NUMBER_TYPES + [lltype.Char, lltype.UniChar]:
@@ -692,26 +697,39 @@
class OpaqueType(Type):
- typestr = '{}'
-
- def setup_from_lltype(self, db, type):
- pass
+ typestr = 'i8'
def repr_of_type(self):
return 'opaque'
def is_zero(self, value):
- return True
+ raise ValueError("value is opaque")
def repr_ref(self, ptr_type, obj):
if hasattr(obj, 'container'):
- ptr_type.refs[obj] = 'bitcast({} to {{}}*)'.format(
- get_repr(obj.container._as_ptr()).TV)
- elif isinstance(obj, llmemory._wref):
- ptr_type.refs[obj] = 'bitcast({} to {{}}*)'.format(
- get_repr(obj._converted_weakref).TV)
+ realvalue = get_repr(lltype.cast_opaque_ptr(
+ lltype.Ptr(lltype.typeOf(obj.container)), obj._as_ptr()))
+ ptr_type.refs[obj] = 'bitcast({.TV} to i8*)'.format(realvalue)
else:
- ptr_type.refs[obj] = 'null'
+ raise ValueError("value is opaque")
+
+
+class WeakRefType(Type):
+ def setup_from_lltype(self, db, type):
+ self.struct_type = StructType()
+ self.struct_type.setup_from_lltype(db, WEAKREF)
+
+ def repr_type(self, extra_len=None):
+ return self.struct_type.repr_type(extra_len)
+
+ def repr_of_type(self):
+ return 'weakref'
+
+ def is_zero(self, value):
+ return self.struct_type.is_zero(value._converted_weakref)
+
+ def repr_value(self, value):
+ return self.struct_type.repr_value(value._converted_weakref)
_LL_TO_LLVM = {
@@ -722,7 +740,7 @@
lltype.FuncType: FuncType,
lltype.OpaqueType: OpaqueType, lltype.GcOpaqueType: OpaqueType,
llgroup.GroupType: GroupType,
- llmemory._WeakRefType: OpaqueType,
+ llmemory._WeakRefType: WeakRefType,
}
class Database(object):
@@ -1520,10 +1538,9 @@
elif type is llmemory.GCREF.TO and hasattr(value, 'container'):
self._consider_constant(value.ORIGTYPE.TO, value.container)
elif type is llmemory.WeakRef:
- from rpython.memory.gctypelayout import convert_weakref_to
wrapper = convert_weakref_to(value._dereference())
self._consider_constant(wrapper._TYPE, wrapper)
- value._converted_weakref = wrapper
+ value._converted_weakref = wrapper._obj
self.gctransformer.consider_constant(type, value)
p, c = lltype.parentlink(value)
@@ -1560,7 +1577,14 @@
class FrameworkGCPolicy(GCPolicy):
- RttiType = OpaqueType
+ class RttiType(Type):
+ typestr = '{}'
+
+ def is_zero(self, value):
+ return True
+
+ def repr_ref(self, ptr_type, obj):
+ ptr_type.refs[obj] = 'null'
def __init__(self, genllvm):
GCPolicy.__init__(self, genllvm)
From noreply at buildbot.pypy.org Mon Jul 7 07:38:39 2014
From: noreply at buildbot.pypy.org (Manuel Jacob)
Date: Mon, 7 Jul 2014 07:38:39 +0200 (CEST)
Subject: [pypy-commit] pypy llvm-translation-backend: Minor refactoring:
slightly change the way struct names are handled.
Message-ID: <20140707053839.4453C1D3528@cobra.cs.uni-duesseldorf.de>
Author: Manuel Jacob
Branch: llvm-translation-backend
Changeset: r72377:e3796537464c
Date: 2014-07-07 07:37 +0200
http://bitbucket.org/pypy/pypy/changeset/e3796537464c/
Log: Minor refactoring: slightly change the way struct names are handled.
diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py
--- a/rpython/translator/llvm/genllvm.py
+++ b/rpython/translator/llvm/genllvm.py
@@ -51,7 +51,7 @@
return self.typestr
def repr_of_type(self):
- return self.repr_type()
+ return self.typestr
def is_zero(self, value):
raise NotImplementedError("Override in subclass.")
@@ -438,21 +438,21 @@
def setup_from_lltype(self, db, type):
if (type._hints.get('typeptr', False) and
db.genllvm.translator.config.translation.gcremovetypeptr):
- self.setup('%' + type._name, [], True)
+ self.setup(type._name, [], True)
return
fields = ((db.get_type(type._flds[f]), f) for f in type._names)
is_gc = type._gckind == 'gc'
needs_gc_header = is_gc and type._first_struct() == (None, None)
- self.setup('%' + type._name, fields, needs_gc_header)
+ self.setup(type._name, fields, needs_gc_header)
def repr_type(self, extra_len=None):
if extra_len not in self.size_variants:
if extra_len is not None:
- name = self.name + '_plus_{}'.format(extra_len)
+ name = '%{}_plus_{}'.format(self.name, extra_len)
elif self.varsize:
- name = self.name + '_varsize'
+ name = '%{}_varsize'.format(self.name)
else:
- name = self.name
+ name = '%{}'.format(self.name)
self.size_variants[extra_len] = name = database.unique_name(name)
lastname = self.fldnames_wo_voids[-1]
tmp = (' {semicolon}{fldtype}{comma} ; {fldname}\n'.format(
@@ -465,7 +465,7 @@
return self.size_variants[extra_len]
def repr_of_type(self):
- return self.name[1:]
+ return self.name
def is_zero(self, value):
if self.needs_gc_header:
@@ -586,7 +586,7 @@
self.bare_array_type.setup(of, None)
self.struct_type = StructType()
fields = [(LLVMSigned, 'len'), (self.bare_array_type, 'items')]
- self.struct_type.setup('%array_of_' + of.repr_of_type(), fields, is_gc)
+ self.struct_type.setup('array_of_' + of.repr_of_type(), fields, is_gc)
def setup_from_lltype(self, db, type):
self.setup(db.get_type(type.OF), type._gckind == 'gc')
@@ -648,7 +648,7 @@
'getelementptr inbounds({}* {}, i64 0, i32 {})'
.format(self.typestr, groupname, i))
struct_type = StructType()
- struct_type.setup(self.typestr, fields, False)
+ struct_type.setup('group_' + obj.name, fields, False)
database.f.write('{} = global {}\n'.format(
groupname, struct_type.repr_type_and_value(group)))
From noreply at buildbot.pypy.org Mon Jul 7 12:57:40 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Mon, 7 Jul 2014 12:57:40 +0200 (CEST)
Subject: [pypy-commit] pypy stmgc-c7: Workaround: some Linux systems start
processes with a non-null %gs
Message-ID: <20140707105740.BE99C1D2DDD@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stmgc-c7
Changeset: r72378:dd3c06b77a11
Date: 2014-07-07 12:57 +0200
http://bitbucket.org/pypy/pypy/changeset/dd3c06b77a11/
Log: Workaround: some Linux systems start processes with a non-null %gs
content. It seems that forcing %gs to be 0 here solves problems in
case we have some 'late_initializations'.
diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py
--- a/rpython/translator/c/genc.py
+++ b/rpython/translator/c/genc.py
@@ -795,6 +795,10 @@
print >> f, 'char *RPython_StartupCode(void) {'
print >> f, '\tchar *error = NULL;'
+ if database.with_stm:
+ print >> f, '\t/* XXX temporary workaround for late_initializations */'
+ print >> f, '\tsyscall(SYS_arch_prctl, ARCH_SET_GS, (uint64_t)0);'
+
# put float infinities in global constants, we should not have so many of them for now to make
# a table+loop preferable
for dest, value in database.late_initializations:
@@ -896,6 +900,14 @@
filename = targetdir.join(modulename + '.c')
f = filename.open('w')
+ if database.with_stm:
+ print >> f, '/* XXX temporary, for SYS_arch_prctl below */'
+ print >> f, '#define _GNU_SOURCE'
+ print >> f, '#include '
+ print >> f, '#include '
+ print >> f, '#include '
+ print >> f, '#include '
+ print >> f
incfilename = targetdir.join('common_header.h')
fi = incfilename.open('w')
fi.write('#ifndef _PY_COMMON_HEADER_H\n#define _PY_COMMON_HEADER_H\n')
From noreply at buildbot.pypy.org Mon Jul 7 13:16:18 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:18 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Renamed variable/parameter.
Message-ID: <20140707111618.7218B1C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r852:35d8fe62d2aa
Date: 2014-05-28 15:08 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/35d8fe62d2aa/
Log: Renamed variable/parameter.
diff --git a/spyvm/storage_statistics.py b/spyvm/storage_statistics.py
--- a/spyvm/storage_statistics.py
+++ b/spyvm/storage_statistics.py
@@ -49,7 +49,7 @@
class StatisticsModule(object):
uses_classname = False
- def storage_operation(self, operation_key, storage_size, element_classname):
+ def storage_operation(self, operation_key, storage_size, container_classname):
raise NotImplementedError("Abstract class")
def print_results(self):
raise NotImplementedError("Abstract class")
@@ -61,12 +61,12 @@
class StatisticsLogger(StatisticsModule):
uses_classname = True
- def storage_operation(self, operation_key, storage_size, element_classname):
- print self.log_string(operation_key, storage_size, element_classname)
+ def storage_operation(self, operation_key, storage_size, container_classname):
+ print self.log_string(operation_key, storage_size, container_classname)
- def log_string(self, operation_key, storage_size, element_classname):
- if element_classname:
- return "%s of %s size %d" % (self.key_string(operation_key), element_classname, storage_size)
+ def log_string(self, operation_key, storage_size, container_classname):
+ if container_classname:
+ return "%s of %s size %d" % (self.key_string(operation_key), container_classname, storage_size)
else:
return "%s size %d" % (self.key_string(operation_key), storage_size)
@@ -79,7 +79,7 @@
def __init__(self):
self.stats = {}
- def storage_operation(self, operation_key, storage_size, element_classname):
+ def storage_operation(self, operation_key, storage_size, container_classname):
if not operation_key in self.stats:
self.stats[operation_key] = self.initial_value()
self.increment_value(self.stats[operation_key], storage_size)
@@ -112,8 +112,8 @@
self.outgoing_operations = {}
self.outgoing_elements = {}
- def storage_operation(self, key, storage_size, element_classname):
- StatisticsCollector.storage_operation(self, key, storage_size, element_classname)
+ def storage_operation(self, key, storage_size, container_classname):
+ StatisticsCollector.storage_operation(self, key, storage_size, container_classname)
source_type = key[1]
target_type = key[2]
self.fill_maps(self.incoming_operations, self.incoming_elements, target_type, storage_size)
From noreply at buildbot.pypy.org Mon Jul 7 13:16:19 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:19 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added histogram output for
storage statistics.
Message-ID: <20140707111619.B87461C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r853:689aa666070e
Date: 2014-06-23 19:14 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/689aa666070e/
Log: Added histogram output for storage statistics.
diff --git a/spyvm/storage_statistics.py b/spyvm/storage_statistics.py
--- a/spyvm/storage_statistics.py
+++ b/spyvm/storage_statistics.py
@@ -32,20 +32,18 @@
old_storage = None
size = w_obj.size()
- key = self.make_key(operation, old_storage, new_storage)
if self.using_classname and log_classname:
classname = w_obj.guess_classname()
else:
classname = None
for module in self.modules:
+ key = module.make_key(operation, old_storage, new_storage)
module.storage_operation(key, size, classname)
- def make_key(self, operation, old_storage, new_storage):
- return (operation, old_storage, new_storage)
-
def print_results(self):
for module in self.modules:
module.print_results()
+ print "\n\n"
class StatisticsModule(object):
uses_classname = False
@@ -53,6 +51,8 @@
raise NotImplementedError("Abstract class")
def print_results(self):
raise NotImplementedError("Abstract class")
+ def make_key(self, operation, old_storage, new_storage):
+ return (operation, old_storage, new_storage)
def key_string(self, key):
if key[1]:
return "%s (%s -> %s)" % (key[0], key[1], key[2])
@@ -82,17 +82,46 @@
def storage_operation(self, operation_key, storage_size, container_classname):
if not operation_key in self.stats:
self.stats[operation_key] = self.initial_value()
- self.increment_value(self.stats[operation_key], storage_size)
+ self.increment_value(self.stats[operation_key], storage_size, container_classname)
def sorted_keys(self):
keys = [ x for x in self.stats ]
StatsSorter(keys).sort()
return keys
+class HistogramStatisticsCollector(AbstractStatisticsCollector):
+ # Stores classnames with sizes
+ # Value: map
+
+ uses_classname = True
+ def initial_value(self): return {}
+ def increment_value(self, value_object, storage_size, container_classname):
+ if not container_classname in value_object:
+ value_object[container_classname] = [0, 0]
+ m = value_object[container_classname]
+ m[0] = m[0] + storage_size
+ m[1] = m[1] + 1
+
+ def make_key(self, operation, old_storage, new_storage):
+ return (new_storage)
+
+ def print_results(self):
+ print "## Histogram statistics:"
+ for key in self.sorted_keys():
+ print "##"
+ print "# %s" % key
+ print "Data Objects Elements"
+ classes = self.stats[key]
+ for cls in classes:
+ tuple = classes[cls]
+ sum = tuple[0]
+ num = tuple[1]
+ print "%s\t%d\t%d" % (cls, num, sum)
+
class StatisticsCollector(AbstractStatisticsCollector):
# Value: [total_size, num_operations]
def initial_value(self): return [0, 0]
- def increment_value(self, value_object, storage_size):
+ def increment_value(self, value_object, storage_size, container_classname):
value_object[0] = value_object[0] + storage_size
value_object[1] = value_object[1] + 1
def print_results(self):
@@ -106,7 +135,7 @@
class DotStatisticsCollector(StatisticsCollector):
def __init__(self):
- AbstractStatisticsCollector.__init__(self)
+ StatisticsCollector.__init__(self)
self.incoming_operations = {}
self.incoming_elements = {}
self.outgoing_operations = {}
@@ -181,7 +210,7 @@
class DetailedStatisticsCollector(AbstractStatisticsCollector):
# Value: list of numbers (sizes)
def initial_value(self): return []
- def increment_value(self, value_object, storage_size):
+ def increment_value(self, value_object, storage_size, container_classname):
value_object.append(storage_size)
def print_results(self):
print "Detailed Storage Statistics:"
@@ -195,8 +224,9 @@
_collector = StatisticsCollector()
_detailedcollector = DetailedStatisticsCollector()
_dotcollector = DotStatisticsCollector()
+_histogramcollector = HistogramStatisticsCollector()
-def activate_statistics(log=False, statistics=False, detailed_statistics=False, dot=False):
+def activate_statistics(log=False, statistics=False, detailed_statistics=False, dot=False, histogram=False):
if log:
_stats.add_module(_logger)
if statistics:
@@ -207,6 +237,8 @@
_stats.add_module(_dotcollector)
# Start a comment in order to make the entire output valid dot code. Hack.
print "/*"
+ if histogram:
+ _stats.add_module(_histogramcollector)
def print_statistics():
_stats.print_results()
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -134,6 +134,7 @@
--strategy-stats
--strategy-stats-dot
--strategy-stats-details
+ --strategy-stats-histogram
[image path, default: Squeak.image]
""" % (argv[0], constants.MAX_LOOP_DEPTH)
@@ -204,6 +205,8 @@
storage_statistics.activate_statistics(statistics=True)
elif arg == "--strategy-stats-dot":
storage_statistics.activate_statistics(dot=True)
+ elif arg == "--strategy-stats-histogram":
+ storage_statistics.activate_statistics(histogram=True)
elif arg == "--strategy-stats-details":
storage_statistics.activate_statistics(statistics=True, detailed_statistics=True)
elif path is None:
From noreply at buildbot.pypy.org Mon Jul 7 13:16:20 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:20 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Fixed histogram statistics
output
Message-ID: <20140707111620.DCA081C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r854:e6b70019cd99
Date: 2014-06-27 13:41 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/e6b70019cd99/
Log: Fixed histogram statistics output
diff --git a/spyvm/storage_statistics.py b/spyvm/storage_statistics.py
--- a/spyvm/storage_statistics.py
+++ b/spyvm/storage_statistics.py
@@ -1,5 +1,6 @@
from rpython.rlib.listsort import TimSort
+from rpython.rlib.objectmodel import import_from_mixin
class StatsSorter(TimSort):
"""Sort a tuple of 3 strings"""
@@ -74,7 +75,7 @@
# Nothing to do, this is just for logging during runtime.
pass
-class AbstractStatisticsCollector(StatisticsModule):
+class StatisticsCollectorMixin(StatisticsModule):
def __init__(self):
self.stats = {}
@@ -89,9 +90,10 @@
StatsSorter(keys).sort()
return keys
-class HistogramStatisticsCollector(AbstractStatisticsCollector):
+class HistogramStatisticsCollector(StatisticsModule):
# Stores classnames with sizes
# Value: map
+ import_from_mixin(StatisticsCollectorMixin)
uses_classname = True
def initial_value(self): return {}
@@ -103,23 +105,27 @@
m[1] = m[1] + 1
def make_key(self, operation, old_storage, new_storage):
- return (new_storage)
+ return (new_storage, "", "")
def print_results(self):
print "## Histogram statistics:"
- for key in self.sorted_keys():
- print "##"
- print "# %s" % key
+ for key_tuple in self.sorted_keys():
+ key = key_tuple[0]
+ if not "Storage" in key:
+ continue
+ print "\n# %s" % key
print "Data Objects Elements"
- classes = self.stats[key]
+ classes = self.stats[key_tuple]
for cls in classes:
tuple = classes[cls]
sum = tuple[0]
num = tuple[1]
- print "%s\t%d\t%d" % (cls, num, sum)
+ print "%d slots in %d objects: %s" % (sum, num, cls)
-class StatisticsCollector(AbstractStatisticsCollector):
+class StatisticsCollector(StatisticsModule):
# Value: [total_size, num_operations]
+ import_from_mixin(StatisticsCollectorMixin)
+
def initial_value(self): return [0, 0]
def increment_value(self, value_object, storage_size, container_classname):
value_object[0] = value_object[0] + storage_size
@@ -207,8 +213,10 @@
result += "}"
return result
-class DetailedStatisticsCollector(AbstractStatisticsCollector):
+class DetailedStatisticsCollector(StatisticsModule):
# Value: list of numbers (sizes)
+ import_from_mixin(StatisticsCollectorMixin)
+
def initial_value(self): return []
def increment_value(self, value_object, storage_size, container_classname):
value_object.append(storage_size)
From noreply at buildbot.pypy.org Mon Jul 7 13:16:22 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:22 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Removed storage_statistics
module, replaced with very simple storage_logger module.
Message-ID: <20140707111622.29E3B1C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r855:9468db2cf599
Date: 2014-06-30 17:57 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/9468db2cf599/
Log: Removed storage_statistics module, replaced with very simple
storage_logger module.
diff --git a/spyvm/model.py b/spyvm/model.py
--- a/spyvm/model.py
+++ b/spyvm/model.py
@@ -15,7 +15,7 @@
that create W_PointersObjects of correct size with attached shadows.
"""
import sys, weakref
-from spyvm import constants, error, version, storage_statistics
+from spyvm import constants, error, version, storage_logger
from spyvm.version import elidable_for_version, constant_for_version, constant_for_version_arg
from rpython.rlib import rrandom, objectmodel, jit, signature
@@ -566,7 +566,7 @@
_attrs_ = ['shadow']
shadow = None
repr_classname = "W_PointersObject"
- log_storage = storage_statistics.log
+ log_storage = storage_logger.log
@jit.unroll_safe
def __init__(self, space, w_class, size, weak=False):
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
new file mode 100644
--- /dev/null
+++ b/spyvm/storage_logger.py
@@ -0,0 +1,27 @@
+
+# Put flag in a list to make it modifyable after compile time.
+_active = [False]
+
+def activate():
+ _active[0] = True
+
+def log(w_obj, operation, old_storage_object=None, log_classname=True):
+ if not _active[0]:
+ return
+
+ # Gather information to be logged
+ new_storage = w_obj.shadow.repr_classname
+ if old_storage_object:
+ old_storage = old_storage_object.repr_classname
+ else:
+ old_storage = None
+ size = w_obj.size()
+ if log_classname:
+ classname = w_obj.guess_classname()
+ else:
+ classname = None
+
+ # Construct and print the logstring
+ old_storage_string = "%s -> " % old_storage if old_storage else ""
+ classname_string = " of %s" % classname if classname else ""
+ print "%s (%s%s)%s size %d" % (operation, old_storage_string, new_storage, classname_string, size)
diff --git a/spyvm/storage_statistics.py b/spyvm/storage_statistics.py
deleted file mode 100644
--- a/spyvm/storage_statistics.py
+++ /dev/null
@@ -1,255 +0,0 @@
-
-from rpython.rlib.listsort import TimSort
-from rpython.rlib.objectmodel import import_from_mixin
-
-class StatsSorter(TimSort):
- """Sort a tuple of 3 strings"""
- def lt(self, a, b):
- if a[0] == b[0]:
- if a[1] == b[1]:
- return a[2] < b[2]
- else:
- return a[1] < b[1]
- else:
- return a[0] < b[0]
-
-class StorageStatistics(object):
-
- def __init__(self):
- self.modules = []
- self.using_classname = False
-
- def add_module(self, module):
- if module not in self.modules:
- self.modules.append(module)
- self.using_classname = self.using_classname or module.uses_classname
-
- def log(self, w_obj, operation, old_storage_object, log_classname):
- if len(self.modules) > 0:
- new_storage = w_obj.shadow.repr_classname
- if old_storage_object:
- old_storage = old_storage_object.repr_classname
- else:
- old_storage = None
- size = w_obj.size()
-
- if self.using_classname and log_classname:
- classname = w_obj.guess_classname()
- else:
- classname = None
- for module in self.modules:
- key = module.make_key(operation, old_storage, new_storage)
- module.storage_operation(key, size, classname)
-
- def print_results(self):
- for module in self.modules:
- module.print_results()
- print "\n\n"
-
-class StatisticsModule(object):
- uses_classname = False
- def storage_operation(self, operation_key, storage_size, container_classname):
- raise NotImplementedError("Abstract class")
- def print_results(self):
- raise NotImplementedError("Abstract class")
- def make_key(self, operation, old_storage, new_storage):
- return (operation, old_storage, new_storage)
- def key_string(self, key):
- if key[1]:
- return "%s (%s -> %s)" % (key[0], key[1], key[2])
- else:
- return "%s (%s)" % (key[0], key[2])
-
-class StatisticsLogger(StatisticsModule):
- uses_classname = True
- def storage_operation(self, operation_key, storage_size, container_classname):
- print self.log_string(operation_key, storage_size, container_classname)
-
- def log_string(self, operation_key, storage_size, container_classname):
- if container_classname:
- return "%s of %s size %d" % (self.key_string(operation_key), container_classname, storage_size)
- else:
- return "%s size %d" % (self.key_string(operation_key), storage_size)
-
- def print_results(self):
- # Nothing to do, this is just for logging during runtime.
- pass
-
-class StatisticsCollectorMixin(StatisticsModule):
-
- def __init__(self):
- self.stats = {}
-
- def storage_operation(self, operation_key, storage_size, container_classname):
- if not operation_key in self.stats:
- self.stats[operation_key] = self.initial_value()
- self.increment_value(self.stats[operation_key], storage_size, container_classname)
-
- def sorted_keys(self):
- keys = [ x for x in self.stats ]
- StatsSorter(keys).sort()
- return keys
-
-class HistogramStatisticsCollector(StatisticsModule):
- # Stores classnames with sizes
- # Value: map
- import_from_mixin(StatisticsCollectorMixin)
-
- uses_classname = True
- def initial_value(self): return {}
- def increment_value(self, value_object, storage_size, container_classname):
- if not container_classname in value_object:
- value_object[container_classname] = [0, 0]
- m = value_object[container_classname]
- m[0] = m[0] + storage_size
- m[1] = m[1] + 1
-
- def make_key(self, operation, old_storage, new_storage):
- return (new_storage, "", "")
-
- def print_results(self):
- print "## Histogram statistics:"
- for key_tuple in self.sorted_keys():
- key = key_tuple[0]
- if not "Storage" in key:
- continue
- print "\n# %s" % key
- print "Data Objects Elements"
- classes = self.stats[key_tuple]
- for cls in classes:
- tuple = classes[cls]
- sum = tuple[0]
- num = tuple[1]
- print "%d slots in %d objects: %s" % (sum, num, cls)
-
-class StatisticsCollector(StatisticsModule):
- # Value: [total_size, num_operations]
- import_from_mixin(StatisticsCollectorMixin)
-
- def initial_value(self): return [0, 0]
- def increment_value(self, value_object, storage_size, container_classname):
- value_object[0] = value_object[0] + storage_size
- value_object[1] = value_object[1] + 1
- def print_results(self):
- print "Storage Statistics:"
- for key in self.sorted_keys():
- tuple = self.stats[key]
- sum = tuple[0]
- num = tuple[1]
- print "\t%s: %d times, avg size: %f" % (self.key_string(key), num, float(sum)/num)
-
-class DotStatisticsCollector(StatisticsCollector):
-
- def __init__(self):
- StatisticsCollector.__init__(self)
- self.incoming_operations = {}
- self.incoming_elements = {}
- self.outgoing_operations = {}
- self.outgoing_elements = {}
-
- def storage_operation(self, key, storage_size, container_classname):
- StatisticsCollector.storage_operation(self, key, storage_size, container_classname)
- source_type = key[1]
- target_type = key[2]
- self.fill_maps(self.incoming_operations, self.incoming_elements, target_type, storage_size)
- if source_type:
- self.fill_maps(self.outgoing_operations, self.outgoing_elements, source_type, storage_size)
-
- def fill_maps(self, operations_map, elements_map, key_type, size):
- if key_type not in operations_map:
- operations_map[key_type] = 0
- elements_map[key_type] = 0
- operations_map[key_type] = operations_map[key_type] + 1
- elements_map[key_type] = elements_map[key_type] + size
-
- def print_results(self):
- print "Storage Statistics (dot format):"
- print "================================"
- print "*/" # End the commend started in activate_statistics()
- print self.dot_string()
-
- def dot_string(self):
- # Return a string that is valid dot code and can be parsed by the graphviz dot utility.
- # Unfortunately, this is pretty complicated and messy... Sorry.
-
- result = "digraph G {"
- result += "loading_image [label=\"Image Loading\",shape=box];"
- result += "created_object [label=\"Object Creation\",shape=box];"
- for key in self.stats:
- operation_type = key[0]
- target_node = key[2]
- elements = self.stats[key][0]
- operations = self.stats[key][1]
- label_suffix = ""
- if operation_type == "Switched":
- source_node = key[1]
- percent_ops = float(operations) / float(self.incoming_operations[source_node]) * 100
- percent_elements = float(elements) / float(self.incoming_elements[source_node]) * 100
- label_suffix = "\n%d%% objects\n%d%% elements" % (int(percent_ops), int(percent_elements))
- elif operation_type == "Initialized":
- source_node = "created_object"
- elif operation_type == "Filledin":
- source_node = "loading_image"
- else:
- print "Could not handle storage operation %s" % operation_type
- continue
- result += "%s -> %s [label=\"%d objects\n%d elements per object%s\"];" % (source_node, target_node, operations, elements/operations, label_suffix)
- for type in self.incoming_operations:
- incoming_ops = self.incoming_operations[type]
- incoming_els = self.incoming_elements[type]
- label = "\nIncoming objects: %d" % incoming_ops
- label += "\nIncoming elements: %d" % incoming_els
- if type in self.outgoing_operations:
- remaining_ops = incoming_ops - self.outgoing_operations[type]
- remaining_els = incoming_els - self.outgoing_elements[type]
- else:
- remaining_ops = incoming_ops
- remaining_els = incoming_els
- percent_remaining_ops = float(remaining_ops) / incoming_ops * 100
- percent_remaining_els = float(remaining_els) / incoming_els * 100
- label += "\nRemaining objects: %d (%d%%)" % (remaining_ops, int(percent_remaining_ops))
- label += "\nRemaining elements: %d (%d%%)" % (remaining_els, int(percent_remaining_els))
- result += "%s [label=\"%s%s\"];" % (type, type, label)
- result += "}"
- return result
-
-class DetailedStatisticsCollector(StatisticsModule):
- # Value: list of numbers (sizes)
- import_from_mixin(StatisticsCollectorMixin)
-
- def initial_value(self): return []
- def increment_value(self, value_object, storage_size, container_classname):
- value_object.append(storage_size)
- def print_results(self):
- print "Detailed Storage Statistics:"
- for key in self.sorted_keys():
- print "\t%s: s" % (self.key_string(key), self.stats[key])
-
-# Static & global access to a StorageStatistics instance.
-
-_stats = StorageStatistics()
-_logger = StatisticsLogger()
-_collector = StatisticsCollector()
-_detailedcollector = DetailedStatisticsCollector()
-_dotcollector = DotStatisticsCollector()
-_histogramcollector = HistogramStatisticsCollector()
-
-def activate_statistics(log=False, statistics=False, detailed_statistics=False, dot=False, histogram=False):
- if log:
- _stats.add_module(_logger)
- if statistics:
- _stats.add_module(_collector)
- if detailed_statistics:
- _stats.add_module(_detailedcollector)
- if dot:
- _stats.add_module(_dotcollector)
- # Start a comment in order to make the entire output valid dot code. Hack.
- print "/*"
- if histogram:
- _stats.add_module(_histogramcollector)
-
-def print_statistics():
- _stats.print_results()
-
-def log(w_obj, operation, old_storage=None, log_classname=True):
- _stats.log(w_obj, operation, old_storage, log_classname)
diff --git a/spyvm/test/test_strategies.py b/spyvm/test/test_strategies.py
--- a/spyvm/test/test_strategies.py
+++ b/spyvm/test/test_strategies.py
@@ -1,5 +1,5 @@
import py
-from spyvm import wrapper, model, interpreter, shadow, storage_statistics
+from spyvm import wrapper, model, interpreter, shadow
from spyvm.error import WrapperException, FatalError
from .util import read_image, copy_to_module, cleanup_module
@@ -175,50 +175,3 @@
a.store(space, 1, space.wrap_int(2))
assert isinstance(a.shadow, shadow.ListStorageShadow)
check_arr(a, [1.2, 2, w_nil, w_nil, w_nil])
-
-def test_statistics_stats():
- col = storage_statistics.DetailedStatisticsCollector()
- stats = storage_statistics.StorageStatistics()
- col.storage_operation(stats.make_key("B", "old", "new"), 3, None)
- col.storage_operation(stats.make_key("B", "old", "new"), 4, None)
- col.storage_operation(stats.make_key("B", "old2", "new2"), 20, None)
- col.storage_operation(stats.make_key("B", "old", "new"), 5, None)
- col.storage_operation(stats.make_key("A", "old", "new"), 1, None)
- col.storage_operation(stats.make_key("A", "old", "new"), 2, None)
- col.storage_operation(stats.make_key("C", "old", "new"), 10, None)
- col.storage_operation(stats.make_key("C", "old", "new"), 11, None)
- keys = col.sorted_keys()
- assert keys == [ ("A", "old", "new"), ("B", "old", "new"), ("B", "old2", "new2"), ("C", "old", "new") ]
- assert col.stats[keys[0]] == [1, 2]
- assert col.stats[keys[1]] == [3, 4, 5]
- assert col.stats[keys[2]] == [20]
- assert col.stats[keys[3]] == [10, 11]
-
-def test_statistics_log():
- stats = storage_statistics.StorageStatistics()
- log = storage_statistics.StatisticsLogger()
- s = log.log_string(stats.make_key("Operation", "old_storage", "new_storage"), 22, "classname")
- assert s == "Operation (old_storage -> new_storage) of classname size 22"
- s = log.log_string(stats.make_key("InitialOperation", None, "some_new_storage"), 40, "a_classname")
- assert s == "InitialOperation (some_new_storage) of a_classname size 40"
-
-def test_statistics_stats_dot():
- col = storage_statistics.DotStatisticsCollector()
- stats = storage_statistics.StorageStatistics()
-
- col.storage_operation(stats.make_key("Switched", "old", "new"), 10, None)
- col.storage_operation(stats.make_key("Switched", "old", "new"), 10, None)
- col.storage_operation(stats.make_key("Switched", "new", "new2"), 10, None)
- col.storage_operation(stats.make_key("Switched", "old2", "new"), 5, None)
- col.storage_operation(stats.make_key("Initialized", None, "old"), 13, None)
- col.storage_operation(stats.make_key("Initialized", None, "old"), 10, None)
- col.storage_operation(stats.make_key("Initialized", None, "old"), 10, None)
- col.storage_operation(stats.make_key("Initialized", None, "old2"), 15, None)
- col.storage_operation(stats.make_key("Filledin", None, "old2"), 20, None)
- col.storage_operation(stats.make_key("Filledin", None, "new"), 10, None)
- col.storage_operation(stats.make_key("Filledin", None, "new"), 11, None)
-
- # The dot-code is correct, I checked ;)
- assert col.dot_string() == \
- 'digraph G {loading_image [label="Image Loading",shape=box];created_object [label="Object Creation",shape=box];created_object -> old2 [label="1 objects\n15 elements per object"];loading_image -> new [label="2 objects\n10 elements per object"];old -> new [label="2 objects\n10 elements per object\n66% objects\n60% elements"];loading_image -> old2 [label="1 objects\n20 elements per object"];created_object -> old [label="3 objects\n11 elements per object"];old2 -> new [label="1 objects\n5 elements per object\n50% objects\n14% elements"];new -> new2 [label="1 objects\n10 elements per object\n20% objects\n21% elements"];new2 [label="new2\nIncoming objects: 1\nIncoming elements: 10\nRemaining objects: 1 (100%)\nRemaining elements: 10 (100%)"];new [label="new\nIncoming objects: 5\nIncoming elements: 46\nRemaining objects: 4 (80%)\nRemaining elements: 36 (78%)"];old2 [label="old2\nIncoming objects: 2\nIncoming elements: 35\nRemaining objects: 1 (50%)\nRemaining elements: 30 (85%)"];old [label="old\nIncoming objects: 3\nIncoming elements: 33\nRemaining objects: 1 (33%)\nRemaining elements: 13 (39%)"];}'
-
\ No newline at end of file
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -6,7 +6,7 @@
from rpython.rlib import jit, rpath
from spyvm import model, interpreter, squeakimage, objspace, wrapper,\
- error, shadow, storage_statistics, constants
+ error, shadow, storage_logger, constants
from spyvm.tool.analyseimage import create_image
from spyvm.interpreter_proxy import VirtualMachine
@@ -130,11 +130,7 @@
-p|--poll_events
-ni|--no-interrupts
-d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
- --strategy-log
- --strategy-stats
- --strategy-stats-dot
- --strategy-stats-details
- --strategy-stats-histogram
+ -l|--storage-log
[image path, default: Squeak.image]
""" % (argv[0], constants.MAX_LOOP_DEPTH)
@@ -199,16 +195,8 @@
_arg_missing(argv, idx, arg)
max_stack_depth = int(argv[idx + 1])
idx += 1
- elif arg == "--strategy-log":
- storage_statistics.activate_statistics(log=True)
- elif arg == "--strategy-stats":
- storage_statistics.activate_statistics(statistics=True)
- elif arg == "--strategy-stats-dot":
- storage_statistics.activate_statistics(dot=True)
- elif arg == "--strategy-stats-histogram":
- storage_statistics.activate_statistics(histogram=True)
- elif arg == "--strategy-stats-details":
- storage_statistics.activate_statistics(statistics=True, detailed_statistics=True)
+ elif arg in ["-l", "--storage-log"]:
+ storage_logger.activate()
elif path is None:
path = argv[idx]
else:
@@ -245,7 +233,6 @@
else:
_run_image(interp)
result = 0
- storage_statistics.print_statistics()
return result
From noreply at buildbot.pypy.org Mon Jul 7 13:16:23 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:23 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added script to parse
storage-log and convert to summary or dot-string/graph.
Message-ID: <20140707111623.5E8FB1C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r856:f4ae062456ae
Date: 2014-07-01 12:12 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/f4ae062456ae/
Log: Added script to parse storage-log and convert to summary or dot-
string/graph.
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
new file mode 100644
--- /dev/null
+++ b/spyvm/tool/storagelog_parser.py
@@ -0,0 +1,529 @@
+
+import re, sys, operator
+
+OPERATIONS = ["Filledin", "Initialized", "Switched"]
+
+# ====================================================================
+# ======== Basic functions
+# ====================================================================
+
+def parse(filename, flags):
+ entries = []
+ with open(filename, 'r', 1) as file:
+ while True:
+ line = file.readline()
+ if len(line) == 0:
+ break
+ entry = parse_line(line, flags)
+ if entry:
+ entries.append(entry)
+ return entries
+
+line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)$")
+
+def parse_line(line, flags):
+ result = line_pattern.match(line)
+ if result is None:
+ if flags.verbose:
+ print "Could not parse line: %s" % line[:-1]
+ return None
+ operation = result.group('operation')
+ old_storage = result.group('old')
+ new_storage = result.group('new')
+ classname = result.group('classname')
+ size = result.group('size')
+ if old_storage is None:
+ if operation == "Filledin":
+ old_storage = " Image Loading Storage" # Space to be sorted to the beginning
+ elif operation == "Initialized":
+ old_storage = " Object Creation Storage"
+ else:
+ assert False, "old_storage has to be available in a Switched operation"
+ entry = LogEntry(operation, old_storage, new_storage, classname, size)
+ #entry.is_special =
+ return entry
+
+class LogEntry(object):
+
+ def __init__(self, operation, old_storage, new_storage, classname, size):
+ self.operation = str(operation)
+ self.old_storage = str(old_storage)
+ self.new_storage = str(new_storage)
+ self.classname = str(classname)
+ self.size = float(size)
+
+ def full_key(self):
+ return (self.operation, self.old_storage, self.new_storage)
+
+ def __str__(self):
+ old_storage_string = "%s -> " % self.old_storage if self.old_storage else ""
+ classname_string = " of %s" % self.classname if self.classname else ""
+ return "%s (%s%s)%s size %d" % (self.operation, old_storage_string, self.new_storage, classname_string, self.size)
+
+# ====================================================================
+# ======== Graph parsing
+# ====================================================================
+
+class Operations(object):
+
+ def __init__(self, objects=0, slots=0):
+ self.objects = objects
+ self.slots = slots
+
+ def __str__(self, total=None):
+ if self.objects == 0:
+ avg_slots = 0
+ else:
+ avg_slots = float(self.slots) / self.objects
+ if total is not None and total.slots != 0:
+ percent_slots = " (%.1f%%)" % (float(self.slots)*100 / total.slots)
+ else:
+ percent_slots = ""
+ if total is not None and total.objects != 0:
+ percent_objects = " (%.1f%%)" % (float(self.objects)*100 / total.objects)
+ else:
+ percent_objects = ""
+ return "%d%s slots in %d%s objects (avg size: %.1f)" % (self.slots, percent_slots, self.objects, percent_objects, avg_slots)
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__str__(), object.__repr__(self))
+
+ def add_log_entry(self, entry):
+ self.slots = self.slots + entry.size
+ self.objects = self.objects + 1
+
+ def __sub__(self, other):
+ return Operations(self.objects - other.objects, self.slots - other.slots)
+
+ def __add__(self, other):
+ return Operations(self.objects + other.objects, self.slots + other.slots)
+
+ def __lt__(self, other):
+ return self.slots < other.slots
+
+ def empty(self):
+ return self.objects == 0 and self.slots == 0
+
+ def prefixprint(self, key="", total=None):
+ if not self.empty():
+ print "%s%s" % (key, self.__str__(total))
+
+class ClassOperations(object):
+
+ def __init__(self):
+ self.classes = {}
+
+ def cls(self, name):
+ if name not in self.classes:
+ self.classes[name] = Operations()
+ return self.classes[name]
+
+ def total(self):
+ return reduce(operator.add, self.classes.values(), Operations())
+
+ def __str__(self):
+ return "ClassOperations(%s)" % self.classes
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__str__(), object.__repr__(self))
+
+ def __add__(self, other):
+ result = ClassOperations()
+ result.classes = dict(self.classes)
+ for classname, other_class in other.classes.items():
+ result.cls(classname) # Make sure exists.
+ result.classes[classname] += other_class
+ return result
+
+ def __sub__(self, other):
+ result = ClassOperations()
+ result.classes = dict(self.classes)
+ for classname, other_class in other.classes.items():
+ result.cls(classname) # Make sure exists.
+ result.classes[classname] -= other_class
+ return result
+
+class StorageEdge(object):
+
+ def __init__(self, operation="None", origin=None, target=None):
+ assert operation == "None" or operation in OPERATIONS, "Unknown operation %s" % operation
+ self.operation = operation
+ self.classes = ClassOperations()
+ self.origin = origin
+ self.target = target
+
+ def full_key(self):
+ return (self.operation, self.origin.name, self.target.name)
+
+ def cls(self, classname):
+ return self.classes.cls(classname)
+
+ def total(self):
+ return self.classes.total()
+
+ def notify_nodes(self):
+ self.origin.note_outgoing(self)
+ self.target.note_incoming(self)
+
+ def add_log_entry(self, entry):
+ self.cls(entry.classname).add_log_entry(entry)
+
+ def __str__(self):
+ return "[%s %s -> %s]" % (self.operation, self.origin, self.target)
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__str__(), object.__repr__(self))
+
+ def __add__(self, other):
+ origin = self.origin if self.origin is not None else other.origin
+ target = self.target if self.target is not None else other.target
+ result = StorageEdge(self.operation, origin, target)
+ result.classes += self.classes + other.classes
+ return result
+
+ def __sub__(self, other):
+ origin = self.origin if self.origin is not None else other.origin
+ target = self.target if self.target is not None else other.target
+ result = StorageEdge(self.operation, origin, target)
+ result.classes += self.classes - other.classes
+ return result
+
+class StorageNode(object):
+
+ def __init__(self, name):
+ self.name = name
+ self.incoming = set()
+ self.outgoing = set()
+
+ def note_incoming(self, edge):
+ assert edge.target is self
+ if edge not in self.incoming:
+ self.incoming.add(edge)
+
+ def note_outgoing(self, edge):
+ assert edge.origin is self
+ if edge not in self.outgoing:
+ self.outgoing.add(edge)
+
+ def incoming_edges(self, operation):
+ return filter(lambda x: x.operation == operation, self.incoming)
+
+ def outgoing_edges(self, operation):
+ return filter(lambda x: x.operation == operation, self.outgoing)
+
+ def sum_incoming(self, operation):
+ return reduce(operator.add, self.incoming_edges(operation), StorageEdge(operation))
+
+ def sum_outgoing(self, operation):
+ return reduce(operator.add, self.outgoing_edges(operation), StorageEdge(operation))
+
+ def sum_all_incoming(self):
+ return reduce(operator.add, self.incoming, StorageEdge())
+
+ def sum_all_outgoing(self):
+ return reduce(operator.add, self.outgoing, StorageEdge())
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__str__(), object.__repr__(self))
+
+ def merge_edge_sets(self, set1, set2, key_slot):
+ getter = lambda edge: edge.__dict__[key_slot]
+ set_dict = dict([(getter(edge), edge) for edge in set1])
+ for edge in set2:
+ key = getter(edge)
+ if key not in set_dict:
+ set_dict[key] = edge
+ else:
+ set_dict[key] += edge
+ return set(set_dict.values())
+
+ def __add__(self, other):
+ result = StorageNode("%s_%s" % (self.name, other.name))
+ result.incoming = self.merge_edge_sets(self.incoming, other.incoming, "origin")
+ # TODO bullshit code
+ for edge in result.incoming:
+ edge.target = result
+ result.outgoing = self.merge_edge_sets(self.outgoing, other.outgoing, "target")
+ for edge in result.outgoing:
+ edge.origin = result
+ return result
+
+ def __lt__(self, other):
+ return self.name < other.name
+
+class StorageGraph(object):
+
+ def __init__(self):
+ self.nodes = {}
+ self.edges = {}
+
+ def node(self, name):
+ if name not in self.nodes:
+ self.nodes[name] = StorageNode(name)
+ return self.nodes[name]
+
+ def assert_sanity(self):
+ visited_edges = set()
+ for node in self.nodes.values():
+ for edge in node.incoming:
+ assert edge in self.edges.values(), "Edge not in graph's edges: %s" % edge
+ visited_edges.add(edge)
+ if not edge.target is node:
+ print "Wrong edge target: %s\nIncoming edge: %s\nIn node: %s" % (edge.target, edge, node)
+ assert False
+ if not edge in edge.origin.outgoing:
+ print "Edge not in origin's outgoing: %s\nIncoming edge: %s\nIn node: %s" % (edge.origin.outgoing, edge, node)
+ assert False
+ for edge in node.outgoing:
+ assert edge in self.edges.values(), "Edge not in graph's edges: %s" % edge
+ visited_edges.add(edge)
+ if not edge.origin is node:
+ print "Wrong edge origin: %s\nOutgoing edge: %s\nIn node: %s" % (edge.origin, edge, node)
+ assert False
+ if not edge in edge.target.incoming:
+ print "Edge not in origin's incoming: %s\nOutgoing edge: %s\nIn node: %s" % (edge.target.incoming, edge, node)
+ assert False
+ assert len(visited_edges) == len(self.edges.values()), "Not all of graph's edges visited."
+
+ def add_log_entry(self, log_entry):
+ key = log_entry.full_key()
+ if key not in self.edges:
+ edge = StorageEdge(log_entry.operation, self.node(log_entry.old_storage), self.node(log_entry.new_storage))
+ self.edges[key] = edge
+ edge.notify_nodes()
+ self.edges[key].add_log_entry(log_entry)
+
+ def collapse_nodes(self, collapsed_nodes, new_name=None):
+ for node in collapsed_nodes:
+ del self.nodes[node.name]
+ for edge in node.incoming:
+ del self.edges[edge.full_key()]
+ for edge in node.outgoing:
+ del self.edges[edge.full_key()]
+ new_node = reduce(operator.add, collapsed_nodes)
+ if new_name is not None:
+ new_node.name = new_name
+ self.nodes[new_node.name] = new_node
+ # TODO bullshit code
+ for node in collapsed_nodes:
+ for edge in node.incoming:
+ edge.origin.outgoing.remove(edge)
+ new_edges = filter(lambda filtered: filtered.origin == edge.origin, new_node.incoming)
+ assert len(new_edges) == 1
+ edge.origin.outgoing.add(new_edges[0])
+ for edge in node.outgoing:
+ edge.target.incoming.remove(edge)
+ new_edges = filter(lambda filtered: filtered.target == edge.target, new_node.outgoing)
+ assert len(new_edges) == 1
+ edge.target.incoming.add(new_edges[0])
+ for edge in new_node.incoming:
+ self.edges[edge.full_key()] = edge
+ for edge in new_node.outgoing:
+ self.edges[edge.full_key()] = edge
+ self.assert_sanity()
+
+ def split_nodes(self, new_name=None):
+ nodes = filter(lambda node: "Storage" not in node.name, self.nodes.values())
+ self.collapse_nodes(nodes, new_name)
+
+ def sorted_nodes(self):
+ nodes = self.nodes.values()
+ nodes.sort()
+ return nodes
+
+def make_graph(entries):
+ graph = StorageGraph()
+ for e in entries:
+ graph.add_log_entry(e)
+ graph.assert_sanity()
+ return graph
+
+# ====================================================================
+# ======== Command - Summarize log content
+# ====================================================================
+
+def command_summarize(entries, flags):
+ print_summary(entries, flags)
+
+def print_summary(entries, flags):
+ graph = make_graph(entries)
+ if not flags.allstorage:
+ graph.split_nodes()
+ for node in graph.sorted_nodes():
+ node.print_summary(flags)
+
+def StorageNode_print_summary(self, flags):
+ print "\n%s:" % self.name
+ sum = StorageEdge()
+ total_incoming = self.sum_all_incoming().total() if flags.percent else None
+
+ print "\tIncoming:"
+ for operation in OPERATIONS:
+ if flags.detailed:
+ edges = [ (edge.origin.name, edge) for edge in self.incoming_edges(operation) ]
+ else:
+ edges = [ (operation, self.sum_incoming(operation)) ]
+ for edgename, edge in edges:
+ edge.print_with_name("\t\t\t", edgename, total_incoming, flags)
+ sum += edge
+
+ print "\tOutgoing:"
+ for operation in OPERATIONS:
+ if flags.detailed:
+ edges = [ (edge.target.name, edge) for edge in self.outgoing_edges(operation) ]
+ else:
+ edges = [ (operation, self.sum_outgoing(operation)) ]
+ for edgename, edge in edges:
+ edge.print_with_name("\t\t\t", edgename, total_incoming, flags)
+ sum -= edge
+
+ sum.print_with_name("\t", "Remaining", total_incoming, flags)
+
+StorageNode.print_summary = StorageNode_print_summary
+
+def StorageEdge_print_with_name(self, prefix, edgename, total_reference, flags):
+ if flags.classes:
+ print "%s%s:" % (prefix, edgename)
+ prefix += "\t\t"
+ operations = self.classes.classes.items()
+ operations.sort(reverse=True, key=operator.itemgetter(1))
+ else:
+ operations = [ (edgename, self.total()) ]
+ for classname, classops in operations:
+ classops.prefixprint("%s%s: " % (prefix, classname), total_reference)
+
+StorageEdge.print_with_name = StorageEdge_print_with_name
+
+# ====================================================================
+# ======== Command - DOT output
+# ====================================================================
+
+# Output is valid dot code and can be parsed by the graphviz dot utility.
+def command_print_dot(entries, flags):
+ graph = make_graph(entries)
+ print "/*"
+ print "Storage Statistics (dot format):"
+ print "================================"
+ print "*/"
+ print dot_string(graph, flags)
+
+def command_dot(entries, flags):
+ import subprocess
+ dot = dot_string(make_graph(entries), flags)
+ command = ["dot", "-Tjpg", "-o%s.jpg" % flags.logfile]
+ print "Running:\n%s" % " ".join(command)
+ p = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ output = p.communicate(input=dot)[0]
+ print output
+
+def dot_string(graph, flags):
+ result = "digraph G {"
+ incoming_cache = {}
+ if not flags.allstorage:
+ graph.split_nodes("Other")
+
+ for node in graph.nodes.values():
+ incoming = node.sum_all_incoming().total()
+ outgoing = node.sum_all_outgoing().total()
+ remaining = incoming - outgoing
+ if remaining.objects < 0:
+ # TODO This is a special node. Hacky way to find out.
+ incoming_cache[node.name] = outgoing
+ shape = ",shape=box"
+ label = "\nObjects: %d" % outgoing.objects
+ label += "\nSlots: %d" % outgoing.slots
+ else:
+ incoming_cache[node.name] = incoming
+ shape = ""
+ label = "\nIncoming objects: %d" % incoming.objects
+ label += "\nIncoming elements: %d" % incoming.slots
+ if flags.percent and incoming.objects != 0:
+ percent_remaining_objects = " (%.1f%%)" % (remaining.objects * 100 / incoming.objects)
+ percent_remaining_slots = " (%.1f%%)" % (remaining.slots * 100 / incoming.slots)
+ else:
+ percent_remaining_objects = percent_remaining_slots = ""
+ label += "\nRemaining objects: %d%s" % (remaining.objects, percent_remaining_objects)
+ label += "\nRemaining elements: %d%s" % (remaining.slots, percent_remaining_slots)
+ result += "%s [label=\"%s%s\"%s];" % (node.name.replace(" ", "_"), node.name, label, shape)
+
+ for edge in graph.edges.values():
+ total = edge.total()
+ str_objects = "%d objects" % total.objects
+ str_slots = "%d slots" % total.slots
+ incoming = incoming_cache[edge.origin.name]
+ if flags.percent and incoming.objects != 0:
+ str_objects += " (%.1f%%)" % (float(total.objects) * 100 / incoming.objects)
+ str_slots += " (%.1f%%)" % (float(total.slots) * 100 / incoming.slots)
+
+ target_node = edge.target.name.replace(" ", "_")
+ source_node = edge.origin.name.replace(" ", "_")
+ result += "%s -> %s [label=\"%s\n%s\n%d slots per object\"];" % (source_node, target_node, str_objects, str_slots, total.slots / total.objects)
+
+ result += "}"
+ return result
+
+# ====================================================================
+# ======== Main
+# ====================================================================
+
+def command_print_entries(entries):
+ for e in entries:
+ print e
+
+class Flags(object):
+
+ def __init__(self, flags):
+ self.flags = {}
+ for name, short in flags:
+ self.__dict__[name] = False
+ self.flags[short] = name
+
+ def handle(self, arg):
+ if arg in self.flags:
+ self.__dict__[self.flags[arg]] = True
+ return True
+ else:
+ return False
+
+ def __str__(self):
+ descriptions = [ ("%s (%s)" % description) for description in self.flags.items() ]
+ return "[%s]" % " | ".join(descriptions)
+
+def usage(flags, commands):
+ print "Arguments: logfile command %s" % flags
+ print "Available commands: %s" % commands
+ exit(1)
+
+def main(argv):
+ flags = Flags([
+ ('verbose', '-v'),
+ ('percent', '-p'),
+ ('allstorage', '-a'),
+ ('detailed', '-d'),
+ ('classes', '-c'),
+ ])
+
+ command_prefix = "command_"
+ module = sys.modules[__name__].__dict__
+ commands = [ a[len(command_prefix):] for a in module.keys() if a.startswith(command_prefix) ]
+
+ if len(argv) < 2:
+ usage(flags, commands)
+ logfile = argv[0]
+ flags.logfile = logfile
+ command = argv[1]
+ for flag in argv[2:]:
+ if not flags.handle(flag):
+ usage(flags, commands)
+ if command not in commands:
+ usage(flags, commands)
+
+ func = module[command_prefix + command]
+ entries = parse(logfile, flags)
+ func(entries, flags)
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
From noreply at buildbot.pypy.org Mon Jul 7 13:16:24 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:24 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Fixed storage_logger.py
Message-ID: <20140707111624.889F41C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r857:48cb3f9eda6e
Date: 2014-07-01 12:58 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/48cb3f9eda6e/
Log: Fixed storage_logger.py
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
--- a/spyvm/storage_logger.py
+++ b/spyvm/storage_logger.py
@@ -1,12 +1,16 @@
-# Put flag in a list to make it modifyable after compile time.
-_active = [False]
+# Put flag in an object to make it modifyable after compile time.
+class LoggerActive(object):
+ def __init__(self):
+ self.active = False
+
+_active = LoggerActive()
def activate():
- _active[0] = True
+ _active.active = True
def log(w_obj, operation, old_storage_object=None, log_classname=True):
- if not _active[0]:
+ if not _active.active:
return
# Gather information to be logged
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -297,6 +297,8 @@
self.edges[key].add_log_entry(log_entry)
def collapse_nodes(self, collapsed_nodes, new_name=None):
+ if len(collapsed_nodes) == 0:
+ return
for node in collapsed_nodes:
del self.nodes[node.name]
for edge in node.incoming:
From noreply at buildbot.pypy.org Mon Jul 7 13:16:25 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:25 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added binary storage-log
output & parsing to shrink the logfile.
Message-ID: <20140707111625.BE7E21C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r858:1f520c5d82db
Date: 2014-07-01 16:23 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/1f520c5d82db/
Log: Added binary storage-log output & parsing to shrink the logfile.
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
--- a/spyvm/storage_logger.py
+++ b/spyvm/storage_logger.py
@@ -1,16 +1,20 @@
-# Put flag in an object to make it modifyable after compile time.
-class LoggerActive(object):
+import sys
+
+# Put flags in an object to make it modifyable after compile time.
+class LoggerOptions(object):
def __init__(self):
self.active = False
+ self.binary = False
-_active = LoggerActive()
+_options = LoggerOptions()
-def activate():
- _active.active = True
+def activate(binary = False):
+ _options.active = True
+ _options.binary = binary
def log(w_obj, operation, old_storage_object=None, log_classname=True):
- if not _active.active:
+ if not _options.active:
return
# Gather information to be logged
@@ -25,7 +29,61 @@
else:
classname = None
- # Construct and print the logstring
+ if _options.binary:
+ binary_output(operation, old_storage, new_storage, classname, size)
+ else:
+ output(operation, old_storage, new_storage, classname, size)
+
+def output(operation, old_storage, new_storage, classname, size):
+ # Construct and print a simple logstring
old_storage_string = "%s -> " % old_storage if old_storage else ""
classname_string = " of %s" % classname if classname else ""
print "%s (%s%s)%s size %d" % (operation, old_storage_string, new_storage, classname_string, size)
+
+operation_map = {
+ "Filledin": 1,
+ "Initialized": 2,
+ "Switched": 3,
+}
+
+storage_map = {
+ "AllNilStorageShadow": 1,
+ "SmallIntegerOrNilStorageShadow": 2,
+ "FloatOrNilStorageShadow": 3,
+ "ListStorageShadow": 4,
+ "WeakListStorageShadow": 5,
+ "ClassShadow": 6,
+ "MethodDictionaryShadow": 7,
+ "BlockContextShadow": 8,
+ "MethodContextShadow": 9,
+ "CachedObjectShadow": 10,
+ "ObserveeShadow": 11,
+ None: 12,
+}
+
+def binary_output(operation, old_storage, new_storage, classname, size):
+ # Output a byte-coded log entry
+ bytes = bytearray()
+
+ # First 3 bytes: operation, old_storage, new_storage
+ assert operation in operation_map, "Cannot handle operation %s" % operation
+ bytes.append(operation_map[operation])
+ assert old_storage in storage_map, "Cannot handle old-storage type %s" % old_storage
+ bytes.append(storage_map[old_storage])
+ assert new_storage in storage_map, "Cannot handle new-storage type %s" % new_storage
+ bytes.append(storage_map[new_storage])
+
+ # Next: 2 bytes encoding object size (big endian)
+ assert size < 2**16, "Object of type %s too large (size %d)" % (classname, size)
+ mask = (1<<8)-1
+ bytes.append(size & mask)
+ mask = mask<<8
+ bytes.append((size & mask) >> 8)
+
+ # Next: classname string plus terminating null-character
+ if classname:
+ for c in classname:
+ bytes.append(c)
+ bytes.append(0)
+
+ sys.stdout.write(bytes)
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -1,24 +1,77 @@
import re, sys, operator
+import spyvm.storage_logger
OPERATIONS = ["Filledin", "Initialized", "Switched"]
+# Reverse the two maps used to encode the byte encoded log-output
+storage_map = {v:k for k, v in spyvm.storage_logger.storage_map.items()}
+operation_map = {v:k for k, v in spyvm.storage_logger.operation_map.items()}
+
# ====================================================================
# ======== Basic functions
# ====================================================================
+def filesize(file):
+ import os
+ return os.path.getsize(file.name)
+
def parse(filename, flags):
entries = []
with open(filename, 'r', 1) as file:
- while True:
- line = file.readline()
- if len(line) == 0:
- break
- entry = parse_line(line, flags)
- if entry:
- entries.append(entry)
+ if flags.binary:
+ while True:
+ try:
+ entry = parse_binary(file)
+ if entry == None:
+ if flags.verbose:
+ tell = file.tell()
+ format = (tell, len(entries), filesize(file) - tell)
+ print "Stopped parsing after %d bytes (%d entries). Ignoring leftover %d bytes." % format
+ break
+ else:
+ entries.append(entry)
+ except:
+ print "Exception while parsing file, after %d bytes (%d entries)" % (file.tell(), len(entries))
+ raise
+ else:
+ while True:
+ line = file.readline()
+ if len(line) == 0:
+ break
+ entry = parse_line(line, flags)
+ if entry:
+ entries.append(entry)
return entries
+def parse_binary(file):
+ # First 3 bytes: operation, old storage, new storage
+ header = file.read(3)
+ operation_byte = ord(header[0])
+ old_storage_byte = ord(header[1])
+ new_storage_byte = ord(header[2])
+ # This is the only way to check if we are reading a correct log entry
+ if operation_byte not in operation_map or old_storage_byte not in storage_map or new_storage_byte not in storage_map:
+ return None
+ operation = operation_map[operation_byte]
+ old_storage = storage_map[old_storage_byte]
+ new_storage = storage_map[new_storage_byte]
+
+ # Next 2 bytes: object size (big endian)
+ size_bytes = file.read(2)
+ size = int(ord(size_bytes[0]) + (ord(size_bytes[1])<<8))
+
+ # Last: classname, nul-terminated
+ classname = ""
+ while True:
+ byte = file.read(1)
+ if byte == chr(0):
+ break
+ classname += byte
+ if len(classname) == 0:
+ classname = None
+ return LogEntry(operation, old_storage, new_storage, classname, size)
+
line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)$")
def parse_line(line, flags):
@@ -32,25 +85,24 @@
new_storage = result.group('new')
classname = result.group('classname')
size = result.group('size')
- if old_storage is None:
- if operation == "Filledin":
- old_storage = " Image Loading Storage" # Space to be sorted to the beginning
- elif operation == "Initialized":
- old_storage = " Object Creation Storage"
- else:
- assert False, "old_storage has to be available in a Switched operation"
- entry = LogEntry(operation, old_storage, new_storage, classname, size)
- #entry.is_special =
- return entry
+ return LogEntry(operation, old_storage, new_storage, classname, size)
class LogEntry(object):
def __init__(self, operation, old_storage, new_storage, classname, size):
self.operation = str(operation)
- self.old_storage = str(old_storage)
self.new_storage = str(new_storage)
self.classname = str(classname)
self.size = float(size)
+
+ if old_storage is None:
+ if operation == "Filledin":
+ old_storage = " Image Loading Storage" # Space to be sorted to the beginning
+ elif operation == "Initialized":
+ old_storage = " Object Creation Storage"
+ else:
+ assert False, "old_storage has to be available in a Switched operation"
+ self.old_storage = str(old_storage)
def full_key(self):
return (self.operation, self.old_storage, self.new_storage)
@@ -471,7 +523,7 @@
# ======== Main
# ====================================================================
-def command_print_entries(entries):
+def command_print_entries(entries, flags):
for e in entries:
print e
@@ -506,6 +558,7 @@
('allstorage', '-a'),
('detailed', '-d'),
('classes', '-c'),
+ ('binary', '-b'),
])
command_prefix = "command_"
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -131,6 +131,7 @@
-ni|--no-interrupts
-d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
-l|--storage-log
+ -lb|--storage-log-binary (output should be redirected to file)
[image path, default: Squeak.image]
""" % (argv[0], constants.MAX_LOOP_DEPTH)
@@ -197,6 +198,8 @@
idx += 1
elif arg in ["-l", "--storage-log"]:
storage_logger.activate()
+ elif arg in ["-lb", "--storage-log-binary"]:
+ storage_logger.activate(binary=True)
elif path is None:
path = argv[idx]
else:
From noreply at buildbot.pypy.org Mon Jul 7 13:16:26 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:26 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Fixed storage_logger.py for
RPython, slightly improved dot-graph.
Message-ID: <20140707111626.E38A51C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r859:519def1169d7
Date: 2014-07-01 16:51 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/519def1169d7/
Log: Fixed storage_logger.py for RPython, slightly improved dot-graph.
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
--- a/spyvm/storage_logger.py
+++ b/spyvm/storage_logger.py
@@ -1,5 +1,3 @@
-
-import sys
# Put flags in an object to make it modifyable after compile time.
class LoggerOptions(object):
@@ -63,7 +61,7 @@
def binary_output(operation, old_storage, new_storage, classname, size):
# Output a byte-coded log entry
- bytes = bytearray()
+ bytes = [] # bytearray()
# First 3 bytes: operation, old_storage, new_storage
assert operation in operation_map, "Cannot handle operation %s" % operation
@@ -81,9 +79,14 @@
bytes.append((size & mask) >> 8)
# Next: classname string plus terminating null-character
+ i = 5
if classname:
for c in classname:
- bytes.append(c)
+ bytes.append(ord(c))
+ i += 1
bytes.append(0)
- sys.stdout.write(bytes)
+ # No simpler way for RPython's sake.
+ import os
+ for b in bytes:
+ os.write(1, chr(b))
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -293,7 +293,7 @@
return set(set_dict.values())
def __add__(self, other):
- result = StorageNode("%s_%s" % (self.name, other.name))
+ result = StorageNode("%s %s" % (self.name, other.name))
result.incoming = self.merge_edge_sets(self.incoming, other.incoming, "origin")
# TODO bullshit code
for edge in result.incoming:
@@ -493,14 +493,17 @@
incoming_cache[node.name] = incoming
shape = ""
label = "\nIncoming objects: %d" % incoming.objects
- label += "\nIncoming elements: %d" % incoming.slots
- if flags.percent and incoming.objects != 0:
- percent_remaining_objects = " (%.1f%%)" % (remaining.objects * 100 / incoming.objects)
- percent_remaining_slots = " (%.1f%%)" % (remaining.slots * 100 / incoming.slots)
+ label += "\nIncoming slots: %d" % incoming.slots
+ if remaining.objects == incoming.objects:
+ label += "\n(All remaining)"
else:
- percent_remaining_objects = percent_remaining_slots = ""
- label += "\nRemaining objects: %d%s" % (remaining.objects, percent_remaining_objects)
- label += "\nRemaining elements: %d%s" % (remaining.slots, percent_remaining_slots)
+ if flags.percent and incoming.objects != 0:
+ percent_remaining_objects = " (%.1f%%)" % (remaining.objects * 100 / incoming.objects)
+ percent_remaining_slots = " (%.1f%%)" % (remaining.slots * 100 / incoming.slots)
+ else:
+ percent_remaining_objects = percent_remaining_slots = ""
+ label += "\nRemaining objects: %d%s" % (remaining.objects, percent_remaining_objects)
+ label += "\nRemaining slots: %d%s" % (remaining.slots, percent_remaining_slots)
result += "%s [label=\"%s%s\"%s];" % (node.name.replace(" ", "_"), node.name, label, shape)
for edge in graph.edges.values():
@@ -514,7 +517,7 @@
target_node = edge.target.name.replace(" ", "_")
source_node = edge.origin.name.replace(" ", "_")
- result += "%s -> %s [label=\"%s\n%s\n%d slots per object\"];" % (source_node, target_node, str_objects, str_slots, total.slots / total.objects)
+ result += "%s -> %s [label=\"%s\n%s\n%.1f slots per object\"];" % (source_node, target_node, str_objects, str_slots, total.slots / total.objects)
result += "}"
return result
From noreply at buildbot.pypy.org Mon Jul 7 13:16:28 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:28 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Using 4 bytes to encode
object size due to large objects in Squeak image.
Message-ID: <20140707111628.213A41C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r860:a5b744a0ec04
Date: 2014-07-01 17:21 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/a5b744a0ec04/
Log: Using 4 bytes to encode object size due to large objects in Squeak
image.
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
--- a/spyvm/storage_logger.py
+++ b/spyvm/storage_logger.py
@@ -71,19 +71,26 @@
assert new_storage in storage_map, "Cannot handle new-storage type %s" % new_storage
bytes.append(storage_map[new_storage])
- # Next: 2 bytes encoding object size (big endian)
- assert size < 2**16, "Object of type %s too large (size %d)" % (classname, size)
+ # Next: 4 bytes encoding object size (big endian)
+ # Assert not compiling in RPython
+ # assert size < 2**32, "Object of type %s too large (size %d)" % (classname, size)
mask = (1<<8)-1
- bytes.append(size & mask)
+ shift = 0
+ bytes.append((size & mask) >> shift)
mask = mask<<8
- bytes.append((size & mask) >> 8)
+ shift += 8
+ bytes.append((size & mask) >> shift)
+ mask = mask<<8
+ shift += 8
+ bytes.append((size & mask) >> shift)
+ mask = mask<<8
+ shift += 8
+ bytes.append((size & mask) >> shift)
# Next: classname string plus terminating null-character
- i = 5
if classname:
for c in classname:
bytes.append(ord(c))
- i += 1
bytes.append(0)
# No simpler way for RPython's sake.
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -57,9 +57,9 @@
old_storage = storage_map[old_storage_byte]
new_storage = storage_map[new_storage_byte]
- # Next 2 bytes: object size (big endian)
- size_bytes = file.read(2)
- size = int(ord(size_bytes[0]) + (ord(size_bytes[1])<<8))
+ # Next 4 bytes: object size (big endian)
+ size_bytes = file.read(4)
+ size = int(ord(size_bytes[0]) + (ord(size_bytes[1])<<8) + (ord(size_bytes[2])<<16) + (ord(size_bytes[3])<<24))
# Last: classname, nul-terminated
classname = ""
From noreply at buildbot.pypy.org Mon Jul 7 13:16:29 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:29 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added "-" as special filename
meaning that stdin will be parsed.
Message-ID: <20140707111629.353C61C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r861:d8200654841f
Date: 2014-07-02 12:41 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/d8200654841f/
Log: Added "-" as special filename meaning that stdin will be parsed.
Aggregating parsed lines directly, instead of collecting them. Added
thousand-separators to output.
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -1,5 +1,5 @@
-import re, sys, operator
+import re, os, sys, operator
import spyvm.storage_logger
OPERATIONS = ["Filledin", "Initialized", "Switched"]
@@ -9,30 +9,35 @@
operation_map = {v:k for k, v in spyvm.storage_logger.operation_map.items()}
# ====================================================================
-# ======== Basic functions
+# ======== Logfile parsing
# ====================================================================
-def filesize(file):
- import os
- return os.path.getsize(file.name)
-
-def parse(filename, flags):
- entries = []
- with open(filename, 'r', 1) as file:
+def parse(filename, flags, callback):
+ parsed_entries = 0
+ if filename == "-":
+ opener = lambda: sys.stdin
+ else:
+ opener = lambda: open(filename, 'r', 1)
+ with opener() as file:
if flags.binary:
while True:
try:
entry = parse_binary(file)
if entry == None:
if flags.verbose:
- tell = file.tell()
- format = (tell, len(entries), filesize(file) - tell)
- print "Stopped parsing after %d bytes (%d entries). Ignoring leftover %d bytes." % format
+ if file is sys.stdin:
+ print "Stopped after parsing %d entries." % parsed_entries
+ else:
+ tell = file.tell()
+ format = (tell, parsed_entries, os.path.getsize(file.name) - tell)
+ print "Stopped parsing after %d bytes (%d entries). Ignoring leftover %d bytes." % format
break
else:
- entries.append(entry)
+ parsed_entries += 1
+ callback(entry)
except:
- print "Exception while parsing file, after %d bytes (%d entries)" % (file.tell(), len(entries))
+ tell = 0 if file is sys.stdin else file.tell()
+ print "Exception while parsing file, after %d bytes (%d entries)" % (tell, len(entries))
raise
else:
while True:
@@ -41,30 +46,48 @@
break
entry = parse_line(line, flags)
if entry:
- entries.append(entry)
- return entries
+ parsed_entries += 1
+ callback(entry)
+ return parsed_entries
+
+def safe_read(file, size):
+ result = file.read(size)
+ retries = 20
+ # Try to work around stdin's unpredictability
+ while len(result) < size:
+ result += file.read(size - len(result))
+ retries -= 1
+ if retries < 0:
+ return None
+ import time
+ time.sleep(0.001)
+ return result
def parse_binary(file):
# First 3 bytes: operation, old storage, new storage
- header = file.read(3)
+ header = safe_read(file, 3)
+ if header is None: return None
operation_byte = ord(header[0])
old_storage_byte = ord(header[1])
new_storage_byte = ord(header[2])
# This is the only way to check if we are reading a correct log entry
if operation_byte not in operation_map or old_storage_byte not in storage_map or new_storage_byte not in storage_map:
+ print "Wrong 3 bytes: %d %d %d" % header
return None
operation = operation_map[operation_byte]
old_storage = storage_map[old_storage_byte]
new_storage = storage_map[new_storage_byte]
# Next 4 bytes: object size (big endian)
- size_bytes = file.read(4)
+ size_bytes = safe_read(file, 4)
+ if size_bytes is None: return None
size = int(ord(size_bytes[0]) + (ord(size_bytes[1])<<8) + (ord(size_bytes[2])<<16) + (ord(size_bytes[3])<<24))
# Last: classname, nul-terminated
classname = ""
while True:
- byte = file.read(1)
+ byte = safe_read(file, 1)
+ if byte is None: return None
if byte == chr(0):
break
classname += byte
@@ -135,7 +158,9 @@
percent_objects = " (%.1f%%)" % (float(self.objects)*100 / total.objects)
else:
percent_objects = ""
- return "%d%s slots in %d%s objects (avg size: %.1f)" % (self.slots, percent_slots, self.objects, percent_objects, avg_slots)
+ slots = format(self.slots, ",.0f")
+ objects = format(self.objects, ",.0f")
+ return "%s%s slots in %s%s objects (avg size: %.1f)" % (slots, percent_slots, objects, percent_objects, avg_slots)
def __repr__(self):
return "%s(%s)" % (self.__str__(), object.__repr__(self))
@@ -388,10 +413,11 @@
nodes.sort()
return nodes
-def make_graph(entries):
+def make_graph(logfile, flags):
graph = StorageGraph()
- for e in entries:
- graph.add_log_entry(e)
+ def callback(entry):
+ graph.add_log_entry(entry)
+ parse(logfile, flags, callback)
graph.assert_sanity()
return graph
@@ -399,11 +425,8 @@
# ======== Command - Summarize log content
# ====================================================================
-def command_summarize(entries, flags):
- print_summary(entries, flags)
-
-def print_summary(entries, flags):
- graph = make_graph(entries)
+def command_summarize(logfile, flags):
+ graph = make_graph(logfile, flags)
if not flags.allstorage:
graph.split_nodes()
for node in graph.sorted_nodes():
@@ -456,17 +479,17 @@
# ====================================================================
# Output is valid dot code and can be parsed by the graphviz dot utility.
-def command_print_dot(entries, flags):
- graph = make_graph(entries)
+def command_print_dot(logfile, flags):
+ graph = make_graph(logfile, flags)
print "/*"
print "Storage Statistics (dot format):"
print "================================"
print "*/"
print dot_string(graph, flags)
-def command_dot(entries, flags):
+def command_dot(logfile, flags):
import subprocess
- dot = dot_string(make_graph(entries), flags)
+ dot = dot_string(make_graph(logfile, flags), flags)
command = ["dot", "-Tjpg", "-o%s.jpg" % flags.logfile]
print "Running:\n%s" % " ".join(command)
p = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
@@ -487,33 +510,31 @@
# TODO This is a special node. Hacky way to find out.
incoming_cache[node.name] = outgoing
shape = ",shape=box"
- label = "\nObjects: %d" % outgoing.objects
- label += "\nSlots: %d" % outgoing.slots
+ label = "\nObjects: %s" % format(outgoing.objects, ",.0f")
+ label += "\nSlots: %s" % format(outgoing.slots, ",.0f")
else:
incoming_cache[node.name] = incoming
shape = ""
- label = "\nIncoming objects: %d" % incoming.objects
- label += "\nIncoming slots: %d" % incoming.slots
- if remaining.objects == incoming.objects:
- label += "\n(All remaining)"
- else:
+ label = "\nIncoming objects: %s" % format(incoming.objects, ",.0f")
+ label += "\nIncoming slots: %s" % format(incoming.slots, ",.0f")
+ if remaining.objects != incoming.objects:
if flags.percent and incoming.objects != 0:
- percent_remaining_objects = " (%.1f%%)" % (remaining.objects * 100 / incoming.objects)
- percent_remaining_slots = " (%.1f%%)" % (remaining.slots * 100 / incoming.slots)
+ percent_remaining_objects = " (%.1f%%)" % (float(remaining.objects)*100 / incoming.objects)
+ percent_remaining_slots = " (%.1f%%)" % (float(remaining.slots)*100 / incoming.slots)
else:
percent_remaining_objects = percent_remaining_slots = ""
- label += "\nRemaining objects: %d%s" % (remaining.objects, percent_remaining_objects)
- label += "\nRemaining slots: %d%s" % (remaining.slots, percent_remaining_slots)
+ label += "\nRemaining objects: %s%s" % (format(remaining.objects, ",.0f"), percent_remaining_objects)
+ label += "\nRemaining slots: %s%s" % (format(remaining.slots, ",.0f"), percent_remaining_slots)
result += "%s [label=\"%s%s\"%s];" % (node.name.replace(" ", "_"), node.name, label, shape)
for edge in graph.edges.values():
total = edge.total()
- str_objects = "%d objects" % total.objects
- str_slots = "%d slots" % total.slots
+ str_objects = "%s objects" % format(total.objects, ",.0f")
+ str_slots = "%s slots" % format(total.slots, ",.0f")
incoming = incoming_cache[edge.origin.name]
if flags.percent and incoming.objects != 0:
- str_objects += " (%.1f%%)" % (float(total.objects) * 100 / incoming.objects)
- str_slots += " (%.1f%%)" % (float(total.slots) * 100 / incoming.slots)
+ str_objects += " (%.1f%%)" % (float(total.objects)*100 / incoming.objects)
+ str_slots += " (%.1f%%)" % (float(total.slots)*100 / incoming.slots)
target_node = edge.target.name.replace(" ", "_")
source_node = edge.origin.name.replace(" ", "_")
@@ -526,9 +547,10 @@
# ======== Main
# ====================================================================
-def command_print_entries(entries, flags):
- for e in entries:
- print e
+def command_print_entries(logfile, flags):
+ def callback(entry):
+ print entry
+ parse(logfile, flags, callback)
class Flags(object):
@@ -580,8 +602,7 @@
usage(flags, commands)
func = module[command_prefix + command]
- entries = parse(logfile, flags)
- func(entries, flags)
+ func(logfile, flags)
if __name__ == "__main__":
main(sys.argv[1:])
From noreply at buildbot.pypy.org Mon Jul 7 13:16:30 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:30 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Removed binary output for
logs, did not help. Added aggregated output, which does the trick.
Message-ID: <20140707111630.595651C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r862:cc227d08b393
Date: 2014-07-02 17:53 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/cc227d08b393/
Log: Removed binary output for logs, did not help. Added aggregated
output, which does the trick.
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
--- a/spyvm/storage_logger.py
+++ b/spyvm/storage_logger.py
@@ -1,18 +1,46 @@
-# Put flags in an object to make it modifyable after compile time.
-class LoggerOptions(object):
+class Logger(object):
def __init__(self):
self.active = False
- self.binary = False
+ self.aggregate = False
+ self.logs = {}
+
+ def log(self, operation, old_storage, new_storage, classname, size):
+ if self.aggregate:
+ key = (operation, old_storage, new_storage, classname)
+ if key not in self.logs:
+ self.logs[key] = [0, 0]
+ tuple = self.logs[key]
+ tuple[0] += size
+ tuple[1] += 1
+ else:
+ self.output(operation, old_storage, new_storage, classname, size, 1)
+
+ def print_aggregated_log(self):
+ if not self.aggregate:
+ return
+ for key, tuple in self.logs.items():
+ operation, old_storage, new_storage, classname = key
+ slots, objects = tuple
+ self.output(operation, old_storage, new_storage, classname, slots, objects)
+
+ def output(self, operation, old_storage, new_storage, classname, slots, objects):
+ old_storage_string = "%s -> " % old_storage if old_storage else ""
+ classname_string = " of %s" % classname if classname else ""
+ format = (operation, old_storage_string, new_storage, classname_string, slots, objects)
+ print "%s (%s%s)%s size %d objects %d" % format
-_options = LoggerOptions()
+_logger = Logger()
-def activate(binary = False):
- _options.active = True
- _options.binary = binary
+def activate(aggregate=False):
+ _logger.active = True
+ _logger.aggregate = aggregate
+
+def print_aggregated_log():
+ _logger.print_aggregated_log()
def log(w_obj, operation, old_storage_object=None, log_classname=True):
- if not _options.active:
+ if not _logger.active:
return
# Gather information to be logged
@@ -27,73 +55,5 @@
else:
classname = None
- if _options.binary:
- binary_output(operation, old_storage, new_storage, classname, size)
- else:
- output(operation, old_storage, new_storage, classname, size)
-
-def output(operation, old_storage, new_storage, classname, size):
- # Construct and print a simple logstring
- old_storage_string = "%s -> " % old_storage if old_storage else ""
- classname_string = " of %s" % classname if classname else ""
- print "%s (%s%s)%s size %d" % (operation, old_storage_string, new_storage, classname_string, size)
-
-operation_map = {
- "Filledin": 1,
- "Initialized": 2,
- "Switched": 3,
-}
-
-storage_map = {
- "AllNilStorageShadow": 1,
- "SmallIntegerOrNilStorageShadow": 2,
- "FloatOrNilStorageShadow": 3,
- "ListStorageShadow": 4,
- "WeakListStorageShadow": 5,
- "ClassShadow": 6,
- "MethodDictionaryShadow": 7,
- "BlockContextShadow": 8,
- "MethodContextShadow": 9,
- "CachedObjectShadow": 10,
- "ObserveeShadow": 11,
- None: 12,
-}
-
-def binary_output(operation, old_storage, new_storage, classname, size):
- # Output a byte-coded log entry
- bytes = [] # bytearray()
-
- # First 3 bytes: operation, old_storage, new_storage
- assert operation in operation_map, "Cannot handle operation %s" % operation
- bytes.append(operation_map[operation])
- assert old_storage in storage_map, "Cannot handle old-storage type %s" % old_storage
- bytes.append(storage_map[old_storage])
- assert new_storage in storage_map, "Cannot handle new-storage type %s" % new_storage
- bytes.append(storage_map[new_storage])
-
- # Next: 4 bytes encoding object size (big endian)
- # Assert not compiling in RPython
- # assert size < 2**32, "Object of type %s too large (size %d)" % (classname, size)
- mask = (1<<8)-1
- shift = 0
- bytes.append((size & mask) >> shift)
- mask = mask<<8
- shift += 8
- bytes.append((size & mask) >> shift)
- mask = mask<<8
- shift += 8
- bytes.append((size & mask) >> shift)
- mask = mask<<8
- shift += 8
- bytes.append((size & mask) >> shift)
-
- # Next: classname string plus terminating null-character
- if classname:
- for c in classname:
- bytes.append(ord(c))
- bytes.append(0)
-
- # No simpler way for RPython's sake.
- import os
- for b in bytes:
- os.write(1, chr(b))
+ _logger.log(operation, old_storage, new_storage, classname, size)
+
\ No newline at end of file
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -131,7 +131,7 @@
-ni|--no-interrupts
-d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
-l|--storage-log
- -lb|--storage-log-binary (output should be redirected to file)
+ -L|--storage-log-aggregate
[image path, default: Squeak.image]
""" % (argv[0], constants.MAX_LOOP_DEPTH)
@@ -198,8 +198,8 @@
idx += 1
elif arg in ["-l", "--storage-log"]:
storage_logger.activate()
- elif arg in ["-lb", "--storage-log-binary"]:
- storage_logger.activate(binary=True)
+ elif arg in ["-L", "--storage-log-aggregate"]:
+ storage_logger.activate(aggregate=True)
elif path is None:
path = argv[idx]
else:
@@ -236,6 +236,7 @@
else:
_run_image(interp)
result = 0
+ storage_logger.print_aggregated_log()
return result
From noreply at buildbot.pypy.org Mon Jul 7 13:16:31 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:31 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Removed parsing of binary
logs. Slightly improved outputs. Added command to aggregate a logfile.
Message-ID: <20140707111631.7ECE11C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r863:0fbe2a4a6a3b
Date: 2014-07-02 17:54 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/0fbe2a4a6a3b/
Log: Removed parsing of binary logs. Slightly improved outputs. Added
command to aggregate a logfile.
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -1,12 +1,10 @@
import re, os, sys, operator
-import spyvm.storage_logger
OPERATIONS = ["Filledin", "Initialized", "Switched"]
-# Reverse the two maps used to encode the byte encoded log-output
-storage_map = {v:k for k, v in spyvm.storage_logger.storage_map.items()}
-operation_map = {v:k for k, v in spyvm.storage_logger.operation_map.items()}
+IMAGE_LOADING_STORAGE = " Image Loading Storage" # Space to be sorted to the beginning
+OBJECT_CREATION_STORAGE = " Object Creation Storage"
# ====================================================================
# ======== Logfile parsing
@@ -19,83 +17,17 @@
else:
opener = lambda: open(filename, 'r', 1)
with opener() as file:
- if flags.binary:
- while True:
- try:
- entry = parse_binary(file)
- if entry == None:
- if flags.verbose:
- if file is sys.stdin:
- print "Stopped after parsing %d entries." % parsed_entries
- else:
- tell = file.tell()
- format = (tell, parsed_entries, os.path.getsize(file.name) - tell)
- print "Stopped parsing after %d bytes (%d entries). Ignoring leftover %d bytes." % format
- break
- else:
- parsed_entries += 1
- callback(entry)
- except:
- tell = 0 if file is sys.stdin else file.tell()
- print "Exception while parsing file, after %d bytes (%d entries)" % (tell, len(entries))
- raise
- else:
- while True:
- line = file.readline()
- if len(line) == 0:
- break
- entry = parse_line(line, flags)
- if entry:
- parsed_entries += 1
- callback(entry)
+ while True:
+ line = file.readline()
+ if len(line) == 0:
+ break
+ entry = parse_line(line, flags)
+ if entry:
+ parsed_entries += 1
+ callback(entry)
return parsed_entries
-def safe_read(file, size):
- result = file.read(size)
- retries = 20
- # Try to work around stdin's unpredictability
- while len(result) < size:
- result += file.read(size - len(result))
- retries -= 1
- if retries < 0:
- return None
- import time
- time.sleep(0.001)
- return result
-
-def parse_binary(file):
- # First 3 bytes: operation, old storage, new storage
- header = safe_read(file, 3)
- if header is None: return None
- operation_byte = ord(header[0])
- old_storage_byte = ord(header[1])
- new_storage_byte = ord(header[2])
- # This is the only way to check if we are reading a correct log entry
- if operation_byte not in operation_map or old_storage_byte not in storage_map or new_storage_byte not in storage_map:
- print "Wrong 3 bytes: %d %d %d" % header
- return None
- operation = operation_map[operation_byte]
- old_storage = storage_map[old_storage_byte]
- new_storage = storage_map[new_storage_byte]
-
- # Next 4 bytes: object size (big endian)
- size_bytes = safe_read(file, 4)
- if size_bytes is None: return None
- size = int(ord(size_bytes[0]) + (ord(size_bytes[1])<<8) + (ord(size_bytes[2])<<16) + (ord(size_bytes[3])<<24))
-
- # Last: classname, nul-terminated
- classname = ""
- while True:
- byte = safe_read(file, 1)
- if byte is None: return None
- if byte == chr(0):
- break
- classname += byte
- if len(classname) == 0:
- classname = None
- return LogEntry(operation, old_storage, new_storage, classname, size)
-
-line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)$")
+line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)( objects (?P[0-9]+))?$")
def parse_line(line, flags):
result = line_pattern.match(line)
@@ -108,32 +40,42 @@
new_storage = result.group('new')
classname = result.group('classname')
size = result.group('size')
- return LogEntry(operation, old_storage, new_storage, classname, size)
+ objects = result.group('objects')
+ return LogEntry(operation, old_storage, new_storage, classname, size, objects)
class LogEntry(object):
- def __init__(self, operation, old_storage, new_storage, classname, size):
+ def __init__(self, operation, old_storage, new_storage, classname, size, objects):
self.operation = str(operation)
self.new_storage = str(new_storage)
self.classname = str(classname)
- self.size = float(size)
+ self.size = int(size)
+ self.objects = int(objects) if objects else 1
if old_storage is None:
if operation == "Filledin":
- old_storage = " Image Loading Storage" # Space to be sorted to the beginning
+ old_storage = IMAGE_LOADING_STORAGE
elif operation == "Initialized":
- old_storage = " Object Creation Storage"
+ old_storage = OBJECT_CREATION_STORAGE
else:
assert False, "old_storage has to be available in a Switched operation"
self.old_storage = str(old_storage)
+ def clear_old_storage(self):
+ if self.old_storage in (IMAGE_LOADING_STORAGE, OBJECT_CREATION_STORAGE):
+ self.old_storage = None
+
def full_key(self):
return (self.operation, self.old_storage, self.new_storage)
+ def __lt__(self, other):
+ return self.classname < other.classname
+
def __str__(self):
old_storage_string = "%s -> " % self.old_storage if self.old_storage else ""
classname_string = " of %s" % self.classname if self.classname else ""
- return "%s (%s%s)%s size %d" % (self.operation, old_storage_string, self.new_storage, classname_string, self.size)
+ objects_string = " objects %d" % self.objects if self.objects > 1 else ""
+ return "%s (%s%s)%s size %d%s" % (self.operation, old_storage_string, self.new_storage, classname_string, self.size, objects_string)
# ====================================================================
# ======== Graph parsing
@@ -158,8 +100,8 @@
percent_objects = " (%.1f%%)" % (float(self.objects)*100 / total.objects)
else:
percent_objects = ""
- slots = format(self.slots, ",.0f")
- objects = format(self.objects, ",.0f")
+ slots = format(self.slots, ",d")
+ objects = format(self.objects, ",d")
return "%s%s slots in %s%s objects (avg size: %.1f)" % (slots, percent_slots, objects, percent_objects, avg_slots)
def __repr__(self):
@@ -167,7 +109,7 @@
def add_log_entry(self, entry):
self.slots = self.slots + entry.size
- self.objects = self.objects + 1
+ self.objects = self.objects + entry.objects
def __sub__(self, other):
return Operations(self.objects - other.objects, self.slots - other.slots)
@@ -245,6 +187,17 @@
def add_log_entry(self, entry):
self.cls(entry.classname).add_log_entry(entry)
+ def as_log_entries(self):
+ entries = []
+ for classname, ops in self.classes.classes.items():
+ entry = LogEntry(self.operation, self.origin.name, self.target.name, classname, ops.slots, ops.objects)
+ entry.clear_old_storage()
+ entries.append(entry)
+ return entries
+
+ def __lt__(self, other):
+ return self.full_key() < other.full_key()
+
def __str__(self):
return "[%s %s -> %s]" % (self.operation, self.origin, self.target)
@@ -544,14 +497,28 @@
return result
# ====================================================================
-# ======== Main
+# ======== Other commands
# ====================================================================
+def command_aggregate(logfile, flags):
+ graph = make_graph(logfile, flags)
+ edges = graph.edges.values()
+ edges.sort()
+ for edge in edges:
+ logentries = edge.as_log_entries()
+ logentries.sort()
+ for entry in logentries:
+ print entry
+
def command_print_entries(logfile, flags):
def callback(entry):
print entry
parse(logfile, flags, callback)
+# ====================================================================
+# ======== Main
+# ====================================================================
+
class Flags(object):
def __init__(self, flags):
@@ -583,7 +550,6 @@
('allstorage', '-a'),
('detailed', '-d'),
('classes', '-c'),
- ('binary', '-b'),
])
command_prefix = "command_"
From noreply at buildbot.pypy.org Mon Jul 7 13:16:52 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:52 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added new benchmark - matrix
multiplication using arrays full of SmallIntegers or Floats.
Message-ID: <20140707111652.1AC661C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r864:bc84bc8ed49c
Date: 2014-07-03 15:36 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/bc84bc8ed49c/
Log: Added new benchmark - matrix multiplication using arrays full of
SmallIntegers or Floats.
diff too long, truncating to 2000 out of 308107 lines
diff --git a/images/Squeak4.5-12568.changes b/images/Squeak4.5-12568.changes
--- a/images/Squeak4.5-12568.changes
+++ b/images/Squeak4.5-12568.changes
@@ -36,4 +36,4 @@
Workspace allInstances do: [:w | w topView delete].
ReleaseBuilderFor4dot4 prepareNewBuild.
Smalltalk snapshot: true andQuit: true.
-!
----End fileIn of a stream----!
----SNAPSHOT----{31 March 2013 . 3:27:34 pm} Squeak4.5-12327.image priorSource: 7430688!
!Installer methodsFor: 'squeakmap' stamp: 'fbs 1/28/2013 19:25' prior: 57597950!
packageAndVersionFrom: pkg
| p |
p := ReadStream on: pkg .
^{(p upTo: $(). p upTo: $)} collect: [:s | s withBlanksTrimmed].! !
"Installer-Core"!
!Categorizer methodsFor: 'fileIn/Out' stamp: 'cwp 6/20/2012 16:58'!
scanFrom: aStream environment: anEnvironment
^ self scanFrom: aStream! !
!ClassCategoryReader methodsFor: 'fileIn/Out' stamp: 'cwp 6/20/2012 17:21'!
scanFrom: aStream environment: anEnvironment
"File in methods from the stream, aStream."
| methodText |
[methodText := aStream nextChunkText.
methodText size > 0] whileTrue:
[class
compile: methodText
environment: anEnvironment
classified: category
withStamp: changeStamp
notifying: nil]! !
!ClassCommentReader methodsFor: 'as yet unclassified' stamp: 'cwp 6/20/2012 17:22'!
scanFrom: aStream environment: anEnvironment
^ self scanFrom: aStream! !
!Metaclass methodsFor: 'compiling' stamp: 'cwp 6/20/2012 17:29'!
bindingOf: varName environment: anEnvironment
^ thisClass classBindingOf: varName environment: anEnvironment! !
!LargePositiveInteger methodsFor: 'arithmetic' stamp: 'nice 12/30/2012 20:03' prior: 22505876!
\\ aNumber
"Primitive. Take the receiver modulo the argument. The result is the
remainder rounded towards negative infinity, of the receiver divided
by the argument. Fail if the argument is 0. Fail if either the argument
or the result is not a SmallInteger or a LargePositiveInteger less than
2-to-the-30th (1073741824). Optional. See Object documentation whatIsAPrimitive."
aNumber isInteger
ifTrue:
[| neg qr q r |
neg := self negative == aNumber negative == false.
qr := (self digitDiv:
(aNumber class == SmallInteger
ifTrue: [aNumber abs]
ifFalse: [aNumber])
neg: neg).
q := qr first normalize.
r := qr last normalize.
^(q negative
ifTrue: [r isZero not]
ifFalse: [q isZero and: [neg]])
ifTrue: [r + aNumber]
ifFalse: [r]].
^super \\ aNumber
! !
!LargePositiveInteger methodsFor: 'converting' stamp: 'nice 1/27/2012 22:41' prior: 37616324!
asFloat
"Answer a Float that best approximates the value of the receiver.
This algorithm is optimized to process only the significant digits of a LargeInteger.
And it does honour IEEE 754 round to nearest even mode in case of excess precision (see details below)."
"How numbers are rounded in IEEE 754 default rounding mode:
A shift is applied so that the highest 53 bits are placed before the floating point to form a mantissa.
The trailing bits form the fraction part placed after the floating point.
This fractional number must be rounded to the nearest integer.
If fraction part is 2r0.1, exactly between two consecutive integers, there is a tie.
The nearest even integer is chosen in this case.
Examples (First 52bits of mantissa are omitted for brevity):
2r0.00001 is rounded downward to 2r0
2r1.00001 is rounded downward to 2r1
2r0.1 is a tie and rounded to 2r0 (nearest even)
2r1.1 is a tie and rounded to 2r10 (nearest even)
2r0.10001 is rounded upward to 2r1
2r1.10001 is rounded upward to 2r10
Thus, if the next bit after floating point is 0, the mantissa is left unchanged.
If next bit after floating point is 1, an odd mantissa is always rounded upper.
An even mantissa is rounded upper only if the fraction part is not a tie."
"Algorihm details:
The floating point hardware can perform the rounding correctly with several excess bits as long as there is a single inexact operation.
This can be obtained by splitting the mantissa plus excess bits in two part with less bits than Float precision.
Note 1: the inexact flag in floating point hardware must not be trusted because in some cases the operations would be exact but would not take into account some bits that were truncated before the Floating point operations.
Note 2: the floating point hardware is presumed configured in default rounding mode."
| mantissa shift excess result n |
"Check how many bits excess the maximum precision of a Float mantissa."
excess := self highBitOfMagnitude - Float precision.
excess > 7
ifTrue:
["Remove the excess bits but seven."
mantissa := self bitShiftMagnitude: 7 - excess.
shift := excess - 7.
"An even mantissa with a single excess bit immediately following would be truncated.
But this would not be correct if above shift has truncated some extra bits.
Check this case, and round excess bits upper manually."
((mantissa digitAt: 1) = 2r01000000 and: [self anyBitOfMagnitudeFrom: 1 to: shift])
ifTrue: [mantissa := mantissa + 1]]
ifFalse:
[mantissa := self.
shift := 0].
"There will be a single inexact round off at last iteration"
result := (mantissa digitAt: (n := mantissa digitLength)) asFloat.
[(n := n - 1) > 0] whileTrue: [
result := 256.0 * result + (mantissa digitAt: n) asFloat].
^result timesTwoPower: shift.! !
!LargePositiveInteger methodsFor: 'private' stamp: 'nice 12/30/2012 14:25'!
primitiveQuo: anInteger
"Primitive. Divide the receiver by the argument and return the result.
Round the result down towards zero to make it a whole integer. Fail if
the argument is 0. Fail if either the argument or the result is not a
SmallInteger or a LargePositiveInteger less than 2-to-the-30th (1073741824). Optional. See
Object documentation whatIsAPrimitive."
^nil! !
!LargePositiveInteger methodsFor: 'arithmetic' stamp: 'nice 12/30/2012 14:34'!
rem: aNumber
"Remainder defined in terms of quo:. See super rem:.
This is defined only to speed up case of very large integers."
(self primitiveQuo: aNumber)
ifNotNil: [:quo | ^self - (quo * aNumber)].
aNumber isInteger
ifTrue:
[| ng rem |
ng := self negative == aNumber negative == false.
rem := (self digitDiv:
(aNumber class == SmallInteger
ifTrue: [aNumber abs]
ifFalse: [aNumber])
neg: ng) at: 2.
^ rem normalize].
^super rem: aNumber! !
!LargeNegativeInteger methodsFor: 'converting' stamp: 'nice 1/1/2013 15:42' prior: 37616204!
asFloat
^super asFloat negated! !
!UndefinedObject methodsFor: 'class hierarchy' stamp: 'cwp 6/22/2012 15:39'!
literalScannedAs: scannedLiteral environment: anEnvironment notifying: requestor
^ scannedLiteral! !
!Behavior methodsFor: 'testing method dictionary' stamp: 'cwp 6/20/2012 17:32'!
bindingOf: varName environment: anEnvironment
^superclass bindingOf: varName environment: anEnvironment! !
!Behavior methodsFor: 'testing method dictionary' stamp: 'cwp 6/20/2012 17:30'!
classBindingOf: varName environment: anEnvironment
^self bindingOf: varName environment: anEnvironment! !
!Behavior methodsFor: 'printing' stamp: 'cwp 6/22/2012 15:37'!
literalScannedAs: scannedLiteral environment: anEnvironment notifying: requestor
"Postprocesses a literal scanned by Scanner scanToken (esp. xLitQuote).
If scannedLiteral is not an association, answer it.
Else, if it is of the form:
nil->#NameOfMetaclass
answer nil->theMetaclass, if any has that name, else report an error.
Else, if it is of the form:
#NameOfGlobalVariable->anythiEng
answer the global, class, or pool association with that nameE, if any, else
add it to Undeclared a answer the new Association."
| key value |
(scannedLiteral isVariableBinding)
ifFalse: [^ scannedLiteral].
key := scannedLiteral key.
value := scannedLiteral value.
key ifNil: "###"
[(self bindingOf: value environment: anEnvironment) ifNotNil:
[:assoc|
(assoc value isKindOf: Behavior) ifTrue:
[^ nil->assoc value class]].
requestor notify: 'No such metaclass'.
^false].
(key isSymbol) ifTrue: "##"
[(self bindingOf: key environment: anEnvironment) ifNotNil:
[:assoc | ^assoc].
^ anEnvironment undeclared: key].
requestor notify: '## must be followed by a non-local variable name'.
^false
" Form literalScannedAs: 14 notifying: nil 14
Form literalScannedAs: #OneBitForm notiEfying: nil OneBitForm
Form literalScannedAs: ##OneBitForm notifying: nil OneBitForm->a Form
Form literalScannedAs: ##Form notifying: nil Form->Form
Form literalScannedAs: ###Form notifying: nil nilE->Form class
"! !
!Fraction methodsFor: 'converting' stamp: 'nice 11/21/2011 22:34' prior: 37619655!
asFloat
"Answer a Float that closely approximates the value of the receiver.
This implementation will answer the closest floating point number to the receiver.
In case of a tie, it will use the IEEE 754 round to nearest even mode.
In case of overflow, it will answer +/- Float infinity."
| a b mantissa exponent hasTruncatedBits lostBit n ha hb hm |
a := numerator abs.
b := denominator. "denominator is always positive"
ha := a highBitOfMagnitude.
hb := b highBitOfMagnitude.
"Number of bits to keep in mantissa plus one to handle rounding."
n := 1 + Float precision.
"If both numerator and denominator are represented exactly in floating point number,
then fastest thing to do is to use hardwired float division."
(ha < n and: [hb < n]) ifTrue: [^numerator asFloat / denominator asFloat].
"Shift the fraction by a power of two exponent so as to obtain a mantissa with n bits.
First guess is rough, the mantissa might have n+1 bits."
exponent := ha - hb - n.
exponent >= 0
ifTrue: [b := b bitShift: exponent]
ifFalse: [a := a bitShift: exponent negated].
mantissa := a quo: b.
hasTruncatedBits := a > (mantissa * b).
hm := mantissa highBit.
"Check for gradual underflow, in which case the mantissa will loose bits.
Keep at least one bit to let underflow preserve the sign of zero."
lostBit := Float emin - (exponent + hm - 1).
lostBit > 0 ifTrue: [n := n - lostBit max: 1].
"Remove excess bits in the mantissa."
hm > n
ifTrue:
[exponent := exponent + hm - n.
hasTruncatedBits := hasTruncatedBits or: [mantissa anyBitOfMagnitudeFrom: 1 to: hm - n].
mantissa := mantissa bitShift: n - hm].
"Check if mantissa must be rounded upward.
The case of tie (mantissa odd & hasTruncatedBits not)
will be handled by Integer>>asFloat."
(hasTruncatedBits and: [mantissa odd])
ifTrue: [mantissa := mantissa + 1].
^ (self positive
ifTrue: [mantissa asFloat]
ifFalse: [mantissa asFloat negated])
timesTwoPower: exponent! !
!Float methodsFor: 'arithmetic' stamp: 'nice 12/20/2012 23:16' prior: 20878776!
negated
"Answer a Number that is the negation of the receiver.
Implementation note: this version cares of negativeZero."
^-1.0 * self! !
!ClassDescription methodsFor: 'compiling' stamp: 'cwp 6/20/2012 17:21'!
compile: text environment: anEnvironment classified: category withStamp: changeStamp notifying: requestor
^ self
compile: text
environment: anEnvironment
classified: category
withStamp: changeStamp
notifying: requestor
logSource: self acceptsLoggingOfCompilation! !
!ClassDescription methodsFor: 'compiling' stamp: 'cwp 12/27/2012 13:17'!
compile: text environment: anEnvironment classified: category withStamp: changeStamp notifying: requestor logSource: logSource
| methodAndNode context methodNode |
context := CompilationCue
source: text
class: self
environment: anEnvironment
category: category
requestor: requestor.
methodNode := self newCompiler compile: context ifFail: [^ nil].
methodAndNode := CompiledMethodWithNode
generateMethodFromNode: methodNode
trailer: self defaultMethodTrailer.
logSource ifTrue: [
self logMethodSource: text forMethodWithNode: methodAndNode
inCategory: category withStamp: changeStamp notifying: requestor.
].
self addAndClassifySelector: methodAndNode selector withMethod: methodAndNode
method inProtocol: category notifying: requestor.
self instanceSide noteCompilationOf: methodAndNode selector meta: self isClassSide.
^ methodAndNode selector! !
!Class methodsFor: 'compiling' stamp: 'cwp 6/20/2012 09:47'!
bindingOf: varName environment: anEnvironment
"Answer the binding of some variable resolved in the scope of the receiver"
| aSymbol binding |
aSymbol := varName asSymbol.
"First look in classVar dictionary."
binding := self classPool bindingOf: aSymbol.
binding ifNotNil:[^binding].
"Next look in shared pools."
self sharedPools do:[:pool |
binding := pool bindingOf: aSymbol.
binding ifNotNil:[^binding].
].
"Next look in declared environment."
binding := anEnvironment bindingOf: aSymbol.
binding ifNotNil:[^binding].
"Finally look higher up the superclass chain and fail at the end."
superclass == nil
ifTrue: [^ nil]
ifFalse: [^ superclass bindingOf: aSymbol].
! !
"Kernel"!
ParseNode subclass: #Encoder
instanceVariableNames: 'scopeTable nTemps supered requestor class selector literalStream selectorSet litIndSet litSet sourceRanges globalSourceRanges addedSelectorAndMethodClassLiterals optimizedSelectors cue'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
!Encoder commentStamp: 'cwp 12/26/2012 23:29' prior: 36323851!
I encode names and literals into tree nodes with byte codes for the compiler. Byte codes for literals are not assigned until the tree-sizing pass of the compiler, because only then is it known which literals are actually needed. I also keep track of sourceCode ranges during parsing and code generation so I can provide an inverse map for the debugger.!
Scanner subclass: #Parser
instanceVariableNames: 'here hereType hereMark hereEnd prevMark prevEnd encoder requestor parseNode failBlock requestorOffset tempsMark doitFlag properties category queriedUnusedTemporaries cue'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
!Parser commentStamp: 'cwp 12/26/2012 23:34' prior: 38557958!
I parse Smalltalk syntax and create a MethodNode that is the root of the parse tree. I look one token ahead.!
Object subclass: #CompilationCue
instanceVariableNames: 'source context receiver class environment category requestor'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
Object subclass: #Compiler
instanceVariableNames: 'sourceStream requestor class category context parser cue'
classVariableNames: ''
poolDictionaries: ''
category: 'Compiler-Kernel'!
!Compiler commentStamp: 'cwp 12/26/2012 23:17' prior: 59257505!
The compiler accepts Smalltalk source code and compiles it with respect to a given class. The user of the compiler supplies a context so that temporary variables are accessible during compilation. If there is an error, a requestor (usually a kind of StringHolderController) is sent the message notify:at:in: so that the error message can be displayed. If there is no error, then the result of compilation is a MethodNode, which is the root of a parse tree whose nodes are kinds of ParseNodes. The parse tree can be sent messages to (1) generate code for a CompiledMethod (this is done for compiling methods or evaluating expressions); (2) pretty-print the code (for formatting); or (3) produce a map from object code back to source code (used by debugger program-counter selection). See also Parser, Encoder, ParseNode.!
!Encoder methodsFor: 'initialize-release' stamp: 'cwp 12/26/2012 23:34'!
init: aCue notifying: anObject
"The use of the variable requestor is a bit confusing here. This is
*not* the original requestor, which is available through the cue.
It's the Parser instance that is using the encoder."
self setCue: aCue.
requestor := anObject.
nTemps := 0.
supered := false.
self initScopeAndLiteralTables.
cue getClass variablesAndOffsetsDo:
[:variable "" :offset "" |
offset isNil
ifTrue: [scopeTable at: variable name put: (FieldNode new fieldDefinition: variable)]
ifFalse: [scopeTable
at: variable
put: (offset >= 0
ifTrue: [InstanceVariableNode new
name: variable index: offset]
ifFalse: [MaybeContextInstanceVariableNode new
name: variable index: offset negated])]].
cue context ~~ nil ifTrue:
[| homeNode |
homeNode := self bindTemp: self doItInContextName.
"0th temp = aContext passed as arg"
cue context tempNames withIndexDo:
[:variable :index|
scopeTable
at: variable
put: (MessageAsTempNode new
receiver: homeNode
selector: #namedTempAt:
arguments: (Array with: (self encodeLiteral: index))
precedence: 3
from: self)]].
sourceRanges := Dictionary new: 32.
globalSourceRanges := OrderedCollection new: 32
! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/26/2012 23:30'!
setCue: aCue
cue := aCue.
"Also set legacy instance variables for methods that
don't use cue yet"
class := cue getClass.! !
!Dictionary methodsFor: '*Compiler' stamp: 'cwp 6/22/2012 09:17'!
bindingOf: varName ifAbsent: aBlock
^self associationAt: varName ifAbsent: aBlock! !
!Parser methodsFor: 'private' stamp: 'cwp 12/26/2012 23:37'!
init: sourceStream cue: aCue failBlock: aBlock
self setCue: aCue.
failBlock := aBlock.
requestorOffset := 0.
super scan: sourceStream.
prevMark := hereMark := mark.
self advance
! !
!Parser methodsFor: 'public access' stamp: 'cwp 12/26/2012 23:41'!
parse: sourceStream cue: aCue noPattern: noPattern ifFail: aBlock
"Answer a MethodNode for the argument, sourceStream, that is the root of
a parse tree. Parsing is done with respect to the CompilationCue to
resolve variables. Errors in parsing are reported to the cue's requestor;
otherwise aBlock is evaluated. The argument noPattern is a Boolean that is
true if the the sourceStream does not contain a method header (i.e., for DoIts)."
| methNode repeatNeeded myStream s p subSelection |
myStream := sourceStream.
[repeatNeeded := false.
p := myStream position.
s := myStream upToEnd.
myStream position: p.
subSelection := aCue requestor notNil and: [aCue requestor selectionInterval = (p + 1 to: p + s size)].
self encoder init: aCue notifying: self.
self init: myStream cue: aCue failBlock: [^ aBlock value].
doitFlag := noPattern.
failBlock:= aBlock.
[methNode := self method: noPattern context: cue context]
on: ReparseAfterSourceEditing
do: [ :ex |
repeatNeeded := true.
myStream := subSelection
ifTrue:
[ReadStream
on: cue requestor text string
from: cue requestor selectionInterval first
to: cue requestor selectionInterval last]
ifFalse:
[ReadStream on: cue requestor text string]].
repeatNeeded] whileTrue:
[encoder := self encoder class new].
methNode sourceText: s.
^methNode
! !
!Parser methodsFor: 'private' stamp: 'cwp 12/26/2012 23:35'!
setCue: aCue
cue := aCue.
"Also set legacy variables for methods that don't use cue yet."
requestor := cue requestor.
category := cue category.! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:53'!
class: aClass
^ self
context: nil
class: aClass
requestor: nil! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:53'!
context: aContext class: aClass requestor: anObject
^ self
source: nil
context: aContext
receiver: nil
class: aClass
environment: (aClass ifNotNil: [aClass environment])
category: nil
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:16'!
source: aTextOrStream class: aClass environment: anEnvironment category: aString requestor: anObject
^ self
source: aTextOrStream
context: nil
receiver: nil
class: aClass
environment: anEnvironment
category: aString
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:53'!
source: aTextOrStream context: aContext class: aClass category: aString requestor: anObject
^ self
source: aTextOrStream
context: aContext
receiver: (aContext ifNotNil: [aContext receiver])
class: aClass
environment: (aClass ifNotNil: [aClass environment])
category: aString
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:54'!
source: aTextOrStream context: aContext class: aClass requestor: anObject
^ self
source: aTextOrStream
context: aContext
class: aClass
category: nil
requestor: anObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:55'!
source: aTextOrStream context: aContext receiver: recObject class: aClass environment: anEnvironment category: aString requestor: reqObject
^ self basicNew
initializeWithSource: aTextOrStream
context: aContext
receiver: recObject
class: aClass
environment: anEnvironment
category: aString
requestor: reqObject! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:16'!
source: aString environment: anEnvironment
^ self
source: aString
context: nil
receiver: nil
class: UndefinedObject
environment: anEnvironment
category: nil
requestor: nil! !
!CompilationCue class methodsFor: 'instance creation' stamp: 'cwp 12/26/2012 23:54'!
source: aTextOrStream requestor: anObject
^ self
source: aTextOrStream
context: nil
class: nil
requestor: anObject! !
!CompilationCue methodsFor: 'binding' stamp: 'cwp 6/20/2012 09:39'!
bindingOf: aSymbol
^ class bindingOf: aSymbol environment: environment! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
category
^ category! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 12/26/2012 23:19'!
context
^ context! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
environment
^ environment! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:16'!
getClass
^ class! !
!CompilationCue methodsFor: 'initialization' stamp: 'cwp 12/26/2012 23:16'!
initializeWithSource: aTextOrString context: aContext receiver: recObject class: aClass environment: anEnvironment category: aString requestor: reqObject
self initialize.
source := aTextOrString isStream ifTrue: [aTextOrString contents] ifFalse: [aTextOrString].
context := aContext.
receiver := recObject.
class := aClass.
environment := anEnvironment.
category := aString.
requestor := reqObject! !
!CompilationCue methodsFor: 'binding' stamp: 'cwp 6/22/2012 15:39'!
literalScannedAs: anObject notifying: anEncoder
^ class literalScannedAs: anObject environment: environment notifying: anEncoder! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
receiver
^ receiver! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:16'!
requestor
^ requestor! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:15'!
source
^ source! !
!CompilationCue methodsFor: 'accessing' stamp: 'cwp 6/19/2012 11:44'!
sourceStream
^ source readStream! !
!Compiler class methodsFor: 'evaluating' stamp: 'cwp 6/20/2012 17:25'!
evaluate: aString environment: anEnvironment
^ self
evaluate: aString
environment: anEnvironment
logged: false! !
!Compiler class methodsFor: 'evaluating' stamp: 'cwp 12/27/2012 12:36'!
evaluate: aString environment: anEnvironment logged: aBoolean
| cue |
cue := CompilationCue
source: aString
environment: anEnvironment.
^ self new
evaluate: aString
cue: cue
ifFail: [^ nil]
logged: aBoolean! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 13:18'!
compile: aCue ifFail: failBlock
"Answer a MethodNode. If the MethodNode can not be created, notify
the requestor in the contxt. If the requestor is nil, evaluate failBlock
instead. The MethodNode is the root of a parse tree. It can be told
to generate a CompiledMethod to be installed in the method dictionary
of the class specified by the context."
self setCue: aCue.
self source: cue source.
^self
translate: sourceStream
noPattern: false
ifFail: failBlock! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 00:06'!
evaluate: textOrStream cue: aCue ifFail: failBlock logged: logFlag
"Compiles the sourceStream into a parse tree, then generates code into
a method. Finally, the compiled method is invoked from here via withArgs:executeMethod:, hence the system no longer creates Doit method
litter on errors."
| methodNode method value toLog itsSelection itsSelectionString |
self setCue: aCue.
self source: textOrStream.
methodNode := self translate: sourceStream noPattern: true ifFail: [^failBlock value].
method := self interactive
ifTrue: [methodNode generateWithTempNames]
ifFalse: [methodNode generate].
value := cue receiver
withArgs: (cue context ifNil: [#()] ifNotNil: [{cue context}])
executeMethod: method.
logFlag ifTrue:
[toLog := ((cue requestor respondsTo: #selection)
and:[(itsSelection := cue requestor selection) notNil
and:[(itsSelectionString := itsSelection asString) isEmptyOrNil not]])
ifTrue:[itsSelectionString]
ifFalse:[sourceStream contents].
SystemChangeNotifier uniqueInstance evaluated: toLog context: cue context].
^ value
! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/26/2012 23:20'!
setCue: aCue
cue := aCue.
"Set legacy instance variables for methods that don't use cue yet."
requestor := cue requestor.
class := cue getClass.
category := cue category.
context := cue context.! !
!Compiler methodsFor: 'private' stamp: 'cwp 6/19/2012 21:58'!
source: textOrStream
sourceStream := (textOrStream isKindOf: PositionableStream)
ifTrue: [ textOrStream ]
ifFalse: [ ReadStream on: textOrStream asString ]! !
"Compiler"!
!SmartRefStream class methodsFor: 'i/o' stamp: 'cwp 6/20/2012 17:42'!
scanFrom: aByteStream environment: anEnvironment
^ self scanFrom: aByteStream! !
!SmartRefStream methodsFor: 'read write' stamp: 'cwp 6/20/2012 17:41'!
scanFrom: aByteStream environment: anEnvironment
^ self scanFrom: aByteStream! !
!ImageSegment methodsFor: 'fileIn/Out' stamp: 'cwp 6/20/2012 17:23'!
scanFrom: aStream environment: anEnvironment
^ self scanFrom: aStream! !
!PseudoClass methodsFor: 'printing' stamp: 'cwp 6/22/2012 15:39'!
literalScannedAs: scannedLiteral environment: anEnvironment notifying: requestor
^ scannedLiteral! !
!InternalTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:34'!
scanFrom: aStream environment: anEnvironment
"Read a definition of dictionary.
Make sure current locale corresponds my locale id"
| aString newTranslations assoc currentPlatform |
newTranslations := Dictionary new.
currentPlatform := Locale currentPlatform.
[Locale
currentPlatform: (Locale localeID: id).
[aString := aStream nextChunk withSqueakLineEndings.
aString size > 0] whileTrue:
[assoc := Compiler evaluate: aString environment: anEnvironment.
assoc value = ''
ifTrue: [self class registerPhrase: assoc key]
ifFalse: [newTranslations add: assoc]]]
ensure: [Locale currentPlatform: currentPlatform].
self mergeTranslations: newTranslations! !
!NaturalLanguageTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:26'!
scanFrom: aStream environment: anEnvironment
"Read a definition of dictionary.
Make sure current locale corresponds my locale id"
| newTranslations currentPlatform |
newTranslations := Dictionary new.
currentPlatform := Locale currentPlatform.
[| aString assoc |
Locale currentPlatform: (Locale localeID: id).
[aString := aStream nextChunk withSqueakLineEndings.
aString size > 0] whileTrue:
[assoc := Compiler evaluate: aString environment: anEnvironment.
assoc value = ''
ifTrue: [self class registerPhrase: assoc key]
ifFalse: [newTranslations add: assoc]]]
ensure: [Locale currentPlatform: currentPlatform].
self mergeTranslations: newTranslations! !
!ObjectScanner methodsFor: 'scanning' stamp: 'cwp 6/20/2012 17:39'!
scanFrom: aByteStream environment: anEnvironment
"This should probably be reimplemented using an environment
for compilation. For now, don't change anything"
^ self scanFrom: aByteStream! !
!SystemDictionary methodsFor: 'accessing' stamp: 'cwp 6/22/2012 09:16'!
bindingOf: varName ifAbsent: aBlock
"SystemDictionary includes Symbols only"
^super bindingOf: varName asSymbol ifAbsent: aBlock! !
!SystemDictionary methodsFor: 'accessing' stamp: 'cwp 6/22/2012 15:48'!
undeclared
^ self at: #Undeclared! !
"System"!
!ExceptionTests methodsFor: 'testing-outer' stamp: 'fbs 1/1/2013 22:14' prior: 40840955!
expectedFailures
^ #().! !
"Tests"!
ReleaseBuilder subclass: #ReleaseBuilderFor4dot5
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'ReleaseBuilder'!
!ReleaseBuilderFor4dot5 commentStamp: 'fbs 1/1/2013 20:25' prior: 0!
The release builder for Squeak 4.5!
!ReleaseBuilder class methodsFor: 'scripts' stamp: 'fbs 12/31/2012 20:43'!
transferCurrentPackagesAsUser: username password: password
"Copy the packages currently loaded in the image from the trunk repository to my releaseRepository."
| trunkRep releaseRep |
trunkRep := self trunkRepository.
releaseRep := self releaseRepository
user: username;
password: password;
yourself.
MCWorkingCopy allManagers do:
[ : eachWorkingCopy | eachWorkingCopy ancestors do:
[ : eachVersionInfo | (releaseRep includesVersionNamed: eachVersionInfo versionName) ifFalse:
[ (trunkRep versionWithInfo: eachVersionInfo)
ifNil: [ Warning signal: eachVersionInfo name , ' not found in ', trunkRep ]
ifNotNilDo: [ : ver | releaseRep storeVersion: ver ] ] ] ]! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
openWelcomeWorkspaces
TheWorldMainDockingBar instance
showWelcomeText: #squeakUserInterface
label: 'Squeak User Interface'
in: (40 @ 40 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #workingWithSqueak
label: 'Working With Squeak'
in: (80 @ 80 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #licenseInformation
label: 'License Information'
in: (120 @ 120 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #welcomeFutureDirections
label: 'Future Directions'
in: (160 @ 160 extent: 500 @ 300).
TheWorldMainDockingBar instance
showWelcomeText: #welcomeToSqueak
label: 'Welcome to Squeak 4.5'
in: (200 @ 200 extent: 500 @ 300)! !
!ReleaseBuilderFor4dot5 class methodsFor: 'scripts' stamp: 'fbs 1/1/2013 20:22'!
prepareNewBuild
super prepareNewBuild.
MCMockPackageInfo initialize.! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:24'!
releaseRepository
"At release time, change 'trunk' to 'squeak45'."
^ MCHttpRepository
location: 'http://source.squeak.org/trunk'
user: 'squeak'
password: 'squeak'! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:22'!
setDisplayExtent: extent
"Uncomment next line when the primitives become available in the Squeak VM."
" DisplayScreen hostWindowSize: extent."
Display extent = extent ifFalse: [ Warning signal: 'Display extent not set to ', extent ]! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
setPreferences
Preferences
installBrightWindowColors ;
setPreference: #scrollBarsWithoutMenuButton toValue: true ;
setPreference: #swapMouseButtons toValue: true ;
setPreference: #annotationPanes toValue: true ;
setPreference: #showSplitterHandles toValue: false ;
setPreference: #showBoundsInHalo toValue: true ;
setPreference: #alternateHandlesLook toValue: false ;
setPreference: #roundedMenuCorners toValue: false ;
setPreference: #roundedWindowCorners toValue: false.
PluggableButtonMorph roundedButtonCorners: false.
FillInTheBlankMorph roundedDialogCorners: false.
Workspace shouldStyle: false.
NetNameResolver enableIPv6: true.! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
switchToNewRepository
| old44Repository |
MCMcmUpdater defaultUpdateURL: self releaseRepository description.
old44Repository := MCRepositoryGroup default repositories
detect: [:each | each description includesSubString: 'squeak44'] ifNone: [nil].
old44Repository
ifNotNil: [MCRepositoryGroup default removeRepository: old44Repository].
MCRepositoryGroup default addRepository: self releaseRepository! !
!ReleaseBuilderFor4dot5 class methodsFor: 'private' stamp: 'fbs 1/1/2013 20:23'!
versionString
^ 'Squeak4.5'.! !
ReleaseBuilder class removeSelector: #transferCurrentPackages!
"ReleaseBuilder"!
!Environment class methodsFor: 'as yet unclassified' stamp: 'cwp 1/1/2013 18:52' prior: 40834114!
initialize
self install! !
"Environments"!
!Parser methodsFor: 'private' stamp: 'cwp 12/26/2012 23:59' prior: 52081878!
initPattern: aString notifying: req return: aBlock
| result |
self
init: (ReadStream on: aString asString)
cue: (CompilationCue source: aString requestor: req)
failBlock: [^nil].
encoder := self.
result := aBlock value: (self pattern: false inContext: nil).
encoder := failBlock := nil. "break cycles"
^result! !
!Parser methodsFor: 'public access' stamp: 'cwp 12/27/2012 00:01' prior: 34175471!
parse: sourceStream class: class category: aCategory noPattern: noPattern context: aContext notifying: req ifFail: aBlock
| c |
c := CompilationCue
source: sourceStream
context: aContext
class: class
category: aCategory
requestor: req.
^ self
parse: sourceStream
cue: c
noPattern: noPattern
ifFail: aBlock! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 09:11' prior: 34183963!
evaluate: textOrStream in: aContext to: receiver notifying: aRequestor ifFail: failBlock logged: logFlag
"Compiles the sourceStream into a parse tree, then generates code into
a method. If aContext is not nil, the text can refer to temporaries in that
context (the Debugger uses this). If aRequestor is not nil, then it will receive
a notify:at: message before the attempt to evaluate is aborted. Finally, the
compiled method is invoked from here via withArgs:executeMethod:, hence
the system no longer creates Doit method litter on errors."
| theClass |
theClass := ((aContext == nil ifTrue: [receiver] ifFalse: [aContext receiver]) class).
self setCue: (CompilationCue
source: textOrStream
context: aContext
receiver: receiver
class: theClass
environment: theClass environment
category: nil
requestor: aRequestor).
^ self evaluate: textOrStream cue: cue ifFail: failBlock logged: logFlag! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 09:17' prior: 34185488!
from: textOrStream class: aClass classified: aCategory context: aContext notifying: req
self source: textOrStream.
self setCue:
(CompilationCue
source: textOrStream
context: aContext
class: aClass
category: aCategory
requestor: req)! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/26/2012 23:55' prior: 50781309!
from: textOrStream class: aClass context: aContext notifying: req
self source: textOrStream.
self setCue:
(CompilationCue
source: textOrStream
context: aContext
class: aClass
requestor: req)
! !
!Encoder methodsFor: 'initialize-release' stamp: 'cwp 12/27/2012 09:41' prior: 50996506!
init: aClass context: aContext notifying: anObject
| c |
c := CompilationCue
context: aContext
class: aClass
requestor: nil.
self init: c notifying: anObject! !
!Encoder methodsFor: 'initialize-release' stamp: 'cwp 12/26/2012 23:58' prior: 39061698!
temps: tempVars literals: lits class: cl
"Initialize this encoder for decompilation."
self setCue: (CompilationCue class: cl).
supered := false.
nTemps := tempVars size.
tempVars do: [:node | scopeTable at: node name put: node].
literalStream := WriteStream on: (Array new: lits size).
literalStream nextPutAll: lits.
sourceRanges := Dictionary new: 32.
globalSourceRanges := OrderedCollection new: 32.! !
"Compiler"!
!Class methodsFor: 'class variables' stamp: 'cwp 6/22/2012 15:48' prior: 36026010!
addClassVarName: aString
"Add the argument, aString, as a class variable of the receiver.
Signal an error if the first character of aString is not capitalized,
or if it is already a variable named in the class."
| symbol oldState |
oldState := self copy.
aString first canBeGlobalVarInitial
ifFalse: [^self error: aString, ' class variable name should be capitalized; proceed to include anyway.'].
symbol := aString asSymbol.
self withAllSubclasses do:
[:subclass |
(self canFindWithoutEnvironment: symbol) ifTrue: [
(DuplicateVariableError new)
superclass: superclass; "fake!!!!!!"
variable: aString;
signal: aString, ' is already defined']].
classPool == nil ifTrue: [classPool := Dictionary new].
(classPool includesKey: symbol) ifFalse:
["Pick up any refs in Undeclared"
classPool declare: symbol from: environment undeclared.
SystemChangeNotifier uniqueInstance classDefinitionChangedFrom: oldState to: self]! !
!Class methodsFor: 'compiling' stamp: 'cwp 6/20/2012 09:48' prior: 54782024!
bindingOf: varName
^ self bindingOf: varName environment: self environment! !
!Class methodsFor: 'organization' stamp: 'cwp 6/25/2012 18:25' prior: 54785804!
category
"Answer the system organization category for the receiver. First check whether the
category name stored in the ivar is still correct and only if this fails look it up
(latter is much more expensive)"
category ifNotNil: [ :symbol |
((self environment organization listAtCategoryNamed: symbol) includes: self name)
ifTrue: [ ^symbol ] ].
category := self environment organization categoryOfElement: self name.
^category! !
!Class methodsFor: 'initialize-release' stamp: 'cwp 6/22/2012 15:49' prior: 36027730!
declare: varString
"Declare class variables common to all instances. Answer whether
recompilation is advisable."
| newVars conflicts |
newVars :=
(Scanner new scanFieldNames: varString)
collect: [:x | x asSymbol].
newVars do:
[:var | var first canBeGlobalVarInitial
ifFalse: [self error: var, ' class variable name should be capitalized; proceed to include anyway.']].
conflicts := false.
classPool == nil
ifFalse: [(classPool keys reject: [:x | newVars includes: x]) do:
[:var | self removeClassVarName: var]].
(newVars reject: [:var | self classPool includesKey: var])
do: [:var | "adding"
"check if new vars defined elsewhere"
(self canFindWithoutEnvironment: var) ifTrue: [
(DuplicateVariableError new)
superclass: superclass; "fake!!!!!!"
variable: var;
signal: var, ' is already defined'.
conflicts := true]].
newVars size > 0
ifTrue:
[classPool := self classPool.
"in case it was nil"
newVars do: [:var | classPool declare: var from: environment undeclared]].
^conflicts! !
!Class methodsFor: 'class variables' stamp: 'cwp 6/22/2012 15:49' prior: 54802475!
removeClassVarName: aString
"Remove the class variable whose name is the argument, aString, from
the names defined in the receiver, a class. Create an error notification if
aString is not a class variable or if it is still being used in the code of
the class."
| aSymbol |
aSymbol := aString asSymbol.
(classPool includesKey: aSymbol)
ifFalse: [^self error: aString, ' is not a class variable'].
self withAllSubclasses do:[:subclass |
(Array with: subclass with: subclass class) do:[:classOrMeta |
(classOrMeta whichSelectorsReferTo: (classPool associationAt: aSymbol))
isEmpty ifFalse: [
InMidstOfFileinNotification signal ifTrue: [
Transcript cr; show: self name, ' (' , aString , ' is Undeclared) '.
^ environment undeclared declare: aSymbol from: classPool].
(self confirm: (aString,' is still used in code of class ', classOrMeta name,
'.\Is it okay to move it to Undeclared?') withCRs)
ifTrue:[^Undeclared declare: aSymbol from: classPool]
ifFalse:[^self]]]].
classPool removeKey: aSymbol.
classPool isEmpty ifTrue: [classPool := nil].
! !
!Class methodsFor: 'class name' stamp: 'cwp 6/22/2012 15:49' prior: 54796206!
rename: aString
"The new name of the receiver is the argument, aString."
| oldName newName |
(newName := aString asSymbol) = (oldName := self name)
ifTrue: [^ self].
(self environment includesKey: newName)
ifTrue: [^ self error: newName , ' already exists'].
(environment undeclared includesKey: newName)
ifTrue: [self inform: 'There are references to, ' , aString printString , '
from Undeclared. Check them after this change.'].
name := newName.
self environment renameClass: self from: oldName! !
!ClassBuilder methodsFor: 'class definition' stamp: 'cwp 6/22/2012 01:05' prior: 39054430!
name: className inEnvironment: env subclassOf: newSuper type: type instanceVariableNames: instVarString classVariableNames: classVarString poolDictionaries: poolString category: category unsafe: unsafe
"Define a new class in the given environment.
If unsafe is true do not run any validation checks.
This facility is provided to implement important system changes."
| oldClass instVars classVars copyOfOldClass newClass |
environ := env.
instVars := Scanner new scanFieldNames: instVarString.
classVars := (Scanner new scanFieldNames: classVarString) collect: [:x | x asSymbol].
"Validate the proposed name"
unsafe ifFalse:[(self validateClassName: className) ifFalse:[^nil]].
oldClass := env at: className ifAbsent:[nil].
oldClass isBehavior
ifFalse: [oldClass := nil] "Already checked in #validateClassName:"
ifTrue: [
copyOfOldClass := oldClass copy.
copyOfOldClass superclass addSubclass: copyOfOldClass].
[ | newCategory needNew force organization oldCategory |
unsafe ifFalse:[
"Run validation checks so we know that we have a good chance for recompilation"
(self validateSuperclass: newSuper forSubclass: oldClass) ifFalse:[^nil].
(self validateInstvars: instVars from: oldClass forSuper: newSuper) ifFalse:[^nil].
(self validateClassvars: classVars from: oldClass forSuper: newSuper) ifFalse:[^nil].
(self validateSubclassFormat: type from: oldClass forSuper: newSuper extra: instVars size) ifFalse:[^nil]].
"See if we need a new subclass"
needNew := self needsSubclassOf: newSuper type: type instanceVariables: instVars from: oldClass.
needNew == nil ifTrue:[^nil]. "some error"
(needNew and:[unsafe not]) ifTrue:[
"Make sure we don't redefine any dangerous classes"
(self tooDangerousClasses includes: oldClass name) ifTrue:[
self error: oldClass name, ' cannot be changed'.
].
"Check if the receiver should not be redefined"
(oldClass ~~ nil and:[oldClass shouldNotBeRedefined]) ifTrue:[
self notify: oldClass name asText allBold,
' should not be redefined. \Proceed to store over it.' withCRs]].
needNew ifTrue:[
"Create the new class"
newClass := self
newSubclassOf: newSuper
type: type
instanceVariables: instVars
from: oldClass.
newClass == nil ifTrue:[^nil]. "Some error"
newClass setName: className.
newClass environment: environ.
] ifFalse:[
"Reuse the old class"
newClass := oldClass.
].
"Install the class variables and pool dictionaries... "
force := (newClass declare: classVarString) | (newClass sharing: poolString).
"... classify ..."
newCategory := category asSymbol.
organization := environ ifNotNil:[environ organization].
oldClass isNil ifFalse: [oldCategory := (organization categoryOfElement: oldClass name) asSymbol].
organization classify: newClass name under: newCategory suppressIfDefault: true.
"... recompile ..."
newClass := self recompile: force from: oldClass to: newClass mutate: false.
"... export if not yet done ..."
(environ at: newClass name ifAbsent:[nil]) == newClass ifFalse:[
[environ at: newClass name put: newClass]
on: AttemptToWriteReadOnlyGlobal do:[:ex| ex resume: true].
environ flushClassNameCache.
].
newClass doneCompiling.
"... notify interested clients ..."
oldClass isNil ifTrue: [
SystemChangeNotifier uniqueInstance classAdded: newClass inCategory: newCategory.
^ newClass].
newCategory ~= oldCategory
ifTrue: [SystemChangeNotifier uniqueInstance class: newClass recategorizedFrom: oldCategory to: category]
ifFalse: [SystemChangeNotifier uniqueInstance classDefinitionChangedFrom: copyOfOldClass to: newClass.].
] ensure:
[copyOfOldClass ifNotNil: [copyOfOldClass superclass removeSubclass: copyOfOldClass].
Behavior flushObsoleteSubclasses.
].
^newClass! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 22:57' prior: 18572019!
superclass: newSuper
subclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class."
| env |
env := EnvironmentRequest signal ifNil: [newSuper environment].
^self
name: t
inEnvironment: env
subclassOf: newSuper
type: newSuper typeOfClass
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:01' prior: 50629912!
superclass: aClass
variableByteSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class in which the subclass is to
have indexable byte-sized nonpointer variables."
| oldClassOrNil actualType env |
(aClass instSize > 0)
ifTrue: [^self error: 'cannot make a byte subclass of a class with named fields'].
(aClass isVariable and: [aClass isWords])
ifTrue: [^self error: 'cannot make a byte subclass of a class with word fields'].
(aClass isVariable and: [aClass isPointers])
ifTrue: [^self error: 'cannot make a byte subclass of a class with pointer fields'].
oldClassOrNil := aClass environment at: t ifAbsent:[nil].
actualType := (oldClassOrNil notNil
and: [oldClassOrNil typeOfClass == #compiledMethod])
ifTrue: [#compiledMethod]
ifFalse: [#bytes].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: actualType
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:03' prior: 18573442!
superclass: aClass
variableSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class in which the subclass is to
have indexable pointer variables."
| env |
aClass isBits ifTrue:
[^self error: 'cannot make a pointer subclass of a class with non-pointer fields'].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: #variable
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:04' prior: 18574098!
superclass: aClass
variableWordSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class in which the subclass is to
have indexable word-sized nonpointer variables."
| env |
(aClass instSize > 0)
ifTrue: [^self error: 'cannot make a word subclass of a class with named fields'].
(aClass isVariable and: [aClass isBytes])
ifTrue: [^self error: 'cannot make a word subclass of a class with byte fields'].
(aClass isVariable and: [aClass isPointers])
ifTrue: [^self error: 'cannot make a word subclass of a class with pointer fields'].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: #words
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
!ClassBuilder methodsFor: 'public' stamp: 'cwp 6/19/2012 23:04' prior: 18575028!
superclass: aClass
weakSubclass: t instanceVariableNames: f
classVariableNames: d poolDictionaries: s category: cat
"This is the standard initialization message for creating a new class as a
subclass of an existing class (the receiver) in which the subclass is to
have weak indexable pointer variables."
| env |
aClass isBits
ifTrue: [^self error: 'cannot make a pointer subclass of a class with non-pointer fields'].
env := EnvironmentRequest signal ifNil: [aClass environment].
^self
name: t
inEnvironment: env
subclassOf: aClass
type: #weak
instanceVariableNames: f
classVariableNames: d
poolDictionaries: s
category: cat! !
"Kernel"!
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:21' prior: 59135029!
ambiguousSelector: aString inRange: anInterval
| correctedSelector userSelection offset intervalWithOffset |
self interactive ifFalse: [
"In non interactive mode, compile with backward comapatibility: $- is part of literal argument"
Transcript cr; store: encoder classEncoding; nextPutAll:#'>>';store: encoder selector; show: ' would send ' , token , '-'.
^super ambiguousSelector: aString inRange: anInterval].
"handle the text selection"
userSelection := cue requestor selectionInterval.
intervalWithOffset := anInterval first + requestorOffset to: anInterval last + requestorOffset.
cue requestor selectFrom: intervalWithOffset first to: intervalWithOffset last.
cue requestor select.
"Build the menu with alternatives"
correctedSelector := AmbiguousSelector
signalName: aString
inRange: intervalWithOffset.
correctedSelector ifNil: [^self fail].
"Execute the selected action"
offset := self substituteWord: correctedSelector wordInterval: intervalWithOffset offset: 0.
cue requestor deselect.
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last + offset.
token := (correctedSelector readStream upTo: Character space) asSymbol! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:21' prior: 38558136!
collectTemporaryDeclarationsFrom: methodNode
| tempsMarks str |
tempsMarks := OrderedCollection new.
str := cue requestor text asString.
methodNode accept: (ParseNodeEnumerator
ofBlock: [ :aNode |
| mark |
(aNode class canUnderstand: #tempsMark)
ifTrue:
[mark := aNode tempsMark.
(mark notNil and: [ mark between: 1 and: str size ] and: [ (str at: mark) = $| ])
ifTrue: [ tempsMarks addLast: aNode ]]]).
(tempsMark notNil and: [ tempsMark between: 1 and: str size ] and: [ (str at: tempsMark) = $| ])
ifTrue: [ tempsMarks addLast: self ].
^ tempsMarks sorted: [ :a :b | a tempsMark > b tempsMark ]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:20' prior: 52096606!
correctSelector: proposedKeyword wordIntervals: spots exprInterval: expInt ifAbort: abortAction
"Correct the proposedKeyword to some selector symbol, correcting the original text if such action is indicated. abortAction is invoked if the proposedKeyword couldn't be converted into a valid selector. Spots is an ordered collection of intervals within the test stream of the for each of the keyword parts."
| correctSelector userSelection |
"If we can't ask the user, assume that the keyword will be defined later"
self interactive ifFalse: [^proposedKeyword asSymbol].
userSelection := cue requestor selectionInterval.
cue requestor selectFrom: spots first first to: spots last last.
cue requestor select.
correctSelector := UnknownSelector name: proposedKeyword.
correctSelector ifNil: [^abortAction value].
cue requestor deselect.
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last.
self substituteSelector: correctSelector keywords wordIntervals: spots.
^(proposedKeyword last ~~ $:
and: [correctSelector last == $:])
ifTrue: [abortAction value]
ifFalse: [correctSelector]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:20' prior: 33907242!
correctVariable: proposedVariable interval: spot
"Correct the proposedVariable to a known variable, or declare it as a new
variable if such action is requested. We support declaring lowercase
variables as temps or inst-vars, and uppercase variables as Globals or
ClassVars, depending on whether the context is nil (class=UndefinedObject).
Spot is the interval within the test stream of the variable.
rr 3/4/2004 10:26 : adds the option to define a new class. "
"Check if this is an i-var, that has been corrected already (ugly)"
"Display the pop-up menu"
| binding userSelection action |
(encoder classEncoding instVarNames includes: proposedVariable) ifTrue:
[^InstanceVariableNode new
name: proposedVariable
index: (encoder classEncoding allInstVarNames indexOf: proposedVariable)].
"If we can't ask the user for correction, make it undeclared"
self interactive ifFalse: [^encoder undeclared: proposedVariable].
"First check to see if the requestor knows anything about the variable"
(binding := cue requestor bindingOf: proposedVariable)
ifNotNil: [^encoder global: binding name: proposedVariable].
userSelection := cue requestor selectionInterval.
cue requestor selectFrom: spot first to: spot last.
cue requestor select.
"Build the menu with alternatives"
action := UndeclaredVariable
signalFor: self
name: proposedVariable
inRange: spot.
action ifNil: [^self fail].
"Execute the selected action"
cue requestor deselect.
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last.
^action value! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:19' prior: 34172921!
declareUndeclaredTemps: methodNode
"Declare any undeclared temps, declaring them at the smallest enclosing scope."
| undeclared userSelection blocksToVars |
(undeclared := encoder undeclaredTemps) isEmpty ifTrue:
[^self].
userSelection := cue requestor selectionInterval.
blocksToVars := IdentityDictionary new.
undeclared do:
[:var|
(blocksToVars
at: (var tag == #method
ifTrue: [methodNode block]
ifFalse: [methodNode accept: (VariableScopeFinder new ofVariable: var)])
ifAbsentPut: [SortedCollection new]) add: var name].
(blocksToVars removeKey: methodNode block ifAbsent: []) ifNotNil:
[:rootVars|
rootVars do: [:varName| self pasteTempAtMethodLevel: varName]].
(blocksToVars keys sorted: [:a :b| a tempsMark < b tempsMark]) do:
[:block| | decl |
decl := (blocksToVars at: block) reduce: [:a :b| a, ' ', b].
block temporaries isEmpty
ifTrue:
[self substituteWord: ' | ', decl, ' |'
wordInterval: (block tempsMark + 1 to: block tempsMark)
offset: requestorOffset]
ifFalse:
[self substituteWord: decl, ' '
wordInterval: (block tempsMark to: block tempsMark - 1)
offset: requestorOffset]].
cue requestor selectInvisiblyFrom: userSelection first to: userSelection last + requestorOffset.
ReparseAfterSourceEditing signal! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 11:45' prior: 37183770!
defineClass: className
"prompts the user to define a new class,
asks for it's category, and lets the users edit further
the definition"
| sym cat def d2 |
sym := className asSymbol.
cat := UIManager default request: 'Enter class category : ' initialAnswer: self encoder classEncoding theNonMetaClass category.
cat
ifEmpty: [cat := 'Unknown'].
def := 'Object subclass: #' , sym , '
instanceVariableNames: ''''
classVariableNames: ''''
poolDictionaries: ''''
category: ''' , cat , ''''.
d2 := UIManager default request: 'Edit class definition : ' initialAnswer: def.
d2
ifEmpty: [d2 := def].
Compiler evaluate: d2.
^ encoder
global: (cue environment bindingOf: sym)
name: sym! !
!Parser methodsFor: 'primitives' stamp: 'cwp 12/27/2012 11:46' prior: 37184567!
externalFunctionDeclaration
"Parse the function declaration for a call to an external library."
| descriptorClass callType modifier retType externalName args argType module fn |
descriptorClass := cue environment
valueOf: #ExternalFunction
ifAbsent: [^ false].
callType := descriptorClass callingConventionFor: here.
callType == nil ifTrue:[^false].
[modifier := descriptorClass callingConventionModifierFor: token.
modifier notNil] whileTrue:
[self advance.
callType := callType bitOr: modifier].
"Parse return type"
self advance.
retType := self externalType: descriptorClass.
retType == nil ifTrue:[^self expected:'return type'].
"Parse function name or index"
externalName := here.
(self match: #string)
ifTrue:[externalName := externalName asSymbol]
ifFalse:[(self match:#number) ifFalse:[^self expected:'function name or index']].
(self matchToken: #'(') ifFalse:[^self expected:'argument list'].
args := WriteStream on: Array new.
[here == #')'] whileFalse:[
argType := self externalType: descriptorClass.
argType == nil ifTrue:[^self expected:'argument'].
argType isVoid & argType isPointerType not ifFalse:[args nextPut: argType].
].
(self matchToken: #')') ifFalse:[^self expected:')'].
(self matchToken: 'module:') ifTrue:[
module := here.
(self match: #string) ifFalse:[^self expected: 'String'].
module := module asSymbol].
Smalltalk at: #ExternalLibraryFunction ifPresent:[:xfn|
fn := xfn name: externalName
module: module
callType: callType
returnType: retType
argumentTypes: args contents.
self allocateLiteral: fn.
].
(self matchToken: 'error:')
ifTrue:
[| errorCodeVariable |
errorCodeVariable := here.
(hereType == #string
or: [hereType == #word]) ifFalse:[^self expected: 'error code (a variable or string)'].
self advance.
self addPragma: (Pragma keyword: #primitive:error: arguments: (Array with: 120 with: errorCodeVariable)).
fn ifNotNil: [fn setErrorCodeName: errorCodeVariable]]
ifFalse:
[self addPragma: (Pragma keyword: #primitive: arguments: #(120))].
^true
! !
!Parser methodsFor: 'error handling' stamp: 'cwp 12/27/2012 10:19' prior: 58306169!
interactive
"Answer true if compilation is interactive"
^ cue requestor notNil! !
!Parser methodsFor: 'error handling' stamp: 'cwp 12/27/2012 10:22' prior: 58137223!
notify: string at: location
cue requestor isNil
ifTrue: [(encoder == self or: [encoder isNil]) ifTrue: [^ self fail "failure setting up syntax error"].
SyntaxErrorNotification
inClass: encoder classEncoding
category: cue category
withCode:
(source contents asText
copyReplaceFrom: location
to: location - 1
with: ((string , ' ->') asText allBold
addAttribute: TextColor red; yourself))
doitFlag: doitFlag
errorMessage: string
location: location]
ifFalse: [cue requestor
notify: string , ' ->'
at: location
in: source].
^self fail! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:17' prior: 34177108!
pasteTempAtMethodLevel: name
| insertion delta theTextString characterBeforeMark |
theTextString := cue requestor text string.
characterBeforeMark := theTextString at: tempsMark-1 ifAbsent: [$ ].
(theTextString at: tempsMark) = $| ifTrue: [
"Paste it before the second vertical bar"
insertion := name, ' '.
characterBeforeMark isSeparator ifFalse: [
insertion := ' ', insertion].
delta := 0.
] ifFalse: [
"No bars - insert some with CR, tab"
insertion := '| ' , name , ' |',String cr.
delta := 2. "the bar and CR"
characterBeforeMark = Character tab ifTrue: [
insertion := insertion , String tab.
delta := delta + 1. "the tab"
].
].
tempsMark := tempsMark +
(self substituteWord: insertion
wordInterval: (tempsMark to: tempsMark-1)
offset: 0) - delta! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:16' prior: 52095305!
queryUndefined
| varStart varName |
varName := parseNode key.
varStart := self endOfLastToken + requestorOffset - varName size + 1.
cue requestor selectFrom: varStart to: varStart + varName size - 1; select.
(UndefinedVariable name: varName) ifFalse: [^ self fail]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:15' prior: 38599341!
removeEmptyTempDeclarationsFrom: methodNode
| sourceCode madeChanges tempsMarkHolder |
sourceCode := cue requestor text asString.
tempsMarkHolder := self collectTemporaryDeclarationsFrom: methodNode.
madeChanges := false.
tempsMarkHolder do: [ :currentBlock | | tempsMarkChar0 tempsMarkChar1 tempsMarkChar2 end start |
tempsMarkChar0 := (sourceCode at: currentBlock tempsMark).
tempsMarkChar1 := (sourceCode at: currentBlock tempsMark - 1).
tempsMarkChar2 := (sourceCode at: currentBlock tempsMark - 2).
tempsMarkChar0 = $| & tempsMarkChar1 = $|
ifTrue:
[ end := currentBlock tempsMark.
start := end - 1].
tempsMarkChar0 = $| & tempsMarkChar1 = $ & tempsMarkChar2 = $|
ifTrue:
[ end := currentBlock tempsMark.
start := end - 2].
start notNil & end notNil ifTrue: [
| lineStart lineEnd |
lineStart := 1 + (sourceCode
lastIndexOf: Character cr
startingAt: start - 1
ifAbsent: [ 0 ]).
lineEnd := sourceCode
indexOf: Character cr
startingAt: end + 1
ifAbsent: [ sourceCode size ].
((sourceCode indexOfAnyOf: CharacterSet nonSeparators startingAt: lineStart) >= start
and: [ (sourceCode indexOfAnyOf: CharacterSet nonSeparators startingAt: end + 1) > lineEnd ]) ifTrue: [
start := lineStart.
end := lineEnd ].
cue requestor correctFrom: start to: end with: ''.
madeChanges := true.
currentBlock tempsMark: nil ] ].
madeChanges ifTrue: [ReparseAfterSourceEditing signal]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:15' prior: 38561281!
removeUnusedTemporaryNamed: temp from: str lookingAt: currentBlock movingTempMarksOf: someBlocks
| start end |
end := currentBlock tempsMark - 1.
["Beginning at right temp marker..."
start := end - temp size + 1.
end < temp size or: [ (str at: start) = $| ]
or: [ temp = (str copyFrom: start to: end)
and: [ ((str at: start - 1) = $| | (str at: start - 1) isSeparator)
& ((str at: end + 1) = $| | (str at: end + 1) isSeparator) ] ]]
whileFalse: [
"Search left for the unused temp"
end := cue requestor nextTokenFrom: end direction: -1 ].
(end < temp size or: [ (str at: start) = $| ])
ifFalse:
[(str at: start - 1) = $
ifTrue: [ start := start - 1 ].
cue requestor correctFrom: start to: end with: ''.
someBlocks do: [ :aBlock | aBlock tempsMark: aBlock tempsMark - (end - start + 1)].
^true ].
^false! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:14' prior: 38562194!
removeUnusedTemps: methodNode
"Scan for unused temp names, and prompt the user about the prospect of removing each one found"
| madeChanges tempsMarkHolder unusedTempNames tempMarkHoldersToChange |
madeChanges := false.
tempMarkHoldersToChange := OrderedCollection new.
tempsMarkHolder := self collectTemporaryDeclarationsFrom: methodNode.
unusedTempNames := encoder unusedTempNames select:
[ :temp | (encoder lookupVariable: temp ifAbsent: [ ]) isUndefTemp
and: [ self queriedUnusedTemporaries at: temp ifAbsentPut: [UnusedVariable name: temp] ]].
tempsMarkHolder do: [ :currentBlock |
tempMarkHoldersToChange add: currentBlock.
unusedTempNames do:
[ :temp |
(self
removeUnusedTemporaryNamed: temp
from: cue requestor text asString
lookingAt: currentBlock
movingTempMarksOf: tempMarkHoldersToChange) ifTrue: [ madeChanges := true ]]].
madeChanges
ifTrue: [ self removeEmptyTempDeclarationsFrom: methodNode.
ReparseAfterSourceEditing signal ]! !
!Parser methodsFor: 'error correction' stamp: 'cwp 12/27/2012 10:14' prior: 34179326!
substituteWord: correctWord wordInterval: spot offset: o
"Substitute the correctSelector into the (presumed interactive) receiver.
Update requestorOffset based on the delta size and answer the updated offset."
cue requestor correctFrom: spot first + o to: spot last + o with: correctWord.
requestorOffset := requestorOffset + correctWord size - spot size.
^o + correctWord size - spot size! !
!Parser methodsFor: 'expression types' stamp: 'cwp 12/27/2012 10:14' prior: 34179807!
temporaries
" [ '|' (variable)* '|' ]"
| vars theActualText |
(self match: #verticalBar) ifFalse:
["no temps"
doitFlag ifTrue:
[tempsMark := self interactive
ifTrue: [cue requestor selectionInterval first]
ifFalse: [1].
^ #()].
tempsMark := hereMark "formerly --> prevMark + prevToken".
tempsMark > 0 ifTrue:
[theActualText := source contents.
[tempsMark < theActualText size and: [(theActualText at: tempsMark) isSeparator]]
whileTrue: [tempsMark := tempsMark + 1]].
^ #()].
vars := OrderedCollection new.
[hereType == #word]
whileTrue: [vars addLast: (encoder bindTemp: self advance)].
(self match: #verticalBar) ifTrue:
[tempsMark := prevMark.
^ vars].
^ self expected: 'Vertical bar'
! !
!Parser methodsFor: 'expression types' stamp: 'cwp 12/27/2012 10:14' prior: 34180638!
temporariesIn: methodSelector
" [ '|' (variable)* '|' ]"
| vars theActualText |
(self match: #verticalBar) ifFalse:
["no temps"
doitFlag ifTrue:
[tempsMark := self interactive
ifTrue: [cue requestor selectionInterval first]
ifFalse: [1].
^ #()].
tempsMark := hereMark "formerly --> prevMark + prevToken".
tempsMark > 0 ifTrue:
[theActualText := source contents.
[tempsMark < theActualText size and: [(theActualText at: tempsMark) isSeparator]]
whileTrue: [tempsMark := tempsMark + 1]].
^ #()].
vars := OrderedCollection new.
[hereType == #word]
whileTrue: [vars addLast: (encoder bindTemp: self advance in: methodSelector)].
(self match: #verticalBar) ifTrue:
[tempsMark := prevMark.
^ vars].
^ self expected: 'Vertical bar'! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 10:11' prior: 53971863!
compiledMethodFor: textOrStream in: aContext to: receiver notifying: aRequestor ifFail: failBlock logged: logFlag
"Compiles the sourceStream into a parse tree, then generates code
into a method, and answers it. If receiver is not nil, then the text can
refer to instance variables of that receiver (the Inspector uses this).
If aContext is not nil, the text can refer to temporaries in that context
(the Debugger uses this). If aRequestor is not nil, then it will receive a
notify:at: message before the attempt to evaluate is aborted."
| methodNode method theClass |
theClass := (aContext == nil ifTrue: [receiver] ifFalse: [aContext receiver]) class.
self from: textOrStream class: theClass context: aContext notifying: aRequestor.
methodNode := self translate: sourceStream noPattern: true ifFail: [^failBlock value].
method := self interactive ifTrue: [ methodNode generateWithTempNames ]
ifFalse: [methodNode generate].
logFlag ifTrue:
[SystemChangeNotifier uniqueInstance evaluated: sourceStream contents context: aContext].
^method! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/27/2012 11:33' prior: 34363593!
format: aStream noPattern: noPattern ifFail: failBlock
^(self parser
parse: aStream
cue: cue
noPattern: noPattern
ifFail: [^failBlock value]) preen! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/27/2012 10:08' prior: 58306325!
interactive
"Answer true if compilation is interactive"
^ cue requestor notNil! !
!Compiler methodsFor: 'error handling' stamp: 'cwp 12/27/2012 10:10' prior: 50779387!
notify: aString at: location
"Refer to the comment in Object|notify:."
^ cue requestor == nil
ifTrue: [SyntaxErrorNotification
inClass: cue getClass
category: cue category
withCode:
(sourceStream contents
copyReplaceFrom: location
to: location - 1
with: aString)
doitFlag: false
errorMessage: aString
location: location]
ifFalse: [cue requestor
notify: aString
at: location
in: sourceStream]! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 11:34' prior: 50777201!
parse: textOrStream in: aClass notifying: req
"Compile the argument, textOrStream, with respect to the class, aClass, and
answer the MethodNode that is the root of the resulting parse tree. Notify the
argument, req, if an error occurs. The failBlock is defaulted to an empty block."
self from: textOrStream class: aClass context: nil notifying: req.
^self parser
parse: sourceStream
cue: cue
noPattern: false
ifFail: []! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 10:09' prior: 36332471!
parser
parser ifNil: [parser := (cue getClass ifNil: [self class]) newParser].
^parser! !
!Compiler methodsFor: 'private' stamp: 'cwp 12/27/2012 11:37' prior: 50780779!
translate: aStream noPattern: noPattern ifFail: failBlock
^self parser
parse: aStream
cue: cue
noPattern: noPattern
ifFail: [^failBlock value]! !
!Compiler methodsFor: 'public access' stamp: 'cwp 12/27/2012 11:37' prior: 19124095!
translate: aStream noPattern: noPattern ifFail: failBlock parser: parser
| tree |
tree := parser
parse: aStream
cue: cue
noPattern: noPattern
ifFail: [^ failBlock value].
^ tree! !
!Encoder methodsFor: 'results' stamp: 'cwp 12/27/2012 10:26' prior: 50999892!
associationForClass
| assoc |
assoc := self environment associationAt: cue getClass name ifAbsent: [nil].
^assoc value == cue getClass
ifTrue: [assoc]
ifFalse: [Association new value: cue getClass]! !
!Encoder methodsFor: 'temps' stamp: 'cwp 12/27/2012 10:25' prior: 20148386!
bindTemp: name in: methodSelector
"Declare a temporary; error not if a field or class variable."
scopeTable at: name ifPresent:[:node|
"When non-interactive raise the error only if its a duplicate"
(node isTemp or:[requestor interactive])
ifTrue:[^self notify:'Name is already defined']
ifFalse:[Transcript
show: '(', name, ' is shadowed in "' , cue getClass printString , '>>' , methodSelector printString , '")']].
^self reallyBind: name! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:25' prior: 20149084!
classEncoding
"This is a hack so that the parser may findout what class it was parsing for when it wants to create a syntax error view."
^ cue getClass! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:39' prior: 20138819!
encodeLiteral: object
^self
name: object
key: (cue literalScannedAs: object notifying: self)
class: LiteralNode
type: LdLitType
set: litSet! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:40' prior: 20139010!
encodeSelector: aSelector
^self
name: aSelector
key: aSelector
class: SelectorNode
type: SendType
set: selectorSet! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:40' prior: 58545123!
environment
"Answer the environment of the current compilation context,
be it in a class or global (e.g. a workspace)"
^cue environment! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 11:41' prior: 50994497!
lookupInPools: varName ifFound: assocBlock
^Symbol
hasInterned: varName
ifTrue:
[:sym|
(cue bindingOf: sym)
ifNil: [^false]
ifNotNil: [:assoc| assocBlock value: assoc]]! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:24' prior: 51004306!
possibleNamesFor: proposedName
| results |
results := cue getClass
possibleVariablesFor: proposedName
continuedFrom: nil.
^ proposedName correctAgainst: nil continuedFrom: results.
! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:24' prior: 50995012!
possibleVariablesFor: proposedVariable
| results |
results := proposedVariable correctAgainstDictionary: scopeTable
continuedFrom: nil.
proposedVariable first canBeGlobalVarInitial ifTrue:
[ results := cue getClass possibleVariablesFor: proposedVariable
continuedFrom: results ].
^ proposedVariable correctAgainst: nil continuedFrom: results.
! !
!Encoder methodsFor: 'encoding' stamp: 'cwp 12/27/2012 11:42' prior: 51002830!
undeclared: name
| sym |
requestor interactive ifTrue:
[requestor requestor == #error: ifTrue:
[requestor error: 'Undeclared'].
^self notify: 'Undeclared'].
"Allow knowlegeable clients to squash the undeclared warning if they want (e.g.
Diffing pretty printers that are simply formatting text). As this breaks
compilation it should only be used by clients that want to discard the result
of the compilation. To squash the warning use e.g.
[Compiler format: code in: class notifying: nil decorated: false]
on: UndeclaredVariableWarning
do: [:ex| ex resume: false]"
sym := name asSymbol.
^(UndeclaredVariableWarning new name: name selector: selector class: cue getClass) signal
ifTrue:
[| undeclared |
undeclared := cue environment undeclared.
undeclared at: sym put: nil.
self global: (undeclared associationAt: sym) name: sym]
ifFalse:
[self global: (Association key: sym) name: sym]! !
!Encoder methodsFor: 'private' stamp: 'cwp 12/27/2012 10:23' prior: 51006007!
warnAboutShadowed: name
requestor addWarning: name,' is shadowed'.
selector ifNotNil:
[Transcript cr; show: cue getClass name,'>>', selector, '(', name,' is shadowed)']! !
"Compiler"!
!SmalltalkImage methodsFor: 'housekeeping' stamp: 'cwp 6/22/2012 15:56' prior: 58497062!
cleanOutUndeclared
globals undeclared removeUnreferencedKeys! !
!SmalltalkImage methodsFor: 'special objects' stamp: 'cwp 6/22/2012 09:01' prior: 40515090!
recreateSpecialObjectsArray
"Smalltalk recreateSpecialObjectsArray"
"To external package developers:
**** DO NOT OVERRIDE THIS METHOD. *****
If you are writing a plugin and need additional special object(s) for your own use,
use addGCRoot() function and use own, separate special objects registry "
"The Special Objects Array is an array of objects used by the Squeak virtual machine.
Its contents are critical and accesses to it by the VM are unchecked, so don't even
think of playing here unless you know what you are doing."
| newArray |
newArray := Array new: 56.
"Nil false and true get used throughout the interpreter"
newArray at: 1 put: nil.
newArray at: 2 put: false.
newArray at: 3 put: true.
"This association holds the active process (a ProcessScheduler)"
newArray at: 4 put: (self bindingOf: #Processor).
"Numerous classes below used for type checking and instantiation"
newArray at: 5 put: Bitmap.
newArray at: 6 put: SmallInteger.
newArray at: 7 put: ByteString.
newArray at: 8 put: Array.
newArray at: 9 put: Smalltalk.
newArray at: 10 put: Float.
newArray at: 11 put: MethodContext.
newArray at: 12 put: BlockContext.
newArray at: 13 put: Point.
newArray at: 14 put: LargePositiveInteger.
newArray at: 15 put: Display.
newArray at: 16 put: Message.
newArray at: 17 put: CompiledMethod.
newArray at: 18 put: (self specialObjectsArray at: 18).
"(low space Semaphore)"
newArray at: 19 put: Semaphore.
newArray at: 20 put: Character.
newArray at: 21 put: #doesNotUnderstand:.
newArray at: 22 put: #cannotReturn:.
newArray at: 23 put: nil. "This is the process signalling low space."
"An array of the 32 selectors that are compiled as special bytecodes,
paired alternately with the number of arguments each takes."
newArray at: 24 put: #( #+ 1 #- 1 #< 1 #> 1 #<= 1 #>= 1 #= 1 #~= 1
#* 1 #/ 1 #\\ 1 #@ 1 #bitShift: 1 #// 1 #bitAnd: 1 #bitOr: 1
#at: 1 #at:put: 2 #size 0 #next 0 #nextPut: 1 #atEnd 0 #== 1 #class 0
#blockCopy: 1 #value 0 #value: 1 #do: 1 #new 0 #new: 1 #x 0 #y 0 ).
"An array of the 255 Characters in ascii order.
Cog inlines table into machine code at: prim so do not regenerate it."
newArray at: 25 put: (self specialObjectsArray at: 25).
newArray at: 26 put: #mustBeBoolean.
newArray at: 27 put: ByteArray.
newArray at: 28 put: Process.
"An array of up to 31 classes whose instances will have compact headers"
newArray at: 29 put: self compactClassesArray.
newArray at: 30 put: (self specialObjectsArray at: 30). "(delay Semaphore)"
newArray at: 31 put: (self specialObjectsArray at: 31). "(user interrupt Semaphore)"
"Entries 32 - 34 unreferenced. Previously these contained prototype instances to be copied for fast initialization"
newArray at: 32 put: nil. "was (Float new: 2)"
newArray at: 33 put: nil. "was (LargePositiveInteger new: 4)"
newArray at: 34 put: nil. "was Point new"
newArray at: 35 put: #cannotInterpret:.
"Note: This must be fixed once we start using context prototypes (yeah, right)"
"(MethodContext new: CompiledMethod fullFrameSize)."
newArray at: 36 put: (self specialObjectsArray at: 36). "Is the prototype MethodContext (unused by the VM)"
newArray at: 37 put: BlockClosure.
"(BlockContext new: CompiledMethod fullFrameSize)."
newArray at: 38 put: (self specialObjectsArray at: 38). "Is the prototype BlockContext (unused by the VM)"
"array of objects referred to by external code"
newArray at: 39 put: (self specialObjectsArray at: 39). "preserve external semaphores"
newArray at: 40 put: nil. "Reserved for Mutex in Cog VMs"
newArray at: 41 put: nil. "Reserved for a LinkedList instance for overlapped calls in CogMT"
"finalization Semaphore"
newArray at: 42 put: ((self specialObjectsArray at: 42) ifNil: [Semaphore new]).
newArray at: 43 put: LargeNegativeInteger.
"External objects for callout.
Note: Written so that one can actually completely remove the FFI."
newArray at: 44 put: (self at: #ExternalAddress ifAbsent: []).
newArray at: 45 put: (self at: #ExternalStructure ifAbsent: []).
newArray at: 46 put: (self at: #ExternalData ifAbsent: []).
newArray at: 47 put: (self at: #ExternalFunction ifAbsent: []).
newArray at: 48 put: (self at: #ExternalLibrary ifAbsent: []).
newArray at: 49 put: #aboutToReturn:through:.
newArray at: 50 put: #run:with:in:.
"51 reserved for immutability message"
"newArray at: 51 put: #attemptToAssign:withIndex:."
newArray at: 52 put: #(nil "nil => generic error" #'bad receiver'
#'bad argument' #'bad index'
#'bad number of arguments'
#'inappropriate operation' #'unsupported operation'
#'no modification' #'insufficient object memory'
#'insufficient C memory' #'not found' #'bad method'
#'internal error in named primitive machinery'
#'object may move').
"53 to 55 are for Alien"
newArray at: 53 put: (self at: #Alien ifAbsent: []).
newArray at: 54 put: #invokeCallback:stack:registers:jmpbuf:.
newArray at: 55 put: (self at: #UnsafeAlien ifAbsent: []).
"Weak reference finalization"
newArray at: 56 put: (self at: #WeakFinalizationList ifAbsent: []).
"Now replace the interpreter's reference in one atomic operation"
self specialObjectsArray becomeForward: newArray
! !
!SmalltalkImage methodsFor: 'shrinking' stamp: 'cwp 6/22/2012 15:57' prior: 37288071!
unloadAllKnownPackages
"Unload all packages we know how to unload and reload"
"Prepare unloading"
Smalltalk zapMVCprojects.
Flaps disableGlobalFlaps: false.
StandardScriptingSystem removeUnreferencedPlayers.
Project removeAllButCurrent.
#('Morphic-UserObjects' 'EToy-UserObjects' 'Morphic-Imported' )
do: [:each | SystemOrganization removeSystemCategory: each].
Smalltalk at: #ServiceRegistry ifPresent:[:aClass|
SystemChangeNotifier uniqueInstance
noMoreNotificationsFor: aClass.
].
World removeAllMorphs.
"Go unloading"
#( 'ReleaseBuilder' 'ScriptLoader'
'311Deprecated' '39Deprecated'
'Universes' 'SMLoader' 'SMBase' 'Installer-Core'
'VersionNumberTests' 'VersionNumber'
'Services-Base' 'PreferenceBrowser' 'Nebraska'
'ToolBuilder-MVC' 'ST80'
'CollectionsTests' 'GraphicsTests' 'KernelTests' 'MorphicTests'
'MultilingualTests' 'NetworkTests' 'ToolsTests' 'TraitsTests'
'SystemChangeNotification-Tests' 'FlexibleVocabularies'
'EToys' 'Protocols' 'XML-Parser' 'Tests' 'SUnitGUI'
'Help-Squeak' 'HelpSystem' 'SystemReporter'
) do: [:pkgName|
(MCPackage named: pkgName) unload.
MCMcmUpdater disableUpdatesOfPackage: pkgName.
].
"Traits use custom unload"
Smalltalk at: #Trait ifPresent:[:aClass| aClass unloadTraits].
"Post-unload cleanup"
MCWorkingCopy flushObsoletePackageInfos.
SystemOrganization removeSystemCategory: 'UserObjects'.
Presenter defaultPresenterClass: nil.
World dumpPresenter.
ScheduledControllers := nil.
Preferences removePreference: #allowEtoyUserCustomEvents.
SystemOrganization removeEmptyCategories.
ChangeSet removeChangeSetsNamedSuchThat:[:cs | (cs == ChangeSet current) not].
globals undeclared removeUnreferencedKeys.
StandardScriptingSystem initialize.
MCFileBasedRepository flushAllCaches.
MCDefinition clearInstances.
Behavior flushObsoleteSubclasses.
ChangeSet current clear.
ChangeSet current name: 'Unnamed1'.
Smalltalk flushClassNameCache.
Smalltalk at: #Browser ifPresent:[:br| br initialize].
DebuggerMethodMap voidMapCache.
DataStream initialize.
AppRegistry removeObsolete.
FileServices removeObsolete.
Preferences removeObsolete.
TheWorldMenu removeObsolete.
Smalltalk garbageCollect.
Symbol compactSymbolTable.
TheWorldMainDockingBar updateInstances.
MorphicProject defaultFill: (Color gray: 0.9).
World color: (Color gray: 0.9).
! !
!InternalTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:34' prior: 40472775!
scanFrom: aStream
^ self scanFrom: aStream environment: Environment default! !
!NaturalLanguageTranslator methodsFor: 'fileIn/fileOut' stamp: 'cwp 6/20/2012 17:27' prior: 40496770!
scanFrom: aStream
^ self scanFrom: aStream environment: Environment default! !
!SystemDictionary methodsFor: 'dictionary access' stamp: 'cwp 6/22/2012 15:58' prior: 30574136!
at: aKey put: anObject
"Override from Dictionary to check Undeclared and fix up
references to undeclared variables."
| index element |
(self includesKey: aKey) ifFalse:
[self declare: aKey from: (self at: #Undeclared).
self flushClassNameCache].
super at: aKey put: anObject.
^ anObject! !
"System"!
CodeHolder subclass: #Browser
instanceVariableNames: 'environment systemOrganizer classOrganizer metaClassOrganizer editSelection metaClassIndicated selectedSystemCategory selectedClassName selectedMessageName selectedMessageCategoryName'
classVariableNames: 'ListClassesHierarchically RecentClasses'
poolDictionaries: ''
category: 'Tools-Browser'!
!Browser commentStamp: 'cwp 12/27/2012 11:09' prior: 36419432!
I represent a query path into the class descriptions, the software of the system.!
!Browser methodsFor: 'accessing' stamp: 'cwp 6/24/2012 23:20'!
selectEnvironment: anEnvironment
environment := anEnvironment.
systemOrganizer := environment organization! !
!Browser methodsFor: 'system category list' stamp: 'cwp 6/24/2012 23:06' prior: 36467357!
From noreply at buildbot.pypy.org Mon Jul 7 13:16:53 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:53 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added an option to also
aggregate and log the classes of elements that cause an object to switch to
another storage strategy.
Message-ID: <20140707111653.45F2D1C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r865:13350a81184e
Date: 2014-07-03 15:37 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/13350a81184e/
Log: Added an option to also aggregate and log the classes of elements
that cause an object to switch to another storage strategy.
diff --git a/spyvm/model.py b/spyvm/model.py
--- a/spyvm/model.py
+++ b/spyvm/model.py
@@ -612,16 +612,16 @@
assert shadow, "The shadow has not been initialized yet!"
return shadow
- def switch_shadow(self, new_shadow):
+ def switch_shadow(self, new_shadow, w_element=None):
old_shadow = self.assert_shadow()
new_shadow.copy_from(old_shadow)
self.store_shadow(new_shadow)
new_shadow.attach_shadow()
- self.log_storage("Switched", old_shadow)
+ self.log_storage("Switched", old_shadow, w_element=w_element)
def store_with_new_storage(self, new_storage, n0, w_val):
space = self.space()
- self.switch_shadow(new_storage(space, self, self.size()))
+ self.switch_shadow(new_storage(space, self, self.size()), w_element=w_val)
self.store(space, n0, w_val)
def space(self):
diff --git a/spyvm/storage_logger.py b/spyvm/storage_logger.py
--- a/spyvm/storage_logger.py
+++ b/spyvm/storage_logger.py
@@ -1,45 +1,65 @@
+
+class LogEntry(object):
+ def __init__(self):
+ self.slots = 0
+ self.objects = 0
+ self.element_classnames = {}
+
+ def add(self, size, element_classname):
+ self.slots += size
+ self.objects += 1
+ if element_classname:
+ self.element_classnames[element_classname] = None
+
+ def classnames(self):
+ if len(self.element_classnames) > 0:
+ return self.element_classnames.keys()
+ return None
class Logger(object):
def __init__(self):
self.active = False
self.aggregate = False
+ self.elements = False
self.logs = {}
- def log(self, operation, old_storage, new_storage, classname, size):
+ def log(self, operation, old_storage, new_storage, classname, size, element_classname):
if self.aggregate:
key = (operation, old_storage, new_storage, classname)
if key not in self.logs:
- self.logs[key] = [0, 0]
- tuple = self.logs[key]
- tuple[0] += size
- tuple[1] += 1
+ self.logs[key] = LogEntry()
+ entry = self.logs[key]
+ entry.add(size, element_classname)
else:
- self.output(operation, old_storage, new_storage, classname, size, 1)
+ element_classnames = [ element_classname ] if element_classname else None
+ self.output(operation, old_storage, new_storage, classname, size, 1, element_classnames)
def print_aggregated_log(self):
if not self.aggregate:
return
- for key, tuple in self.logs.items():
+ for key, entry in self.logs.items():
operation, old_storage, new_storage, classname = key
- slots, objects = tuple
- self.output(operation, old_storage, new_storage, classname, slots, objects)
+ slots, objects, element_classnames = entry.slots, entry.objects, entry.classnames()
+ self.output(operation, old_storage, new_storage, classname, slots, objects, element_classnames)
- def output(self, operation, old_storage, new_storage, classname, slots, objects):
+ def output(self, operation, old_storage, new_storage, classname, slots, objects, element_classnames):
old_storage_string = "%s -> " % old_storage if old_storage else ""
classname_string = " of %s" % classname if classname else ""
- format = (operation, old_storage_string, new_storage, classname_string, slots, objects)
- print "%s (%s%s)%s size %d objects %d" % format
+ element_string = (" elements: " + " ".join(element_classnames)) if element_classnames else ""
+ format = (operation, old_storage_string, new_storage, classname_string, slots, objects, element_string)
+ print "%s (%s%s)%s size %d objects %d%s" % format
_logger = Logger()
-def activate(aggregate=False):
+def activate(aggregate=False, elements=False):
_logger.active = True
- _logger.aggregate = aggregate
+ _logger.aggregate = _logger.aggregate or aggregate
+ _logger.elements = _logger.elements or elements
def print_aggregated_log():
_logger.print_aggregated_log()
-def log(w_obj, operation, old_storage_object=None, log_classname=True):
+def log(w_obj, operation, old_storage_object=None, log_classname=True, w_element=None):
if not _logger.active:
return
@@ -54,6 +74,10 @@
classname = w_obj.guess_classname()
else:
classname = None
+ if _logger.elements and w_element and log_classname:
+ element_classname = w_element.guess_classname()
+ else:
+ element_classname = None
- _logger.log(operation, old_storage, new_storage, classname, size)
+ _logger.log(operation, old_storage, new_storage, classname, size, element_classname)
\ No newline at end of file
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -27,7 +27,7 @@
callback(entry)
return parsed_entries
-line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)( objects (?P[0-9]+))?$")
+line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)( objects (?P[0-9]+))?( elements: (?P.+( .+)+))?$")
def parse_line(line, flags):
result = line_pattern.match(line)
@@ -41,16 +41,20 @@
classname = result.group('classname')
size = result.group('size')
objects = result.group('objects')
- return LogEntry(operation, old_storage, new_storage, classname, size, objects)
+ classnames = result.group('classnames')
+ if classnames is not None:
+ classnames = classnames.split(' ')
+ return LogEntry(operation, old_storage, new_storage, classname, size, objects, classnames)
class LogEntry(object):
- def __init__(self, operation, old_storage, new_storage, classname, size, objects):
+ def __init__(self, operation, old_storage, new_storage, classname, size, objects, classnames):
self.operation = str(operation)
self.new_storage = str(new_storage)
self.classname = str(classname)
self.size = int(size)
self.objects = int(objects) if objects else 1
+ self.classnames = set(classnames) if classnames else set()
if old_storage is None:
if operation == "Filledin":
@@ -83,9 +87,10 @@
class Operations(object):
- def __init__(self, objects=0, slots=0):
+ def __init__(self, objects=0, slots=0, element_classnames=[]):
self.objects = objects
self.slots = slots
+ self.element_classnames = set(element_classnames)
def __str__(self, total=None):
if self.objects == 0:
@@ -102,7 +107,9 @@
percent_objects = ""
slots = format(self.slots, ",d")
objects = format(self.objects, ",d")
- return "%s%s slots in %s%s objects (avg size: %.1f)" % (slots, percent_slots, objects, percent_objects, avg_slots)
+ classnames = (" [ elements: %s ]" % ' '.join([str(x) for x in self.element_classnames])) \
+ if len(self.element_classnames) else ""
+ return "%s%s slots in %s%s objects (avg size: %.1f)%s" % (slots, percent_slots, objects, percent_objects, avg_slots, classnames)
def __repr__(self):
return "%s(%s)" % (self.__str__(), object.__repr__(self))
@@ -110,6 +117,7 @@
def add_log_entry(self, entry):
self.slots = self.slots + entry.size
self.objects = self.objects + entry.objects
+ self.element_classnames |= entry.classnames
def __sub__(self, other):
return Operations(self.objects - other.objects, self.slots - other.slots)
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -132,6 +132,7 @@
-d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
-l|--storage-log
-L|--storage-log-aggregate
+ -E|--storage-log-elements
[image path, default: Squeak.image]
""" % (argv[0], constants.MAX_LOOP_DEPTH)
@@ -200,6 +201,8 @@
storage_logger.activate()
elif arg in ["-L", "--storage-log-aggregate"]:
storage_logger.activate(aggregate=True)
+ elif arg in ["-E", "--storage-log-elements"]:
+ storage_logger.activate(elements=True)
elif path is None:
path = argv[idx]
else:
From noreply at buildbot.pypy.org Mon Jul 7 13:16:54 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:16:54 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Fixed log parsing regex
Message-ID: <20140707111654.5DD4E1C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r866:d39d1b72d99e
Date: 2014-07-03 21:12 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/d39d1b72d99e/
Log: Fixed log parsing regex
diff --git a/spyvm/tool/storagelog_parser.py b/spyvm/tool/storagelog_parser.py
--- a/spyvm/tool/storagelog_parser.py
+++ b/spyvm/tool/storagelog_parser.py
@@ -27,7 +27,7 @@
callback(entry)
return parsed_entries
-line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)( objects (?P[0-9]+))?( elements: (?P.+( .+)+))?$")
+line_pattern = re.compile("^(?P\w+) \(((?P\w+) -> )?(?P\w+)\)( of (?P.+))? size (?P[0-9]+)( objects (?P[0-9]+))?( elements: (?P.+( .+)*))?$")
def parse_line(line, flags):
result = line_pattern.match(line)
From noreply at buildbot.pypy.org Mon Jul 7 13:17:08 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:17:08 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added some code to the Matrix
benchmark.
Message-ID: <20140707111708.BF55B1C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r867:d1dfa8569637
Date: 2014-07-04 10:14 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/d1dfa8569637/
Log: Added some code to the Matrix benchmark.
diff --git a/images/Squeak4.5-noBitBlt.changes b/images/Squeak4.5-noBitBlt.changes
--- a/images/Squeak4.5-noBitBlt.changes
+++ b/images/Squeak4.5-noBitBlt.changes
@@ -12208,4 +12208,4 @@
].
"self footer."
- ^ self! !
----QUIT----{2 April 2014 . 11:59:41 am} Squeak4.5-noBitBlt.image priorSource: 15812182!
----STARTUP----{3 July 2014 . 11:14:14 am} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
SystemOrganization addCategory: #Anton!
Object subclass: #AntonMatrix
instanceVariableNames: 'fields columns rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Anton'!
Object subclass: #AntonMatrix
instanceVariableNames: 'fields columns rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Anton'!
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:17'!
at: point
^ self x: point x y: point y! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:16'!
at: point put: number
^ self x: point x y: point y put: number! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:12'!
columns
^ columns! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:55'!
fieldsDo: block
(1 to: self rows) do: [ :row |
(1 to: self columns) do: [ :column |
block value: row value: column ] ].! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:56'!
fill: block
self fieldsDo: [ :x :y |
self x: x y: y put: (block value: x value: y) ].! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:12'!
rows
^ rows! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:17'!
x: x y: y
^ fields at: (self offsetX: x y: y)! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:17'!
x: x y: y put: number
fields at: (self offsetX: x y: y) put: number! !
!AntonMatrix methodsFor: 'private' stamp: 'ag 7/3/2014 10:44'!
offsetX: x y: y
^ (y-1) * columns + x! !
!AntonMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 10:43'!
initializeFields: f rows: r
rows := r.
(f size \\ r) = 0 ifFalse: [ self error: 'Illegal initialization.' ].
columns := f size / r.
fields := f.! !
!AntonMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 10:18'!
initializeRows: r columns: c
rows := r.
columns := c.
fields := Array new: rows * columns.! !
!AntonMatrix methodsFor: 'math' stamp: 'ag 7/3/2014 10:30'!
* other
| result |
(self columns = other rows and: [ self rows = other columns ])
ifFalse: [ ^ self error: 'Cannot multiply, wrong dimensions.' ].
result := AntonMatrix rows: self rows columns: other columns.
(1 to: self rows) do: [ :row |
(1 to: other columns) do: [ :column | | value |
value := 0.
(1 to: self columns) do: [ :i |
value := value + ((self x: i y: row) * (other x: column y: i)) ].
result x: column y: row put: value ] ].
^ result! !
!AntonMatrix methodsFor: 'printing' stamp: 'ag 7/3/2014 10:47'!
printOn: s
(1 to: self rows) do: [ :row |
(1 to: self columns) do: [ :column |
s nextPutAll: (self x: column y: row) asString.
s nextPutAll: ' ' ].
s nextPutAll: String cr ].! !
!AntonMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:01'!
fillRandomFloats: generator
| max |
max := SmallInteger maxVal sqrt asInteger.
self fill: [ :x :y | max atRandom: generator ].! !
!AntonMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:02'!
fillRandomInts: generator
"Fill with SmallInteger values small enough to stay SmallIntegers after multiplication."
self fill: [ :x :y | generator next * 100 ].! !
"-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- "!
AntonMatrix class
instanceVariableNames: ''!
!AntonMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:13'!
benchFloats: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := AntonMatrix rows: r columns: c.
b := AntonMatrix rows: r columns: c.
a fillRandomFloats: generator.
b fillRandomFloats: generator.
mults timesRepeat: [ a * b ] ].! !
!AntonMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:03'!
benchInts: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := AntonMatrix rows: r columns: c.
b := AntonMatrix rows: r columns: c.
a fillRandomInts: generator.
b fillRandomInts: generator.
mults timesRepeat: [ a * b ] ].! !
!AntonMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 10:35'!
fields: fields rows: r
^ self basicNew
initializeFields: fields rows: r! !
!AntonMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 10:19'!
rows: r columns: c
^ self basicNew
initializeRows: r columns: c;
yourself! !
Object subclass: #AntonMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Anton'!
Object subclass: #AntonMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Anton'!
!AntonMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:19'!
benchFloats
AntonMatrix benchFloats: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
!AntonMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:18'!
benchInts
AntonMatrix benchInts: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
"-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- "!
AntonMatrixBenchmark class
instanceVariableNames: ''!
!AntonMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:24'!
config: spec
| tokens nextInt |
tokens := spec findTokens: ' '.
nextInt := [ :default |
(tokens ifEmpty: [ nil ] ifNotEmptyDo: #removeFirst) asInteger ifNil: [ default ] ].
NumOfRuns := nextInt value: 10.
Mults := nextInt value: 100.
Rows := nextInt value: 100.
Cols := nextInt value: 100.! !
!AntonMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:20'!
initialize
super initialize.
NumOfRuns := 10.
Mults := 100.
Cols := 100.
Rows := 100.! !
AntonMatrixBenchmark initialize!
----End fileIn of C:\Dev\lang-smalltalk\Anton.st----!
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:27'!
benchMatrixInt: spec
AntonMatrixBenchmark config: spec.
! !
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:28' prior: 49374034!
benchMatrixInt: spec
AntonMatrixBenchmark config: spec.
^ Benchmarks runMatching: 'AntonMatrix' iterations: self benchmarkIterations! !
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:28'!
benchMatrix: spec
AntonMatrixBenchmark config: spec.
^ Benchmarks runMatching: 'AntonMatrix' iterations: self benchmarkIterations! !
SystemOrganization renameCategory: #Anton toBe: #'Matrix-Benchmarks'!
Smalltalk renameClassNamed: #AntonMatrix as: #BenchMatrix!
Object subclass: #SimpleMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
Smalltalk removeClassNamed: #SimpleMatrixBenchmark!
Smalltalk renameClassNamed: #AntonMatrixBenchmark as: #SimpleMatrixBenchmark!
SmallInteger removeSelector: #benchMatrixInt:!
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:30' prior: 49374406!
benchMatrix: spec
SimpleMatrixBenchmark config: spec.
^ Benchmarks runMatching: 'SimpleMatrixBenchmark' iterations: self benchmarkIterations! !
!Benchmarks class methodsFor: 'benchmarks' stamp: 'ag 7/3/2014 11:31' prior: 49367383!
allBenchmarks
^ {
CPBAStarBenchmark.
CPBBinaryTreeBenchmark.
CPBBlowfishSuite.
CPBChameneosBenchmark.
CPBDeltaBlueBenchmark.
CPBMandelbrotBenchmarkSuite.
CPBNBodyBenchmark.
"CPBPolymorphyBenchmark." "Commented out because it compiled code in setup."
CPBRichardsBenchmark.
CPBSplayTreeBenchmark.
SimpleMatrixBenchmark.
}! !
----QUIT----{3 July 2014 . 11:32:10 am} Squeak4.5-noBitBlt.image priorSource: 15813551!
----STARTUP----{3 July 2014 . 11:34:49 am} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
SMarkSuite subclass: #SimpleMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
!SimpleMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:37' prior: 49372902!
benchFloats
BenchMatrix benchFloats: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
!SimpleMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:37' prior: 49373080!
benchInts
BenchMatrix benchInts: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
SimpleMatrixBenchmark config: '5 5 5 5'!
Benchmarks runMatching: 'SimpleMatrixBenchmark' iterations: 1!
!SimpleMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:38' prior: 49373773!
initialize
super initialize.
NumOfRuns := 10.
Mults := 10.
Cols := 10.
Rows := 10.! !
self initialize!
!SimpleMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:39' prior: 49376651!
initialize
"self initialize"
super initialize.
NumOfRuns := 10.
Mults := 10.
Cols := 10.
Rows := 10.! !
----QUIT----{3 July 2014 . 11:39:08 am} Squeak4.5-noBitBlt.image priorSource: 15821257!
----STARTUP----{3 July 2014 . 11:48:06 am} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
!BenchMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:49' prior: 49371447!
benchFloats: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := BenchMatrix rows: r columns: c.
b := BenchMatrix rows: c columns: r.
a fillRandomFloats: generator.
b fillRandomFloats: generator.
mults timesRepeat: [ a * b ] ].! !
!BenchMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:49' prior: 49371861!
benchInts: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := BenchMatrix rows: r columns: c.
b := BenchMatrix rows: c columns: r.
a fillRandomInts: generator.
b fillRandomInts: generator.
mults timesRepeat: [ a * b ] ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 11:51' prior: 49368902!
fieldsDo: block
(1 to: self rows) do: [ :row |
(1 to: self columns) do: [ :column |
block value: column value: row ] ].! !
1 benchMatrix: '1 10 100 10'!
1 benchMatrix: '1 10 100 10'!
1 benchMatrix: '1 10 10 100'!
1 benchMatrix: '1 10 10 1000'!
----QUIT----{3 July 2014 . 11:51:44 am} Squeak4.5-noBitBlt.image priorSource: 15822543!
----STARTUP----{3 July 2014 . 12:30:20 pm} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'columns rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
BenchMatrix removeSelector: #at:!
BenchMatrix removeSelector: #at:put:!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:31' prior: 49368813!
columns
^ self size / rows! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:31' prior: 49379008!
columns
^ self size / rows! !
11/2!
11//2!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:33' prior: 49378103!
fieldsDo: block
(1 to: self size) do: [ :i |
block value: i \\ rows value: i // rows ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:34' prior: 49379251!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows value: i // rows ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:34' prior: 49369340!
x: x y: y
^ self at: (self offsetX: x y: y)! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:34' prior: 49369457!
x: x y: y put: number
self at: (self offsetX: x y: y) put: number! !
a := BenchMatrix fields: #( 3 2 1 1 0 2 ) rows: 2!
b := BenchMatrix fields: #( 1 2 0 1 4 0 ) rows: 3!
a!
a!
a rows!
a columns!
BenchMatrix removeSelector: #initializeRows:columns:!
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:36'!
initializeRows: r
rows := r.! !
BenchMatrix removeSelector: #initializeFields:rows:!
!BenchMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 12:37' prior: 49372274!
fields: fields rows: r
| columns f rows |
rows := r.
(f size \\ r) = 0 ifFalse: [ self error: 'Illegal initialization.' ].
columns := f size / r.
" fields := f."
^ self basicNew
initializeFields: fields rows: r! !
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:37'!
rows: r
rows := r.! !
BenchMatrix removeSelector: #initializeRows:!
Array withAll: #(1 2 3)!
!BenchMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 12:39' prior: 49380248!
fields: fields rows: r
(fields size \\ r) = 0 ifFalse: [ self error: 'Illegal initialization.' ].
^ (self withAll: fields)
rows: r;
yourself! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:39' prior: 49379122!
columns
^ self size // rows! !
!BenchMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 12:40' prior: 49372433!
rows: r columns: c
^ (self new: r * c)
rows: r;
fillZeros;
yourself! !
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:40'!
fillZeros
self fill: [ :x :y | 0 ].! !
i!
i \\ rows!
i //rows!
rows!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:42' prior: 49379428!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
x := BenchMatrix rows: 4 columns: 3.!
x!
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:43' prior: 49381404!
fieldsDo: block
0 to: self size do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
x := BenchMatrix rows: 4 columns: 3.!
x!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:44' prior: 49381705!
fieldsDo: block
0 to: self size + 1 do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
x := BenchMatrix rows: 4 columns: 3.!
x!
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
o size!
x size!
o size!
o asSet size!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:46' prior: 49382006!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
o size!
o !
1 \\ 4!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:46' prior: 49382353!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows value: i // rows + 1 ].! !
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o size!
o !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:50' prior: 49382634!
fieldsDo: block
| columns |
columns := self columns.
1 to: self size do: [ :i |
block value: i \\ columns value: i // columns + 1 ].! !
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'rows columns'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'rows columns'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:51' prior: 49382898!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ columns value: i // columns + 1 ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:51' prior: 49380969!
columns
^ columns! !
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:51' prior: 49380543!
rows: r
rows := r.
columns := self size // r.! !
x := BenchMatrix rows: 4 columns: 3.!
x!
o := OrderedCollection new.
x fieldsDo: [ :x :y | o add: x -> y ].
!
ox!
o!
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:52' prior: 49381247!
fillZeros
self atAllPut: 0.! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:53' prior: 49383432!
fieldsDo: block
0 to: self size - 1 do: [ :i |
block value: i \\ columns + 1 value: i // columns + 1 ].! !
o := OrderedCollection new.
x fieldsDo: [ :x :y | o add: x -> y ].!
o size!
o!
a := BenchMatrix fields: #( 3 2 1 1 0 2 ) rows: 2!
b := BenchMatrix fields: #( 1 2 0 1 4 0 ) rows: 3!
!BenchMatrix methodsFor: 'math' stamp: 'ag 7/3/2014 12:55' prior: 49370092!
* other
| result |
(self columns = other rows and: [ self rows = other columns ])
ifFalse: [ ^ self error: 'Cannot multiply, wrong dimensions.' ].
result := BenchMatrix rows: self rows columns: other columns.
(1 to: self rows) do: [ :row |
(1 to: other columns) do: [ :column | | value |
value := 0.
(1 to: self columns) do: [ :i |
value := value + ((self x: i y: row) * (other x: column y: i)) ].
result x: column y: row put: value ] ].
^ result! !
a * b!
self assert: (Array withAll: (a * b)) = #(7 8 9 2)!
BenchMatrix class organization addCategory: #test!
!BenchMatrix class methodsFor: 'test' stamp: 'ag 7/3/2014 12:57'!
tinyTest
"self tinyTest"
| a b |
a := BenchMatrix fields: #( 3 2 1 1 0 2 ) rows: 2.
b := BenchMatrix fields: #( 1 2 0 1 4 0 ) rows: 3.
self assert: (Array withAll: (a * b)) = #(7 8 9 2).! !
self tinyTest!
1 benchMatrix: '1 3 5 5'!
1 benchMatrix: '1 10 5 5'!
----QUIT----{3 July 2014 . 12:58:52 pm} Squeak4.5-noBitBlt.image priorSource: 15823926!
----STARTUP----{3 July 2014 . 1:05:04 pm} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.!
(a collect: #class) asSet!
(b collect: #class) asSet!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomFloats: generator.
b fillRandomInts: generator.!
(b collect: #class) asSet!
(a collect: #class) asSet!
!BenchMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 13:08' prior: 49371100!
fillRandomInts: generator
"Fill with SmallInteger values small enough to stay SmallIntegers after multiplication."
| max |
max := SmallInteger maxVal sqrt asInteger.
self fill: [ :x :y | max atRandom: generator ].
! !
!BenchMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 13:08' prior: 49370897!
fillRandomFloats: generator
self fill: [ :x :y | generator next * 100 ].! !
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomFloats: generator.
b fillRandomInts: generator.
(a collect: #class) asSet
!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomFloats: generator.
b fillRandomInts: generator.
(b collect: #class) asSet
!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.
!
c := a * b!
(c collect: #class) asSet!
!BenchMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 13:09' prior: 49386143!
fillRandomInts: generator
"Fill with SmallInteger values small enough to stay SmallIntegers after multiplication."
| max |
max := 1000.
self fill: [ :x :y | max atRandom: generator ].
! !
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.
c := a * b.
(c collect: #class) asSet!
----QUIT----{3 July 2014 . 1:09:37 pm} Squeak4.5-noBitBlt.image priorSource: 15830973!
\ No newline at end of file
+ ^ self! !
----QUIT----{2 April 2014 . 11:59:41 am} Squeak4.5-noBitBlt.image priorSource: 15812182!
----STARTUP----{3 July 2014 . 11:14:14 am} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
SystemOrganization addCategory: #Anton!
Object subclass: #AntonMatrix
instanceVariableNames: 'fields columns rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Anton'!
Object subclass: #AntonMatrix
instanceVariableNames: 'fields columns rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Anton'!
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:17'!
at: point
^ self x: point x y: point y! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:16'!
at: point put: number
^ self x: point x y: point y put: number! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:12'!
columns
^ columns! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:55'!
fieldsDo: block
(1 to: self rows) do: [ :row |
(1 to: self columns) do: [ :column |
block value: row value: column ] ].! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:56'!
fill: block
self fieldsDo: [ :x :y |
self x: x y: y put: (block value: x value: y) ].! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:12'!
rows
^ rows! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:17'!
x: x y: y
^ fields at: (self offsetX: x y: y)! !
!AntonMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 10:17'!
x: x y: y put: number
fields at: (self offsetX: x y: y) put: number! !
!AntonMatrix methodsFor: 'private' stamp: 'ag 7/3/2014 10:44'!
offsetX: x y: y
^ (y-1) * columns + x! !
!AntonMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 10:43'!
initializeFields: f rows: r
rows := r.
(f size \\ r) = 0 ifFalse: [ self error: 'Illegal initialization.' ].
columns := f size / r.
fields := f.! !
!AntonMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 10:18'!
initializeRows: r columns: c
rows := r.
columns := c.
fields := Array new: rows * columns.! !
!AntonMatrix methodsFor: 'math' stamp: 'ag 7/3/2014 10:30'!
* other
| result |
(self columns = other rows and: [ self rows = other columns ])
ifFalse: [ ^ self error: 'Cannot multiply, wrong dimensions.' ].
result := AntonMatrix rows: self rows columns: other columns.
(1 to: self rows) do: [ :row |
(1 to: other columns) do: [ :column | | value |
value := 0.
(1 to: self columns) do: [ :i |
value := value + ((self x: i y: row) * (other x: column y: i)) ].
result x: column y: row put: value ] ].
^ result! !
!AntonMatrix methodsFor: 'printing' stamp: 'ag 7/3/2014 10:47'!
printOn: s
(1 to: self rows) do: [ :row |
(1 to: self columns) do: [ :column |
s nextPutAll: (self x: column y: row) asString.
s nextPutAll: ' ' ].
s nextPutAll: String cr ].! !
!AntonMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:01'!
fillRandomFloats: generator
| max |
max := SmallInteger maxVal sqrt asInteger.
self fill: [ :x :y | max atRandom: generator ].! !
!AntonMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:02'!
fillRandomInts: generator
"Fill with SmallInteger values small enough to stay SmallIntegers after multiplication."
self fill: [ :x :y | generator next * 100 ].! !
"-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- "!
AntonMatrix class
instanceVariableNames: ''!
!AntonMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:13'!
benchFloats: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := AntonMatrix rows: r columns: c.
b := AntonMatrix rows: r columns: c.
a fillRandomFloats: generator.
b fillRandomFloats: generator.
mults timesRepeat: [ a * b ] ].! !
!AntonMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:03'!
benchInts: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := AntonMatrix rows: r columns: c.
b := AntonMatrix rows: r columns: c.
a fillRandomInts: generator.
b fillRandomInts: generator.
mults timesRepeat: [ a * b ] ].! !
!AntonMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 10:35'!
fields: fields rows: r
^ self basicNew
initializeFields: fields rows: r! !
!AntonMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 10:19'!
rows: r columns: c
^ self basicNew
initializeRows: r columns: c;
yourself! !
Object subclass: #AntonMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Anton'!
Object subclass: #AntonMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Anton'!
!AntonMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:19'!
benchFloats
AntonMatrix benchFloats: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
!AntonMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:18'!
benchInts
AntonMatrix benchInts: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
"-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- "!
AntonMatrixBenchmark class
instanceVariableNames: ''!
!AntonMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:24'!
config: spec
| tokens nextInt |
tokens := spec findTokens: ' '.
nextInt := [ :default |
(tokens ifEmpty: [ nil ] ifNotEmptyDo: #removeFirst) asInteger ifNil: [ default ] ].
NumOfRuns := nextInt value: 10.
Mults := nextInt value: 100.
Rows := nextInt value: 100.
Cols := nextInt value: 100.! !
!AntonMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:20'!
initialize
super initialize.
NumOfRuns := 10.
Mults := 100.
Cols := 100.
Rows := 100.! !
AntonMatrixBenchmark initialize!
----End fileIn of C:\Dev\lang-smalltalk\Anton.st----!
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:27'!
benchMatrixInt: spec
AntonMatrixBenchmark config: spec.
! !
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:28' prior: 49374034!
benchMatrixInt: spec
AntonMatrixBenchmark config: spec.
^ Benchmarks runMatching: 'AntonMatrix' iterations: self benchmarkIterations! !
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:28'!
benchMatrix: spec
AntonMatrixBenchmark config: spec.
^ Benchmarks runMatching: 'AntonMatrix' iterations: self benchmarkIterations! !
SystemOrganization renameCategory: #Anton toBe: #'Matrix-Benchmarks'!
Smalltalk renameClassNamed: #AntonMatrix as: #BenchMatrix!
Object subclass: #SimpleMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
Smalltalk removeClassNamed: #SimpleMatrixBenchmark!
Smalltalk renameClassNamed: #AntonMatrixBenchmark as: #SimpleMatrixBenchmark!
SmallInteger removeSelector: #benchMatrixInt:!
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 11:30' prior: 49374406!
benchMatrix: spec
SimpleMatrixBenchmark config: spec.
^ Benchmarks runMatching: 'SimpleMatrixBenchmark' iterations: self benchmarkIterations! !
!Benchmarks class methodsFor: 'benchmarks' stamp: 'ag 7/3/2014 11:31' prior: 49367383!
allBenchmarks
^ {
CPBAStarBenchmark.
CPBBinaryTreeBenchmark.
CPBBlowfishSuite.
CPBChameneosBenchmark.
CPBDeltaBlueBenchmark.
CPBMandelbrotBenchmarkSuite.
CPBNBodyBenchmark.
"CPBPolymorphyBenchmark." "Commented out because it compiled code in setup."
CPBRichardsBenchmark.
CPBSplayTreeBenchmark.
SimpleMatrixBenchmark.
}! !
----QUIT----{3 July 2014 . 11:32:10 am} Squeak4.5-noBitBlt.image priorSource: 15813551!
----STARTUP----{3 July 2014 . 11:34:49 am} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
SMarkSuite subclass: #SimpleMatrixBenchmark
instanceVariableNames: ''
classVariableNames: 'Cols Mults NumOfRuns Rows'
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
!SimpleMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:37' prior: 49372902!
benchFloats
BenchMatrix benchFloats: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
!SimpleMatrixBenchmark methodsFor: 'bench' stamp: 'ag 7/3/2014 11:37' prior: 49373080!
benchInts
BenchMatrix benchInts: NumOfRuns multiplicationsPerRun: Mults rows: Rows columns: Cols.! !
SimpleMatrixBenchmark config: '5 5 5 5'!
Benchmarks runMatching: 'SimpleMatrixBenchmark' iterations: 1!
!SimpleMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:38' prior: 49373773!
initialize
super initialize.
NumOfRuns := 10.
Mults := 10.
Cols := 10.
Rows := 10.! !
self initialize!
!SimpleMatrixBenchmark class methodsFor: 'initialization' stamp: 'ag 7/3/2014 11:39' prior: 49376651!
initialize
"self initialize"
super initialize.
NumOfRuns := 10.
Mults := 10.
Cols := 10.
Rows := 10.! !
----QUIT----{3 July 2014 . 11:39:08 am} Squeak4.5-noBitBlt.image priorSource: 15821257!
----STARTUP----{3 July 2014 . 11:48:06 am} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
!BenchMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:49' prior: 49371447!
benchFloats: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := BenchMatrix rows: r columns: c.
b := BenchMatrix rows: c columns: r.
a fillRandomFloats: generator.
b fillRandomFloats: generator.
mults timesRepeat: [ a * b ] ].! !
!BenchMatrix class methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 11:49' prior: 49371861!
benchInts: numOfRuns multiplicationsPerRun: mults rows: r columns: c
| generator |
generator := Random seed: 23456432.
numOfRuns timesRepeat: [ | a b |
a := BenchMatrix rows: r columns: c.
b := BenchMatrix rows: c columns: r.
a fillRandomInts: generator.
b fillRandomInts: generator.
mults timesRepeat: [ a * b ] ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 11:51' prior: 49368902!
fieldsDo: block
(1 to: self rows) do: [ :row |
(1 to: self columns) do: [ :column |
block value: column value: row ] ].! !
1 benchMatrix: '1 10 100 10'!
1 benchMatrix: '1 10 100 10'!
1 benchMatrix: '1 10 10 100'!
1 benchMatrix: '1 10 10 1000'!
----QUIT----{3 July 2014 . 11:51:44 am} Squeak4.5-noBitBlt.image priorSource: 15822543!
----STARTUP----{3 July 2014 . 12:30:20 pm} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'columns rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'rows'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
BenchMatrix removeSelector: #at:!
BenchMatrix removeSelector: #at:put:!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:31' prior: 49368813!
columns
^ self size / rows! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:31' prior: 49379008!
columns
^ self size / rows! !
11/2!
11//2!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:33' prior: 49378103!
fieldsDo: block
(1 to: self size) do: [ :i |
block value: i \\ rows value: i // rows ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:34' prior: 49379251!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows value: i // rows ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:34' prior: 49369340!
x: x y: y
^ self at: (self offsetX: x y: y)! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:34' prior: 49369457!
x: x y: y put: number
self at: (self offsetX: x y: y) put: number! !
a := BenchMatrix fields: #( 3 2 1 1 0 2 ) rows: 2!
b := BenchMatrix fields: #( 1 2 0 1 4 0 ) rows: 3!
a!
a!
a rows!
a columns!
BenchMatrix removeSelector: #initializeRows:columns:!
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:36'!
initializeRows: r
rows := r.! !
BenchMatrix removeSelector: #initializeFields:rows:!
!BenchMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 12:37' prior: 49372274!
fields: fields rows: r
| columns f rows |
rows := r.
(f size \\ r) = 0 ifFalse: [ self error: 'Illegal initialization.' ].
columns := f size / r.
" fields := f."
^ self basicNew
initializeFields: fields rows: r! !
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:37'!
rows: r
rows := r.! !
BenchMatrix removeSelector: #initializeRows:!
Array withAll: #(1 2 3)!
!BenchMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 12:39' prior: 49380248!
fields: fields rows: r
(fields size \\ r) = 0 ifFalse: [ self error: 'Illegal initialization.' ].
^ (self withAll: fields)
rows: r;
yourself! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:39' prior: 49379122!
columns
^ self size // rows! !
!BenchMatrix class methodsFor: 'instance creation' stamp: 'ag 7/3/2014 12:40' prior: 49372433!
rows: r columns: c
^ (self new: r * c)
rows: r;
fillZeros;
yourself! !
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:40'!
fillZeros
self fill: [ :x :y | 0 ].! !
i!
i \\ rows!
i //rows!
rows!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:42' prior: 49379428!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
x := BenchMatrix rows: 4 columns: 3.!
x!
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:43' prior: 49381404!
fieldsDo: block
0 to: self size do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
x := BenchMatrix rows: 4 columns: 3.!
x!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:44' prior: 49381705!
fieldsDo: block
0 to: self size + 1 do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
x := BenchMatrix rows: 4 columns: 3.!
x!
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
o size!
x size!
o size!
o asSet size!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:46' prior: 49382006!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows + 1 value: i // rows + 1 ].! !
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o!
o size!
o !
1 \\ 4!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:46' prior: 49382353!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ rows value: i // rows + 1 ].! !
o := OrderedCollection new.!
x fieldsDo: [ :x :y | o add: x -> y ].!
o size!
o !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:50' prior: 49382634!
fieldsDo: block
| columns |
columns := self columns.
1 to: self size do: [ :i |
block value: i \\ columns value: i // columns + 1 ].! !
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'rows columns'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
Array variableSubclass: #BenchMatrix
instanceVariableNames: 'rows columns'
classVariableNames: ''
poolDictionaries: ''
category: 'Matrix-Benchmarks'!
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:51' prior: 49382898!
fieldsDo: block
1 to: self size do: [ :i |
block value: i \\ columns value: i // columns + 1 ].! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:51' prior: 49380969!
columns
^ columns! !
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:51' prior: 49380543!
rows: r
rows := r.
columns := self size // r.! !
x := BenchMatrix rows: 4 columns: 3.!
x!
o := OrderedCollection new.
x fieldsDo: [ :x :y | o add: x -> y ].
!
ox!
o!
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 12:52' prior: 49381247!
fillZeros
self atAllPut: 0.! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 12:53' prior: 49383432!
fieldsDo: block
0 to: self size - 1 do: [ :i |
block value: i \\ columns + 1 value: i // columns + 1 ].! !
o := OrderedCollection new.
x fieldsDo: [ :x :y | o add: x -> y ].!
o size!
o!
a := BenchMatrix fields: #( 3 2 1 1 0 2 ) rows: 2!
b := BenchMatrix fields: #( 1 2 0 1 4 0 ) rows: 3!
!BenchMatrix methodsFor: 'math' stamp: 'ag 7/3/2014 12:55' prior: 49370092!
* other
| result |
(self columns = other rows and: [ self rows = other columns ])
ifFalse: [ ^ self error: 'Cannot multiply, wrong dimensions.' ].
result := BenchMatrix rows: self rows columns: other columns.
(1 to: self rows) do: [ :row |
(1 to: other columns) do: [ :column | | value |
value := 0.
(1 to: self columns) do: [ :i |
value := value + ((self x: i y: row) * (other x: column y: i)) ].
result x: column y: row put: value ] ].
^ result! !
a * b!
self assert: (Array withAll: (a * b)) = #(7 8 9 2)!
BenchMatrix class organization addCategory: #test!
!BenchMatrix class methodsFor: 'test' stamp: 'ag 7/3/2014 12:57'!
tinyTest
"self tinyTest"
| a b |
a := BenchMatrix fields: #( 3 2 1 1 0 2 ) rows: 2.
b := BenchMatrix fields: #( 1 2 0 1 4 0 ) rows: 3.
self assert: (Array withAll: (a * b)) = #(7 8 9 2).! !
self tinyTest!
1 benchMatrix: '1 3 5 5'!
1 benchMatrix: '1 10 5 5'!
----QUIT----{3 July 2014 . 12:58:52 pm} Squeak4.5-noBitBlt.image priorSource: 15823926!
----STARTUP----{3 July 2014 . 1:05:04 pm} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.!
(a collect: #class) asSet!
(b collect: #class) asSet!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomFloats: generator.
b fillRandomInts: generator.!
(b collect: #class) asSet!
(a collect: #class) asSet!
!BenchMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 13:08' prior: 49371100!
fillRandomInts: generator
"Fill with SmallInteger values small enough to stay SmallIntegers after multiplication."
| max |
max := SmallInteger maxVal sqrt asInteger.
self fill: [ :x :y | max atRandom: generator ].
! !
!BenchMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 13:08' prior: 49370897!
fillRandomFloats: generator
self fill: [ :x :y | generator next * 100 ].! !
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomFloats: generator.
b fillRandomInts: generator.
(a collect: #class) asSet
!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomFloats: generator.
b fillRandomInts: generator.
(b collect: #class) asSet
!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.
!
c := a * b!
(c collect: #class) asSet!
!BenchMatrix methodsFor: 'benchmarking' stamp: 'ag 7/3/2014 13:09' prior: 49386143!
fillRandomInts: generator
"Fill with SmallInteger values small enough to stay SmallIntegers after multiplication."
| max |
max := 1000.
self fill: [ :x :y | max atRandom: generator ].
! !
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.
c := a * b.
(c collect: #class) asSet!
----QUIT----{3 July 2014 . 1:09:37 pm} Squeak4.5-noBitBlt.image priorSource: 15830973!
----STARTUP----{3 July 2014 . 8:26:43 pm} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.!
c := a * b.!
(c collect: #class) asSet!
c!
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 20:27'!
testMatrix
! !
!SmallInteger methodsFor: 'as yet unclassified' stamp: 'ag 7/3/2014 20:28' prior: 49388134!
testMatrix
| a b c generator |
a := BenchMatrix rows: 20 columns: 20.
b := BenchMatrix rows: 20 columns: 20.
generator := Random seed: 13243456.
a fillRandomInts: generator.
b fillRandomInts: generator.
c := a * b.
^ (c collect: #class) asSet asString! !
5 testMatrix!
----SNAPSHOT----{3 July 2014 . 8:28:40 pm} Squeak4.5-noBitBlt.1.image priorSource: 15833215!
----QUIT----{3 July 2014 . 8:28:49 pm} Squeak4.5-noBitBlt.1.image priorSource: 15834093!
----STARTUP----{3 July 2014 . 9:02:43 pm} as C:\Dev\lang-smalltalk\images\Squeak4.5-noBitBlt.image!
!BenchMatrix methodsFor: 'initialization' stamp: 'ag 7/3/2014 21:03' prior: 49383727!
rows: r
rows := r asFloat.
columns := (self size // r) asFloat.! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 21:03' prior: 49369257!
rows
^ rows asInteger! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 21:03' prior: 49383617!
columns
^ columns asInteger! !
!BenchMatrix methodsFor: 'accessing' stamp: 'ag 7/3/2014 21:04' prior: 49384103!
fieldsDo: block
0 to: self size - 1 do: [ :i |
block value: i \\ self columns + 1 value: i // self columns + 1 ].! !
----QUIT----{3 July 2014 . 9:04:33 pm} Squeak4.5-noBitBlt.image priorSource: 15834187!
\ No newline at end of file
diff --git a/images/Squeak4.5-noBitBlt.image b/images/Squeak4.5-noBitBlt.image
index 901620a8a8d4d194528f72a369392610813925a4..46e8d064f5b9b3ded5cfa05ee2ff1651286c82e7
GIT binary patch
[cut]
From noreply at buildbot.pypy.org Mon Jul 7 13:17:09 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Mon, 7 Jul 2014 13:17:09 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: WORK IN PROGRESS. This
compiles, but segfaults.
Message-ID: <20140707111709.EB7791C024A@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r868:3fe7264ab317
Date: 2014-07-07 13:15 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/3fe7264ab317/
Log: WORK IN PROGRESS. This compiles, but segfaults.
- Refactoring: maintaining 2 backreferences to the sender in
ContextPartObjects (direct_sender and virtual_sender). As many
contexts as possible should use virtual_sender; when it becomes
necessary, direct_sender will be set instead (should break
performance). Context objects are now always created WITHOUT a
sender reference; it is set and deleted by the interpreter. Required
small changes in lots of places.
- Made ProcessWrapper more consistent and removed 2 duplicated
methods.
- Fixed tests.
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -68,7 +68,9 @@
while True:
assert self.current_stack_depth == 0
# Need to save s_sender, loop_bytecodes will nil this on return
- s_sender = s_new_context.s_sender()
+ # Virtual references are not allowed here, and neither are "fresh" contexts (except for the toplevel one).
+ assert s_new_context.virtual_sender is jit.vref_None
+ s_sender = s_new_context.direct_sender
try:
self.loop_bytecodes(s_new_context)
raise Exception("loop_bytecodes left without raising...")
@@ -79,7 +81,7 @@
except Return, nlr:
s_new_context = s_sender
while s_new_context is not nlr.s_target_context:
- s_sender = s_new_context.s_sender()
+ s_sender = s_new_context.direct_sender
s_new_context._activate_unwind_context(self)
s_new_context = s_sender
s_new_context.push(nlr.value)
@@ -88,7 +90,7 @@
print "====== Switched process from: %s" % s_new_context.short_str()
print "====== to: %s " % p.s_new_context.short_str()
s_new_context = p.s_new_context
-
+
def loop_bytecodes(self, s_context, may_context_switch=True):
old_pc = 0
if not jit.we_are_jitted() and may_context_switch:
@@ -98,6 +100,7 @@
pc = s_context.pc()
if pc < old_pc:
if jit.we_are_jitted():
+ # Do the interrupt-check at the end of a loop, don't interrupt loops midway.
self.jitted_check_for_interrupt(s_context)
self.jit_driver.can_enter_jit(
pc=pc, self=self, method=method,
@@ -115,17 +118,30 @@
else:
s_context.push(nlr.value)
- # This is just a wrapper around loop_bytecodes that handles the stack overflow protection mechanism
- def stack_frame(self, s_new_frame, may_context_switch=True):
- if self.max_stack_depth > 0:
- if self.current_stack_depth >= self.max_stack_depth:
- raise StackOverflow(s_new_frame)
-
- self.current_stack_depth += 1
+ # This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
+ # and handles the stack overflow protection mechanism.
+ def stack_frame(self, s_frame, s_sender, may_context_switch=True):
+ assert s_frame.virtual_sender is jit.vref_None
try:
- self.loop_bytecodes(s_new_frame, may_context_switch)
+ # Enter the context - store a virtual reference back to the sender
+ # Non-fresh contexts can happen, e.g. when activating a stored BlockContext.
+ # The same frame object must not pass through here recursively!
+ if s_frame.is_fresh():
+ s_frame.virtual_sender = jit.virtual_ref(s_sender)
+
+ self.current_stack_depth += 1
+ if self.max_stack_depth > 0:
+ if self.current_stack_depth >= self.max_stack_depth:
+ raise StackOverflow(s_frame)
+
+ # Now (continue to) execute the context bytecodes
+ self.loop_bytecodes(s_frame, may_context_switch)
finally:
self.current_stack_depth -= 1
+ # Cleanly leave the context. This will finish the virtual sender-reference, if
+ # it is still there, which can happen in case of ProcessSwitch or StackOverflow;
+ # in case of a Return, this will already be handled while unwinding the stack.
+ s_frame.finish_virtual_sender()
def step(self, context):
bytecode = context.fetch_next_bytecode()
@@ -177,7 +193,7 @@
self.next_wakeup_tick = 0
semaphore = self.space.objtable["w_timerSemaphore"]
if not semaphore.is_nil(self.space):
- wrapper.SemaphoreWrapper(self.space, semaphore).signal(s_frame.w_self())
+ wrapper.SemaphoreWrapper(self.space, semaphore).signal(s_frame)
# We have no finalization process, so far.
# We do not support external semaphores.
# In cog, the method to add such a semaphore is only called in GC.
@@ -195,7 +211,12 @@
except ReturnFromTopLevel, e:
return e.object
- def perform(self, w_receiver, selector, *arguments_w):
+ def perform(self, w_receiver, selector, *w_arguments):
+ s_frame = self.create_toplevel_context(w_receiver, selector, *w_arguments)
+ self.interrupt_check_counter = self.interrupt_counter_size
+ return self.interpret_toplevel(s_frame.w_self())
+
+ def create_toplevel_context(self, w_receiver, selector, *w_arguments):
if isinstance(selector, str):
if selector == "asSymbol":
w_selector = self.image.w_asSymbol
@@ -207,15 +228,13 @@
w_method = model.W_CompiledMethod(self.space, header=512)
w_method.literalatput0(self.space, 1, w_selector)
- assert len(arguments_w) <= 7
- w_method.setbytes([chr(131), chr(len(arguments_w) << 5 + 0), chr(124)]) #returnTopFromMethodBytecode
+ assert len(w_arguments) <= 7
+ w_method.setbytes([chr(131), chr(len(w_arguments) << 5 + 0), chr(124)]) #returnTopFromMethodBytecode
w_method.set_lookup_class_and_name(w_receiver.getclass(self.space), "Interpreter.perform")
- s_frame = MethodContextShadow(self.space, None, w_method, w_receiver, [])
+ s_frame = MethodContextShadow(self.space, w_method=w_method, w_receiver=w_receiver)
s_frame.push(w_receiver)
- s_frame.push_all(list(arguments_w))
-
- self.interrupt_check_counter = self.interrupt_counter_size
- return self.interpret_toplevel(s_frame.w_self())
+ s_frame.push_all(list(w_arguments))
+ return s_frame
def padding(self, symbol=' '):
return symbol * self.current_stack_depth
@@ -233,8 +252,7 @@
class ContextSwitchException(Exception):
"""General Exception that causes the interpreter to leave
- the current context. The current pc is required in order to update
- the context object that we are leaving."""
+ the current context."""
_attrs_ = ["s_new_context"]
def __init__(self, s_new_context):
self.s_new_context = s_new_context
@@ -528,7 +546,7 @@
s_compiledin.s_superclass())
def _sendSelector(self, w_selector, argcount, interp,
- receiver, receiverclassshadow):
+ receiver, receiverclassshadow, w_arguments=None):
assert argcount >= 0
try:
w_method = receiverclassshadow.lookup(w_selector)
@@ -537,19 +555,22 @@
code = w_method.primitive()
if code:
+ if w_arguments:
+ self.push_all(w_arguments)
try:
return self._call_primitive(code, interp, argcount, w_method, w_selector)
except primitives.PrimitiveFailedError:
pass # ignore this error and fall back to the Smalltalk version
- arguments = self.pop_and_return_n(argcount)
- s_frame = w_method.create_frame(interp.space, receiver, arguments, self)
+ if not w_arguments:
+ w_arguments = self.pop_and_return_n(argcount)
+ s_frame = w_method.create_frame(interp.space, receiver, w_arguments)
self.pop() # receiver
# ######################################################################
if interp.trace:
print interp.padding() + s_frame.short_str()
- return interp.stack_frame(s_frame)
+ return interp.stack_frame(s_frame, self)
@objectmodel.specialize.arg(1)
def _sendSelfSelectorSpecial(self, selector, numargs, interp):
@@ -560,7 +581,7 @@
w_special_selector = self.space.objtable["w_" + special_selector]
s_class = receiver.class_shadow(self.space)
w_method = s_class.lookup(w_special_selector)
- s_frame = w_method.create_frame(interp.space, receiver, w_args, self)
+ s_frame = w_method.create_frame(interp.space, receiver, w_args)
# ######################################################################
if interp.trace:
@@ -568,7 +589,7 @@
if not objectmodel.we_are_translated():
import pdb; pdb.set_trace()
- return interp.stack_frame(s_frame)
+ return interp.stack_frame(s_frame, self)
def _doesNotUnderstand(self, w_selector, argcount, interp, receiver):
arguments = self.pop_and_return_n(argcount)
diff --git a/spyvm/model.py b/spyvm/model.py
--- a/spyvm/model.py
+++ b/spyvm/model.py
@@ -1411,15 +1411,15 @@
def is_array_object(self):
return True
-
- def create_frame(self, space, receiver, arguments, sender = None):
+
+ def create_frame(self, space, receiver, arguments=[]):
from spyvm.shadow import MethodContextShadow
assert len(arguments) == self.argsize
- return MethodContextShadow(space, None, self, receiver, arguments, sender)
+ return MethodContextShadow(space, w_method=self, w_receiver=receiver, arguments=arguments)
# === Printing ===
- def guess_classname (self):
+ def guess_classname(self):
return "CompiledMethod"
def str_content(self):
diff --git a/spyvm/objspace.py b/spyvm/objspace.py
--- a/spyvm/objspace.py
+++ b/spyvm/objspace.py
@@ -197,11 +197,6 @@
elif isinstance(w_v, model.W_SmallInteger): return float(w_v.value)
raise UnwrappingError()
- def unwrap_pointersobject(self, w_v):
- if not isinstance(w_v, model.W_PointersObject):
- raise UnwrappingError()
- return w_v
-
@jit.look_inside_iff(lambda self, w_array: jit.isconstant(w_array.size()))
def unwrap_array(self, w_array):
# Check that our argument has pointers format and the class:
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -10,6 +10,10 @@
from rpython.rlib import rarithmetic, rfloat, unroll, jit
+def assert_class(interp, w_obj, w_class):
+ if not w_obj.getclass(interp.space).is_same_object(w_class):
+ raise PrimitiveFailedError()
+
def assert_bounds(n0, minimum, maximum):
if not minimum <= n0 < maximum:
raise PrimitiveFailedError()
@@ -100,17 +104,17 @@
if unwrap_spec is None:
def wrapped(interp, s_frame, argument_count_m1, w_method=None):
if compiled_method:
- w_result = func(interp, s_frame, argument_count_m1, w_method)
+ result = func(interp, s_frame, argument_count_m1, w_method)
else:
- w_result = func(interp, s_frame, argument_count_m1)
+ result = func(interp, s_frame, argument_count_m1)
if result_is_new_frame:
- return interp.stack_frame(w_result, may_context_switch)
+ return interp.stack_frame(result, s_frame, may_context_switch)
if not no_result:
- assert w_result is not None
- s_frame.push(w_result)
+ assert result is not None
+ s_frame.push(result)
else:
len_unwrap_spec = len(unwrap_spec)
- assert (len_unwrap_spec + 2 == len(inspect.getargspec(func)[0])), "wrong number of arguments"
+ assert len_unwrap_spec + 2 == len(inspect.getargspec(func)[0]), "wrong number of arguments"
unrolling_unwrap_spec = unrolling_iterable(enumerate(unwrap_spec))
def wrapped(interp, s_frame, argument_count_m1, w_method=None):
argument_count = argument_count_m1 + 1 # to account for the rcvr
@@ -153,7 +157,7 @@
if clean_stack:
# happens only if no exception occurs!
s_frame.pop_n(len_unwrap_spec)
- return interp.stack_frame(s_new_frame, may_context_switch)
+ return interp.stack_frame(s_new_frame, s_frame, may_context_switch)
else:
w_result = func(interp, s_frame, *args)
# After calling primitive, reload context-shadow in case it
@@ -379,7 +383,6 @@
print ("%s" % s_frame.peek(1)).replace('\r', '\n')
if isinstance(w_message, model.W_PointersObject):
print ('%s' % w_message.fetch_all(s_frame.space)).replace('\r', '\n')
- # raise Exit('Probably Debugger called...')
raise PrimitiveFailedError()
# ___________________________________________________________________________
@@ -593,8 +596,7 @@
@expose_primitive(NEW_METHOD, unwrap_spec=[object, int, int])
def func(interp, s_frame, w_class, bytecount, header):
# We ignore w_class because W_CompiledMethod is special
- w_method = model.W_CompiledMethod(s_frame.space, bytecount, header)
- return w_method
+ return model.W_CompiledMethod(interp.space, bytecount, header)
# ___________________________________________________________________________
# I/O Primitives
@@ -965,15 +967,14 @@
raise PrimitiveFailedError
@expose_primitive(LOW_SPACE_SEMAPHORE, unwrap_spec=[object, object])
-def func(interp, s_frame, w_reciver, i):
+def func(interp, s_frame, w_receiver, i):
# dont know when the space runs out
- return w_reciver
-
+ return w_receiver
@expose_primitive(SIGNAL_AT_BYTES_LEFT, unwrap_spec=[object, int])
-def func(interp, s_frame, w_reciver, i):
+def func(interp, s_frame, w_receiver, i):
# dont know when the space runs out
- return w_reciver
+ return w_receiver
@expose_primitive(DEFER_UPDATES, unwrap_spec=[object, bool])
def func(interp, s_frame, w_receiver, flag):
@@ -1287,19 +1288,8 @@
# The block bytecodes are stored inline: so we skip past the
# byteodes to invoke this primitive to find them (hence +2)
initialip = s_frame.pc() + 2
- s_new_context = shadow.BlockContextShadow(
- interp.space, None, w_method_context, argcnt, initialip)
+ s_new_context = shadow.BlockContextShadow(interp.space, None, w_method_context, argcnt, initialip)
return s_new_context.w_self()
-
-def finalize_block_ctx(interp, s_block_ctx, s_frame):
- from spyvm.error import SenderChainManipulation
- # Set some fields
- s_block_ctx.store_pc(s_block_ctx.initialip())
- try:
- s_block_ctx.store_s_sender(s_frame)
- except SenderChainManipulation, e:
- assert e.s_context == s_block_ctx
- return s_block_ctx
@expose_primitive(VALUE, result_is_new_frame=True)
def func(interp, s_frame, argument_count):
@@ -1333,7 +1323,8 @@
s_block_ctx.push_all(block_args)
s_frame.pop()
- return finalize_block_ctx(interp, s_block_ctx, s_frame)
+ s_block_ctx.reset_pc()
+ return s_block_ctx
@expose_primitive(VALUE_WITH_ARGS, unwrap_spec=[object, list],
result_is_new_frame=True)
@@ -1352,7 +1343,8 @@
# XXX Check original logic. Image does not test this anyway
# because falls back to value + internal implementation
- return finalize_block_ctx(interp, s_block_ctx, s_frame)
+ s_block_ctx.reset_pc()
+ return s_block_ctx
@expose_primitive(PERFORM)
def func(interp, s_frame, argcount):
@@ -1361,72 +1353,49 @@
@expose_primitive(PERFORM_WITH_ARGS,
unwrap_spec=[object, object, list],
no_result=True, clean_stack=False)
-def func(interp, s_frame, w_rcvr, w_selector, args_w):
+def func(interp, s_frame, w_rcvr, w_selector, w_arguments):
from spyvm.shadow import MethodNotFound
- argcount = len(args_w)
s_frame.pop_n(2) # removing our arguments
+
+ return s_frame._sendSelector(w_selector, len(w_arguments), interp, w_rcvr,
+ w_rcvr.class_shadow(interp.space), w_arguments=w_arguments)
- try:
- w_method = w_rcvr.class_shadow(interp.space).lookup(w_selector)
- except MethodNotFound:
- return s_frame._doesNotUnderstand(w_selector, argcount, interp, w_rcvr)
-
- code = w_method.primitive()
- if code:
- s_frame.push_all(args_w)
- try:
- return s_frame._call_primitive(code, interp, argcount, w_method, w_selector)
- except PrimitiveFailedError:
- pass # ignore this error and fall back to the Smalltalk version
- s_new_frame = w_method.create_frame(interp.space, w_rcvr, args_w, s_frame)
- s_frame.pop()
- return interp.stack_frame(s_new_frame)
-
- at expose_primitive(WITH_ARGS_EXECUTE_METHOD, unwrap_spec=[object, list, object], no_result=True)
+ at expose_primitive(WITH_ARGS_EXECUTE_METHOD,
+ result_is_new_frame=True, unwrap_spec=[object, list, object])
def func(interp, s_frame, w_rcvr, args_w, w_cm):
if not isinstance(w_cm, model.W_CompiledMethod):
raise PrimitiveFailedError()
code = w_cm.primitive()
if code:
raise PrimitiveFailedError("withArgs:executeMethod: not support with primitive method")
- s_new_frame = w_cm.create_frame(interp.space, w_rcvr, args_w, s_frame)
- return interp.stack_frame(s_new_frame)
+ return w_cm.create_frame(interp.space, w_rcvr, args_w)
+
+
+# XXX we might want to disable the assert_class checks in the 4 primitives below
@expose_primitive(SIGNAL, unwrap_spec=[object], clean_stack=False, no_result=True)
def func(interp, s_frame, w_rcvr):
- # XXX we might want to disable this check
- if not w_rcvr.getclass(interp.space).is_same_object(
- interp.space.w_Semaphore):
- raise PrimitiveFailedError()
- wrapper.SemaphoreWrapper(interp.space, w_rcvr).signal(s_frame.w_self())
+ assert_class(interp, w_rcvr, interp.space.w_Semaphore)
+ wrapper.SemaphoreWrapper(interp.space, w_rcvr).signal(s_frame)
@expose_primitive(WAIT, unwrap_spec=[object], clean_stack=False, no_result=True)
def func(interp, s_frame, w_rcvr):
- # XXX we might want to disable this check
- if not w_rcvr.getclass(interp.space).is_same_object(
- interp.space.w_Semaphore):
- raise PrimitiveFailedError()
- wrapper.SemaphoreWrapper(interp.space, w_rcvr).wait(s_frame.w_self())
+ assert_class(interp, w_rcvr, interp.space.w_Semaphore)
+ wrapper.SemaphoreWrapper(interp.space, w_rcvr).wait(s_frame)
- at expose_primitive(RESUME, unwrap_spec=[object], result_is_new_frame=True, clean_stack=False)
+ at expose_primitive(RESUME, unwrap_spec=[object], no_result=True, clean_stack=False)
def func(interp, s_frame, w_rcvr):
- # XXX we might want to disable this check
- if not w_rcvr.getclass(interp.space).is_same_object(
- interp.space.w_Process):
- raise PrimitiveFailedError()
- w_frame = wrapper.ProcessWrapper(interp.space, w_rcvr).resume(s_frame.w_self())
- w_frame = interp.space.unwrap_pointersobject(w_frame)
- return w_frame.as_context_get_shadow(interp.space)
+ import pdb; pdb.set_trace()
+ assert_class(interp, w_rcvr, interp.space.w_Process)
+ wrapper.ProcessWrapper(interp.space, w_rcvr).resume(s_frame)
- at expose_primitive(SUSPEND, unwrap_spec=[object], result_is_new_frame=True, clean_stack=False)
+ at expose_primitive(SUSPEND, unwrap_spec=[object], no_result=True, clean_stack=False)
def func(interp, s_frame, w_rcvr):
- # XXX we might want to disable this check
- if not w_rcvr.getclass(interp.space).is_same_object(
- interp.space.w_Process):
- raise PrimitiveFailedError()
- w_frame = wrapper.ProcessWrapper(interp.space, w_rcvr).suspend(s_frame.w_self())
- w_frame = interp.space.unwrap_pointersobject(w_frame)
- return w_frame.as_context_get_shadow(interp.space)
+ import pdb; pdb.set_trace()
+ assert_class(interp, w_rcvr, interp.space.w_Process)
+ wrapper.ProcessWrapper(interp.space, w_rcvr).suspend(s_frame)
+
+
@expose_primitive(FLUSH_CACHE, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
@@ -1455,11 +1424,9 @@
return w_context
-def activateClosure(interp, s_frame, w_block, args_w):
+def activateClosure(interp, w_block, args_w):
space = interp.space
- if not w_block.getclass(space).is_same_object(
- space.w_BlockClosure):
- raise PrimitiveFailedError()
+ assert_class(interp, w_block, space.w_BlockClosure)
block = wrapper.BlockClosureWrapper(space, w_block)
if not block.numArgs() == len(args_w):
raise PrimitiveFailedError()
@@ -1470,7 +1437,7 @@
# additionally to the smalltalk implementation, this also pushes
# args and copiedValues
- s_new_frame = block.asContextWithSender(s_frame.w_self(), args_w)
+ s_new_frame = block.create_frame(args_w)
w_closureMethod = s_new_frame.w_method()
assert isinstance(w_closureMethod, model.W_CompiledMethod)
@@ -1481,35 +1448,35 @@
@expose_primitive(CLOSURE_VALUE, unwrap_spec=[object], result_is_new_frame=True)
def func(interp, s_frame, w_block_closure):
- return activateClosure(interp, s_frame, w_block_closure, [])
+ return activateClosure(interp, w_block_closure, [])
@expose_primitive(CLOSURE_VALUE_, unwrap_spec=[object, object], result_is_new_frame=True)
def func(interp, s_frame, w_block_closure, w_a0):
- return activateClosure(interp, s_frame, w_block_closure, [w_a0])
+ return activateClosure(interp, w_block_closure, [w_a0])
@expose_primitive(CLOSURE_VALUE_VALUE, unwrap_spec=[object, object, object], result_is_new_frame=True)
def func(interp, s_frame, w_block_closure, w_a0, w_a1):
- return activateClosure(interp, s_frame, w_block_closure, [w_a0, w_a1])
+ return activateClosure(interp, w_block_closure, [w_a0, w_a1])
@expose_primitive(CLOSURE_VALUE_VALUE_VALUE, unwrap_spec=[object, object, object, object], result_is_new_frame=True)
def func(interp, s_frame, w_block_closure, w_a0, w_a1, w_a2):
- return activateClosure(interp, s_frame, w_block_closure, [w_a0, w_a1, w_a2])
+ return activateClosure(interp, w_block_closure, [w_a0, w_a1, w_a2])
@expose_primitive(CLOSURE_VALUE_VALUE_VALUE_VALUE, unwrap_spec=[object, object, object, object, object], result_is_new_frame=True)
def func(interp, s_frame, w_block_closure, w_a0, w_a1, w_a2, w_a3):
- return activateClosure(interp, s_frame, w_block_closure, [w_a0, w_a1, w_a2, w_a3])
+ return activateClosure(interp, w_block_closure, [w_a0, w_a1, w_a2, w_a3])
@expose_primitive(CLOSURE_VALUE_WITH_ARGS, unwrap_spec=[object, list], result_is_new_frame=True)
def func(interp, s_frame, w_block_closure, args_w):
- return activateClosure(interp, s_frame, w_block_closure, args_w)
+ return activateClosure(interp, w_block_closure, args_w)
@expose_primitive(CLOSURE_VALUE_NO_CONTEXT_SWITCH, unwrap_spec=[object], result_is_new_frame=True, may_context_switch=False)
def func(interp, s_frame, w_block_closure):
- return activateClosure(interp, s_frame, w_block_closure, [])
+ return activateClosure(interp, w_block_closure, [])
@expose_primitive(CLOSURE_VALUE_NO_CONTEXT_SWITCH_, unwrap_spec=[object, object], result_is_new_frame=True, may_context_switch=False)
def func(interp, s_frame, w_block_closure, w_a0):
- return activateClosure(interp, s_frame, w_block_closure, [w_a0])
+ return activateClosure(interp, w_block_closure, [w_a0])
# ___________________________________________________________________________
# Override the default primitive to give latitude to the VM in context management.
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -607,13 +607,14 @@
class ContextPartShadow(AbstractRedirectingShadow):
__metaclass__ = extendabletype
- _attrs_ = ['_s_sender', '_pc', '_temps_and_stack',
+ _attrs_ = ['direct_sender', 'virtual_sender',
+ '_pc', '_temps_and_stack',
'_stack_ptr', 'instances_w']
repr_classname = "ContextPartShadow"
_virtualizable_ = [
- "_s_sender", "_pc",
- "_temps_and_stack[*]", "_stack_ptr",
+ 'direct_sender', 'virtual_sender',
+ "_pc", "_temps_and_stack[*]", "_stack_ptr",
"_w_self", "_w_self_size"
]
@@ -621,7 +622,8 @@
# Initialization
def __init__(self, space, w_self):
- self._s_sender = None
+ self.direct_sender = None
+ self.virtual_sender = jit.vref_None
AbstractRedirectingShadow.__init__(self, space, w_self)
self.instances_w = {}
@@ -671,7 +673,7 @@
if n0 == constants.CTXPART_SENDER_INDEX:
assert isinstance(w_value, model.W_PointersObject)
if w_value.is_nil(self.space):
- self._s_sender = None
+ self.store_s_sender(None, raise_error=False)
else:
self.store_s_sender(w_value.as_context_get_shadow(self.space))
return
@@ -690,19 +692,40 @@
raise error.WrapperException("Index in context out of bounds")
# === Sender ===
+ # There are two fields for the sender (virtual and direct). Only one of them is can be set at a time.
+ # As long as the frame object is virtualized, using the virtual reference should increase performance.
+ # As soon as a frame object is forced to the heap, the direct reference must be used.
- def store_s_sender(self, s_sender):
- assert s_sender is None or isinstance(s_sender, ContextPartShadow)
- self._s_sender = s_sender
- raise error.SenderChainManipulation(self)
+ def is_fresh(self):
+ return self.direct_sender is None and self.virtual_sender is jit.vref_None
+
+ def finish_virtual_sender(self, save_direct_sender=True):
+ if self.virtual_sender is not jit.vref_None:
+ sender = self.virtual_sender()
+ jit.virtual_ref_finish(self.virtual_sender, sender)
+ self.virtual_sender = jit.vref_None
+ if save_direct_sender:
+ self.direct_sender = sender
+
+ def store_s_sender(self, s_sender, raise_error=True):
+ # If we have a virtual back reference, we must finish it before storing the direct reference.
+ self.finish_virtual_sender(save_direct_sender=False)
+ self.direct_sender = s_sender
+ if raise_error:
+ raise error.SenderChainManipulation(self)
def w_sender(self):
- if self._s_sender is None:
+ sender = self.s_sender()
+ if sender is None:
return self.space.w_nil
- return self._s_sender.w_self()
+ return sender.w_self()
def s_sender(self):
- return self._s_sender
+ if self.direct_sender:
+ return self.direct_sender
+ else:
+ result = self.virtual_sender()
+ return result
# === Stack Pointer ===
@@ -779,10 +802,7 @@
def mark_returned(self):
self.store_pc(-1)
- try:
- self.store_s_sender(None)
- except error.SenderChainManipulation, e:
- assert self == e.s_context
+ self.store_s_sender(None, raise_error=False)
def is_returned(self):
return self.pc() == -1 and self.w_sender().is_nil(self.space)
@@ -1042,7 +1062,10 @@
initialip = self.initialip()
initialip += 1 + self.w_method().literalsize
return self.space.wrap_int(initialip)
-
+
+ def reset_pc(self):
+ self.store_pc(self.initialip())
+
def initialip(self):
return self._initialip
@@ -1079,7 +1102,7 @@
@jit.unroll_safe
def __init__(self, space, w_self=None, w_method=None, w_receiver=None,
- arguments=None, s_sender=None, closure=None, pc=0):
+ arguments=[], closure=None, pc=0):
self = jit.hint(self, access_directly=True, fresh_virtualizable=True)
ContextPartShadow.__init__(self, space, w_self)
self.store_w_receiver(w_receiver)
@@ -1095,18 +1118,9 @@
else:
self._w_method = None
- if s_sender:
- try:
- self.store_s_sender(s_sender)
- except error.SenderChainManipulation, e:
- assert self == e.s_context
-
- if arguments:
- argc = len(arguments)
- for i0 in range(argc):
- self.settemp(i0, arguments[i0])
- else:
- argc = 0
+ argc = len(arguments)
+ for i0 in range(argc):
+ self.settemp(i0, arguments[i0])
if closure:
for i0 in range(closure.size()):
diff --git a/spyvm/test/jit.py b/spyvm/test/jit.py
--- a/spyvm/test/jit.py
+++ b/spyvm/test/jit.py
@@ -49,7 +49,7 @@
w_method.literals = literals
w_method.setbytes(bytes)
w_receiver = stack[0]
- s_frame = shadow.MethodContextShadow(space, None, w_method, w_receiver, [])
+ s_frame = shadow.MethodContextShadow(space, w_method=w_method, w_receiver=w_receiver)
w_frame = s_frame.w_self()
def interp_execute_frame():
return interp.interpret_toplevel(w_frame)
diff --git a/spyvm/test/test_interpreter.py b/spyvm/test/test_interpreter.py
--- a/spyvm/test/test_interpreter.py
+++ b/spyvm/test/test_interpreter.py
@@ -1013,12 +1013,12 @@
assert False
class StackTestInterpreter(TestInterpreter):
- def stack_frame(self, w_frame, may_interrupt=True):
+ def stack_frame(self, s_frame, s_sender, may_interrupt=True):
stack_depth = self.current_stack_depth
for i in range(stack_depth + 1):
assert sys._getframe(5 + i * 7).f_code.co_name == 'loop_bytecodes'
assert sys._getframe(6 + stack_depth * 7).f_code.co_name == 'loop'
- return interpreter.Interpreter.stack_frame(self, w_frame)
+ return interpreter.Interpreter.stack_frame(self, s_frame, s_sender, may_interrupt)
def test_actual_stackdepth():
# | testBlock |
diff --git a/spyvm/test/test_shadow.py b/spyvm/test/test_shadow.py
--- a/spyvm/test/test_shadow.py
+++ b/spyvm/test/test_shadow.py
@@ -281,9 +281,7 @@
def test_methodcontext_s_home():
w_context = methodcontext()
s_context = w_context.as_methodcontext_get_shadow(space)
- w_middle_context = methodcontext(w_sender=w_context)
- s_middle_context = w_middle_context.as_methodcontext_get_shadow(space)
w_closure = space.newClosure(w_context, 3, 0, [])
- s_closure_context = wrapper.BlockClosureWrapper(space, w_closure).asContextWithSender(w_middle_context, [])
+ s_closure_context = wrapper.BlockClosureWrapper(space, w_closure).create_frame()
assert s_closure_context.s_home() is s_context
diff --git a/spyvm/test/test_wrapper.py b/spyvm/test/test_wrapper.py
--- a/spyvm/test/test_wrapper.py
+++ b/spyvm/test/test_wrapper.py
@@ -12,7 +12,7 @@
cleanup_module(__name__)
def new_frame():
- return _new_frame(space, "")[0]
+ return _new_frame(space, "")[0].as_context_get_shadow(space)
def test_simpleread():
w_o = model.W_PointersObject(space, None, 2)
@@ -152,7 +152,7 @@
def test_suspend_asleep(self):
process, old_process = self.make_processes(4, 2, space.w_false)
- w_frame = process.suspend(space.w_true)
+ process.suspend(space.w_true)
process_list = wrapper.scheduler(space).get_process_list(process.priority())
assert process_list.first_link() is process_list.last_link()
assert process_list.first_link().is_nil(space)
@@ -168,7 +168,7 @@
assert process_list.first_link() is process_list.last_link()
assert process_list.first_link().is_nil(space)
assert old_process.my_list().is_nil(space)
- assert old_process.suspended_context() is current_context
+ assert old_process.suspended_context() is current_context.w_self()
assert wrapper.scheduler(space).active_process() is process._w_self
def new_process_consistency(self, process, old_process, w_active_context):
@@ -181,15 +181,16 @@
assert priority_list.first_link() is process._w_self
def old_process_consistency(self, old_process, old_process_context):
- assert old_process.suspended_context() is old_process_context
+ assert old_process.suspended_context() is old_process_context.w_self()
priority_list = wrapper.scheduler(space).get_process_list(old_process.priority())
assert priority_list.first_link() is old_process._w_self
def make_processes(self, sleepingpriority, runningpriority,
sleepingcontext):
+ if not isinstance(sleepingcontext, model.W_Object):
+ sleepingcontext = sleepingcontext.w_self()
scheduler = wrapper.scheduler(space)
- sleeping = new_process(priority=sleepingpriority,
- w_suspended_context=sleepingcontext)
+ sleeping = new_process(priority=sleepingpriority, w_suspended_context=sleepingcontext)
sleeping.put_to_sleep()
running = new_process(priority=runningpriority)
scheduler.store_active_process(running._w_self)
diff --git a/spyvm/test/test_zin_squeak_4_5_image.py b/spyvm/test/test_zin_squeak_4_5_image.py
--- a/spyvm/test/test_zin_squeak_4_5_image.py
+++ b/spyvm/test/test_zin_squeak_4_5_image.py
@@ -42,10 +42,11 @@
# create a frame for our newly crafted method with a valid sender (to avoid raising returnFromTop to early)
s_initial_frame = create_method(chr(0x7c)).create_frame(space, w(0), [])
- w_frame = w_method.create_frame(space, w(0), [], sender=s_initial_frame).w_self()
-
+ s_frame = w_method.create_frame(space, w(0))
+ s_frame.store_s_sender(s_initial_frame, raise_error=False)
+
try:
- interp.loop(w_frame)
+ interp.loop(s_frame.w_self())
except interpreter.ReturnFromTopLevel, e:
assert e.object.as_string() == 'b2'
except interpreter.StackOverflow, e:
@@ -67,11 +68,12 @@
w('ensure'), space.w_BlockClosure])
# create a frame for our newly crafted method with a valid sender (to avoid raising returnFromTop to early)
- s_initial_frame = create_method(chr(0x7c)).create_frame(space, w(0), [])
- w_frame = w_method.create_frame(space, w(0), [], sender=s_initial_frame).w_self()
-
+ s_initial_frame = create_method(chr(0x7c)).create_frame(space, w(0))
+ s_frame = w_method.create_frame(space, w(0))
+ s_frame.store_s_sender(s_initial_frame, raise_error=False)
+
try:
- interp.loop(w_frame)
+ interp.loop(s_frame.w_self())
except interpreter.ReturnFromTopLevel, e:
assert e.object.as_string() == 'b1'
except interpreter.StackOverflow, e:
diff --git a/spyvm/test/util.py b/spyvm/test/util.py
--- a/spyvm/test/util.py
+++ b/spyvm/test/util.py
@@ -81,11 +81,13 @@
self._loop = True
return interpreter.Interpreter.loop(self, w_active_context)
- def stack_frame(self, s_new_frame, may_context_switch=True):
+ def stack_frame(self, s_new_frame, s_sender, may_context_switch=True):
if not self._loop:
- return s_new_frame # this test is done to not loop in test,
- # but rather step just once where wanted
- return interpreter.Interpreter.stack_frame(self, s_new_frame, may_context_switch)
+ # this test is done to not loop in test, but rather step just once where wanted
+ # Unfortunately, we have to mimick some of the original behaviour.
+ s_new_frame.store_s_sender(s_sender, raise_error=False)
+ return s_new_frame
+ return interpreter.Interpreter.stack_frame(self, s_new_frame, s_sender, may_context_switch)
class BootstrappedObjSpace(objspace.ObjSpace):
diff --git a/spyvm/tool/analyseimage.py b/spyvm/tool/analyseimage.py
--- a/spyvm/tool/analyseimage.py
+++ b/spyvm/tool/analyseimage.py
@@ -56,7 +56,7 @@
w_method = s_class.lookup("tinyBenchmarks")
assert w_method
- w_frame = w_method.create_frame(interp.space, w_object, [])
+ w_frame = w_method.create_frame(interp.space, w_object)
interp.store_w_active_context(w_frame)
from spyvm.interpreter import BYTECODE_TABLE
diff --git a/spyvm/wrapper.py b/spyvm/wrapper.py
--- a/spyvm/wrapper.py
+++ b/spyvm/wrapper.py
@@ -82,37 +82,36 @@
assert isinstance(w_frame, model.W_PointersObject)
raise ProcessSwitch(w_frame.as_context_get_shadow(self.space))
- def deactivate(self, w_current_frame):
- self.put_to_sleep()
- self.store_suspended_context(w_current_frame)
+ def deactivate(self, s_current_frame, put_to_sleep=True):
+ if put_to_sleep:
+ self.put_to_sleep()
+ self.store_suspended_context(s_current_frame.w_self())
- def resume(self, w_current_frame):
+ def resume(self, s_current_frame):
sched = scheduler(self.space)
active_process = ProcessWrapper(self.space, sched.active_process())
active_priority = active_process.priority()
priority = self.priority()
if priority > active_priority:
- active_process.deactivate(w_current_frame)
- return self.activate()
+ active_process.deactivate(s_current_frame)
+ self.activate()
else:
self.put_to_sleep()
- return w_current_frame
def is_active_process(self):
return self._w_self.is_same_object(scheduler(self.space).active_process())
- def suspend(self, w_current_frame):
+ def suspend(self, s_current_frame):
if self.is_active_process():
assert self.my_list().is_nil(self.space)
w_process = scheduler(self.space).pop_highest_priority_process()
- self.store_suspended_context(w_current_frame)
- return ProcessWrapper(self.space, w_process).activate()
+ self.deactivate(s_current_frame, put_to_sleep=False)
+ ProcessWrapper(self.space, w_process).activate()
else:
if not self.my_list().is_nil(self.space):
process_list = ProcessListWrapper(self.space, self.my_list())
process_list.remove(self._w_self)
self.store_my_list(self.space.w_nil)
- return w_current_frame
class LinkedListWrapper(Wrapper):
first_link, store_first_link = make_getter_setter(0)
@@ -212,24 +211,22 @@
excess_signals, store_excess_signals = make_int_getter_setter(2)
- def signal(self, w_current_frame):
+ def signal(self, s_current_frame):
if self.is_empty_list():
value = self.excess_signals()
self.store_excess_signals(value + 1)
- return w_current_frame
else:
process = self.remove_first_link_of_list()
- return ProcessWrapper(self.space, process).resume(w_current_frame)
+ ProcessWrapper(self.space, process).resume(s_current_frame)
- def wait(self, w_current_frame):
+ def wait(self, s_current_frame):
excess = self.excess_signals()
w_process = scheduler(self.space).active_process()
if excess > 0:
self.store_excess_signals(excess - 1)
- return w_current_frame
else:
self.add_last_link(w_process)
- return ProcessWrapper(self.space, w_process).suspend(w_current_frame)
+ ProcessWrapper(self.space, w_process).suspend(s_current_frame)
class PointWrapper(Wrapper):
x, store_x = make_int_getter_setter(0)
@@ -241,7 +238,7 @@
startpc, store_startpc = make_int_getter_setter(constants.BLKCLSR_STARTPC)
numArgs, store_numArgs = make_int_getter_setter(constants.BLKCLSR_NUMARGS)
- def asContextWithSender(self, w_context, arguments):
+ def create_frame(self, arguments=[]):
from spyvm import shadow
w_outerContext = self.outerContext()
if not isinstance(w_outerContext, model.W_PointersObject):
@@ -250,10 +247,8 @@
w_method = s_outerContext.w_method()
w_receiver = s_outerContext.w_receiver()
pc = self.startpc() - w_method.bytecodeoffset() - 1
- s_new_frame = shadow.MethodContextShadow(self.space, None, w_method, w_receiver,
- arguments, s_sender=w_context.get_shadow(self.space),
- closure=self, pc=pc)
- return s_new_frame
+ return shadow.MethodContextShadow(self.space, w_method=w_method, w_receiver=w_receiver,
+ arguments=arguments, closure=self, pc=pc)
def tempsize(self):
# We ignore the number of temps a block has, because the first
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -104,19 +104,11 @@
return _run_benchmark(interp, 0, selector, "")
def context_for(interp, number, benchmark, stringarg):
- # XXX: Copied from interpreter >> perform
- space = interp.space
- argcount = 0 if stringarg == "" else 1
- w_receiver = space.wrap_int(number)
- w_selector = interp.perform(space.wrap_string(benchmark), "asSymbol")
- w_method = model.W_CompiledMethod(space, header=512)
- w_method.literalatput0(space, 1, w_selector)
- w_method.setbytes([chr(131), chr(argcount << 5), chr(124)]) #returnTopFromMethodBytecodeBytecode
- s_frame = shadow.MethodContextShadow(space, None, w_method, w_receiver, [])
- s_frame.push(w_receiver)
- if not stringarg == "":
- s_frame.push(space.wrap_string(stringarg))
- return s_frame
+ w_receiver = interp.space.wrap_int(number)
+ if stringarg:
+ return interp.create_toplevel_context(w_receiver, benchmark, interp.space.wrap_string(stringarg))
+ else:
+ return interp.create_toplevel_context(w_receiver, benchmark)
def _usage(argv):
print """
diff --git a/targettinybenchsmalltalk.py b/targettinybenchsmalltalk.py
--- a/targettinybenchsmalltalk.py
+++ b/targettinybenchsmalltalk.py
@@ -25,7 +25,7 @@
w_object = model.W_SmallInteger(0)
s_class = w_object.class_shadow(space)
w_method = s_class.lookup(w_selector)
- s_frame = w_method.create_frame(space, w_object, [])
+ s_frame = w_method.create_frame(space, w_object)
return interp, s_frame
interp, s_frame = setup()
From noreply at buildbot.pypy.org Mon Jul 7 22:13:29 2014
From: noreply at buildbot.pypy.org (pjenvey)
Date: Mon, 7 Jul 2014 22:13:29 +0200 (CEST)
Subject: [pypy-commit] extradoc extradoc: update #13 (forgot to commit this
before)
Message-ID: <20140707201329.A11B01C024A@cobra.cs.uni-duesseldorf.de>
Author: Philip Jenvey
Branch: extradoc
Changeset: r5353:36da5bb08996
Date: 2014-07-07 13:13 -0700
http://bitbucket.org/pypy/extradoc/changeset/36da5bb08996/
Log: update #13 (forgot to commit this before)
diff --git a/blog/draft/py3k-status-update-13.rst b/blog/draft/py3k-status-update-13.rst
new file mode 100644
--- /dev/null
+++ b/blog/draft/py3k-status-update-13.rst
@@ -0,0 +1,51 @@
+Py3k status update #13
+----------------------
+
+This is the 13th status update about our work on the `py3k branch`_, which we
+can work on thanks to all of the people who donated_ to the `py3k proposal`_.
+
+We're just finishing up a cleanup of int/long types. This work helps the py3k
+branch unify these types into the Python 3 int and restore `JIT compilation of
+machine sized integers`_.
+
+This cleanup also removes `multimethods`_ from these types. PyPy has
+historically used a clever implementation of multimethod dispatch for declaring
+methods of the __builtin__ types in RPython.
+
+This multimethod scheme provides some convenient features for doing this,
+however we've come to the conclusion that it may be more trouble than it's
+worth. A major problem of multimethods is that they generate a large amount of
+stub methods which burden the already lengthy and memory hungry RPython
+translation process. Also, their implementation and behavior can be somewhat
+complicated/obscure.
+
+The alternative to multimethods involves doing the work of the type checking
+and dispatching rules in a more verbose, manual way. It's a little more work in
+the end but less magical.
+
+Recently, Manuel Jacob finished a large cleanup effort of the
+unicode/string/bytearray types that also removed their multimethods. This work
+also benefits the py3k branch: it'll help with future `PEP 393`_ (or `PEP 393
+alternative`_) work. This effort was partly sponsored by Google's Summer of
+Code: thanks Manuel and Google!
+
+Now there's only a couple major pieces left in the multimethod removal (the
+float/complex types and special marshaling code) and a few minor pieces that
+should be relatively easy.
+
+In conclusion, there's been some good progress made on py3k and multimethod
+removal this winter, albeit a bit slower than we would have liked.
+
+cheers,
+Phil
+
+.. _donated: http://morepypy.blogspot.com/2012/01/py3k-and-numpy-first-stage-thanks-to.html
+.. _`py3k proposal`: http://pypy.org/py3donate.html
+.. _`py3k branch`: https://bitbucket.org/pypy/pypy/commits/all/tip/branch%28%22py3k%22%29
+
+.. _`JIT compilation of machine sized integers`:
+ http://morepypy.blogspot.com/2013/11/py3k-status-update-12.html
+.. _`multimethods`: http://doc.pypy.org/en/latest/objspace.html#multimethods
+
+.. _`PEP 393`: http://www.python.org/dev/peps/pep-0393/
+.. _`PEP 393 alternative`: http://lucumr.pocoo.org/2014/1/9/ucs-vs-utf8/
From noreply at buildbot.pypy.org Tue Jul 8 01:26:34 2014
From: noreply at buildbot.pypy.org (mattip)
Date: Tue, 8 Jul 2014 01:26:34 +0200 (CEST)
Subject: [pypy-commit] pypy default: fix unsafe FormatMessage call (windows)
Message-ID: <20140707232634.B41191D353E@cobra.cs.uni-duesseldorf.de>
Author: mattip
Branch:
Changeset: r72379:f3d274416c97
Date: 2014-07-08 09:22 +1000
http://bitbucket.org/pypy/pypy/changeset/f3d274416c97/
Log: fix unsafe FormatMessage call (windows)
diff --git a/rpython/rlib/rwin32.py b/rpython/rlib/rwin32.py
--- a/rpython/rlib/rwin32.py
+++ b/rpython/rlib/rwin32.py
@@ -79,7 +79,7 @@
"MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT)")
defines = """FORMAT_MESSAGE_ALLOCATE_BUFFER FORMAT_MESSAGE_FROM_SYSTEM
- MAX_PATH _MAX_ENV
+ MAX_PATH _MAX_ENV FORMAT_MESSAGE_IGNORE_INSERTS
WAIT_OBJECT_0 WAIT_TIMEOUT INFINITE
ERROR_INVALID_HANDLE
DELETE READ_CONTROL SYNCHRONIZE WRITE_DAC
@@ -226,7 +226,8 @@
buf[0] = lltype.nullptr(rffi.CCHARP.TO)
try:
msglen = FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER |
- FORMAT_MESSAGE_FROM_SYSTEM,
+ FORMAT_MESSAGE_FROM_SYSTEM |
+ FORMAT_MESSAGE_IGNORE_INSERTS,
None,
rffi.cast(DWORD, code),
DEFAULT_LANGUAGE,
diff --git a/rpython/rlib/test/test_rwin32.py b/rpython/rlib/test/test_rwin32.py
--- a/rpython/rlib/test/test_rwin32.py
+++ b/rpython/rlib/test/test_rwin32.py
@@ -58,3 +58,9 @@
for key, value in env.iteritems():
assert type(key) is unicode
assert type(value) is unicode
+
+def test_formaterror():
+ # choose one with formatting characters and newlines
+ msg = rwin32.FormatError(34)
+ assert '%2' in msg
+
From noreply at buildbot.pypy.org Tue Jul 8 05:16:42 2014
From: noreply at buildbot.pypy.org (mattip)
Date: Tue, 8 Jul 2014 05:16:42 +0200 (CEST)
Subject: [pypy-commit] pypy default: merge dtype record hash,
based on pr#242 (yuyichao)
Message-ID: <20140708031642.918591C024A@cobra.cs.uni-duesseldorf.de>
Author: mattip
Branch:
Changeset: r72380:f20ac16753b6
Date: 2014-07-08 13:15 +1000
http://bitbucket.org/pypy/pypy/changeset/f20ac16753b6/
Log: merge dtype record hash, based on pr#242 (yuyichao)
diff --git a/pypy/module/micronumpy/descriptor.py b/pypy/module/micronumpy/descriptor.py
--- a/pypy/module/micronumpy/descriptor.py
+++ b/pypy/module/micronumpy/descriptor.py
@@ -6,7 +6,7 @@
from pypy.interpreter.typedef import (TypeDef, GetSetProperty,
interp_attrproperty, interp_attrproperty_w)
from rpython.rlib import jit
-from rpython.rlib.objectmodel import specialize
+from rpython.rlib.objectmodel import specialize, compute_hash
from rpython.rlib.rarithmetic import r_longlong, r_ulonglong
from pypy.module.micronumpy import types, boxes, base, support, constants as NPY
from pypy.module.micronumpy.appbridge import get_appbridge_cache
@@ -254,8 +254,38 @@
def descr_ne(self, space, w_other):
return space.wrap(not self.eq(space, w_other))
+ def _compute_hash(self, space, x):
+ from rpython.rlib.rarithmetic import intmask
+ if self.fields is None and self.subdtype is None:
+ endian = self.byteorder
+ if endian == NPY.NATIVE:
+ endian = NPY.NATBYTE
+ flags = 0
+ y = 0x345678
+ y = intmask((1000003 * y) ^ ord(self.kind[0]))
+ y = intmask((1000003 * y) ^ ord(endian[0]))
+ y = intmask((1000003 * y) ^ flags)
+ y = intmask((1000003 * y) ^ self.elsize)
+ if self.is_flexible():
+ y = intmask((1000003 * y) ^ self.alignment)
+ return intmask((1000003 * x) ^ y)
+ if self.fields is not None:
+ for name, (offset, subdtype) in self.fields.iteritems():
+ assert isinstance(subdtype, W_Dtype)
+ y = intmask(1000003 * (0x345678 ^ compute_hash(name)))
+ y = intmask(1000003 * (y ^ compute_hash(offset)))
+ y = intmask(1000003 * (y ^ subdtype._compute_hash(space,
+ 0x345678)))
+ x = intmask(x ^ y)
+ if self.subdtype is not None:
+ for s in self.shape:
+ x = intmask((1000003 * x) ^ compute_hash(s))
+ x = self.base._compute_hash(space, x)
+ return x
+
def descr_hash(self, space):
- return space.hash(self.descr_reduce(space))
+ return space.wrap(self._compute_hash(space, 0x345678))
+
def descr_str(self, space):
if self.fields:
diff --git a/pypy/module/micronumpy/test/test_dtypes.py b/pypy/module/micronumpy/test/test_dtypes.py
--- a/pypy/module/micronumpy/test/test_dtypes.py
+++ b/pypy/module/micronumpy/test/test_dtypes.py
@@ -368,15 +368,30 @@
d5 = numpy.dtype([('f0', 'i4'), ('f1', d2)])
d6 = numpy.dtype([('f0', 'i4'), ('f1', d3)])
import sys
- if '__pypy__' not in sys.builtin_module_names:
- assert hash(d1) == hash(d2)
- assert hash(d1) != hash(d3)
- assert hash(d4) == hash(d5)
- assert hash(d4) != hash(d6)
- else:
- for d in [d1, d2, d3, d4, d5, d6]:
- raises(TypeError, hash, d)
+ assert hash(d1) == hash(d2)
+ assert hash(d1) != hash(d3)
+ assert hash(d4) == hash(d5)
+ assert hash(d4) != hash(d6)
+ def test_record_hash(self):
+ from numpy import dtype
+ # make sure the fields hash return different value
+ # for different order of field in a structure
+
+ # swap names
+ t1 = dtype([('x', '
Author: Yichao Yu
Branch:
Changeset: r72381:2aabeb712f61
Date: 2014-07-04 22:11 +0800
http://bitbucket.org/pypy/pypy/changeset/2aabeb712f61/
Log: make numpy scalar non-iterable
diff --git a/pypy/module/micronumpy/boxes.py b/pypy/module/micronumpy/boxes.py
--- a/pypy/module/micronumpy/boxes.py
+++ b/pypy/module/micronumpy/boxes.py
@@ -153,6 +153,11 @@
raise OperationError(space.w_IndexError, space.wrap(
"invalid index to scalar variable"))
+ def descr_iter(self, space):
+ # Making numpy scalar non-iterable with a valid __getitem__ method
+ raise oefmt(space.w_TypeError,
+ "'%T' object is not iterable", self)
+
def descr_str(self, space):
return space.wrap(self.get_dtype(space).itemtype.str_format(self))
@@ -555,6 +560,7 @@
__new__ = interp2app(W_GenericBox.descr__new__.im_func),
__getitem__ = interp2app(W_GenericBox.descr_getitem),
+ __iter__ = interp2app(W_GenericBox.descr_iter),
__str__ = interp2app(W_GenericBox.descr_str),
__repr__ = interp2app(W_GenericBox.descr_str),
__format__ = interp2app(W_GenericBox.descr_format),
diff --git a/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py b/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py
--- a/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py
+++ b/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py
@@ -249,4 +249,12 @@
assert d.dtype == dtype('int32')
assert (d == [[1, 0, 0], [0, 1, 0], [0, 0, 1]]).all()
-
+ def test_scalar_iter(self):
+ from numpypy import int8, int16, int32, int64, float32, float64
+ for t in int8, int16, int32, int64, float32, float64:
+ try:
+ iter(t(17))
+ except TypeError:
+ pass
+ else:
+ assert False, "%s object should not be iterable." % t
From noreply at buildbot.pypy.org Tue Jul 8 05:53:43 2014
From: noreply at buildbot.pypy.org (mattip)
Date: Tue, 8 Jul 2014 05:53:43 +0200 (CEST)
Subject: [pypy-commit] pypy default: move test to untranslated
Message-ID: <20140708035343.A5D751C0906@cobra.cs.uni-duesseldorf.de>
Author: mattip
Branch:
Changeset: r72382:30e15663c576
Date: 2014-07-08 13:46 +1000
http://bitbucket.org/pypy/pypy/changeset/30e15663c576/
Log: move test to untranslated
diff --git a/pypy/module/micronumpy/test/test_scalar.py b/pypy/module/micronumpy/test/test_scalar.py
--- a/pypy/module/micronumpy/test/test_scalar.py
+++ b/pypy/module/micronumpy/test/test_scalar.py
@@ -290,3 +290,13 @@
assert np.isnan(b/a)
b = t(0.)
assert np.isnan(b/a)
+
+ def test_scalar_iter(self):
+ from numpypy import int8, int16, int32, int64, float32, float64
+ for t in int8, int16, int32, int64, float32, float64:
+ try:
+ iter(t(17))
+ except TypeError:
+ pass
+ else:
+ assert False, "%s object should not be iterable." % t
diff --git a/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py b/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py
--- a/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py
+++ b/pypy/module/test_lib_pypy/numpypy/core/test_numeric.py
@@ -248,13 +248,3 @@
assert d.shape == (3, 3)
assert d.dtype == dtype('int32')
assert (d == [[1, 0, 0], [0, 1, 0], [0, 0, 1]]).all()
-
- def test_scalar_iter(self):
- from numpypy import int8, int16, int32, int64, float32, float64
- for t in int8, int16, int32, int64, float32, float64:
- try:
- iter(t(17))
- except TypeError:
- pass
- else:
- assert False, "%s object should not be iterable." % t
From noreply at buildbot.pypy.org Tue Jul 8 09:43:29 2014
From: noreply at buildbot.pypy.org (waedt)
Date: Tue, 8 Jul 2014 09:43:29 +0200 (CEST)
Subject: [pypy-commit] pypy utf8-unicode2: pobjspace and interpreter tests
now pass
Message-ID: <20140708074329.B74EA1C1068@cobra.cs.uni-duesseldorf.de>
Author: Tyler Wade
Branch: utf8-unicode2
Changeset: r72383:104602bd7dd9
Date: 2014-07-08 02:37 -0500
http://bitbucket.org/pypy/pypy/changeset/104602bd7dd9/
Log: pobjspace and interpreter tests now pass
diff too long, truncating to 2000 out of 2118 lines
diff --git a/pypy/interpreter/astcompiler/test/test_astbuilder.py b/pypy/interpreter/astcompiler/test/test_astbuilder.py
--- a/pypy/interpreter/astcompiler/test/test_astbuilder.py
+++ b/pypy/interpreter/astcompiler/test/test_astbuilder.py
@@ -8,6 +8,7 @@
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.astcompiler.astbuilder import ast_from_node
from pypy.interpreter.astcompiler import ast, consts
+from pypy.interpreter.utf8 import Utf8Str
class TestAstBuilder:
@@ -1103,7 +1104,7 @@
assert info.encoding == "utf-7"
s = ast_from_node(space, tree, info).body[0].value
assert isinstance(s, ast.Str)
- assert space.eq_w(s.s, space.wrap(sentence))
+ assert space.eq_w(s.s, space.wrap(Utf8Str.from_unicode(sentence)))
def test_string_bug(self):
space = self.space
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -919,11 +919,7 @@
import sys
d = {}
exec '# -*- coding: utf-8 -*-\n\nu = u"\xf0\x9f\x92\x8b"' in d
- if sys.maxunicode > 65535 and self.maxunicode > 65535:
- expected_length = 1
- else:
- expected_length = 2
- assert len(d['u']) == expected_length
+ assert len(d['u']) == 1
class TestOptimizations:
diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -153,6 +153,10 @@
const = code_hook(space, const, hidden_applevel, code_hook)
if isinstance(const, unicode):
const = Utf8Str.from_unicode(const)
+ if isinstance(const, tuple):
+ const = tuple(x if not isinstance(x, unicode)
+ else Utf8Str.from_unicode(x)
+ for x in const)
newconsts_w[num] = space.wrap(const)
num += 1
# stick the underlying CPython magic value, if the code object
diff --git a/pypy/interpreter/pyparser/test/test_parsestring.py b/pypy/interpreter/pyparser/test/test_parsestring.py
--- a/pypy/interpreter/pyparser/test/test_parsestring.py
+++ b/pypy/interpreter/pyparser/test/test_parsestring.py
@@ -102,7 +102,4 @@
def test_decode_unicode_utf8(self):
buf = parsestring.decode_unicode_utf8(self.space,
'u"\xf0\x9f\x92\x8b"', 2, 6)
- if sys.maxunicode == 65535:
- assert buf == r"\U0000d83d\U0000dc8b"
- else:
- assert buf == r"\U0001f48b"
+ assert buf == r"\U0001f48b"
diff --git a/pypy/interpreter/test/test_gateway.py b/pypy/interpreter/test/test_gateway.py
--- a/pypy/interpreter/test/test_gateway.py
+++ b/pypy/interpreter/test/test_gateway.py
@@ -4,6 +4,7 @@
from pypy.interpreter import gateway, argument
from pypy.interpreter.gateway import ObjSpace, W_Root, WrappedDefault
from pypy.interpreter.signature import Signature
+from pypy.interpreter.utf8 import Utf8Str
import py
import sys
@@ -519,7 +520,7 @@
unicode])
w_app_g3_u = space.wrap(app_g3_u)
assert self.space.eq_w(
- space.call_function(w_app_g3_u, w(u"foo")),
+ space.call_function(w_app_g3_u, w(Utf8Str("foo"))),
w(3))
assert self.space.eq_w(
space.call_function(w_app_g3_u, w("baz")),
diff --git a/pypy/interpreter/test/test_objspace.py b/pypy/interpreter/test/test_objspace.py
--- a/pypy/interpreter/test/test_objspace.py
+++ b/pypy/interpreter/test/test_objspace.py
@@ -2,6 +2,7 @@
from pypy.interpreter.error import OperationError
from pypy.interpreter.function import Function
from pypy.interpreter.pycode import PyCode
+from pypy.interpreter.utf8 import Utf8Str
from rpython.rlib.rarithmetic import r_longlong, r_ulonglong
import sys
@@ -217,8 +218,9 @@
w = space.wrap
assert space.str0_w(w("123")) == "123"
exc = space.raises_w(space.w_TypeError, space.str0_w, w("123\x004"))
- assert space.unicode0_w(w(u"123")) == u"123"
- exc = space.raises_w(space.w_TypeError, space.unicode0_w, w(u"123\x004"))
+ assert space.unicode0_w(w(Utf8Str("123"))) == u"123"
+ exc = space.raises_w(space.w_TypeError, space.unicode0_w,
+ w(Utf8Str.from_unicode(u"123\x004")))
def test_getindex_w(self):
w_instance1 = self.space.appexec([], """():
diff --git a/pypy/interpreter/test/test_utf8.py b/pypy/interpreter/test/test_utf8.py
--- a/pypy/interpreter/test/test_utf8.py
+++ b/pypy/interpreter/test/test_utf8.py
@@ -35,13 +35,15 @@
iter.move(i)
if i != 4:
assert iter.peek_next() == [0x41, 0x10F, 0x20AC, 0x1F63D][i]
- assert list(iter) == [0x41, 0x10F, 0x20AC, 0x1F63D][i:]
+ l = list(iter)
+ assert l == [0x41, 0x10F, 0x20AC, 0x1F63D][i:]
for i in range(1, 5):
iter = s.codepoint_iter()
list(iter) # move the iterator to the end
iter.move(-i)
- assert list(iter) == [0x41, 0x10F, 0x20AC, 0x1F63D][4-i:]
+ l = list(iter)
+ assert l == [0x41, 0x10F, 0x20AC, 0x1F63D][4-i:]
iter = s.char_iter()
l = [s.bytes.decode('utf8') for s in list(iter)]
@@ -50,6 +52,27 @@
else:
assert l == [u'A', u'\u010F', u'\u20AC', u'\U00001F63D']
+def test_reverse_iterator():
+ s = build_utf8str()
+ iter = s.reverse_codepoint_iter()
+ assert iter.peek_next() == 0x1F63D
+ assert list(iter) == [0x1F63D, 0x20AC, 0x10F, 0x41]
+
+ for i in range(1, 5):
+ iter = s.reverse_codepoint_iter()
+ iter.move(i)
+ if i != 4:
+ assert iter.peek_next() == [0x1F63D, 0x20AC, 0x10F, 0x41][i]
+ l = list(iter)
+ assert l == [0x1F63D, 0x20AC, 0x10F, 0x41][i:]
+
+ for i in range(1, 5):
+ iter = s.reverse_codepoint_iter()
+ list(iter) # move the iterator to the end
+ iter.move(-i)
+ l = list(iter)
+ assert l == [0x1F63D, 0x20AC, 0x10F, 0x41][4-i:]
+
def test_builder_append_slice():
builder = Utf8Builder()
builder.append_slice(Utf8Str.from_unicode(u"0ê0"), 1, 2)
@@ -57,6 +80,10 @@
assert builder.build() == u"êes"
+def test_eq():
+ assert Utf8Str('test') == Utf8Str('test')
+ assert Utf8Str('test') != Utf8Str('test1')
+
def test_unicode_literal_comparison():
builder = Utf8Builder()
builder.append(0x10F)
@@ -152,5 +179,17 @@
assert s.split() == u.split()
assert s.split(' ') == u.split(' ')
- assert s.split(maxsplit=1) == u.split(None, 1)
+ assert s.split(maxsplit=2) == u.split(None, 2)
+ assert s.split(' ', 2) == u.split(' ', 2)
assert s.split('\n') == [s]
+
+def test_rsplit():
+ # U+00A0 is a non-breaking space
+ u = u"one two three\xA0four"
+ s = Utf8Str.from_unicode(u)
+
+ assert s.rsplit() == u.rsplit()
+ assert s.rsplit(' ') == u.rsplit(' ')
+ assert s.rsplit(maxsplit=2) == u.rsplit(None, 2)
+ assert s.rsplit(' ', 2) == u.rsplit(' ', 2)
+ assert s.rsplit('\n') == [s]
diff --git a/pypy/interpreter/utf8.py b/pypy/interpreter/utf8.py
--- a/pypy/interpreter/utf8.py
+++ b/pypy/interpreter/utf8.py
@@ -104,6 +104,9 @@
return Utf8Str('')
# TODO: If start > _len or stop >= _len, then raise exception
+ if stop > len(self):
+ stop = len(self)
+
if self._is_ascii:
return Utf8Str(self.bytes[start:stop], True)
@@ -124,6 +127,12 @@
return Utf8Str(self.bytes[start_byte:stop_byte], is_ascii,
stop - start)
+ def byte_slice(self, start, end):
+ return Utf8Str(self.bytes[start:end], self._is_ascii)
+
+ def __repr__(self):
+ return "" % unicode(self)
+
def __add__(self, other):
return Utf8Str(self.bytes + other.bytes,
self._is_ascii and other._is_ascii)
@@ -134,6 +143,9 @@
def __len__(self):
return self._len
+ def __hash__(self):
+ return hash(self.bytes)
+
def __eq__(self, other):
"""NOT_RPYTHON"""
if isinstance(other, Utf8Str):
@@ -143,6 +155,27 @@
return False
+ def __ne__(self, other):
+ """NOT_RPYTHON"""
+ if isinstance(other, Utf8Str):
+ return self.bytes != other.bytes
+ if isinstance(other, unicode):
+ return unicode(self.bytes, 'utf8') != other
+
+ return True
+
+ def __lt__(self, other):
+ return self.bytes < other.bytes
+
+ def __le__(self, other):
+ return self.bytes <= other.bytes
+
+ def __gt__(self, other):
+ return self.bytes > other.bytes
+
+ def __ge__(self, other):
+ return self.bytes >= other.bytes
+
@specialize.argtype(1)
def __contains__(self, other):
if isinstance(other, Utf8Str):
@@ -158,11 +191,20 @@
def __iter__(self):
return self.char_iter()
+ def __unicode__(self):
+ return unicode(self.bytes, 'utf8')
+
def char_iter(self):
- return Utf8StrCharIterator(self)
+ return Utf8CharacterIter(self)
+
+ def reverse_char_iter(self):
+ return Utf8ReverseCharacterIter(self)
def codepoint_iter(self):
- return Utf8StrCodePointIterator(self)
+ return Utf8CodePointIter(self)
+
+ def reverse_codepoint_iter(self):
+ return Utf8ReverseCodePointIter(self)
@specialize.argtype(1, 2)
def _bound_check(self, start, end):
@@ -270,12 +312,11 @@
else:
break
- iter.prev_count(1)
start_byte = iter.byte_pos
- iter.next_count(1)
if maxsplit == 0:
- res.append(Utf8Str(self.bytes[start_byte:len(self.bytes)]))
+ res.append(Utf8Str(self.bytes[start_byte:len(self.bytes)],
+ self._is_ascii))
break
for cd in iter:
@@ -283,12 +324,12 @@
break
else:
# Hit the end of the string
- res.append(Utf8Str(self.bytes[start_byte:len(self.bytes)]))
+ res.append(Utf8Str(self.bytes[start_byte:len(self.bytes)],
+ self._is_ascii))
break
- iter.prev_count(1)
- res.append(Utf8Str(self.bytes[start_byte:iter.byte_pos]))
- iter.next_count(1)
+ res.append(Utf8Str(self.bytes[start_byte:iter.byte_pos],
+ self._is_ascii))
maxsplit -= 1
return res
@@ -302,15 +343,54 @@
other_bytes = other.bytes
return [Utf8Str(s) for s in self.bytes.rsplit(other_bytes, maxsplit)]
- # TODO: I need to make a reverse_codepoint_iter first
+ res = []
+ iter = self.reverse_codepoint_iter()
+ while True:
+ # Find the start of the next word
+ for cd in iter:
+ if not unicodedb.isspace(cd):
+ break
+ else:
+ break
+ start_byte = self.next_char(iter.byte_pos)
+
+ if maxsplit == 0:
+ res.append(Utf8Str(self.bytes[0:start_byte], self._is_ascii))
+ break
+
+ # Find the end of the word
+ for cd in iter:
+ if unicodedb.isspace(cd):
+ break
+ else:
+ # We hit the end of the string
+ res.append(Utf8Str(self.bytes[0:start_byte], self._is_ascii))
+ break
+
+ end_byte = self.next_char(iter.byte_pos)
+ res.append(Utf8Str(self.bytes[end_byte:start_byte],
+ self._is_ascii))
+ maxsplit -= 1
+
+ res.reverse()
+ return res
+
+ @specialize.argtype(1)
def join(self, other):
if len(other) == 0:
return Utf8Str('')
- assert isinstance(other[0], Utf8Str)
- return Utf8Str(self.bytes.join([s.bytes for s in other]),
- self._is_ascii and all(s._is_ascii for s in other))
+ if isinstance(other[0], Utf8Str):
+ return Utf8Str(
+ self.bytes.join([s.bytes for s in other]),
+ self._is_ascii and all(s._is_ascii for s in other)
+ )
+ else:
+ return Utf8Str(
+ self.bytes.join([s for s in other]),
+ self._is_ascii and all(s._is_ascii for s in other)
+ )
def as_unicode(self):
"""NOT_RPYTHON"""
@@ -321,83 +401,18 @@
"""NOT_RPYTHON"""
return Utf8Str(u.encode('utf-8'))
-class Utf8StrCodePointIterator(object):
- def __init__(self, ustr):
- self.ustr = ustr
- self.pos = 0
- self.byte_pos = 0
+ def next_char(self, byte_pos):
+ return byte_pos + utf8_code_length[ord(self.bytes[byte_pos])]
- if len(ustr) != 0:
- self.current = utf8ord_bytes(ustr.bytes, 0)
- else:
- self.current = -1
+ def prev_char(self, byte_pos):
+ if byte_pos == 0:
+ return -1
+ byte_pos -= 1
+ while utf8_code_length[ord(self.bytes[byte_pos])] == 0:
+ byte_pos -= 1
+ return byte_pos
- def __iter__(self):
- return self
- def next(self):
- if self.pos == len(self.ustr):
- raise StopIteration()
- self.current = utf8ord_bytes(self.ustr.bytes, self.byte_pos)
-
- self.byte_pos += utf8_code_length[ord(self.ustr.bytes[self.byte_pos])]
- self.pos += 1
-
- return self.current
-
- def next_count(self, count=1):
- self.pos += count
- while count > 1:
- self.byte_pos += utf8_code_length[ord(self.ustr.bytes[self.byte_pos])]
- count -= 1
- self.current = utf8ord_bytes(self.ustr.bytes, self.byte_pos)
- self.byte_pos += utf8_code_length[ord(self.ustr.bytes[self.byte_pos])]
-
- def prev_count(self, count=1):
- self.pos -= count
- while count > 0:
- self.byte_pos -= 1
- while utf8_code_length[ord(self.ustr.bytes[self.byte_pos])] == 0:
- self.byte_pos -= 1
- count -= 1
-
- self.current = utf8ord_bytes(self.ustr.bytes, self.byte_pos)
-
- def move(self, count):
- if count > 0:
- self.next_count(count)
- elif count < 0:
- self.prev_count(-count)
-
- def peek_next(self):
- return utf8ord_bytes(self.ustr.bytes, self.byte_pos)
-
-class Utf8StrCharIterator(object):
- def __init__(self, ustr):
- self.ustr = ustr
- self.byte_pos = 0
- self.current = self._get_current()
-
- def __iter__(self):
- return self
-
- def _get_current(self):
- if self.byte_pos == len(self.ustr.bytes):
- return None
- length = utf8_code_length[ord(self.ustr.bytes[self.byte_pos])]
- return Utf8Str(''.join([self.ustr.bytes[i]
- for i in range(self.byte_pos, self.byte_pos + length)]),
- length == 1)
-
- def next(self):
- #import pdb; pdb.set_trace()
- ret = self.current
- if ret is None:
- raise StopIteration()
-
- self.byte_pos += utf8_code_length[ord(self.ustr.bytes[self.byte_pos])]
- self.current = self._get_current()
- return ret
class Utf8Builder(object):
@specialize.argtype(1)
@@ -452,9 +467,168 @@
raise TypeError("Invalid type '%s' for Utf8Str.append_slice" %
type(s))
+ @specialize.argtype(1)
def append_multiple_char(self, c, count):
- self._builder.append_multiple_char(c, count)
+ # TODO: What do I do when I have an int? Is it fine to just loop over
+ # .append(c) then? Should (can) I force a resize first?
+ if isinstance(c, int):
+ self._builder.append_multiple_char(chr(c), count)
+ return
+
+ if len(c) > 1:
+ import pdb; pdb.set_trace()
+ if isinstance(c, str):
+ self._builder.append_multiple_char(c, count)
+ else:
+ self._builder.append_multiple_char(c.bytes, count)
def build(self):
return Utf8Str(self._builder.build(), self._is_ascii)
+# _______________________________________________
+
+# iter.current is the current (ie the last returned) element
+# iter.pos isthe position of the current element
+# iter.byte_pos isthe byte position of the current element
+# In the before-the-start state, for foward iterators iter.pos and
+# iter.byte_pos are -1. For reverse iterators, they are len(ustr) and
+# len(ustr.bytes) respectively.
+
+class ForwardIterBase(object):
+ def __init__(self, ustr):
+ self.ustr = ustr
+ self.pos = -1
+
+ self._byte_pos = 0
+ self.byte_pos = -1
+ self.current = self._default
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self.pos + 1 == len(self.ustr):
+ raise StopIteration()
+
+ self.pos += 1
+ self.byte_pos = self._byte_pos
+
+ self.current = self._value(self.byte_pos)
+
+ self._byte_pos = self.ustr.next_char(self._byte_pos)
+ return self.current
+
+ def peek_next(self):
+ return self._value(self._byte_pos)
+
+ def peek_prev(self):
+ return self._value(self._move_backward(self.byte_pos))
+
+ def move(self, count):
+ if count > 0:
+ self.pos += count
+
+ while count != 1:
+ self._byte_pos = self.ustr.next_char(self._byte_pos)
+ count -= 1
+ self.byte_pos = self._byte_pos
+ self._byte_pos = self.ustr.next_char(self._byte_pos)
+ self.current = self._value(self.byte_pos)
+
+ elif count < 0:
+ self.pos += count
+ while count < -1:
+ self.byte_pos = self.ustr.prev_char(self.byte_pos)
+ count += 1
+ self._byte_pos = self.byte_pos
+ self.byte_pos = self.ustr.prev_char(self.byte_pos)
+ self.current = self._value(self.byte_pos)
+
+ def copy(self):
+ iter = self.__class__(self.ustr)
+ iter.pos = self.pos
+ iter.byte_pos = self.byte_pos
+ iter._byte_pos = self._byte_pos
+ iter.current = self.current
+ return iter
+
+class ReverseIterBase(object):
+ def __init__(self, ustr):
+ self.ustr = ustr
+ self.pos = len(ustr)
+ self.byte_pos = len(ustr.bytes)
+ self.current = self._default
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self.pos == 0:
+ raise StopIteration()
+
+ self.pos -= 1
+ self.byte_pos = self.ustr.prev_char(self.byte_pos)
+ self.current = self._value(self.byte_pos)
+ return self.current
+
+ def peek_next(self):
+ return self._value(self.ustr.prev_char(self.byte_pos))
+
+ def peek_prev(self):
+ return self._value(self.ustr.next_char(self.byte_pos))
+
+ def move(self, count):
+ if count > 0:
+ self.pos -= count
+ while count != 0:
+ self.byte_pos = self.ustr.prev_char(self.byte_pos)
+ count -= 1
+ self.current = self._value(self.byte_pos)
+ elif count < 0:
+ self.pos -= count
+ while count != 0:
+ self.byte_pos = self.ustr.next_char(self.byte_pos)
+ count += 1
+ self.current = self._value(self.byte_pos)
+
+ def copy(self):
+ iter = self.__class__(self.ustr)
+ iter.pos = self.pos
+ iter.byte_pos = self.byte_pos
+ iter.current = self.current
+ return iter
+
+def make_iterator(name, base, calc_value, default):
+ class C(base):
+ _default = default
+ _value = calc_value
+ C.__name__ = name
+ return C
+
+def codepoint_calc_value(self, byte_pos):
+ if byte_pos == -1 or byte_pos == len(self.ustr.bytes):
+ return -1
+ return utf8ord_bytes(self.ustr.bytes, byte_pos)
+
+def character_calc_value(self, byte_pos):
+ if byte_pos == -1 or byte_pos == len(self.ustr.bytes):
+ return None
+ length = utf8_code_length[ord(self.ustr.bytes[self.byte_pos])]
+ return Utf8Str(''.join([self.ustr.bytes[i]
+ for i in range(self.byte_pos, self.byte_pos + length)]),
+ length == 1)
+
+Utf8CodePointIter = make_iterator("Utf8CodePointIter", ForwardIterBase,
+ codepoint_calc_value, -1)
+Utf8CharacterIter = make_iterator("Utf8CharacterIter", ForwardIterBase,
+ character_calc_value, None)
+Utf8ReverseCodePointIter = make_iterator(
+ "Utf8ReverseCodePointIter", ReverseIterBase, codepoint_calc_value, -1)
+Utf8ReverseCharacterIter = make_iterator(
+ "Utf8ReverseCharacterIter", ReverseIterBase, character_calc_value, None)
+
+del make_iterator
+del codepoint_calc_value
+del character_calc_value
+del ForwardIterBase
+del ReverseIterBase
diff --git a/pypy/interpreter/utf8_codecs.py b/pypy/interpreter/utf8_codecs.py
--- a/pypy/interpreter/utf8_codecs.py
+++ b/pypy/interpreter/utf8_codecs.py
@@ -208,7 +208,6 @@
pos = 0
while pos < size:
- #oc = ORD(s, pos)
oc = utf8ord(s, pos)
# Escape quotes
@@ -460,10 +459,10 @@
else:
return s.bytes
- iter.move(-1)
result = Utf8Builder(len(s.bytes))
result.append_slice(s.bytes, 0, iter.byte_pos)
+ iter.move(-1)
for oc in iter:
if oc >= 0xD800 and oc <= 0xDFFF:
# Check the next character to see if this is a surrogate pair
@@ -741,7 +740,6 @@
result = Utf8Builder(size // 2)
- #XXX I think the errors are not correctly handled here
while pos < size:
# remaining bytes at the end? (size should be even)
if len(s) - pos < 2:
@@ -869,7 +867,8 @@
def str_decode_utf_32_helper(s, size, errors, final=True,
errorhandler=None,
- byteorder="native"):
+ byteorder="native",
+ encodingname='utf32'):
if errorhandler is None:
errorhandler = default_unicode_error_decode
bo = 0
@@ -924,7 +923,7 @@
if len(s) - pos < 4:
if not final:
break
- r, pos = errorhandler(errors, 'utf32', "truncated data",
+ r, pos = errorhandler(errors, encodingname, "truncated data",
s, pos, len(s))
result.append(r)
if len(s) - pos < 4:
@@ -933,7 +932,8 @@
ch = ((ord(s[pos + iorder[3]]) << 24) | (ord(s[pos + iorder[2]]) << 16) |
(ord(s[pos + iorder[1]]) << 8) | ord(s[pos + iorder[0]]))
if ch >= 0x110000:
- r, pos = errorhandler(errors, 'utf32', "codepoint not in range(0x110000)",
+ r, pos = errorhandler(errors, encodingname,
+ "codepoint not in range(0x110000)",
s, pos, len(s))
result.append(r)
continue
@@ -1097,7 +1097,7 @@
if errorhandler is None:
errorhandler = default_unicode_error_decode
if size == 0:
- return u'', 0
+ return Utf8Str(''), 0
inShift = False
base64bits = 0
@@ -1345,9 +1345,12 @@
def str_decode_unicode_internal(s, size, errors, final=False,
errorhandler=None):
if BYTEORDER == 'little':
- return str_decode_utf_32_le(s, size, errors, errorhandler)
+ result, length, byteorder = str_decode_utf_32_helper(
+ s, size, errors, final, errorhandler, "little", "unicode_internal")
else:
- return str_decode_utf_32_be(s, size, errors, errorhandler)
+ result, length, byteorder = str_decode_utf_32_helper(
+ s, size, errors, final, errorhandler, "internal", "unicode_internal")
+ return result, length
def unicode_encode_unicode_internal(s, size, errors, errorhandler=None):
if BYTEORDER == 'little':
@@ -1561,6 +1564,7 @@
def default_unicode_error_decode(errors, encoding, msg, s,
startingpos, endingpos):
+ """NOT_RPYTHON"""
if errors == 'replace':
return _unicode_error_replacement, endingpos
if errors == 'ignore':
@@ -1570,9 +1574,10 @@
def default_unicode_error_encode(errors, encoding, msg, u,
startingpos, endingpos):
+ """NOT_RPYTHON"""
if errors == 'replace':
return '?', None, endingpos
if errors == 'ignore':
return '', None, endingpos
- raise UnicodeEncodeError(encoding, u, startingpos, endingpos, msg)
+ raise UnicodeEncodeError(encoding, unicode(u), startingpos, endingpos, msg)
diff --git a/pypy/objspace/std/bytearrayobject.py b/pypy/objspace/std/bytearrayobject.py
--- a/pypy/objspace/std/bytearrayobject.py
+++ b/pypy/objspace/std/bytearrayobject.py
@@ -9,6 +9,7 @@
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import WrappedDefault, interp2app, unwrap_spec
from pypy.interpreter.signature import Signature
+from pypy.interpreter.utf8_codecs import str_decode_latin_1
from pypy.objspace.std.sliceobject import W_SliceObject
from pypy.objspace.std.stdtypedef import StdTypeDef
from pypy.objspace.std.stringmethods import StringMethods, _get_buffer
@@ -154,9 +155,11 @@
w_dict = self.getdict(space)
if w_dict is None:
w_dict = space.w_None
+ ustr = str_decode_latin_1(''.join(self.data), len(self.data),
+ 'strict')[0]
return space.newtuple([
space.type(self), space.newtuple([
- space.wrap(''.join(self.data).decode('latin-1')),
+ space.wrap(ustr),
space.wrap('latin-1')]),
w_dict])
diff --git a/pypy/objspace/std/newformat.py b/pypy/objspace/std/newformat.py
--- a/pypy/objspace/std/newformat.py
+++ b/pypy/objspace/std/newformat.py
@@ -4,7 +4,9 @@
import string
from pypy.interpreter.error import OperationError, oefmt
-from pypy.interpreter.utf8 import Utf8Str, Utf8Builder, ORD
+from pypy.interpreter.utf8 import Utf8Str, Utf8Builder, ORD, utf8chr
+from pypy.interpreter.utf8_codecs import (
+ unicode_encode_latin_1, unicode_encode_ascii, str_decode_ascii)
from rpython.rlib import rstring, runicode, rlocale, rfloat, jit
from rpython.rlib.objectmodel import specialize
from rpython.rlib.rfloat import copysign, formatd
@@ -20,7 +22,7 @@
result = 0
i = start
while i < end:
- digit = ord(s[i]) - ord('0')
+ digit = ORD(s, i) - ord('0')
if 0 <= digit <= 9:
if result > (sys.maxint - digit) / 10:
raise oefmt(space.w_ValueError,
@@ -63,22 +65,24 @@
out = Utf8Builder()
else:
out = rstring.StringBuilder()
+
if not level:
raise OperationError(space.w_ValueError,
space.wrap("Recursion depth exceeded"))
level -= 1
- s = self.template
- return self._do_build_string(start, end, level, out, s)
+ return self._do_build_string(start, end, level, out, self.template)
@jit.look_inside_iff(lambda self, start, end, level, out, s: jit.isconstant(s))
def _do_build_string(self, start, end, level, out, s):
space = self.space
last_literal = i = start
+
while i < end:
c = ORD(s, i)
i += 1
if c == ord("{") or c == ord("}"):
at_end = i == end
+
# Find escaped "{" and "}"
markup_follows = True
if c == ord("}"):
@@ -87,6 +91,7 @@
space.wrap("Single '}'"))
i += 1
markup_follows = False
+
if c == ord("{"):
if at_end:
raise OperationError(space.w_ValueError,
@@ -94,6 +99,7 @@
if ORD(s, i) == ord("{"):
i += 1
markup_follows = False
+
# Attach literal data, ending with { or }
out.append_slice(s, last_literal, i - 1)
if not markup_follows:
@@ -101,6 +107,7 @@
end_literal = i - 1
assert end_literal > last_literal
literal = self.template[last_literal:end_literal]
+
w_entry = space.newtuple([
space.wrap(literal),
space.w_None, space.w_None, space.w_None])
@@ -108,6 +115,7 @@
self.last_end = i
last_literal = i
continue
+
nested = 1
field_start = i
recursive = False
@@ -121,6 +129,7 @@
if not nested:
break
i += 1
+
if nested:
raise OperationError(space.w_ValueError,
space.wrap("Unmatched '{'"))
@@ -139,41 +148,43 @@
# Find ":" or "!"
i = start
while i < end:
- c = s[i]
+ c = ORD(s, i)
if c == ord(":") or c == ord("!"):
end_name = i
+
if c == ord("!"):
i += 1
if i == end:
w_msg = self.space.wrap("expected conversion")
raise OperationError(self.space.w_ValueError, w_msg)
- conversion = s[i]
+
+ conversion = ORD(s, i)
i += 1
if i < end:
- if s[i] != ':':
+ if ORD(s, i) != ord(':'):
w_msg = self.space.wrap("expected ':' after"
" format specifier")
raise OperationError(self.space.w_ValueError,
w_msg)
i += 1
else:
- conversion = None
+ conversion = -1
i += 1
return s[start:end_name], conversion, i
i += 1
- return s[start:end], None, end
+ return s[start:end], -1, end
@jit.unroll_safe
def _get_argument(self, name):
# First, find the argument.
space = self.space
i = 0
- end = len(name)
- while i < end:
- c = name[i]
+ while i < len(name):
+ c = ORD(name, i)
if c == ord("[") or c == ord("."):
break
i += 1
+
empty = not i
if empty:
index = -1
@@ -181,12 +192,14 @@
index, stop = _parse_int(self.space, name, 0, i)
if stop != i:
index = -1
+
use_numeric = empty or index != -1
if self.auto_numbering_state == ANS_INIT and use_numeric:
if empty:
self.auto_numbering_state = ANS_AUTO
else:
self.auto_numbering_state = ANS_MANUAL
+
if use_numeric:
if self.auto_numbering_state == ANS_MANUAL:
if empty:
@@ -204,7 +217,8 @@
kwarg = name[:i]
if self.is_unicode:
try:
- arg_key = kwarg.encode("latin-1")
+ arg_key = unicode_encode_latin_1(kwarg, len(kwarg),
+ 'strict')
except UnicodeEncodeError:
# Not going to be found in a dict of strings.
raise OperationError(space.w_KeyError, space.wrap(kwarg))
@@ -220,7 +234,7 @@
except IndexError:
w_msg = space.wrap("index out of range")
raise OperationError(space.w_IndexError, w_msg)
- return self._resolve_lookups(w_arg, name, i, end)
+ return self._resolve_lookups(w_arg, name, i, len(name))
@jit.unroll_safe
def _resolve_lookups(self, w_obj, name, start, end):
@@ -228,15 +242,16 @@
space = self.space
i = start
while i < end:
- c = name[i]
+ c = ORD(name, i)
if c == ord("."):
i += 1
start = i
while i < end:
- c = name[i]
+ c = ORD(name, i)
if c == ord("[") or c == ord("."):
break
i += 1
+
if start == i:
w_msg = space.wrap("Empty attribute in format string")
raise OperationError(space.w_ValueError, w_msg)
@@ -247,18 +262,17 @@
self.parser_list_w.append(space.newtuple([
space.w_True, w_attr]))
elif c == ord("["):
- got_bracket = False
i += 1
start = i
while i < end:
- c = name[i]
+ c = ORD(name, i)
if c == ord("]"):
- got_bracket = True
break
i += 1
- if not got_bracket:
+ else:
raise OperationError(space.w_ValueError,
space.wrap("Missing ']'"))
+
index, reached = _parse_int(self.space, name, start, i)
if index != -1 and reached == i:
w_item = space.wrap(index)
@@ -285,29 +299,30 @@
if c == ord("[") or c == ord("."):
break
i += 1
+
if i == 0:
index = -1
else:
index, stop = _parse_int(self.space, name, 0, i)
if stop != i:
index = -1
+
if index >= 0:
w_first = space.wrap(index)
else:
w_first = space.wrap(name[:i])
- #
+
self.parser_list_w = []
self._resolve_lookups(None, name, i, end)
- #
+
return space.newtuple([w_first,
space.iter(space.newlist(self.parser_list_w))])
def _convert(self, w_obj, conversion):
space = self.space
- conv = ORD(conversion, 0)
- if conv == ord("r"):
+ if conversion == ord("r"):
return space.repr(w_obj)
- elif conv == ord("s"):
+ elif conversion == ord("s"):
if self.is_unicode:
return space.call_function(space.w_unicode, w_obj)
return space.str(w_obj)
@@ -318,7 +333,7 @@
def _render_field(self, start, end, recursive, level):
name, conversion, spec_start = self._parse_field(start, end)
spec = self.template[spec_start:end]
- #
+
if self.parser_list_w is not None:
# used from formatter_parser()
if level == 1: # ignore recursive calls
@@ -333,12 +348,13 @@
self.parser_list_w.append(w_entry)
self.last_end = end + 1
return self.empty
- #
+
w_obj = self._get_argument(name)
- if conversion is not None:
+ if conversion != -1:
w_obj = self._convert(w_obj, conversion)
if recursive:
spec = self._build_string(spec_start, end, level)
+
w_rendered = self.space.format(w_obj, self.space.wrap(spec))
unwrapper = "unicode_w" if self.is_unicode else "str_w"
to_interp = getattr(self.space, unwrapper)
@@ -348,7 +364,7 @@
self.parser_list_w = []
self.last_end = 0
self._build_string(0, len(self.template), 2)
- #
+
space = self.space
if self.last_end < len(self.template):
w_lastentry = space.newtuple([
@@ -413,7 +429,7 @@
def __init__(self, space, is_unicode, spec):
self.space = space
self.is_unicode = is_unicode
- self.empty = u"" if is_unicode else ""
+ self.empty = Utf8Str("") if is_unicode else ""
self.spec = spec
def _is_alignment(self, c):
@@ -429,78 +445,76 @@
def _parse_spec(self, default_type, default_align):
space = self.space
- self._fill_char = self._lit("\0")[0]
- self._align = default_align
+ self._fill_char = ord("\0")
+
+ self._align = ord(default_align)
self._alternate = False
- self._sign = "\0"
+ self._sign = ord("\0")
self._thousands_sep = False
self._precision = -1
- the_type = default_type
+
spec = self.spec
if not spec:
return True
+
length = len(spec)
i = 0
got_align = True
- if length - i >= 2 and self._is_alignment(spec[i + 1]):
- self._align = spec[i + 1]
- self._fill_char = spec[i]
+
+ if length - i >= 2 and self._is_alignment(ORD(spec, i + 1)):
+ self._align = ORD(spec, i + 1)
+ self._fill_char = ORD(spec, i)
i += 2
- elif length - i >= 1 and self._is_alignment(spec[i]):
- self._align = spec[i]
+ elif length - i >= 1 and self._is_alignment(ORD(spec, i)):
+ self._align = ORD(spec, i)
i += 1
else:
got_align = False
- if length - i >= 1 and self._is_sign(spec[i]):
- self._sign = spec[i]
+
+ if length - i >= 1 and self._is_sign(ORD(spec, i)):
+ self._sign = ORD(spec, i)
i += 1
- if length - i >= 1 and spec[i] == "#":
+ if length - i >= 1 and ORD(spec, i) == ord("#"):
self._alternate = True
i += 1
- if self._fill_char == "\0" and length - i >= 1 and spec[i] == "0":
- self._fill_char = self._lit("0")[0]
+
+ if (self._fill_char == ord("\0") and length - i >= 1 and
+ ORD(spec, i) == ord("0")):
+ self._fill_char = ord("0")
if not got_align:
- self._align = "="
+ self._align = ord("=")
i += 1
+
self._width, i = _parse_int(self.space, spec, i, length)
- if length != i and spec[i] == ",":
+ if length != i and ORD(spec, i) == ord(","):
self._thousands_sep = True
i += 1
- if length != i and spec[i] == ".":
+ if length != i and ORD(spec, i) == ord("."):
i += 1
self._precision, i = _parse_int(self.space, spec, i, length)
if self._precision == -1:
raise OperationError(space.w_ValueError,
space.wrap("no precision given"))
+
if length - i > 1:
raise OperationError(space.w_ValueError,
space.wrap("invalid format spec"))
if length - i == 1:
- presentation_type = spec[i]
if self.is_unicode:
try:
- the_type = spec[i].encode("ascii")[0]
+ self._type = unicode_encode_ascii(spec[i], 1, 'strict')[0]
except UnicodeEncodeError:
raise OperationError(space.w_ValueError,
space.wrap("invalid presentation type"))
else:
- the_type = presentation_type
+ self._type = spec[i]
i += 1
- self._type = the_type
+ else:
+ self._type = default_type
+
if self._thousands_sep:
- tp = self._type
- if (tp == "d" or
- tp == "e" or
- tp == "f" or
- tp == "g" or
- tp == "E" or
- tp == "G" or
- tp == "%" or
- tp == "F" or
- tp == "\0"):
- # ok
- pass
- else:
+ if self._type not in ('d', 'e', 'f', 'g', 'E', 'G', '%', 'F',
+ '\0'):
raise OperationError(space.w_ValueError,
space.wrap("invalid type with ','"))
return False
@@ -511,12 +525,13 @@
total = self._width
else:
total = length
+
align = self._align
- if align == ">":
+ if align == ord(">"):
left = total - length
- elif align == "^":
+ elif align == ord("^"):
left = (total - length) / 2
- elif align == "<" or align == "=":
+ elif align == ord("<") or align == ord("="):
left = 0
else:
raise AssertionError("shouldn't be here")
@@ -525,22 +540,16 @@
self._right_pad = right
return total
- def _lit(self, s):
- if self.is_unicode:
- return s.decode("ascii")
- else:
- return s
-
def _pad(self, string):
builder = self._builder()
- builder.append_multiple_char(self._fill_char, self._left_pad)
+ builder.append_multiple_char(chr(self._fill_char), self._left_pad)
builder.append(string)
- builder.append_multiple_char(self._fill_char, self._right_pad)
+ builder.append_multiple_char(chr(self._fill_char), self._right_pad)
return builder.build()
def _builder(self):
if self.is_unicode:
- return rstring.UnicodeBuilder()
+ return Utf8Builder()
else:
return rstring.StringBuilder()
@@ -555,23 +564,25 @@
return space.wrap(string)
if self._type != "s":
self._unknown_presentation("string")
- if self._sign != "\0":
+ if self._sign != ord("\0"):
msg = "Sign not allowed in string format specifier"
raise OperationError(space.w_ValueError, space.wrap(msg))
if self._alternate:
msg = "Alternate form not allowed in string format specifier"
raise OperationError(space.w_ValueError, space.wrap(msg))
- if self._align == "=":
+ if self._align == ord("="):
msg = "'=' alignment not allowed in string format specifier"
raise OperationError(space.w_ValueError, space.wrap(msg))
+
length = len(string)
precision = self._precision
if precision != -1 and length >= precision:
assert precision >= 0
length = precision
string = string[:precision]
- if self._fill_char == "\0":
- self._fill_char = self._lit(" ")[0]
+
+ if self._fill_char == ord("\0"):
+ self._fill_char = ord(" ")
self._calc_padding(string, length)
return space.wrap(self._pad(string))
@@ -586,9 +597,11 @@
dec = "."
thousands = ""
grouping = "\256"
+
if self.is_unicode:
- self._loc_dec = dec.decode("ascii")
- self._loc_thousands = thousands.decode("ascii")
+ self._loc_dec = str_decode_ascii(dec, len(dec), 'strict')[0]
+ self._loc_thousands = str_decode_ascii(
+ thousands, len(thousands), 'strict')[0]
else:
self._loc_dec = dec
self._loc_thousands = thousands
@@ -617,41 +630,45 @@
spec.n_rpadding = 0
spec.n_min_width = 0
spec.n_total = 0
- spec.sign = "\0"
+ spec.sign = ord("\0")
spec.n_sign = 0
+
sign = self._sign
- if sign == "+":
+ if sign == ord("+"):
spec.n_sign = 1
- spec.sign = "-" if sign_char == "-" else "+"
- elif sign == " ":
+ spec.sign = ord("-") if sign_char == "-" else ord("+")
+ elif sign == ord(" "):
spec.n_sign = 1
- spec.sign = "-" if sign_char == "-" else " "
+ spec.sign = ord("-") if sign_char == "-" else ord(" ")
elif sign_char == "-":
spec.n_sign = 1
- spec.sign = "-"
+ spec.sign = ord("-")
extra_length = (spec.n_sign + spec.n_prefix + spec.n_decimal +
spec.n_remainder) # Not padding or digits
- if self._fill_char == "0" and self._align == "=":
+
+ if self._fill_char == ord("0") and self._align == ord("="):
spec.n_min_width = self._width - extra_length
if self._loc_thousands:
self._group_digits(spec, digits[to_number:])
n_grouped_digits = len(self._grouped_digits)
else:
n_grouped_digits = spec.n_digits
+
n_padding = self._width - (extra_length + n_grouped_digits)
if n_padding > 0:
align = self._align
- if align == "<":
+ if align == ord("<"):
spec.n_rpadding = n_padding
- elif align == ">":
+ elif align == ord(">"):
spec.n_lpadding = n_padding
- elif align == "^":
+ elif align == ord("^"):
spec.n_lpadding = n_padding // 2
spec.n_rpadding = n_padding - spec.n_lpadding
- elif align == "=":
+ elif align == ord("="):
spec.n_spadding = n_padding
else:
raise AssertionError("shouldn't reach")
+
spec.n_total = spec.n_lpadding + spec.n_sign + spec.n_prefix + \
spec.n_spadding + n_grouped_digits + \
spec.n_decimal + spec.n_remainder + spec.n_rpadding
@@ -720,21 +737,28 @@
def _fill_number(self, spec, num, to_digits, to_prefix, fill_char,
to_remainder, upper, grouped_digits=None):
out = self._builder()
+
if spec.n_lpadding:
- out.append_multiple_char(fill_char[0], spec.n_lpadding)
+ out.append_multiple_char(chr(fill_char), spec.n_lpadding)
+
if spec.n_sign:
if self.is_unicode:
- sign = spec.sign.decode("ascii")
+ # TODO: A better way to do this might be to check if
+ # spec.sign < 127 ...
+ sign = str_decode_ascii(chr(spec.sign), 1, 'strict')[0]
else:
- sign = spec.sign
+ sign = chr(spec.sign)
out.append(sign)
+
if spec.n_prefix:
pref = num[to_prefix:to_prefix + spec.n_prefix]
if upper:
pref = self._upcase_string(pref)
out.append(pref)
+
if spec.n_spadding:
- out.append_multiple_char(fill_char[0], spec.n_spadding)
+ out.append_multiple_char(chr(fill_char), spec.n_spadding)
+
if spec.n_digits != 0:
if self._loc_thousands:
if grouped_digits is not None:
@@ -749,12 +773,13 @@
if upper:
digits = self._upcase_string(digits)
out.append(digits)
+
if spec.n_decimal:
- out.append(self._lit(".")[0])
+ out.append(".")
if spec.n_remainder:
out.append(num[to_remainder:])
if spec.n_rpadding:
- out.append_multiple_char(fill_char[0], spec.n_rpadding)
+ out.append_multiple_char(chr(fill_char), spec.n_rpadding)
#if complex, need to call twice - just retun the buffer
return out.build()
@@ -764,14 +789,14 @@
msg = "precision not allowed in integer type"
raise OperationError(space.w_ValueError, space.wrap(msg))
sign_char = "\0"
- tp = self._type
- if tp == "c":
- if self._sign != "\0":
+
+ if self._type == "c":
+ if self._sign != ord("\0"):
msg = "sign not allowed with 'c' presentation type"
raise OperationError(space.w_ValueError, space.wrap(msg))
value = space.int_w(w_num)
if self.is_unicode:
- result = runicode.UNICHR(value)
+ result = utf8chr(value)
else:
result = chr(value)
n_digits = 1
@@ -781,16 +806,16 @@
to_prefix = 0
to_numeric = 0
else:
- if tp == "b":
+ if self._type == "b":
base = 2
skip_leading = 2
- elif tp == "o":
+ elif self._type == "o":
base = 8
skip_leading = 2
- elif tp == "x" or tp == "X":
+ elif self._type == "x" or self._type == "X":
base = 16
skip_leading = 2
- elif tp == "n" or tp == "d":
+ elif self._type == "n" or self._type == "d":
base = 10
skip_leading = 0
else:
@@ -801,7 +826,7 @@
result = self._long_to_base(base, space.bigint_w(w_num))
n_prefix = skip_leading if self._alternate else 0
to_prefix = 0
- if result[0] == "-":
+ if ORD(result, 0) == ord("-"):
sign_char = "-"
skip_leading += 1
to_prefix += 1
@@ -809,10 +834,10 @@
n_remainder = 0
to_remainder = 0
to_numeric = skip_leading
- self._get_locale(tp)
+ self._get_locale(self._type)
spec = self._calc_num_width(n_prefix, sign_char, to_numeric, n_digits,
n_remainder, False, result)
- fill = self._lit(" ") if self._fill_char == "\0" else self._fill_char
+ fill = ord(" ") if self._fill_char == ord("\0") else self._fill_char
upper = self._type == "X"
return self.space.wrap(self._fill_number(spec, result, to_numeric,
to_prefix, fill, to_remainder, upper))
@@ -827,14 +852,14 @@
prefix = "0x"
as_str = value.format(LONG_DIGITS[:base], prefix)
if self.is_unicode:
- return as_str.decode("ascii")
+ return str_decode_ascii(as_str, len(as_str), 'strict')[0]
return as_str
def _int_to_base(self, base, value):
if base == 10:
s = str(value)
if self.is_unicode:
- return s.decode("ascii")
+ return str_decode_ascii(s, len(s), 'strict')[0]
return s
# This part is slow.
negative = value < 0
@@ -879,22 +904,10 @@
if self.is_unicode:
return space.call_function(space.w_unicode, w_num)
return self.space.str(w_num)
- tp = self._type
- if (tp == "b" or
- tp == "c" or
- tp == "d" or
- tp == "o" or
- tp == "x" or
- tp == "X" or
- tp == "n"):
+
+ if self._type in ("b", "c", "d", "o", "x", "X", "n"):
return self._format_int_or_long(w_num, kind)
- elif (tp == "e" or
- tp == "E" or
- tp == "f" or
- tp == "F" or
- tp == "g" or
- tp == "G" or
- tp == "%"):
+ elif self._type in ("e", "E", "f", "F", "g", "G", "%"):
w_float = space.float(w_num)
return self._format_float(w_float)
else:
@@ -921,6 +934,7 @@
if self._alternate:
msg = "alternate form not allowed in float formats"
raise OperationError(space.w_ValueError, space.wrap(msg))
+
tp = self._type
self._get_locale(tp)
if tp == "\0":
@@ -929,6 +943,7 @@
flags |= rfloat.DTSF_ADD_DOT_0
elif tp == "n":
tp = "g"
+
value = space.float_w(w_float)
if tp == "%":
tp = "f"
@@ -936,6 +951,7 @@
add_pct = True
else:
add_pct = False
+
if self._precision == -1:
self._precision = default_precision
result, special = rfloat.double_to_string(value, tp,
@@ -943,22 +959,26 @@
if add_pct:
result += "%"
n_digits = len(result)
- if result[0] == "-":
+
+ if ORD(result, 0) == ord("-"):
sign = "-"
to_number = 1
n_digits -= 1
else:
sign = "\0"
to_number = 0
+
have_dec_point, to_remainder = self._parse_number(result, to_number)
n_remainder = len(result) - to_remainder
+
if self.is_unicode:
- digits = result.decode("ascii")
+ digits = str_decode_ascii(result , len(result), 'strict')[0]
else:
digits = result
+
spec = self._calc_num_width(0, sign, to_number, n_digits,
n_remainder, have_dec_point, digits)
- fill = self._lit(" ") if self._fill_char == "\0" else self._fill_char
+ fill = ord(" ") if self._fill_char == ord("\0") else self._fill_char
return self.space.wrap(self._fill_number(spec, digits, to_number, 0,
fill, to_remainder, False))
@@ -968,30 +988,23 @@
if self.is_unicode:
return space.call_function(space.w_unicode, w_float)
return space.str(w_float)
- tp = self._type
- if (tp == "\0" or
- tp == "e" or
- tp == "E" or
- tp == "f" or
- tp == "F" or
- tp == "g" or
- tp == "G" or
- tp == "n" or
- tp == "%"):
+
+ if self._type in ("\0", "e", "E", "f", "F", "g", "G", "n", "%"):
return self._format_float(w_float)
self._unknown_presentation("float")
def _format_complex(self, w_complex):
space = self.space
+
tp = self._type
self._get_locale(tp)
default_precision = 6
- if self._align == "=":
+ if self._align == ord("="):
# '=' alignment is invalid
msg = ("'=' alignment flag is not allowed in"
" complex format specifier")
raise OperationError(space.w_ValueError, space.wrap(msg))
- if self._fill_char == "0":
+ if self._fill_char == ord("0"):
#zero padding is invalid
msg = "Zero padding is not allowed in complex format specifier"
raise OperationError(space.w_ValueError, space.wrap(msg))
@@ -1047,7 +1060,7 @@
tmp_fill_char = self._fill_char
tmp_align = self._align
tmp_width = self._width
- self._fill_char = "\0"
+ self._fill_char = ord("\0")
self._align = "<"
self._width = -1
@@ -1058,8 +1071,8 @@
to_imag_number)
if self.is_unicode:
- re_num = re_num.decode("ascii")
- im_num = im_num.decode("ascii")
+ re_num = str_decode_ascii(re_num, len(re_num), 'strict')[0]
+ im_num = str_decode_ascii(im_num, len(im_num), 'strict')[0]
#set remainder, in CPython _parse_number sets this
#using n_re_digits causes tests to fail
@@ -1073,7 +1086,7 @@
#self._grouped_digits will get overwritten in imaginary calc_num_width
re_grouped_digits = self._grouped_digits
if not skip_re:
- self._sign = "+"
+ self._sign = ord("+")
im_spec = self._calc_num_width(0, im_sign, to_imag_number, n_im_digits,
im_n_remainder, im_have_dec,
im_num)
@@ -1093,14 +1106,14 @@
out = self._builder()
fill = self._fill_char
- if fill == "\0":
- fill = self._lit(" ")[0]
+ if fill == ord("\0"):
+ fill = ord(" ")
#compose the string
#add left padding
- out.append_multiple_char(fill, self._left_pad)
+ out.append_multiple_char(chr(fill), self._left_pad)
if add_parens:
- out.append(self._lit('(')[0])
+ out.append('(')
#if the no. has a real component, add it
if not skip_re:
@@ -1114,13 +1127,13 @@
im_grouped_digits))
#add 'j' character
- out.append(self._lit('j')[0])
+ out.append('j')
if add_parens:
- out.append(self._lit(')')[0])
+ out.append(')')
#add right padding
- out.append_multiple_char(fill, self._right_pad)
+ out.append_multiple_char(chr(fill), self._right_pad)
return self.space.wrap(out.build())
@@ -1131,15 +1144,8 @@
#parse format specification, set associated variables
if self._parse_spec("\0", ">"):
return space.str(w_complex)
- tp = self._type
- if (tp == "\0" or
- tp == "e" or
- tp == "E" or
- tp == "f" or
- tp == "F" or
- tp == "g" or
- tp == "G" or
- tp == "n"):
+
+ if self._type in ('\0', 'e', 'E', 'f', 'F', 'g', 'G', 'n'):
return self._format_complex(w_complex)
self._unknown_presentation("complex")
return Formatter
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -162,9 +162,6 @@
if isinstance(x, Utf8Str):
return wrapunicode(self, x)
- if isinstance(x, unicode):
- import pdb; pdb.set_trace()
-
if isinstance(x, float):
return W_FloatObject(x)
if isinstance(x, W_Root):
diff --git a/pypy/objspace/std/stringmethods.py b/pypy/objspace/std/stringmethods.py
--- a/pypy/objspace/std/stringmethods.py
+++ b/pypy/objspace/std/stringmethods.py
@@ -585,13 +585,13 @@
by = self._op_val(space, w_sep)
if len(by) == 0:
raise oefmt(space.w_ValueError, "empty separator")
- res = self._split(value, by, maxsplit)
+ res = self._rsplit(value, by, maxsplit)
return self._newlist_unwrapped(space, res)
@staticmethod
def _rsplit(value, sep=None, maxsplit=-1):
- return value.split(sep, maxsplit)
+ return rsplit(value, sep, maxsplit)
@unwrap_spec(keepends=bool)
def descr_splitlines(self, space, keepends=False):
@@ -606,7 +606,8 @@
eol = pos
pos += 1
# read CRLF as one line break
- if pos < length and value[eol] == '\r' and value[pos] == '\n':
+ if (pos < length and ORD(value, eol) == ord('\r') and
+ ORD(value, pos) == ord('\n')):
pos += 1
if keepends:
eol = pos
diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py
--- a/pypy/objspace/std/test/test_dictmultiobject.py
+++ b/pypy/objspace/std/test/test_dictmultiobject.py
@@ -3,6 +3,7 @@
from pypy.objspace.std.dictmultiobject import (W_DictMultiObject,
BytesDictStrategy, ObjectDictStrategy)
+from pypy.interpreter.utf8 import Utf8Str
class TestW_DictObject(object):
@@ -142,8 +143,9 @@
def test_listview_unicode_dict(self):
w = self.space.wrap
+ w_u = lambda x: w(Utf8Str.from_unicode(x))
w_d = self.space.newdict()
- w_d.initialize_content([(w(u"a"), w(1)), (w(u"b"), w(2))])
+ w_d.initialize_content([(w_u(u"a"), w(1)), (w_u(u"b"), w(2))])
assert self.space.listview_unicode(w_d) == [u"a", u"b"]
def test_listview_int_dict(self):
@@ -154,7 +156,8 @@
def test_keys_on_string_unicode_int_dict(self, monkeypatch):
w = self.space.wrap
-
+ w_u = lambda x: w(Utf8Str.from_unicode(x))
+
w_d = self.space.newdict()
w_d.initialize_content([(w(1), w("a")), (w(2), w("b"))])
w_l = self.space.call_method(w_d, "keys")
@@ -174,7 +177,7 @@
# but we need space.newlist_unicode for it
monkeypatch.undo()
w_d = self.space.newdict()
- w_d.initialize_content([(w(u"a"), w(1)), (w(u"b"), w(6))])
+ w_d.initialize_content([(w_u(u"a"), w(1)), (w_u(u"b"), w(6))])
w_l = self.space.call_method(w_d, "keys")
assert sorted(self.space.listview_unicode(w_l)) == [u"a", u"b"]
diff --git a/pypy/objspace/std/test/test_index.py b/pypy/objspace/std/test/test_index.py
--- a/pypy/objspace/std/test/test_index.py
+++ b/pypy/objspace/std/test/test_index.py
@@ -1,5 +1,7 @@
from py.test import raises
+from pypy.interpreter.utf8 import Utf8Str
+
class AppTest_IndexProtocol:
def setup_class(self):
w_oldstyle = self.space.appexec([], """():
@@ -263,7 +265,7 @@
class AppTest_UnicodeTestCase(SeqTestCase, StringTestCase):
def setup_method(self, method):
SeqTestCase.setup_method(self, method)
- self.w_seq = self.space.wrap(u"this is a test")
+ self.w_seq = self.space.wrap(Utf8Str.from_unicode(u"this is a test"))
self.w_const = self.space.appexec([], """(): return unicode""")
diff --git a/pypy/objspace/std/test/test_lengthhint.py b/pypy/objspace/std/test/test_lengthhint.py
--- a/pypy/objspace/std/test/test_lengthhint.py
+++ b/pypy/objspace/std/test/test_lengthhint.py
@@ -1,3 +1,4 @@
+from pypy.interpreter.utf8 import Utf8Str
from pypy.module._collections.interp_deque import W_Deque
from pypy.module.itertools.interp_itertools import W_Repeat
@@ -71,7 +72,7 @@
self._test_length_hint(self.space.wrap('P' * self.SIZE))
def test_unicode(self):
- self._test_length_hint(self.space.wrap(u'Y' * self.SIZE))
+ self._test_length_hint(self.space.wrap(Utf8Str('Y' * self.SIZE)))
def test_tuple(self):
self._test_length_hint(self.space.wrap(tuple(self.ITEMS)))
diff --git a/pypy/objspace/std/test/test_liststrategies.py b/pypy/objspace/std/test/test_liststrategies.py
--- a/pypy/objspace/std/test/test_liststrategies.py
+++ b/pypy/objspace/std/test/test_liststrategies.py
@@ -1,4 +1,5 @@
import sys
+from pypy.interpreter.utf8 import Utf8Str
from pypy.objspace.std.listobject import (
W_ListObject, EmptyListStrategy, ObjectListStrategy, IntegerListStrategy,
FloatListStrategy, BytesListStrategy, RangeListStrategy,
@@ -11,20 +12,22 @@
def test_check_strategy(self):
space = self.space
w = space.wrap
+ w_u = lambda x: w(Utf8Str(x))
assert isinstance(W_ListObject(space, []).strategy, EmptyListStrategy)
assert isinstance(W_ListObject(space, [w(1),w('a')]).strategy, ObjectListStrategy)
assert isinstance(W_ListObject(space, [w(1),w(2),w(3)]).strategy,
IntegerListStrategy)
assert isinstance(W_ListObject(space, [w('a'), w('b')]).strategy,
BytesListStrategy)
- assert isinstance(W_ListObject(space, [w(u'a'), w(u'b')]).strategy,
+ assert isinstance(W_ListObject(space, [w_u('a'), w_u('b')]).strategy,
UnicodeListStrategy)
- assert isinstance(W_ListObject(space, [w(u'a'), w('b')]).strategy,
+ assert isinstance(W_ListObject(space, [w_u('a'), w('b')]).strategy,
ObjectListStrategy) # mixed unicode and bytes
def test_empty_to_any(self):
space = self.space
w = space.wrap
+ w_u = lambda x: w(Utf8Str(x))
l = W_ListObject(space, [])
assert isinstance(l.strategy, EmptyListStrategy)
l.append(w((1,3)))
@@ -42,7 +45,7 @@
l = W_ListObject(space, [])
assert isinstance(l.strategy, EmptyListStrategy)
- l.append(w(u'a'))
+ l.append(w_u('a'))
assert isinstance(l.strategy, UnicodeListStrategy)
l = W_ListObject(space, [])
@@ -70,9 +73,10 @@
def test_unicode_to_any(self):
space = self.space
- l = W_ListObject(space, [space.wrap(u'a'), space.wrap(u'b'), space.wrap(u'c')])
+ w_u = lambda x: space.wrap(Utf8Str(x))
+ l = W_ListObject(space, [w_u('a'), w_u('b'), w_u('c')])
assert isinstance(l.strategy, UnicodeListStrategy)
- l.append(space.wrap(u'd'))
+ l.append(w_u('d'))
assert isinstance(l.strategy, UnicodeListStrategy)
l.append(space.wrap(3))
assert isinstance(l.strategy, ObjectListStrategy)
@@ -89,6 +93,7 @@
def test_setitem(self):
space = self.space
w = space.wrap
+ w_u = lambda x: w(Utf8Str(x))
# This should work if test_listobject.py passes
l = W_ListObject(space, [w('a'),w('b'),w('c')])
assert space.eq_w(l.getitem(0), w('a'))
@@ -110,7 +115,7 @@
assert isinstance(l.strategy, ObjectListStrategy)
# UnicodeStrategy to ObjectStrategy
- l = W_ListObject(space, [w(u'a'),w(u'b'),w(u'c')])
+ l = W_ListObject(space, [w_u('a'),w_u('b'),w_u('c')])
assert isinstance(l.strategy, UnicodeListStrategy)
l.setitem(0, w(2))
assert isinstance(l.strategy, ObjectListStrategy)
@@ -124,6 +129,7 @@
def test_insert(self):
space = self.space
w = space.wrap
+ w_u = lambda x: w(Utf8Str(x))
# no change
l = W_ListObject(space, [w(1),w(2),w(3)])
assert isinstance(l.strategy, IntegerListStrategy)
@@ -137,7 +143,7 @@
assert isinstance(l.strategy, ObjectListStrategy)
# UnicodeStrategy
- l = W_ListObject(space, [w(u'a'),w(u'b'),w(u'c')])
+ l = W_ListObject(space, [w_u('a'),w_u('b'),w_u('c')])
assert isinstance(l.strategy, UnicodeListStrategy)
l.insert(3, w(2))
assert isinstance(l.strategy, ObjectListStrategy)
@@ -186,6 +192,7 @@
def test_setslice(self):
space = self.space
w = space.wrap
+ w_u = lambda x: w(Utf8Str(x))
l = W_ListObject(space, [])
assert isinstance(l.strategy, EmptyListStrategy)
@@ -217,7 +224,7 @@
assert isinstance(l.strategy, ObjectListStrategy)
# UnicodeStrategy to ObjectStrategy
- l = W_ListObject(space, [w(u'a'), w(u'b'), w(u'c')])
+ l = W_ListObject(space, [w_u('a'), w_u('b'), w_u('c')])
assert isinstance(l.strategy, UnicodeListStrategy)
l.setslice(0, 1, 2, W_ListObject(space, [w(1), w(2), w(3)]))
assert isinstance(l.strategy, ObjectListStrategy)
@@ -242,6 +249,8 @@
def wrapitems(items):
items_w = []
for i in items:
+ if isinstance(i, unicode):
+ i = Utf8Str.from_unicode(i)
items_w.append(space.wrap(i))
return items_w
@@ -323,6 +332,7 @@
def test_empty_extend_with_any(self):
space = self.space
w = space.wrap
+ w_u = lambda x: w(Utf8Str(x))
empty = W_ListObject(space, [])
assert isinstance(empty.strategy, EmptyListStrategy)
@@ -336,7 +346,7 @@
empty = W_ListObject(space, [])
assert isinstance(empty.strategy, EmptyListStrategy)
- empty.extend(W_ListObject(space, [w(u"a"), w(u"b"), w(u"c")]))
+ empty.extend(W_ListObject(space, [w_u("a"), w_u("b"), w_u("c")]))
assert isinstance(empty.strategy, UnicodeListStrategy)
empty = W_ListObject(space, [])
@@ -588,11 +598,12 @@
assert self.space.eq_w(l3, W_ListObject(self.space, [self.space.wrap(1), self.space.wrap(2), self.space.wrap(3), self.space.wrap(4), self.space.wrap(5)]))
def test_unicode(self):
+ wrap_u = lambda x: self.space.wrap(Utf8Str(x))
l1 = W_ListObject(self.space, [self.space.wrap("eins"), self.space.wrap("zwei")])
assert isinstance(l1.strategy, BytesListStrategy)
- l2 = W_ListObject(self.space, [self.space.wrap(u"eins"), self.space.wrap(u"zwei")])
+ l2 = W_ListObject(self.space, [wrap_u("eins"), wrap_u("zwei")])
assert isinstance(l2.strategy, UnicodeListStrategy)
- l3 = W_ListObject(self.space, [self.space.wrap("eins"), self.space.wrap(u"zwei")])
+ l3 = W_ListObject(self.space, [self.space.wrap("eins"), wrap_u(u"zwei")])
assert isinstance(l3.strategy, ObjectListStrategy)
def test_listview_bytes(self):
@@ -603,20 +614,22 @@
def test_listview_unicode(self):
space = self.space
+ wrap_u = lambda x: self.space.wrap(Utf8Str(x))
assert space.listview_unicode(space.wrap(1)) == None
- w_l = self.space.newlist([self.space.wrap(u'a'), self.space.wrap(u'b')])
+ w_l = self.space.newlist([wrap_u('a'), wrap_u('b')])
assert space.listview_unicode(w_l) == [u"a", u"b"]
def test_string_join_uses_listview_bytes(self):
space = self.space
+ wrap_u = lambda x: self.space.wrap(Utf8Str(x))
w_l = self.space.newlist([self.space.wrap('a'), self.space.wrap('b')])
w_l.getitems = None
assert space.str_w(space.call_method(space.wrap("c"), "join", w_l)) == "acb"
#
# the same for unicode
- w_l = self.space.newlist([self.space.wrap(u'a'), self.space.wrap(u'b')])
+ w_l = self.space.newlist([wrap_u('a'), wrap_u('b')])
w_l.getitems = None
- assert space.unicode_w(space.call_method(space.wrap(u"c"), "join", w_l)) == u"acb"
+ assert space.unicode_w(space.call_method(wrap_u("c"), "join", w_l)) == u"acb"
def test_string_join_returns_same_instance(self):
space = self.space
@@ -626,10 +639,11 @@
assert space.is_w(space.call_method(space.wrap(" -- "), "join", w_l), w_text)
#
# the same for unicode
- w_text = space.wrap(u"text")
+ w_base = space.wrap(Utf8Str(" -- "))
+ w_text = space.wrap(Utf8Str("text"))
w_l = self.space.newlist([w_text])
w_l.getitems = None
- assert space.is_w(space.call_method(space.wrap(u" -- "), "join", w_l), w_text)
+ assert space.is_w(space.call_method(w_base, "join", w_l), w_text)
def test_newlist_bytes(self):
space = self.space
@@ -656,7 +670,7 @@
def test_unicode_uses_newlist_unicode(self):
space = self.space
- w_u = space.wrap(u"a b c")
+ w_u = space.wrap(Utf8Str("a b c"))
space.newlist = None
try:
w_l = space.call_method(w_u, "split")
@@ -711,7 +725,8 @@
def test_listview_unicode_list(self):
space = self.space
- w_l = W_ListObject(space, [space.wrap(u"a"), space.wrap(u"b")])
+ wrap_u = lambda x: self.space.wrap(Utf8Str(x))
+ w_l = W_ListObject(space, [wrap_u("a"), wrap_u("b")])
assert self.space.listview_unicode(w_l) == [u"a", u"b"]
def test_listview_int_list(self):
diff --git a/pypy/objspace/std/test/test_newformat.py b/pypy/objspace/std/test/test_newformat.py
--- a/pypy/objspace/std/test/test_newformat.py
+++ b/pypy/objspace/std/test/test_newformat.py
@@ -382,30 +382,30 @@
assert l == [('abcd', None, None, None)]
#
l = list('ab{0}cd'._formatter_parser())
- assert l == [('ab', '0', '', None), ('cd', None, None, None)]
+ assert l == [('ab', '0', '', -1), ('cd', None, None, None)]
#
l = list('{0}cd'._formatter_parser())
- assert l == [('', '0', '', None), ('cd', None, None, None)]
+ assert l == [('', '0', '', -1), ('cd', None, None, None)]
#
l = list('ab{0}'._formatter_parser())
- assert l == [('ab', '0', '', None)]
+ assert l == [('ab', '0', '', -1)]
#
l = list(''._formatter_parser())
assert l == []
#
l = list('{0:123}'._formatter_parser())
- assert l == [('', '0', '123', None)]
+ assert l == [('', '0', '123', -1)]
#
l = list('{0!x:123}'._formatter_parser())
- assert l == [('', '0', '123', 'x')]
+ assert l == [('', '0', '123', ord('x'))]
#
l = list('{0!x:12{sdd}3}'._formatter_parser())
- assert l == [('', '0', '12{sdd}3', 'x')]
+ assert l == [('', '0', '12{sdd}3', ord('x'))]
def test_u_formatter_parser(self):
l = list(u'{0!x:12{sdd}3}'._formatter_parser())
- assert l == [(u'', u'0', u'12{sdd}3', u'x')]
- for x in l[0]:
+ assert l == [(u'', u'0', u'12{sdd}3', ord(u'x'))]
+ for x in l[0][:-1]:
assert isinstance(x, unicode)
def test_formatter_parser_escape(self):
From noreply at buildbot.pypy.org Tue Jul 8 09:49:01 2014
From: noreply at buildbot.pypy.org (fijal)
Date: Tue, 8 Jul 2014 09:49:01 +0200 (CEST)
Subject: [pypy-commit] pypy default: accept unicode keys
Message-ID: <20140708074901.D73BE1C0F1D@cobra.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch:
Changeset: r72384:c8d30edc0498
Date: 2014-07-08 09:48 +0200
http://bitbucket.org/pypy/pypy/changeset/c8d30edc0498/
Log: accept unicode keys
diff --git a/lib-python/2.7/test/test_gdbm.py b/lib-python/2.7/test/test_gdbm.py
--- a/lib-python/2.7/test/test_gdbm.py
+++ b/lib-python/2.7/test/test_gdbm.py
@@ -98,6 +98,17 @@
self.assertTrue(key in self.g)
self.assertTrue(self.g.has_key(key))
+ def test_unicode_key(self):
+ key = u'ab'
+ value = u'cd'
+ self.g = gdbm.open(filename, 'cf')
+ self.g[key] = value
+ self.g.close()
+ self.g = gdbm.open(filename, 'r')
+ self.assertEquals(self.g[key], value)
+ self.assertTrue(key in self.g)
+ self.assertTrue(self.g.has_key(key))
+
def test_main():
run_unittest(TestGdbm)
diff --git a/lib_pypy/gdbm.py b/lib_pypy/gdbm.py
--- a/lib_pypy/gdbm.py
+++ b/lib_pypy/gdbm.py
@@ -50,6 +50,8 @@
pass
def _fromstr(key):
+ if isinstance(key, unicode):
+ key = key.encode("ascii")
if not isinstance(key, str):
raise TypeError("gdbm mappings have string indices only")
return {'dptr': ffi.new("char[]", key), 'dsize': len(key)}
From noreply at buildbot.pypy.org Tue Jul 8 09:59:40 2014
From: noreply at buildbot.pypy.org (fijal)
Date: Tue, 8 Jul 2014 09:59:40 +0200 (CEST)
Subject: [pypy-commit] pypy default: pass the errno as positional arg too
Message-ID: <20140708075940.D3D741D35AE@cobra.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch:
Changeset: r72385:df9c15f76446
Date: 2014-07-08 09:59 +0200
http://bitbucket.org/pypy/pypy/changeset/df9c15f76446/
Log: pass the errno as positional arg too
diff --git a/lib_pypy/gdbm.py b/lib_pypy/gdbm.py
--- a/lib_pypy/gdbm.py
+++ b/lib_pypy/gdbm.py
@@ -73,8 +73,8 @@
def _raise_from_errno(self):
if ffi.errno:
- raise error(os.strerror(ffi.errno))
- raise error(lib.gdbm_strerror(lib.gdbm_errno))
+ raise error(ffi.errno, os.strerror(ffi.errno))
+ raise error(lib.gdbm_errno, lib.gdbm_strerror(lib.gdbm_errno))
def __len__(self):
if self.size < 0:
@@ -143,7 +143,7 @@
def _check_closed(self):
if not self.ll_dbm:
- raise error("GDBM object has already been closed")
+ raise error(0, "GDBM object has already been closed")
__del__ = close
@@ -161,7 +161,7 @@
elif flags[0] == 'n':
iflags = lib.GDBM_NEWDB
else:
- raise error("First flag must be one of 'r', 'w', 'c' or 'n'")
+ raise error(0, "First flag must be one of 'r', 'w', 'c' or 'n'")
for flag in flags[1:]:
if flag == 'f':
iflags |= lib.GDBM_FAST
@@ -170,7 +170,7 @@
elif flag == 'u':
iflags |= lib.GDBM_NOLOCK
else:
- raise error("Flag '%s' not supported" % flag)
+ raise error(0, "Flag '%s' not supported" % flag)
return gdbm(filename, iflags, mode)
open_flags = "rwcnfsu"
From noreply at buildbot.pypy.org Tue Jul 8 15:26:36 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 8 Jul 2014 15:26:36 +0200 (CEST)
Subject: [pypy-commit] pypy default: Avoid unnecessary wrapping and
unwrapping of the keys in update(). This is not JITted code.
Message-ID: <20140708132636.20BA91C3288@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72386:844c2e1c2673
Date: 2014-07-08 13:13 +0200
http://bitbucket.org/pypy/pypy/changeset/844c2e1c2673/
Log: Avoid unnecessary wrapping and unwrapping of the keys in update().
This is not JITted code.
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -699,9 +699,10 @@
else:
wrapkey = dictimpl.wrapkey.im_func
if not hasattr(dictimpl, 'wrapvalue'):
- wrapvalue = lambda space, key: key
+ wrapvalue = lambda space, value: value
else:
wrapvalue = dictimpl.wrapvalue.im_func
+ setitem_untyped = getattr(dictimpl, 'setitem_untyped', None)
class IterClassKeys(BaseKeyIterator):
def __init__(self, space, strategy, impl):
@@ -770,10 +771,19 @@
w_dict.length() - 1)
else:
spec = _SPEC1
- for key, value in self.getiteritems(w_dict):
- w_key = wrapkey(self.space, key)
- w_value = wrapvalue(self.space, value)
- w_updatedict.setitem(w_key, w_value)
+ iteritems = self.getiteritems(w_dict)
+ for key, value in iteritems:
+ if spec is not _SPEC3:
+ if (setitem_untyped is not None and
+ self is w_updatedict.strategy):
+ dstorage = w_updatedict.dstorage
+ spec = _SPEC3
+ else:
+ w_key = wrapkey(self.space, key)
+ w_value = wrapvalue(self.space, value)
+ w_updatedict.setitem(w_key, w_value)
+ if spec is _SPEC3:
+ setitem_untyped(self, dstorage, key, value)
if spec is _SPEC1:
spec = _SPEC2
w_updatedict.strategy.prepare_update(w_updatedict,
@@ -786,8 +796,9 @@
create_iterator_classes(EmptyDictStrategy)
-_SPEC1 = SpecTag()
-_SPEC2 = SpecTag()
+_SPEC1 = SpecTag() # first iteration
+_SPEC2 = SpecTag() # all other iteration
+_SPEC3 = SpecTag() # same strategy with setitem_untyped()
# concrete subclasses of the above
@@ -907,6 +918,9 @@
objectmodel.prepare_dict_update(self.unerase(w_dict.dstorage),
num_extra)
+ def setitem_untyped(self, dstorage, key, w_value):
+ self.unerase(dstorage)[key] = w_value
+
class ObjectDictStrategy(AbstractTypedStrategy, DictStrategy):
erase, unerase = rerased.new_erasing_pair("object")
From noreply at buildbot.pypy.org Tue Jul 8 15:26:37 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 8 Jul 2014 15:26:37 +0200 (CEST)
Subject: [pypy-commit] pypy default: Found another reason for slowness of
dict.update(): the keys are wrapped
Message-ID: <20140708132637.7CBAC1C3288@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72387:7f9dca73c7b6
Date: 2014-07-08 13:33 +0200
http://bitbucket.org/pypy/pypy/changeset/7f9dca73c7b6/
Log: Found another reason for slowness of dict.update(): the keys are
wrapped and unwrapped. Fixed, and removed the magic SpecTag too.
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -3,7 +3,6 @@
from rpython.rlib import jit, rerased, objectmodel
from rpython.rlib.debug import mark_dict_non_null
from rpython.rlib.objectmodel import newlist_hint, r_dict, specialize
-from rpython.rlib.unroll import SpecTag
from rpython.tool.sourcetools import func_renamer, func_with_new_name
from pypy.interpreter.baseobjspace import W_Root
@@ -759,35 +758,51 @@
# this is very similar to the general version, but the difference
# is that it is specialized to call a specific next_item()
iteritems = IterClassItems(self.space, self, w_dict)
- spec = _SPEC1
+ w_key, w_value = iteritems.next_item()
+ if w_key is None:
+ return
+ w_updatedict.setitem(w_key, w_value)
+ w_updatedict.strategy.prepare_update(w_updatedict,
+ w_dict.length() - 1)
while True:
w_key, w_value = iteritems.next_item()
if w_key is None:
+ return
+ w_updatedict.setitem(w_key, w_value)
+ else:
+ iteritems = self.getiteritems(w_dict)
+ if not same_strategy(self, w_updatedict):
+ # Different strategy. Try to copy one item of w_dict
+ for key, value in iteritems:
+ w_key = wrapkey(self.space, key)
+ w_value = wrapvalue(self.space, value)
+ w_updatedict.setitem(w_key, w_value)
break
- w_updatedict.setitem(w_key, w_value)
- if spec is _SPEC1:
- spec = _SPEC2
- w_updatedict.strategy.prepare_update(w_updatedict,
- w_dict.length() - 1)
- else:
- spec = _SPEC1
- iteritems = self.getiteritems(w_dict)
- for key, value in iteritems:
- if spec is not _SPEC3:
- if (setitem_untyped is not None and
- self is w_updatedict.strategy):
- dstorage = w_updatedict.dstorage
- spec = _SPEC3
- else:
+ else:
+ return # w_dict is completely empty, nothing to do
+ count = w_dict.length() - 1
+ w_updatedict.strategy.prepare_update(w_updatedict, count)
+ # If the strategy is still different, continue the slow way
+ if not same_strategy(self, w_updatedict):
+ for key, value in iteritems:
w_key = wrapkey(self.space, key)
w_value = wrapvalue(self.space, value)
w_updatedict.setitem(w_key, w_value)
- if spec is _SPEC3:
- setitem_untyped(self, dstorage, key, value)
- if spec is _SPEC1:
- spec = _SPEC2
- w_updatedict.strategy.prepare_update(w_updatedict,
- w_dict.length() - 1)
+ return # done
+ else:
+ # Same strategy.
+ self.prepare_update(w_updatedict, w_dict.length())
+ #
+ # Use setitem_untyped() to speed up copying without
+ # wrapping/unwrapping the key.
+ assert setitem_untyped is not None
+ dstorage = w_updatedict.dstorage
+ for key, value in iteritems:
+ setitem_untyped(self, dstorage, key, value)
+
+ def same_strategy(self, w_otherdict):
+ return (setitem_untyped is not None and
+ w_otherdict.strategy is self)
dictimpl.iterkeys = iterkeys
dictimpl.itervalues = itervalues
@@ -796,10 +811,6 @@
create_iterator_classes(EmptyDictStrategy)
-_SPEC1 = SpecTag() # first iteration
-_SPEC2 = SpecTag() # all other iteration
-_SPEC3 = SpecTag() # same strategy with setitem_untyped()
-
# concrete subclasses of the above
From noreply at buildbot.pypy.org Tue Jul 8 15:26:38 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 8 Jul 2014 15:26:38 +0200 (CEST)
Subject: [pypy-commit] pypy default: Translation fix
Message-ID: <20140708132638.BCB2F1C3288@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72388:af210d996bf8
Date: 2014-07-08 13:51 +0200
http://bitbucket.org/pypy/pypy/changeset/af210d996bf8/
Log: Translation fix
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -701,7 +701,12 @@
wrapvalue = lambda space, value: value
else:
wrapvalue = dictimpl.wrapvalue.im_func
- setitem_untyped = getattr(dictimpl, 'setitem_untyped', None)
+ if not hasattr(dictimpl, 'setitem_untyped'):
+ setitem_untyped = None
+ else:
+ setitem_untyped = dictimpl.setitem_untyped.im_func
+ setitem_untyped = func_with_new_name(setitem_untyped,
+ 'setitem_untyped_%s' % dictimpl.__name__)
class IterClassKeys(BaseKeyIterator):
def __init__(self, space, strategy, impl):
From noreply at buildbot.pypy.org Tue Jul 8 16:54:56 2014
From: noreply at buildbot.pypy.org (waedt)
Date: Tue, 8 Jul 2014 16:54:56 +0200 (CEST)
Subject: [pypy-commit] pypy utf8-unicode2: Don't actually skip these tests
Message-ID: <20140708145456.7646F1C1068@cobra.cs.uni-duesseldorf.de>
Author: Tyler Wade
Branch: utf8-unicode2
Changeset: r72389:62603ec2b980
Date: 2014-07-08 09:54 -0500
http://bitbucket.org/pypy/pypy/changeset/62603ec2b980/
Log: Don't actually skip these tests
diff --git a/pypy/interpreter/test/test_utf8_codecs.py b/pypy/interpreter/test/test_utf8_codecs.py
--- a/pypy/interpreter/test/test_utf8_codecs.py
+++ b/pypy/interpreter/test/test_utf8_codecs.py
@@ -3,7 +3,7 @@
import py
import sys, random
-from pypy.interpreter.utf8 import Utf8Str
+from pypy.interpreter.utf8 import Utf8Str, utf8chr
from pypy.interpreter import utf8_codecs
@@ -740,12 +740,7 @@
assert encoder(u'u\u1234', 2, 'replace') == 'u?'
-# TODO: Do I need to actually skip these?
class TestTranslation(object):
- def setup_class(cls):
- if utf8_codecs.MAXUNICODE != sys.maxunicode:
- py.test.skip("these tests cannot run on the llinterp")
-
def test_utf8(self):
from rpython.rtyper.test.test_llinterp import interpret
def f(x):
@@ -758,16 +753,10 @@
assert res
def test_encode_surrogate_pair(self):
- u = runicode.UNICHR(0xD800) + runicode.UNICHR(0xDC00)
- if runicode.MAXUNICODE < 65536:
- # Narrow unicode build, consider utf16 surrogate pairs
- assert utf8_codecs.unicode_encode_unicode_escape(
- u, len(u), True) == r'\U00010000'
- assert utf8_codecs.unicode_encode_raw_unicode_escape(
- u, len(u), True) == r'\U00010000'
- else:
- # Wide unicode build, don't merge utf16 surrogate pairs
- assert utf8_codecs.unicode_encode_unicode_escape(
- u, len(u), True) == r'\ud800\udc00'
- assert utf8_codecs.unicode_encode_raw_unicode_escape(
- u, len(u), True) == r'\ud800\udc00'
+ u = utf8chr(0xD800) + utf8chr(0xDC00)
+
+ # Wide unicode build, don't merge utf16 surrogate pairs
+ assert utf8_codecs.unicode_encode_unicode_escape(
+ u, len(u), True) == r'\ud800\udc00'
+ assert utf8_codecs.unicode_encode_raw_unicode_escape(
+ u, len(u), True) == r'\ud800\udc00'
From noreply at buildbot.pypy.org Tue Jul 8 16:54:57 2014
From: noreply at buildbot.pypy.org (waedt)
Date: Tue, 8 Jul 2014 16:54:57 +0200 (CEST)
Subject: [pypy-commit] pypy utf8-unicode2: Re-enable wrapping unicode objects
Message-ID: <20140708145457.D87D21C1068@cobra.cs.uni-duesseldorf.de>
Author: Tyler Wade
Branch: utf8-unicode2
Changeset: r72390:11e3ba8aad74
Date: 2014-07-08 09:54 -0500
http://bitbucket.org/pypy/pypy/changeset/11e3ba8aad74/
Log: Re-enable wrapping unicode objects
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -227,6 +227,9 @@
wrappeditems = [self.wrap(item) for item in x]
return W_FrozensetObject(self, wrappeditems)
+ if isinstance(x, unicode):
+ return W_UnicodeObject(Utf8Str.from_unicode(x))
+
if x is __builtin__.Ellipsis:
# '__builtin__.Ellipsis' avoids confusion with special.Ellipsis
return self.w_Ellipsis
From noreply at buildbot.pypy.org Tue Jul 8 17:38:48 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 8 Jul 2014 17:38:48 +0200 (CEST)
Subject: [pypy-commit] pypy default: Refactor the implementation of the dict
iterators in RPython. The goal
Message-ID: <20140708153848.3FADA1D2317@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72391:711f53c92504
Date: 2014-07-08 16:35 +0200
http://bitbucket.org/pypy/pypy/changeset/711f53c92504/
Log: Refactor the implementation of the dict iterators in RPython. The
goal is to avoid the allocation of a tuple in iteritems().next().
diff --git a/rpython/jit/codewriter/support.py b/rpython/jit/codewriter/support.py
--- a/rpython/jit/codewriter/support.py
+++ b/rpython/jit/codewriter/support.py
@@ -507,18 +507,7 @@
_ll_1_dict_values.need_result_type = True
_ll_1_dict_items .need_result_type = True
- _dictnext_keys = staticmethod(ll_rdict.ll_dictnext_group['keys'])
- _dictnext_values = staticmethod(ll_rdict.ll_dictnext_group['values'])
- _dictnext_items = staticmethod(ll_rdict.ll_dictnext_group['items'])
-
- def _ll_1_dictiter_nextkeys(iter):
- return LLtypeHelpers._dictnext_keys(None, iter)
- def _ll_1_dictiter_nextvalues(iter):
- return LLtypeHelpers._dictnext_values(None, iter)
- def _ll_1_dictiter_nextitems(RES, iter):
- return LLtypeHelpers._dictnext_items(lltype.Ptr(RES), iter)
- _ll_1_dictiter_nextitems.need_result_type = True
-
+ _ll_1_dictiter_next = ll_rdict._ll_dictnext
_ll_1_dict_resize = ll_rdict.ll_dict_resize
# ---------- ordered dict ----------
@@ -534,18 +523,7 @@
_ll_1_odict_values.need_result_type = True
_ll_1_odict_items .need_result_type = True
- _odictnext_keys = staticmethod(rordereddict.ll_dictnext_group['keys'])
- _odictnext_values = staticmethod(rordereddict.ll_dictnext_group['values'])
- _odictnext_items = staticmethod(rordereddict.ll_dictnext_group['items'])
-
- def _ll_1_odictiter_nextkeys(iter):
- return LLtypeHelpers._odictnext_keys(None, iter)
- def _ll_1_odictiter_nextvalues(iter):
- return LLtypeHelpers._odictnext_values(None, iter)
- def _ll_1_odictiter_nextitems(RES, iter):
- return LLtypeHelpers._odictnext_items(lltype.Ptr(RES), iter)
- _ll_1_odictiter_nextitems.need_result_type = True
-
+ _ll_1_odictiter_next = rordereddict._ll_dictnext
_ll_1_odict_resize = rordereddict.ll_dict_resize
# ---------- strings and unicode ----------
diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py
--- a/rpython/rtyper/lltypesystem/rdict.py
+++ b/rpython/rtyper/lltypesystem/rdict.py
@@ -721,7 +721,7 @@
('dict', r_dict.lowleveltype),
('index', lltype.Signed)))
self.ll_dictiter = ll_dictiter
- self.ll_dictnext = ll_dictnext_group[variant]
+ self._ll_dictnext = _ll_dictnext
def ll_dictiter(ITERPTR, d):
@@ -730,45 +730,26 @@
iter.index = 0
return iter
-def _make_ll_dictnext(kind):
- # make three versions of the following function: keys, values, items
- @jit.look_inside_iff(lambda RETURNTYPE, iter: jit.isvirtual(iter)
- and (iter.dict is None or
- jit.isvirtual(iter.dict)))
- @jit.oopspec("dictiter.next%s(iter)" % kind)
- def ll_dictnext(RETURNTYPE, iter):
- # note that RETURNTYPE is None for keys and values
- dict = iter.dict
- if dict:
- entries = dict.entries
- index = iter.index
- assert index >= 0
- entries_len = len(entries)
- while index < entries_len:
- entry = entries[index]
- is_valid = entries.valid(index)
- index = index + 1
- if is_valid:
- iter.index = index
- if RETURNTYPE is lltype.Void:
- return None
- elif kind == 'items':
- r = lltype.malloc(RETURNTYPE.TO)
- r.item0 = recast(RETURNTYPE.TO.item0, entry.key)
- r.item1 = recast(RETURNTYPE.TO.item1, entry.value)
- return r
- elif kind == 'keys':
- return entry.key
- elif kind == 'values':
- return entry.value
- # clear the reference to the dict and prevent restarts
- iter.dict = lltype.nullptr(lltype.typeOf(iter).TO.dict.TO)
- raise StopIteration
- return ll_dictnext
-
-ll_dictnext_group = {'keys' : _make_ll_dictnext('keys'),
- 'values': _make_ll_dictnext('values'),
- 'items' : _make_ll_dictnext('items')}
+ at jit.look_inside_iff(lambda iter: jit.isvirtual(iter)
+ and (iter.dict is None or
+ jit.isvirtual(iter.dict)))
+ at jit.oopspec("dictiter.next(iter)")
+def _ll_dictnext(iter):
+ dict = iter.dict
+ if dict:
+ entries = dict.entries
+ index = iter.index
+ assert index >= 0
+ entries_len = len(entries)
+ while index < entries_len:
+ nextindex = index + 1
+ if entries.valid(index):
+ iter.index = nextindex
+ return index
+ index = nextindex
+ # clear the reference to the dict and prevent restarts
+ iter.dict = lltype.nullptr(lltype.typeOf(iter).TO.dict.TO)
+ raise StopIteration
# _____________________________________________________________
# methods
diff --git a/rpython/rtyper/lltypesystem/rordereddict.py b/rpython/rtyper/lltypesystem/rordereddict.py
--- a/rpython/rtyper/lltypesystem/rordereddict.py
+++ b/rpython/rtyper/lltypesystem/rordereddict.py
@@ -886,7 +886,7 @@
self.variant = variant
self.lowleveltype = get_ll_dictiter(r_dict.lowleveltype)
self.ll_dictiter = ll_dictiter
- self.ll_dictnext = ll_dictnext_group[variant]
+ self._ll_dictnext = _ll_dictnext
def ll_dictiter(ITERPTR, d):
@@ -895,49 +895,26 @@
iter.index = 0
return iter
-def _make_ll_dictnext(kind):
- # make three versions of the following function: keys, values, items
- @jit.look_inside_iff(lambda RETURNTYPE, iter: jit.isvirtual(iter)
- and (iter.dict is None or
- jit.isvirtual(iter.dict)))
- @jit.oopspec("odictiter.next%s(iter)" % kind)
- def ll_dictnext(RETURNTYPE, iter):
- # note that RETURNTYPE is None for keys and values
- dict = iter.dict
- if not dict:
- raise StopIteration
-
+ at jit.look_inside_iff(lambda iter: jit.isvirtual(iter)
+ and (iter.dict is None or
+ jit.isvirtual(iter.dict)))
+ at jit.oopspec("odictiter.next(iter)")
+def _ll_dictnext(iter):
+ dict = iter.dict
+ if dict:
entries = dict.entries
index = iter.index
assert index >= 0
entries_len = dict.num_used_items
while index < entries_len:
- entry = entries[index]
- is_valid = entries.valid(index)
- index = index + 1
- if is_valid:
- iter.index = index
- if RETURNTYPE is lltype.Void:
- return None
- elif kind == 'items':
- r = lltype.malloc(RETURNTYPE.TO)
- r.item0 = recast(RETURNTYPE.TO.item0, entry.key)
- r.item1 = recast(RETURNTYPE.TO.item1, entry.value)
- return r
- elif kind == 'keys':
- return entry.key
- elif kind == 'values':
- return entry.value
-
+ nextindex = index + 1
+ if entries.valid(index):
+ iter.index = nextindex
+ return index
+ index = nextindex
# clear the reference to the dict and prevent restarts
iter.dict = lltype.nullptr(lltype.typeOf(iter).TO.dict.TO)
- raise StopIteration
-
- return ll_dictnext
-
-ll_dictnext_group = {'keys' : _make_ll_dictnext('keys'),
- 'values': _make_ll_dictnext('values'),
- 'items' : _make_ll_dictnext('items')}
+ raise StopIteration
# _____________________________________________________________
# methods
diff --git a/rpython/rtyper/rdict.py b/rpython/rtyper/rdict.py
--- a/rpython/rtyper/rdict.py
+++ b/rpython/rtyper/rdict.py
@@ -75,18 +75,49 @@
def rtype_next(self, hop):
variant = self.variant
v_iter, = hop.inputargs(self)
- if variant in ('keys', 'values'):
- c1 = hop.inputconst(lltype.Void, None)
- else:
- c1 = hop.inputconst(lltype.Void, hop.r_result.lowleveltype)
# record that we know about these two possible exceptions
hop.has_implicit_exception(StopIteration)
hop.has_implicit_exception(RuntimeError)
hop.exception_is_here()
- v = hop.gendirectcall(self.ll_dictnext, c1, v_iter)
+ v_index = hop.gendirectcall(self._ll_dictnext, v_iter)
+ if variant == 'items' and hop.r_result.lowleveltype != lltype.Void:
+ c1 = hop.inputconst(lltype.Void, hop.r_result.lowleveltype.TO)
+ cflags = hop.inputconst(lltype.Void, {'flavor': 'gc'})
+ v_result = hop.genop('malloc', [c1, cflags],
+ resulttype = hop.r_result.lowleveltype)
+ DICT = self.lowleveltype.TO.dict
+ c_dict = hop.inputconst(lltype.Void, 'dict')
+ v_dict = hop.genop('getfield', [v_iter, c_dict], resulttype=DICT)
+ ENTRIES = DICT.TO.entries
+ c_entries = hop.inputconst(lltype.Void, 'entries')
+ v_entries = hop.genop('getfield', [v_dict, c_entries],
+ resulttype=ENTRIES)
+ if variant != 'values':
+ KEY = ENTRIES.TO.OF.key
+ c_key = hop.inputconst(lltype.Void, 'key')
+ v_key = hop.genop('getinteriorfield', [v_entries, v_index, c_key],
+ resulttype=KEY)
+ if variant != 'keys':
+ VALUE = ENTRIES.TO.OF.value
+ c_value = hop.inputconst(lltype.Void, 'value')
+ v_value = hop.genop('getinteriorfield', [v_entries,v_index,c_value],
+ resulttype=VALUE)
if variant == 'keys':
- return self.r_dict.recast_key(hop.llops, v)
+ return self.r_dict.recast_key(hop.llops, v_key)
elif variant == 'values':
- return self.r_dict.recast_value(hop.llops, v)
+ return self.r_dict.recast_value(hop.llops, v_value)
+ elif hop.r_result.lowleveltype == lltype.Void:
+ return hop.inputconst(lltype.Void, None)
else:
- return v
+ assert variant == 'items'
+ ITEM0 = v_result.concretetype.TO.item0
+ ITEM1 = v_result.concretetype.TO.item1
+ if ITEM0 != v_key.concretetype:
+ v_key = hop.genop('cast_pointer', [v_key], resulttype=ITEM0)
+ if ITEM1 != v_value.concretetype:
+ v_value = hop.genop('cast_pointer', [v_value], resulttype=ITEM1)
+ c_item0 = hop.inputconst(lltype.Void, 'item0')
+ c_item1 = hop.inputconst(lltype.Void, 'item1')
+ hop.genop('setfield', [v_result, c_item0, v_key])
+ hop.genop('setfield', [v_result, c_item1, v_value])
+ return v_result
diff --git a/rpython/rtyper/test/test_rordereddict.py b/rpython/rtyper/test/test_rordereddict.py
--- a/rpython/rtyper/test/test_rordereddict.py
+++ b/rpython/rtyper/test/test_rordereddict.py
@@ -138,12 +138,12 @@
rordereddict.ll_dict_setitem(ll_d, llstr("j"), 2)
ITER = rordereddict.get_ll_dictiter(lltype.Ptr(DICT))
ll_iter = rordereddict.ll_dictiter(ITER, ll_d)
- ll_iterkeys = rordereddict.ll_dictnext_group['keys']
- next = ll_iterkeys(lltype.Signed, ll_iter)
- assert hlstr(next) == "k"
- next = ll_iterkeys(lltype.Signed, ll_iter)
- assert hlstr(next) == "j"
- py.test.raises(StopIteration, ll_iterkeys, lltype.Signed, ll_iter)
+ ll_dictnext = rordereddict._ll_dictnext
+ num = ll_dictnext(ll_iter)
+ assert hlstr(ll_d.entries[num].key) == "k"
+ num = ll_dictnext(ll_iter)
+ assert hlstr(ll_d.entries[num].key) == "j"
+ py.test.raises(StopIteration, ll_dictnext, ll_iter)
def test_popitem(self):
DICT = self._get_str_dict()
From noreply at buildbot.pypy.org Tue Jul 8 17:38:49 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 8 Jul 2014 17:38:49 +0200 (CEST)
Subject: [pypy-commit] pypy default: Add comment
Message-ID: <20140708153849.784CC1D2317@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72392:b74596c6094a
Date: 2014-07-08 16:46 +0200
http://bitbucket.org/pypy/pypy/changeset/b74596c6094a/
Log: Add comment
diff --git a/rpython/rtyper/rdict.py b/rpython/rtyper/rdict.py
--- a/rpython/rtyper/rdict.py
+++ b/rpython/rtyper/rdict.py
@@ -81,6 +81,8 @@
hop.exception_is_here()
v_index = hop.gendirectcall(self._ll_dictnext, v_iter)
if variant == 'items' and hop.r_result.lowleveltype != lltype.Void:
+ # this allocates the tuple for the result, directly in the function
+ # where it will be used (likely). This will let it be removed.
c1 = hop.inputconst(lltype.Void, hop.r_result.lowleveltype.TO)
cflags = hop.inputconst(lltype.Void, {'flavor': 'gc'})
v_result = hop.genop('malloc', [c1, cflags],
From noreply at buildbot.pypy.org Tue Jul 8 19:49:08 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Tue, 8 Jul 2014 19:49:08 +0200 (CEST)
Subject: [pypy-commit] pypy default: More tweaks
Message-ID: <20140708174908.ED5831C1068@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72393:8920909d084e
Date: 2014-07-08 19:48 +0200
http://bitbucket.org/pypy/pypy/changeset/8920909d084e/
Log: More tweaks
diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py
--- a/rpython/rtyper/lltypesystem/rdict.py
+++ b/rpython/rtyper/lltypesystem/rdict.py
@@ -805,6 +805,8 @@
ll_clear.oopspec = 'dict.clear(d)'
def ll_update(dic1, dic2):
+ if dic1 == dic2:
+ return
ll_prepare_dict_update(dic1, dic2.num_items)
entries = dic2.entries
d2len = len(entries)
@@ -827,7 +829,13 @@
# (d.resize_counter - 1) // 3 = room left in d
# so, if num_extra == 1, we need d.resize_counter > 3
# if num_extra == 2, we need d.resize_counter > 6 etc.
- jit.conditional_call(d.resize_counter <= num_extra * 3,
+ # Note however a further hack: if num_extra <= d.num_items,
+ # we avoid calling _ll_dict_resize_to here. This is to handle
+ # the case where dict.update() actually has a lot of collisions.
+ # If num_extra is much greater than d.num_items the conditional_call
+ # will trigger anyway, which is really the goal.
+ x = num_extra - d.num_items
+ jit.conditional_call(d.resize_counter <= x * 3,
_ll_dict_resize_to, d, num_extra)
# this is an implementation of keys(), values() and items()
From noreply at buildbot.pypy.org Wed Jul 9 03:02:44 2014
From: noreply at buildbot.pypy.org (yuyichao)
Date: Wed, 9 Jul 2014 03:02:44 +0200 (CEST)
Subject: [pypy-commit] pypy py3k-reset-locale: initialize LC_CTYPE instead
of LC_ALL
Message-ID: <20140709010244.8A8C11D23FA@cobra.cs.uni-duesseldorf.de>
Author: Yichao Yu
Branch: py3k-reset-locale
Changeset: r72394:2bd3b970804c
Date: 2014-07-09 07:45 +0800
http://bitbucket.org/pypy/pypy/changeset/2bd3b970804c/
Log: initialize LC_CTYPE instead of LC_ALL
diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -57,7 +57,7 @@
space.call_function(w_run_toplevel, w_call_startup_gateway)
if rlocale.HAVE_LANGINFO:
try:
- rlocale.setlocale(rlocale.LC_ALL, '')
+ rlocale.setlocale(rlocale.LC_CTYPE, '')
except rlocale.LocaleError:
pass
w_executable = space.fsdecode(space.wrapbytes(argv[0]))
From noreply at buildbot.pypy.org Wed Jul 9 03:02:45 2014
From: noreply at buildbot.pypy.org (pjenvey)
Date: Wed, 9 Jul 2014 03:02:45 +0200 (CEST)
Subject: [pypy-commit] pypy py3k: Merged in yuyichao/pypy/py3k-reset-locale
(pull request #246)
Message-ID: <20140709010245.E63301D23FA@cobra.cs.uni-duesseldorf.de>
Author: Philip Jenvey
Branch: py3k
Changeset: r72395:bd92f95fcd54
Date: 2014-07-08 18:02 -0700
http://bitbucket.org/pypy/pypy/changeset/bd92f95fcd54/
Log: Merged in yuyichao/pypy/py3k-reset-locale (pull request #246)
initialize LC_CTYPE instead of LC_ALL
diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -57,7 +57,7 @@
space.call_function(w_run_toplevel, w_call_startup_gateway)
if rlocale.HAVE_LANGINFO:
try:
- rlocale.setlocale(rlocale.LC_ALL, '')
+ rlocale.setlocale(rlocale.LC_CTYPE, '')
except rlocale.LocaleError:
pass
w_executable = space.fsdecode(space.wrapbytes(argv[0]))
From noreply at buildbot.pypy.org Wed Jul 9 03:03:00 2014
From: noreply at buildbot.pypy.org (pjenvey)
Date: Wed, 9 Jul 2014 03:03:00 +0200 (CEST)
Subject: [pypy-commit] pypy py3k-reset-locale: Close branch py3k-reset-locale
Message-ID: <20140709010300.B5EF91D23FA@cobra.cs.uni-duesseldorf.de>
Author: Philip Jenvey
Branch: py3k-reset-locale
Changeset: r72396:d577539c2307
Date: 2014-07-08 18:02 -0700
http://bitbucket.org/pypy/pypy/changeset/d577539c2307/
Log: Close branch py3k-reset-locale
From noreply at buildbot.pypy.org Wed Jul 9 16:18:16 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 9 Jul 2014 16:18:16 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage-refactoring-virtual-pc: start
adding vrefs
Message-ID: <20140709141816.A5E601C33F5@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage-refactoring-virtual-pc
Changeset: r869:6fb935c7c9b6
Date: 2014-07-03 16:21 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/6fb935c7c9b6/
Log: start adding vrefs
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -26,7 +26,7 @@
_immutable_fields_ = ["space", "image", "image_name",
"max_stack_depth", "interrupt_counter_size",
"startup_time", "evented", "interrupts"]
-
+
jit_driver = jit.JitDriver(
greens=['pc', 'self', 'method'],
reds=['s_context'],
@@ -38,7 +38,7 @@
trace=False, evented=True, interrupts=True,
max_stack_depth=constants.MAX_LOOP_DEPTH):
import time
-
+
# === Initialize immutable variables
self.space = space
self.image = image
@@ -54,7 +54,7 @@
self.interrupt_counter_size = int(os.environ["SPY_ICS"])
except KeyError:
self.interrupt_counter_size = constants.INTERRUPT_COUNTER_SIZE
-
+
# === Initialize mutable variables
self.interrupt_check_counter = self.interrupt_counter_size
self.current_stack_depth = 0
@@ -108,19 +108,25 @@
if jit.we_are_jitted():
self.jitted_check_for_interrupt(s_context)
self.jit_driver.can_enter_jit(pc=pc, self=self, method=method, s_context=s_context)
-
+
# This is just a wrapper around loop_bytecodes that handles the stack overflow protection mechanism
def stack_frame(self, s_new_frame, may_context_switch=True, fresh_context=False):
if self.max_stack_depth > 0:
if self.current_stack_depth >= self.max_stack_depth:
raise StackOverflow(s_new_frame)
-
+
self.current_stack_depth += 1
+ s_sender = s_new_frame.s_sender()
+ assert s_sender
+ s_sender_ref = jit.virtual_ref(s_sender)
+ s_new_frame.store_s_sender(s_sender_ref, raiseError=False)
try:
self.loop_bytecodes(s_new_frame, may_context_switch=may_context_switch, fresh_context=fresh_context)
finally:
+ jit.virtual_ref_finish(s_sender_ref, s_sender)
+ s_new_frame.restore_s_sender(s_sender)
self.current_stack_depth -= 1
-
+
def step(self, context, pc):
bytecode = context.fetch_bytecode(pc)
pc += 1
@@ -134,9 +140,9 @@
if start <= bytecode <= stop:
return getattr(context, methname)(self, bytecode, pc)
assert False, "unreachable"
-
+
# ============== Methods for handling user interrupts ==============
-
+
def jitted_check_for_interrupt(self, s_frame):
if not self.interrupts:
return
@@ -147,7 +153,7 @@
decr_by = int(trace_length // 100)
decr_by = max(decr_by, 1)
self.quick_check_for_interrupt(s_frame, decr_by)
-
+
def quick_check_for_interrupt(self, s_frame, dec=1):
if not self.interrupts:
return
@@ -183,7 +189,7 @@
return intmask(int((time.time() - self.startup_time) * 1000) & constants.TAGGED_MASK)
# ============== Convenience methods for executing code ==============
-
+
def interpret_toplevel(self, w_frame):
try:
self.loop(w_frame)
@@ -199,7 +205,7 @@
"asSymbol")
else:
w_selector = selector
-
+
w_method = model.W_CompiledMethod(self.space, header=512)
w_method.literalatput0(self.space, 1, w_selector)
assert len(arguments_w) <= 7
@@ -208,10 +214,10 @@
s_frame = MethodContextShadow(self.space, None, w_method, w_receiver, [])
s_frame.push(w_receiver)
s_frame.push_all(list(arguments_w))
-
+
self.interrupt_check_counter = self.interrupt_counter_size
return self.interpret_toplevel(s_frame.w_self())
-
+
def padding(self, symbol=' '):
return symbol * self.current_stack_depth
@@ -247,9 +253,20 @@
# jump=True means the pc is changed in an unpredictable way.
# The implementation method must additionally handle the pc.
# needs_pc=True means the bytecode implementation required the pc, but will not change it.
+
+from rpython.rlib.unroll import SpecTag
+class unrolling_int(int, SpecTag):
+ def __add__(self, other):
+ return unrolling_int(int.__add__(self, other))
+ __radd__ = __add__
+ def __sub__(self, other):
+ return unrolling_int(int.__sub__(self, other))
+ def __rsub__(self, other):
+ return unrolling_int(int.__rsub__(self, other))
+unrolling_zero = unrolling_int(0)
+
def bytecode_implementation(parameter_bytes=0, jump=False, needs_pc=False):
def bytecode_implementation_decorator(actual_implementation_method):
- from rpython.rlib.unroll import unrolling_zero
@jit.unroll_safe
def bytecode_implementation_wrapper(self, interp, current_bytecode, pc):
parameters = ()
@@ -351,9 +368,9 @@
# __extend__ adds new methods to the ContextPartShadow class
class __extend__(ContextPartShadow):
-
+
# ====== Push/Pop bytecodes ======
-
+
@bytecode_implementation()
def pushReceiverVariableBytecode(self, interp, current_bytecode):
index = current_bytecode & 15
@@ -432,7 +449,7 @@
@bytecode_implementation()
def popStackBytecode(self, interp, current_bytecode):
self.pop()
-
+
@bytecode_implementation(parameter_bytes=1)
def pushNewArrayBytecode(self, interp, current_bytecode, descriptor):
arraySize, popIntoArray = splitter[7, 1](descriptor)
@@ -442,9 +459,9 @@
else:
newArray = interp.space.w_Array.as_class_get_shadow(interp.space).new(arraySize)
self.push(newArray)
-
+
# ====== Extended Push/Pop bytecodes ======
-
+
def _extendedVariableTypeAndIndex(self, descriptor):
return ((descriptor >> 6) & 3), (descriptor & 63)
@@ -480,16 +497,16 @@
@bytecode_implementation(parameter_bytes=1)
def extendedStoreBytecode(self, interp, current_bytecode, descriptor):
return self._extendedStoreBytecode(interp, current_bytecode, descriptor)
-
+
@bytecode_implementation(parameter_bytes=1)
def extendedStoreAndPopBytecode(self, interp, current_bytecode, descriptor):
self._extendedStoreBytecode(interp, current_bytecode, descriptor)
self.pop()
-
+
def _extract_index_and_temps(self, index_in_array, index_of_array):
w_indirectTemps = self.gettemp(index_of_array)
return index_in_array, w_indirectTemps
-
+
@bytecode_implementation(parameter_bytes=2)
def pushRemoteTempLongBytecode(self, interp, current_bytecode, index_in_array, index_of_array):
index_in_array, w_indirectTemps = self._extract_index_and_temps(index_in_array, index_of_array)
@@ -527,7 +544,7 @@
copiedValues: copiedValues).
self jump: blockSize
"""
-
+
space = self.space
numArgs, numCopied = splitter[4, 4](descriptor)
blockSize = (j << 8) | i
@@ -536,7 +553,7 @@
self.push(w_closure)
assert blockSize >= 0
return self._jump(blockSize, pc)
-
+
# ====== Helpers for send/return bytecodes ======
def _sendSelfSelector(self, w_selector, argcount, interp):
@@ -558,7 +575,7 @@
w_method = receiverclassshadow.lookup(w_selector)
except MethodNotFound:
return self._doesNotUnderstand(w_selector, argcount, interp, receiver)
-
+
code = w_method.primitive()
if code:
try:
@@ -579,21 +596,21 @@
def _sendSelfSelectorSpecial(self, interp, selector, numargs):
w_selector = self.space.get_special_selector(selector)
return self._sendSelfSelector(w_selector, numargs, interp)
-
+
def _sendSpecialSelector(self, interp, receiver, special_selector, w_args=[]):
w_special_selector = self.space.objtable["w_" + special_selector]
s_class = receiver.class_shadow(self.space)
w_method = s_class.lookup(w_special_selector)
s_frame = w_method.create_frame(interp.space, receiver, w_args, self)
-
+
# ######################################################################
if interp.trace:
print '%s %s %s: #%s' % (interp.padding('#'), special_selector, s_frame.short_str(), w_args)
if not objectmodel.we_are_translated():
import pdb; pdb.set_trace()
-
+
return interp.stack_frame(s_frame)
-
+
def _doesNotUnderstand(self, w_selector, argcount, interp, receiver):
arguments = self.pop_and_return_n(argcount)
w_message_class = self.space.classtable["w_Message"]
@@ -603,7 +620,7 @@
w_message.store(self.space, 0, w_selector)
w_message.store(self.space, 1, self.space.wrap_list(arguments))
self.pop() # The receiver, already known.
-
+
try:
return self._sendSpecialSelector(interp, receiver, "doesNotUnderstand", [w_message])
except MethodNotFound:
@@ -612,10 +629,10 @@
assert isinstance(s_class, ClassShadow)
print "Missing doesNotUnderstand in hierarchy of %s" % s_class.getname()
raise
-
+
def _mustBeBoolean(self, interp, receiver):
return self._sendSpecialSelector(interp, receiver, "mustBeBoolean")
-
+
def _call_primitive(self, code, interp, argcount, w_method, w_selector):
# ##################################################################
if interp.trace:
@@ -635,11 +652,11 @@
def _return(self, return_value, interp, s_return_to):
# unfortunately, this assert is not true for some tests. TODO fix this.
# assert self._stack_ptr == self.tempsize()
-
+
# ##################################################################
if interp.trace:
print '%s<- %s' % (interp.padding(), return_value.as_repr_string())
-
+
if s_return_to is None:
# This should never happen while executing a normal image.
raise ReturnFromTopLevel(return_value)
@@ -736,7 +753,7 @@
return self._sendSelfSelector(w_selector, argcount, interp)
# ====== Misc ======
-
+
def _activate_unwind_context(self, interp, current_pc):
# TODO put the constant somewhere else.
# Primitive 198 is used in BlockClosure >> ensure:
@@ -754,11 +771,11 @@
raise nlr
finally:
self.mark_returned()
-
+
@bytecode_implementation()
def unknownBytecode(self, interp, current_bytecode):
raise MissingBytecode("unknownBytecode")
-
+
@bytecode_implementation()
def experimentalBytecode(self, interp, current_bytecode):
raise MissingBytecode("experimentalBytecode")
@@ -775,7 +792,7 @@
else:
w_alternative = interp.space.w_true
w_expected = interp.space.w_false
-
+
# Don't check the class, just compare with only two Boolean instances.
w_bool = self.pop()
if w_expected.is_same_object(w_bool):
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1290,13 +1290,13 @@
s_new_context = shadow.BlockContextShadow(
interp.space, None, w_method_context, argcnt, initialip)
return s_new_context.w_self()
-
+
def finalize_block_ctx(interp, s_block_ctx, s_frame):
from spyvm.error import SenderChainManipulation
# Set some fields
s_block_ctx.store_pc(s_block_ctx.initialip())
try:
- s_block_ctx.store_s_sender(s_frame)
+ s_block_ctx.store_s_sender(jit.non_virtual_ref(s_frame))
except SenderChainManipulation, e:
assert e.s_context == s_block_ctx
return s_block_ctx
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -20,7 +20,7 @@
_immutable_fields_ = ['space']
provides_getname = False
repr_classname = "AbstractShadow"
-
+
def __init__(self, space, w_self):
self.space = space
assert w_self is None or isinstance(w_self, model.W_PointersObject)
@@ -34,19 +34,19 @@
return "<%s %s>" % (self.repr_classname, self.getname())
else:
return "<%s>" % self.repr_classname
-
+
def fetch(self, n0):
raise NotImplementedError("Abstract class")
def store(self, n0, w_value):
raise NotImplementedError("Abstract class")
def size(self):
raise NotImplementedError("Abstract class")
-
+
def attach_shadow(self): pass
-
+
def copy_field_from(self, n0, other_shadow):
self.store(n0, other_shadow.fetch(n0))
-
+
# This can be overwritten to change the order of initialization.
def copy_from(self, other_shadow):
assert self.size() == other_shadow.size()
@@ -98,24 +98,24 @@
# Class must provide: wrap, unwrap, nil_value, is_nil_value, wrapper_class
_attrs_ = ['storage']
_immutable_fields_ = ['storage']
-
+
def __init__(self, space, w_self, size):
AbstractStorageShadow.__init__(self, space, w_self, size)
self.storage = [self.nil_value] * size
-
+
def size(self):
return len(self.storage)
-
+
def generalized_strategy_for(self, w_val):
return ListStorageShadow
-
+
def fetch(self, n0):
val = self.storage[n0]
if self.is_nil_value(val):
return self.space.w_nil
else:
return self.wrap(self.space, val)
-
+
def do_store(self, n0, w_val):
if w_val.is_nil(self.space):
self.storage[n0] = self.nil_value
@@ -134,7 +134,7 @@
nil_value = constants.MAXINT
wrapper_class = model.W_SmallInteger
import_from_mixin(AbstractValueOrNilStorageMixin)
-
+
@staticmethod
def static_can_contain(space, w_val):
return _value_or_nil_can_handle(SmallIntegerOrNilStorageShadow, space, w_val)
@@ -153,7 +153,7 @@
nil_value = sys.float_info.max
wrapper_class = model.W_Float
import_from_mixin(AbstractValueOrNilStorageMixin)
-
+
@staticmethod
def static_can_contain(space, w_val):
return _value_or_nil_can_handle(FloatOrNilStorageShadow, space, w_val)
@@ -193,17 +193,17 @@
if float_can_handle and not FloatOrNilStorageShadow.static_can_contain(space, w_obj):
float_can_handle = False
specialized_strategies = specialized_strategies - 1
-
+
if specialized_strategies <= 0:
return ListStorageShadow
-
+
if all_nil_can_handle:
return AllNilStorageShadow
if small_int_can_handle:
return SmallIntegerOrNilStorageShadow
if float_can_handle:
return FloatOrNilStorageShadow
-
+
# If this happens, please look for a bug in the code above.
assert False, "No strategy could be found for list..."
@@ -223,7 +223,7 @@
_immutable_fields_ = ['storage']
repr_classname = "ListStorageShadow"
import_from_mixin(ListStorageMixin)
-
+
def initialize_storage(self, size):
self.storage = [self.space.w_nil] * size
def fetch(self, n0):
@@ -236,7 +236,7 @@
_immutable_fields_ = ['storage']
repr_classname = "WeakListStorageShadow"
import_from_mixin(ListStorageMixin)
-
+
def initialize_storage(self, size):
self.storage = [weakref.ref(self.space.w_nil)] * size
def fetch(self, n0):
@@ -245,14 +245,14 @@
def store(self, n0, w_value):
assert w_value is not None
self.storage[n0] = weakref.ref(w_value)
-
+
class AbstractCachingShadow(ListStorageShadow):
_immutable_fields_ = ['version?']
_attrs_ = ['version']
repr_classname = "AbstractCachingShadow"
import_from_mixin(version.VersionMixin)
version = None
-
+
def __init__(self, space, w_self):
ListStorageShadow.__init__(self, space, w_self, 0)
self.changed()
@@ -284,7 +284,7 @@
_s_superclass = _s_methoddict = None
provides_getname = True
repr_classname = "ClassShadow"
-
+
def __init__(self, space, w_self):
self.subclass_s = {}
AbstractCachingShadow.__init__(self, space, w_self)
@@ -305,7 +305,7 @@
# In Slang the value is read directly as a boxed integer, so that
# the code gets a "pointer" whose bits are set as above, but
# shifted one bit to the left and with the lowest bit set to 1.
-
+
# Compute the instance size (really the size, not the number of bytes)
instsize_lo = (classformat >> 1) & 0x3F
instsize_hi = (classformat >> (9 + 1)) & 0xC0
@@ -313,10 +313,10 @@
# decode the instSpec
format = (classformat >> 7) & 15
self.instance_varsized = format >= 2
-
+
# In case of raised exception below.
self.changed()
-
+
if format < 4:
self.instance_kind = POINTERS
elif format == 4:
@@ -356,7 +356,7 @@
return
# Some of the special info has changed -> Switch version.
self.changed()
-
+
def store_w_superclass(self, w_class):
superclass = self._s_superclass
if w_class is None or w_class.is_nil(self.space):
@@ -383,24 +383,24 @@
return
if methoddict: methoddict.s_class = None
self.store_s_methoddict(s_new_methoddict)
-
+
def store_s_methoddict(self, s_methoddict):
s_methoddict.s_class = self
s_methoddict.sync_method_cache()
self._s_methoddict = s_methoddict
-
+
def attach_s_class(self, s_other):
self.subclass_s[s_other] = None
def detach_s_class(self, s_other):
del self.subclass_s[s_other]
-
+
def store_w_name(self, w_name):
if isinstance(w_name, model.W_BytesObject):
self.name = w_name.as_string()
else:
self.name = None
-
+
@jit.unroll_safe
def flush_method_caches(self):
look_in_shadow = self
@@ -497,7 +497,7 @@
self.version = version
for s_class in self.subclass_s:
s_class.superclass_changed(version)
-
+
# _______________________________________________________________
# Methods used only in testing
@@ -532,7 +532,7 @@
_immutable_fields_ = ['invalid?', 's_class']
_attrs_ = ['methoddict', 'invalid', 's_class']
repr_classname = "MethodDictionaryShadow"
-
+
def __init__(self, space, w_self):
self.invalid = True
self.s_class = None
@@ -541,7 +541,7 @@
def update(self):
self.sync_method_cache()
-
+
def find_selector(self, w_selector):
if self.invalid:
return None # we may be invalid if Smalltalk code did not call flushCache
@@ -593,7 +593,7 @@
class AbstractRedirectingShadow(AbstractShadow):
_attrs_ = ['_w_self_size']
repr_classname = "AbstractRedirectingShadow"
-
+
def __init__(self, space, w_self):
AbstractShadow.__init__(self, space, w_self)
if w_self is not None:
@@ -610,7 +610,7 @@
_attrs_ = ['_s_sender', '_pc', '_temps_and_stack',
'_stack_ptr', 'instances_w']
repr_classname = "ContextPartShadow"
-
+
_virtualizable_ = [
"_s_sender", "_pc",
"_temps_and_stack[*]", "_stack_ptr",
@@ -619,9 +619,9 @@
# ______________________________________________________________________
# Initialization
-
+
def __init__(self, space, w_self):
- self._s_sender = None
+ self._s_sender = jit.vref_None
AbstractRedirectingShadow.__init__(self, space, w_self)
self.instances_w = {}
@@ -630,26 +630,26 @@
AbstractRedirectingShadow.copy_field_from(self, n0, other_shadow)
except error.SenderChainManipulation, e:
assert e.s_context == self
-
+
def copy_from(self, other_shadow):
# Some fields have to be initialized before the rest, to ensure correct initialization.
privileged_fields = self.fields_to_copy_first()
for n0 in privileged_fields:
self.copy_field_from(n0, other_shadow)
-
+
# Now the temp size will be known.
self.init_stack_and_temps()
-
+
for n0 in range(self.size()):
if n0 not in privileged_fields:
self.copy_field_from(n0, other_shadow)
-
+
def fields_to_copy_first(self):
return []
-
+
# ______________________________________________________________________
# Accessing object fields
-
+
def fetch(self, n0):
if n0 == constants.CTXPART_SENDER_INDEX:
return self.w_sender()
@@ -671,9 +671,9 @@
if n0 == constants.CTXPART_SENDER_INDEX:
assert isinstance(w_value, model.W_PointersObject)
if w_value.is_nil(self.space):
- self._s_sender = None
+ self._s_sender = jit.vref_None
else:
- self.store_s_sender(w_value.as_context_get_shadow(self.space))
+ self.store_s_sender(jit.non_virtual_ref(w_value.as_context_get_shadow(self.space)))
return
if n0 == constants.CTXPART_PC_INDEX:
return self.store_unwrap_pc(w_value)
@@ -688,24 +688,30 @@
else:
# XXX later should store tail out of known context part as well
raise error.WrapperException("Index in context out of bounds")
-
+
# === Sender ===
-
- def store_s_sender(self, s_sender):
- assert s_sender is None or isinstance(s_sender, ContextPartShadow)
+
+ def store_s_sender(self, s_sender, raiseError=True):
+ assert s_sender is jit.vref_None or isinstance(s_sender, jit.DirectVRef)
self._s_sender = s_sender
- raise error.SenderChainManipulation(self)
-
+ if raiseError:
+ raise error.SenderChainManipulation(self)
+
+ def restore_s_sender(self, s_direct):
+ if self._s_sender is not jit.vref_None:
+ # virtual sender wasn't already cleared by e.g. mark_returned
+ self._s_sender = s_direct
+
def w_sender(self):
if self._s_sender is None:
return self.space.w_nil
return self._s_sender.w_self()
-
+
def s_sender(self):
- return self._s_sender
-
+ return self._s_sender()
+
# === Stack Pointer ===
-
+
def unwrap_store_stackpointer(self, w_sp1):
# the stackpointer in the W_PointersObject starts counting at the
# tempframe start
@@ -724,12 +730,12 @@
def stackdepth(self):
return rarithmetic.intmask(self._stack_ptr)
-
+
def wrap_stackpointer(self):
return self.space.wrap_int(self.stackdepth())
# === Program Counter ===
-
+
def store_unwrap_pc(self, w_pc):
if w_pc.is_nil(self.space):
self.store_pc(-1)
@@ -754,9 +760,9 @@
def store_pc(self, newpc):
assert newpc >= -1
self._pc = newpc
-
+
# === Subclassed accessors ===
-
+
def s_home(self):
raise NotImplementedError()
@@ -765,22 +771,22 @@
def w_receiver(self):
raise NotImplementedError()
-
+
def w_method(self):
raise NotImplementedError()
-
+
def tempsize(self):
raise NotImplementedError()
-
+
def is_closure_context(self):
raise NotImplementedError()
-
+
# === Other properties of Contexts ===
-
+
def mark_returned(self):
self.store_pc(-1)
try:
- self.store_s_sender(None)
+ self.store_s_sender(jit.vref_None)
except error.SenderChainManipulation, e:
assert self == e.s_context
@@ -789,25 +795,25 @@
def external_stackpointer(self):
return self.stackdepth() + self.stackstart()
-
+
def stackend(self):
# XXX this is incorrect when there is subclassing
return self._w_self_size
-
+
def fetch_next_bytecode(self):
pc = jit.promote(self._pc)
assert pc >= 0
self._pc += 1
return self.fetch_bytecode(pc)
-
+
def fetch_bytecode(self, pc):
bytecode = self.w_method().fetch_bytecode(pc)
return ord(bytecode)
-
+
# ______________________________________________________________________
# Temporary Variables
#
- # Every context has it's own stack. BlockContexts share their temps with
+ # Every context has it's own stack. BlockContexts share their temps with
# their home contexts. MethodContexts created from a BlockClosure get their
# temps copied from the closure upon activation. Changes are not propagated back;
# this is handled by the compiler by allocating an extra Array for temps.
@@ -817,7 +823,7 @@
def settemp(self, index, w_value):
raise NotImplementedError()
-
+
# ______________________________________________________________________
# Stack Manipulation
@@ -831,13 +837,13 @@
for i in range(tempsize):
temps_and_stack[i] = self.space.w_nil
self._stack_ptr = rarithmetic.r_uint(tempsize) # we point after the last element
-
+
def stack_get(self, index0):
return self._temps_and_stack[index0]
-
+
def stack_put(self, index0, w_val):
self._temps_and_stack[index0] = w_val
-
+
def stack(self):
"""NOT_RPYTHON""" # purely for testing
return self._temps_and_stack[self.tempsize():self._stack_ptr]
@@ -892,7 +898,7 @@
# ______________________________________________________________________
# Primitive support
-
+
def store_instances_array(self, w_class, match_w):
# used for primitives 77 & 78
self.instances_w[w_class] = match_w
@@ -919,7 +925,7 @@
j += 1
retval += "\n---------------------"
return retval
-
+
def short_str(self):
arg_strings = self.argument_strings()
if len(arg_strings) > 0:
@@ -933,10 +939,10 @@
self.w_receiver().as_repr_string(),
args
)
-
+
def print_stack(self, method=True):
return self.print_padded_stack(method)[1]
-
+
def print_padded_stack(self, method):
padding = ret_str = ''
if self.s_sender() is not None:
@@ -950,9 +956,9 @@
class BlockContextShadow(ContextPartShadow):
_attrs_ = ['_w_home', '_initialip', '_eargc']
repr_classname = "BlockContextShadow"
-
+
# === Initialization ===
-
+
def __init__(self, space, w_self=None, w_home=None, argcnt=0, initialip=0):
self = jit.hint(self, access_directly=True, fresh_virtualizable=True)
creating_w_self = w_self is None
@@ -972,40 +978,40 @@
def fields_to_copy_first(self):
return [ constants.BLKCTX_HOME_INDEX ]
-
+
# === Implemented accessors ===
-
+
def s_home(self):
return self._w_home.as_methodcontext_get_shadow(self.space)
-
+
def stackstart(self):
return constants.BLKCTX_STACK_START
def tempsize(self):
# A blockcontext doesn't have any temps
return 0
-
+
def w_receiver(self):
return self.s_home().w_receiver()
-
+
def w_method(self):
retval = self.s_home().w_method()
assert isinstance(retval, model.W_CompiledMethod)
return retval
-
+
def is_closure_context(self):
return True
-
+
# === Temporary variables ===
-
+
def gettemp(self, index):
return self.s_home().gettemp(index)
def settemp(self, index, w_value):
self.s_home().settemp(index, w_value)
-
+
# === Accessing object fields ===
-
+
def fetch(self, n0):
if n0 == constants.BLKCTX_HOME_INDEX:
return self._w_home
@@ -1025,11 +1031,11 @@
return self.unwrap_store_eargc(w_value)
else:
return ContextPartShadow.store(self, n0, w_value)
-
+
def store_w_home(self, w_home):
assert isinstance(w_home, model.W_PointersObject)
self._w_home = w_home
-
+
def unwrap_store_initialip(self, w_value):
initialip = self.space.unwrap_int(w_value)
initialip -= 1 + self.w_method().literalsize
@@ -1037,7 +1043,7 @@
def store_initialip(self, initialip):
self._initialip = initialip
-
+
def wrap_initialip(self):
initialip = self.initialip()
initialip += 1 + self.w_method().literalsize
@@ -1045,7 +1051,7 @@
def initialip(self):
return self._initialip
-
+
def unwrap_store_eargc(self, w_value):
self.store_expected_argument_count(self.space.unwrap_int(w_value))
@@ -1059,24 +1065,24 @@
self._eargc = argc
# === Stack Manipulation ===
-
+
def reset_stack(self):
self.pop_n(self.stackdepth())
# === Printing ===
-
+
def argument_strings(self):
return []
-
+
def method_str(self):
return '[] in %s' % self.w_method().get_identifier_string()
class MethodContextShadow(ContextPartShadow):
_attrs_ = ['closure', '_w_receiver', '_w_method']
repr_classname = "MethodContextShadow"
-
+
# === Initialization ===
-
+
@jit.unroll_safe
def __init__(self, space, w_self=None, w_method=None, w_receiver=None,
arguments=None, s_sender=None, closure=None, pc=0):
@@ -1085,7 +1091,7 @@
self.store_w_receiver(w_receiver)
self.store_pc(pc)
self.closure = closure
-
+
if w_method:
self.store_w_method(w_method)
# The summand is needed, because we calculate i.a. our stackdepth relative of the size of w_self.
@@ -1094,29 +1100,29 @@
self.init_stack_and_temps()
else:
self._w_method = None
-
+
if s_sender:
try:
- self.store_s_sender(s_sender)
+ self.store_s_sender(jit.non_virtual_ref(s_sender))
except error.SenderChainManipulation, e:
assert self == e.s_context
-
+
if arguments:
argc = len(arguments)
for i0 in range(argc):
self.settemp(i0, arguments[i0])
else:
argc = 0
-
+
if closure:
for i0 in range(closure.size()):
self.settemp(i0+argc, closure.at0(i0))
def fields_to_copy_first(self):
return [ constants.MTHDCTX_METHOD, constants.MTHDCTX_CLOSURE_OR_NIL ]
-
+
# === Accessing object fields ===
-
+
def fetch(self, n0):
if n0 == constants.MTHDCTX_METHOD:
return self.w_method()
@@ -1150,12 +1156,12 @@
return self.settemp(temp_i, w_value)
else:
return ContextPartShadow.store(self, n0, w_value)
-
+
def store_w_receiver(self, w_receiver):
self._w_receiver = w_receiver
-
+
# === Implemented Accessors ===
-
+
def s_home(self):
if self.is_closure_context():
# this is a context for a blockClosure
@@ -1168,31 +1174,31 @@
return s_outerContext.s_home()
else:
return self
-
+
def stackstart(self):
return constants.MTHDCTX_TEMP_FRAME_START
-
+
def store_w_method(self, w_method):
assert isinstance(w_method, model.W_CompiledMethod)
self._w_method = w_method
def w_receiver(self):
return self._w_receiver
-
+
def w_method(self):
retval = self._w_method
assert isinstance(retval, model.W_CompiledMethod)
return retval
-
+
def tempsize(self):
if not self.is_closure_context():
return self.w_method().tempsize()
else:
return self.closure.tempsize()
-
+
def is_closure_context(self):
return self.closure is not None
-
+
# ______________________________________________________________________
# Marriage of MethodContextShadows with PointerObjects only when required
@@ -1209,9 +1215,9 @@
self._w_self = w_self
self._w_self_size = w_self.size()
return w_self
-
+
# === Temporary variables ===
-
+
def gettemp(self, index0):
return self.stack_get(index0)
@@ -1219,7 +1225,7 @@
self.stack_put(index0, w_value)
# === Printing ===
-
+
def argument_strings(self):
argcount = self.w_method().argsize
tempsize = self.w_method().tempsize()
From noreply at buildbot.pypy.org Wed Jul 9 16:18:17 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 9 Jul 2014 16:18:17 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage-refactoring-virtual-pc: graft
changes from 64bit branch onto master
Message-ID: <20140709141817.E906A1C33F5@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage-refactoring-virtual-pc
Changeset: r870:2eead83ddd0a
Date: 2014-02-07 13:12 +0100
http://bitbucket.org/pypy/lang-smalltalk/changeset/2eead83ddd0a/
Log: graft changes from 64bit branch onto master
diff --git a/spyvm/constants.py b/spyvm/constants.py
--- a/spyvm/constants.py
+++ b/spyvm/constants.py
@@ -146,7 +146,7 @@
"timerSemaphore" : SO_TIMER_SEMAPHORE,
}
-LONG_BIT = 32
+from rpython.rlib.rarithmetic import LONG_BIT
TAGGED_MAXINT = 2 ** (LONG_BIT - 2) - 1
TAGGED_MININT = -2 ** (LONG_BIT - 2)
diff --git a/spyvm/display.py b/spyvm/display.py
--- a/spyvm/display.py
+++ b/spyvm/display.py
@@ -1,4 +1,3 @@
-from rpython.rlib.rarithmetic import r_uint
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib.runicode import unicode_encode_utf_8
from rpython.rlib import jit
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -93,6 +93,7 @@
s_new_context = p.s_new_context
def loop_bytecodes(self, s_context, fresh_context=False, may_context_switch=True):
+ assert isinstance(s_context, ContextPartShadow)
if not jit.we_are_jitted() and may_context_switch:
self.quick_check_for_interrupt(s_context)
method = s_context.w_method()
diff --git a/spyvm/model.py b/spyvm/model.py
--- a/spyvm/model.py
+++ b/spyvm/model.py
@@ -15,17 +15,33 @@
that create W_PointersObjects of correct size with attached shadows.
"""
import sys, weakref
+<<<<<<< local
from spyvm import constants, error, version, storage_statistics
from spyvm.version import elidable_for_version, constant_for_version, constant_for_version_arg
+=======
+from spyvm import constants, error, system
+>>>>>>> other
from rpython.rlib import rrandom, objectmodel, jit, signature
+<<<<<<< local
from rpython.rlib.rarithmetic import intmask, r_uint, r_int
from rpython.rlib.debug import make_sure_not_resized
+=======
+from rpython.rlib.rarithmetic import intmask, r_uint32, r_uint, r_int
+>>>>>>> other
from rpython.tool.pairtype import extendabletype
from rpython.rlib.objectmodel import instantiate, compute_hash, import_from_mixin, we_are_translated
from rpython.rtyper.lltypesystem import lltype, rffi
from rsdl import RSDL, RSDL_helper
+
+if system.IS_64BIT:
+ from rpython.rlib.rarithmetic import widen
+else:
+ def widen(x):
+ return x
+
+
class W_Object(object):
"""Root of Squeak model, abstract."""
_attrs_ = [] # no RPython-level instance variables allowed in W_Object
@@ -216,7 +232,7 @@
return isinstance(self.value, int) and self.value < 0x8000
def lshift(self, space, shift):
- from rpython.rlib.rarithmetic import ovfcheck, intmask, r_uint
+ from rpython.rlib.rarithmetic import ovfcheck, intmask
# shift > 0, therefore the highest bit of upperbound is not set,
# i.e. upperbound is positive
upperbound = intmask(r_uint(-1) >> shift)
@@ -359,7 +375,6 @@
return space.wrap_int((self.value >> shift) & mask)
def unwrap_uint(self, space):
- from rpython.rlib.rarithmetic import r_uint
return r_uint(self.value)
def clone(self, space):
@@ -469,11 +484,11 @@
from rpython.rlib.rstruct.ieee import float_pack
r = float_pack(self.value, 8) # C double
if n0 == 0:
- return space.wrap_uint(r_uint(intmask(r >> 32)))
+ return space.wrap_uint(r_uint32(intmask(r >> 32)))
else:
# bounds-check for primitive access is done in the primitive
assert n0 == 1
- return space.wrap_uint(r_uint(intmask(r)))
+ return space.wrap_uint(r_uint32(intmask(r)))
def store(self, space, n0, w_obj):
from rpython.rlib.rstruct.ieee import float_unpack, float_pack
@@ -799,14 +814,19 @@
byte0 = ord(self.getchar(byte_index0))
byte1 = ord(self.getchar(byte_index0 + 1)) << 8
if byte1 & 0x8000 != 0:
- byte1 = intmask(r_uint(0xffff0000) | r_uint(byte1))
+ byte1 = intmask(widen(r_uint32(0xffff0000)) | widen(r_uint32(byte1)))
return space.wrap_int(byte1 | byte0)
def short_atput0(self, space, index0, w_value):
from rpython.rlib.rarithmetic import int_between
i_value = space.unwrap_int(w_value)
- if not int_between(-0x8000, i_value, 0x8000):
- raise error.PrimitiveFailedError
+ if constants.LONG_BIT == 64:
+ if (not int_between(0, i_value, 0x8000) and
+ not int_between(0, i_value ^ (0xffffffff), 0x8000)):
+ raise error.PrimitiveFailedError
+ else:
+ if not int_between(-0x8000, i_value, 0x8000):
+ raise error.PrimitiveFailedError
byte_index0 = index0 * 2
byte0 = i_value & 0xff
byte1 = (i_value & 0xff00) >> 8
@@ -938,20 +958,25 @@
else:
short = (word >> 16) & 0xffff
if short & 0x8000 != 0:
- short = r_uint(0xffff0000) | r_uint(short)
+ short = widen(r_uint32(0xffff0000)) | short
return space.wrap_int(intmask(short))
def short_atput0(self, space, index0, w_value):
from rpython.rlib.rarithmetic import int_between
i_value = space.unwrap_int(w_value)
- if not int_between(-0x8000, i_value, 0x8000):
- raise error.PrimitiveFailedError
+ if constants.LONG_BIT == 64:
+ if (not int_between(0, i_value, 0x8000) and
+ not int_between(0, i_value ^ (0xffffffff), 0x8000)):
+ raise error.PrimitiveFailedError
+ else:
+ if not int_between(-0x8000, i_value, 0x8000):
+ raise error.PrimitiveFailedError
word_index0 = index0 / 2
- word = intmask(self.getword(word_index0))
+ word = intmask(r_uint32(self.getword(word_index0)))
if index0 % 2 == 0:
- word = intmask(r_uint(word) & r_uint(0xffff0000)) | (i_value & 0xffff)
+ word = intmask(widen(r_uint32(word)) & widen(r_uint32(0xffff0000))) | (i_value & 0xffff)
else:
- word = (i_value << 16) | (word & 0xffff)
+ word = intmask(r_uint32((i_value << 16) | (word & 0xffff)))
value = r_uint(word)
self.setword(word_index0, value)
@@ -1020,11 +1045,16 @@
class W_DisplayBitmap(W_AbstractObjectWithClassReference):
_attrs_ = ['pixelbuffer', '_realsize', '_real_depth_buffer', 'display', '_depth']
+<<<<<<< local
_immutable_fields_ = ['_realsize', 'display', '_depth']
repr_classname = "W_DisplayBitmap"
pixelbuffer = None
+=======
+ _immutable_fields_ = ['_realsize', 'display', '_depth', '_real_depth_buffer']
+
+>>>>>>> other
@staticmethod
def create(space, w_class, size, depth, display):
if depth < 8:
@@ -1041,7 +1071,7 @@
def __init__(self, space, w_class, size, depth, display):
W_AbstractObjectWithClassReference.__init__(self, space, w_class)
- self._real_depth_buffer = lltype.malloc(rffi.CArray(rffi.UINT), size, flavor='raw')
+ self._real_depth_buffer = [r_uint(0)] * size
self._realsize = size
self.display = display
self._depth = depth
@@ -1052,7 +1082,7 @@
def atput0(self, space, index0, w_value):
word = space.unwrap_uint(w_value)
- self.setword(index0, word)
+ self.setword(index0, r_uint(word))
def flush_to_screen(self):
self.display.flip()
@@ -1077,7 +1107,7 @@
def setword(self, n, word):
self._real_depth_buffer[n] = word
- self.display.get_pixelbuffer()[n] = word
+ self.display.get_pixelbuffer()[n] = r_uint32(word)
def is_array_object(self):
return True
@@ -1116,14 +1146,14 @@
((msb & mask) << 11)
)
- self.display.get_pixelbuffer()[n] = r_uint(lsb | (msb << 16))
+ self.display.get_pixelbuffer()[n] = r_uint32(lsb | (msb << 16))
class W_8BitDisplayBitmap(W_DisplayBitmap):
repr_classname = "W_8BitDisplayBitmap"
def setword(self, n, word):
self._real_depth_buffer[n] = word
- self.display.get_pixelbuffer()[n] = r_uint(
+ self.display.get_pixelbuffer()[n] = r_uint32(
(word >> 24) |
((word >> 8) & 0x0000ff00) |
((word << 8) & 0x00ff0000) |
@@ -1136,7 +1166,7 @@
@jit.unroll_safe
def setword(self, n, word):
self._real_depth_buffer[n] = word
- word = r_uint(word)
+ nWord = r_uint(word)
pos = self.compute_pos(n)
assert self._depth <= 4
rshift = 32 - self._depth
@@ -1145,10 +1175,10 @@
return
mapword = r_uint(0)
for i in xrange(4):
- pixel = r_uint(word) >> rshift
+ pixel = r_uint(nWord) >> rshift
mapword |= (r_uint(pixel) << (i * 8))
- word <<= self._depth
- self.display.get_pixelbuffer()[pos] = mapword
+ nWord <<= self._depth
+ self.display.get_pixelbuffer()[pos] = r_uint32(mapword)
pos += 1
def compute_pos(self, n):
diff --git a/spyvm/objspace.py b/spyvm/objspace.py
--- a/spyvm/objspace.py
+++ b/spyvm/objspace.py
@@ -1,6 +1,10 @@
import os
+<<<<<<< local
from spyvm import constants, model, shadow, wrapper, version
+=======
+from spyvm import constants, model, shadow, wrapper, system
+>>>>>>> other
from spyvm.error import UnwrappingError, WrappingError, PrimitiveFailedError
from rpython.rlib import jit, rpath
from rpython.rlib.objectmodel import instantiate, specialize
@@ -23,7 +27,7 @@
self.make_bootstrap_objects()
def find_executable(self, executable):
- if os.sep in executable or (os.name == "nt" and ":" in executable):
+ if os.sep in executable or (system.IS_WINDOWS and ":" in executable):
return executable
path = os.environ.get("PATH")
if path:
@@ -107,9 +111,8 @@
# methods for wrapping and unwrapping stuff
def wrap_int(self, val):
- from spyvm import constants
- assert isinstance(val, int)
- # we don't do tagging
+ if not isinstance(val, int):
+ raise WrappingError
return model.W_SmallInteger(val)
def wrap_uint(self, val):
diff --git a/spyvm/plugins/bitblt.py b/spyvm/plugins/bitblt.py
--- a/spyvm/plugins/bitblt.py
+++ b/spyvm/plugins/bitblt.py
@@ -17,7 +17,7 @@
raise PrimitiveFailedError("BitBlt primitive not called in BitBlt object!")
# only allow combinationRules 0-41
- combinationRule = interp.space.unwrap_positive_32bit_int(w_rcvr.fetch(interp.space, 3))
+ combinationRule = interp.space.unwrap_int(w_rcvr.fetch(interp.space, 3))
if combinationRule > 41:
raise PrimitiveFailedError("Missing combinationRule %d" % combinationRule)
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -5,7 +5,7 @@
from spyvm import model, shadow
from spyvm import constants, display
from spyvm.error import PrimitiveFailedError, \
- PrimitiveNotYetWrittenError
+ PrimitiveNotYetWrittenError, WrappingError
from spyvm import wrapper
from rpython.rlib import rarithmetic, rfloat, unroll, jit
@@ -300,9 +300,13 @@
@expose_primitive(FLOAT_TRUNCATED, unwrap_spec=[float])
def func(interp, s_frame, f):
try:
- return interp.space.wrap_int(rarithmetic.ovfcheck_float_to_int(f))
+ integer = rarithmetic.ovfcheck_float_to_int(f)
except OverflowError:
raise PrimitiveFailedError
+ try:
+ return interp.space.wrap_int(integer) # in 64bit VMs, this may fail
+ except WrappingError:
+ raise PrimitiveFailedError
@expose_primitive(FLOAT_TIMES_TWO_POWER, unwrap_spec=[float, int])
def func(interp, s_frame, rcvr, arg):
@@ -647,17 +651,22 @@
def func(interp, s_frame, argcount, w_method):
from spyvm.interpreter import Return
w_rcvr = s_frame.peek(0)
- try:
- s_frame._sendSelfSelector(interp.image.w_simulateCopyBits, 0, interp)
- except Return:
- w_dest_form = w_rcvr.fetch(interp.space, 0)
- w_display = interp.space.objtable['w_display']
- if w_dest_form.is_same_object(w_display):
- w_bitmap = w_display.fetch(interp.space, 0)
- assert isinstance(w_bitmap, model.W_DisplayBitmap)
- w_bitmap.flush_to_screen()
- return w_rcvr
- except shadow.MethodNotFound:
+ w_display = interp.space.objtable['w_display']
+ if interp.space.unwrap_int(w_display.fetch(interp.space, 3)) == 1:
+ try:
+ s_frame._sendSelfSelector(interp.image.w_simulateCopyBits, 0, interp)
+ except Return:
+ w_dest_form = w_rcvr.fetch(interp.space, 0)
+ if w_dest_form.is_same_object(w_display):
+ w_bitmap = w_display.fetch(interp.space, 0)
+ assert isinstance(w_bitmap, model.W_DisplayBitmap)
+ w_bitmap.flush_to_screen()
+ return w_rcvr
+ except shadow.MethodNotFound:
+ from spyvm.plugins.bitblt import BitBltPlugin
+ BitBltPlugin.call("primitiveCopyBits", interp, s_frame, argcount, s_method)
+ return w_rcvr
+ else:
from spyvm.plugins.bitblt import BitBltPlugin
BitBltPlugin.call("primitiveCopyBits", interp, s_frame, argcount, w_method)
return w_rcvr
@@ -872,6 +881,15 @@
w_rcvr.w_class = w_arg_class
+
+if constants.LONG_BIT == 32:
+ def callIProxy(signature, interp, s_frame, argcount, s_method):
+ from spyvm.interpreter_proxy import IProxy
+ return IProxy.call(signature, interp, s_frame, argcount, s_method)
+else:
+ def callIProxy(signature, interp, s_frame, argcount, s_method):
+ raise PrimitiveFailedError
+
@expose_primitive(EXTERNAL_CALL, clean_stack=False, no_result=True, compiled_method=True)
def func(interp, s_frame, argcount, w_method):
space = interp.space
@@ -898,8 +916,12 @@
from spyvm.plugins.vmdebugging import DebuggingPlugin
return DebuggingPlugin.call(signature[1], interp, s_frame, argcount, w_method)
else:
+<<<<<<< local
from spyvm.interpreter_proxy import IProxy
return IProxy.call(signature, interp, s_frame, argcount, w_method)
+=======
+ return callIProxy(signature, interp, s_frame, argcount, s_method)
+>>>>>>> other
raise PrimitiveFailedError
@expose_primitive(COMPILED_METHOD_FLUSH_CACHE, unwrap_spec=[object])
@@ -1073,7 +1095,7 @@
sec_since_epoch = rarithmetic.r_uint(time.time())
# XXX: overflow check necessary?
sec_since_1901 = sec_since_epoch + secs_between_1901_and_1970
- return interp.space.wrap_uint(sec_since_1901)
+ return interp.space.wrap_uint(rarithmetic.r_uint(sec_since_1901))
#____________________________________________________________________________
@@ -1117,7 +1139,7 @@
w_arg.setchar(i, chr(new_value))
elif isinstance(w_arg, model.W_WordsObject) or isinstance(w_arg, model.W_DisplayBitmap):
for i in xrange(w_arg.size()):
- w_arg.setword(i, new_value)
+ w_arg.setword(i, rarithmetic.r_uint(new_value))
else:
raise PrimitiveFailedError
return w_arg
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -692,7 +692,7 @@
# === Sender ===
def store_s_sender(self, s_sender, raiseError=True):
- assert s_sender is jit.vref_None or isinstance(s_sender, jit.DirectVRef)
+ # assert s_sender is jit.vref_None or isinstance(s_sender, jit.DirectVRef)
self._s_sender = s_sender
if raiseError:
raise error.SenderChainManipulation(self)
@@ -700,12 +700,12 @@
def restore_s_sender(self, s_direct):
if self._s_sender is not jit.vref_None:
# virtual sender wasn't already cleared by e.g. mark_returned
- self._s_sender = s_direct
+ self._s_sender = jit.non_virtual_ref(s_direct)
def w_sender(self):
- if self._s_sender is None:
+ if self._s_sender is jit.vref_None:
return self.space.w_nil
- return self._s_sender.w_self()
+ return self.s_sender().w_self()
def s_sender(self):
return self._s_sender()
diff --git a/spyvm/squeakimage.py b/spyvm/squeakimage.py
--- a/spyvm/squeakimage.py
+++ b/spyvm/squeakimage.py
@@ -386,12 +386,20 @@
self.startup_time = time.time()
def run_spy_hacks(self, space):
+<<<<<<< local
pass
# w_display = space.objtable["w_display"]
# if w_display is not None and not w_display.is_nil(space):
# if space.unwrap_int(w_display.fetch(space, 3)) < 8:
# # non-native indexed color depth not well supported
# w_display.store(space, 3, space.wrap_int(8))
+=======
+ if constants.LONG_BIT == 64:
+ w_display = space.objtable["w_display"]
+ if w_display is not None and w_display is not space.w_nil:
+ if space.unwrap_int(w_display.fetch(space, 3)) < 32:
+ w_display.store(space, 3, space.wrap_int(32))
+>>>>>>> other
def find_symbol(self, space, reader, symbol):
w_dnu = self.special(constants.SO_DOES_NOT_UNDERSTAND)
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -6,9 +6,12 @@
from rpython.rlib import jit, rpath
from spyvm import model, interpreter, squeakimage, objspace, wrapper,\
+<<<<<<< local
error, shadow, storage_statistics, constants
+=======
+ error, shadow, system
+>>>>>>> other
from spyvm.tool.analyseimage import create_image
-from spyvm.interpreter_proxy import VirtualMachine
def print_result(w_result):
# This will also print contents of strings/symbols/numbers
@@ -253,6 +256,9 @@
# driver.config.translation.gc = "stmgc"
# driver.config.translation.gcrootfinder = "stm"
from rpython.rlib import rgc
+ driver.exe_name = "rsqueakvm"
+ if system.IS_64BIT:
+ driver.exe_name += "-64"
if hasattr(rgc, "stm_is_enabled"):
driver.config.translation.stm = True
driver.config.translation.thread = True
From noreply at buildbot.pypy.org Wed Jul 9 16:18:19 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 9 Jul 2014 16:18:19 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: first version to run
Squeak4.5 again
Message-ID: <20140709141819.284861C33F5@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r871:ed6b708b05a3
Date: 2014-07-09 15:37 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/ed6b708b05a3/
Log: first version to run Squeak4.5 again
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -26,7 +26,7 @@
_immutable_fields_ = ["space", "image", "image_name",
"max_stack_depth", "interrupt_counter_size",
"startup_time", "evented", "interrupts"]
-
+
jit_driver = jit.JitDriver(
greens=['pc', 'self', 'method'],
reds=['s_context'],
@@ -38,7 +38,7 @@
trace=False, evented=True, interrupts=True,
max_stack_depth=constants.MAX_LOOP_DEPTH):
import time
-
+
# === Initialize immutable variables
self.space = space
self.image = image
@@ -54,7 +54,7 @@
self.interrupt_counter_size = int(os.environ["SPY_ICS"])
except KeyError:
self.interrupt_counter_size = constants.INTERRUPT_COUNTER_SIZE
-
+
# === Initialize mutable variables
self.interrupt_check_counter = self.interrupt_counter_size
self.current_stack_depth = 0
@@ -90,7 +90,7 @@
print "====== Switched process from: %s" % s_new_context.short_str()
print "====== to: %s " % p.s_new_context.short_str()
s_new_context = p.s_new_context
-
+
def loop_bytecodes(self, s_context, may_context_switch=True):
old_pc = 0
if not jit.we_are_jitted() and may_context_switch:
@@ -117,7 +117,7 @@
raise nlr
else:
s_context.push(nlr.value)
-
+
# This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
# and handles the stack overflow protection mechanism.
def stack_frame(self, s_frame, s_sender, may_context_switch=True):
@@ -126,14 +126,14 @@
# Enter the context - store a virtual reference back to the sender
# Non-fresh contexts can happen, e.g. when activating a stored BlockContext.
# The same frame object must not pass through here recursively!
- if s_frame.is_fresh():
+ if s_frame.is_fresh() and s_sender is not None:
s_frame.virtual_sender = jit.virtual_ref(s_sender)
-
+
self.current_stack_depth += 1
if self.max_stack_depth > 0:
if self.current_stack_depth >= self.max_stack_depth:
raise StackOverflow(s_frame)
-
+
# Now (continue to) execute the context bytecodes
self.loop_bytecodes(s_frame, may_context_switch)
finally:
@@ -142,7 +142,7 @@
# it is still there, which can happen in case of ProcessSwitch or StackOverflow;
# in case of a Return, this will already be handled while unwinding the stack.
s_frame.finish_virtual_sender()
-
+
def step(self, context):
bytecode = context.fetch_next_bytecode()
for entry in UNROLLING_BYTECODE_RANGES:
@@ -155,9 +155,9 @@
if start <= bytecode <= stop:
return getattr(context, methname)(self, bytecode)
assert 0, "unreachable"
-
+
# ============== Methods for handling user interrupts ==============
-
+
def jitted_check_for_interrupt(self, s_frame):
if not self.interrupts:
return
@@ -168,7 +168,7 @@
decr_by = int(trace_length // 100)
decr_by = max(decr_by, 1)
self.quick_check_for_interrupt(s_frame, decr_by)
-
+
def quick_check_for_interrupt(self, s_frame, dec=1):
if not self.interrupts:
return
@@ -204,7 +204,7 @@
return intmask(int((time.time() - self.startup_time) * 1000) & constants.TAGGED_MASK)
# ============== Convenience methods for executing code ==============
-
+
def interpret_toplevel(self, w_frame):
try:
self.loop(w_frame)
@@ -215,7 +215,7 @@
s_frame = self.create_toplevel_context(w_receiver, selector, *w_arguments)
self.interrupt_check_counter = self.interrupt_counter_size
return self.interpret_toplevel(s_frame.w_self())
-
+
def create_toplevel_context(self, w_receiver, selector, *w_arguments):
if isinstance(selector, str):
if selector == "asSymbol":
@@ -225,7 +225,7 @@
"asSymbol")
else:
w_selector = selector
-
+
w_method = model.W_CompiledMethod(self.space, header=512)
w_method.literalatput0(self.space, 1, w_selector)
assert len(w_arguments) <= 7
@@ -235,7 +235,7 @@
s_frame.push(w_receiver)
s_frame.push_all(list(w_arguments))
return s_frame
-
+
def padding(self, symbol=' '):
return symbol * self.current_stack_depth
@@ -265,11 +265,26 @@
class ProcessSwitch(ContextSwitchException):
"""This causes the interpreter to switch the executed context."""
+
+import rpython.rlib.unroll
+if hasattr(unroll, "unrolling_zero"):
+ unrolling_zero = unroll.unrolling_zero
+else:
+ class unrolling_int(int, unroll.SpecTag):
+ def __add__(self, other):
+ return unrolling_int(int.__add__(self, other))
+ __radd__ = __add__
+ def __sub__(self, other):
+ return unrolling_int(int.__sub__(self, other))
+ def __rsub__(self, other):
+ return unrolling_int(int.__rsub__(self, other))
+ unrolling_zero = unrolling_int(0)
+
+
# This is a decorator for bytecode implementation methods.
# parameter_bytes=N means N additional bytes are fetched as parameters.
def bytecode_implementation(parameter_bytes=0):
def bytecode_implementation_decorator(actual_implementation_method):
- from rpython.rlib.unroll import unrolling_zero
@jit.unroll_safe
def bytecode_implementation_wrapper(self, interp, current_bytecode):
parameters = ()
@@ -345,9 +360,9 @@
# __extend__ adds new methods to the ContextPartShadow class
class __extend__(ContextPartShadow):
-
+
# ====== Push/Pop bytecodes ======
-
+
@bytecode_implementation()
def pushReceiverVariableBytecode(self, interp, current_bytecode):
index = current_bytecode & 15
@@ -426,7 +441,7 @@
@bytecode_implementation()
def popStackBytecode(self, interp, current_bytecode):
self.pop()
-
+
@bytecode_implementation(parameter_bytes=1)
def pushNewArrayBytecode(self, interp, current_bytecode, descriptor):
arraySize, popIntoArray = splitter[7, 1](descriptor)
@@ -436,9 +451,9 @@
else:
newArray = interp.space.w_Array.as_class_get_shadow(interp.space).new(arraySize)
self.push(newArray)
-
+
# ====== Extended Push/Pop bytecodes ======
-
+
def _extendedVariableTypeAndIndex(self, descriptor):
return ((descriptor >> 6) & 3), (descriptor & 63)
@@ -474,16 +489,16 @@
@bytecode_implementation(parameter_bytes=1)
def extendedStoreBytecode(self, interp, current_bytecode, descriptor):
return self._extendedStoreBytecode(interp, current_bytecode, descriptor)
-
+
@bytecode_implementation(parameter_bytes=1)
def extendedStoreAndPopBytecode(self, interp, current_bytecode, descriptor):
self._extendedStoreBytecode(interp, current_bytecode, descriptor)
self.pop()
-
+
def _extract_index_and_temps(self, index_in_array, index_of_array):
w_indirectTemps = self.gettemp(index_of_array)
return index_in_array, w_indirectTemps
-
+
@bytecode_implementation(parameter_bytes=2)
def pushRemoteTempLongBytecode(self, interp, current_bytecode, index_in_array, index_of_array):
index_in_array, w_indirectTemps = self._extract_index_and_temps(index_in_array, index_of_array)
@@ -521,7 +536,7 @@
copiedValues: copiedValues).
self jump: blockSize
"""
-
+
space = self.space
numArgs, numCopied = splitter[4, 4](descriptor)
blockSize = (j << 8) | i
@@ -530,7 +545,7 @@
self.pop_and_return_n(numCopied))
self.push(w_closure)
self._jump(blockSize)
-
+
# ====== Helpers for send/return bytecodes ======
def _sendSelfSelector(self, w_selector, argcount, interp):
@@ -552,7 +567,7 @@
w_method = receiverclassshadow.lookup(w_selector)
except MethodNotFound:
return self._doesNotUnderstand(w_selector, argcount, interp, receiver)
-
+
code = w_method.primitive()
if code:
if w_arguments:
@@ -576,21 +591,21 @@
def _sendSelfSelectorSpecial(self, selector, numargs, interp):
w_selector = self.space.get_special_selector(selector)
return self._sendSelfSelector(w_selector, numargs, interp)
-
+
def _sendSpecialSelector(self, interp, receiver, special_selector, w_args=[]):
w_special_selector = self.space.objtable["w_" + special_selector]
s_class = receiver.class_shadow(self.space)
w_method = s_class.lookup(w_special_selector)
s_frame = w_method.create_frame(interp.space, receiver, w_args)
-
+
# ######################################################################
if interp.trace:
print '%s %s %s: #%s' % (interp.padding('#'), special_selector, s_frame.short_str(), w_args)
if not objectmodel.we_are_translated():
import pdb; pdb.set_trace()
-
+
return interp.stack_frame(s_frame, self)
-
+
def _doesNotUnderstand(self, w_selector, argcount, interp, receiver):
arguments = self.pop_and_return_n(argcount)
w_message_class = self.space.classtable["w_Message"]
@@ -600,7 +615,7 @@
w_message.store(self.space, 0, w_selector)
w_message.store(self.space, 1, self.space.wrap_list(arguments))
self.pop() # The receiver, already known.
-
+
try:
return self._sendSpecialSelector(interp, receiver, "doesNotUnderstand", [w_message])
except MethodNotFound:
@@ -609,10 +624,10 @@
assert isinstance(s_class, ClassShadow)
print "Missing doesNotUnderstand in hierarchy of %s" % s_class.getname()
raise
-
+
def _mustBeBoolean(self, interp, receiver):
return self._sendSpecialSelector(interp, receiver, "mustBeBoolean")
-
+
def _call_primitive(self, code, interp, argcount, w_method, w_selector):
# ##################################################################
if interp.trace:
@@ -632,11 +647,11 @@
def _return(self, return_value, interp, s_return_to):
# unfortunately, this assert is not true for some tests. TODO fix this.
# assert self._stack_ptr == self.tempsize()
-
+
# ##################################################################
if interp.trace:
print '%s<- %s' % (interp.padding(), return_value.as_repr_string())
-
+
if s_return_to is None:
# This should never happen while executing a normal image.
raise ReturnFromTopLevel(return_value)
@@ -733,7 +748,7 @@
return self._sendSelfSelector(w_selector, argcount, interp)
# ====== Misc ======
-
+
def _activate_unwind_context(self, interp):
# TODO put the constant somewhere else.
# Primitive 198 is used in BlockClosure >> ensure:
@@ -751,11 +766,11 @@
raise nlr
finally:
self.mark_returned()
-
+
@bytecode_implementation()
def unknownBytecode(self, interp, current_bytecode):
raise MissingBytecode("unknownBytecode")
-
+
@bytecode_implementation()
def experimentalBytecode(self, interp, current_bytecode):
raise MissingBytecode("experimentalBytecode")
@@ -772,7 +787,7 @@
else:
w_alternative = interp.space.w_true
w_expected = interp.space.w_false
-
+
# Don't check the class, just compare with only two Boolean instances.
w_bool = self.pop()
if w_expected.is_same_object(w_bool):
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1356,7 +1356,7 @@
def func(interp, s_frame, w_rcvr, w_selector, w_arguments):
from spyvm.shadow import MethodNotFound
s_frame.pop_n(2) # removing our arguments
-
+
return s_frame._sendSelector(w_selector, len(w_arguments), interp, w_rcvr,
w_rcvr.class_shadow(interp.space), w_arguments=w_arguments)
@@ -1385,17 +1385,15 @@
@expose_primitive(RESUME, unwrap_spec=[object], no_result=True, clean_stack=False)
def func(interp, s_frame, w_rcvr):
- import pdb; pdb.set_trace()
assert_class(interp, w_rcvr, interp.space.w_Process)
wrapper.ProcessWrapper(interp.space, w_rcvr).resume(s_frame)
@expose_primitive(SUSPEND, unwrap_spec=[object], no_result=True, clean_stack=False)
def func(interp, s_frame, w_rcvr):
- import pdb; pdb.set_trace()
assert_class(interp, w_rcvr, interp.space.w_Process)
wrapper.ProcessWrapper(interp.space, w_rcvr).suspend(s_frame)
-
-
+
+
@expose_primitive(FLUSH_CACHE, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -20,7 +20,7 @@
_immutable_fields_ = ['space']
provides_getname = False
repr_classname = "AbstractShadow"
-
+
def __init__(self, space, w_self):
self.space = space
assert w_self is None or isinstance(w_self, model.W_PointersObject)
@@ -34,19 +34,19 @@
return "<%s %s>" % (self.repr_classname, self.getname())
else:
return "<%s>" % self.repr_classname
-
+
def fetch(self, n0):
raise NotImplementedError("Abstract class")
def store(self, n0, w_value):
raise NotImplementedError("Abstract class")
def size(self):
raise NotImplementedError("Abstract class")
-
+
def attach_shadow(self): pass
-
+
def copy_field_from(self, n0, other_shadow):
self.store(n0, other_shadow.fetch(n0))
-
+
# This can be overwritten to change the order of initialization.
def copy_from(self, other_shadow):
assert self.size() == other_shadow.size()
@@ -98,24 +98,24 @@
# Class must provide: wrap, unwrap, nil_value, is_nil_value, wrapper_class
_attrs_ = ['storage']
_immutable_fields_ = ['storage']
-
+
def __init__(self, space, w_self, size):
AbstractStorageShadow.__init__(self, space, w_self, size)
self.storage = [self.nil_value] * size
-
+
def size(self):
return len(self.storage)
-
+
def generalized_strategy_for(self, w_val):
return ListStorageShadow
-
+
def fetch(self, n0):
val = self.storage[n0]
if self.is_nil_value(val):
return self.space.w_nil
else:
return self.wrap(self.space, val)
-
+
def do_store(self, n0, w_val):
if w_val.is_nil(self.space):
self.storage[n0] = self.nil_value
@@ -134,7 +134,7 @@
nil_value = constants.MAXINT
wrapper_class = model.W_SmallInteger
import_from_mixin(AbstractValueOrNilStorageMixin)
-
+
@staticmethod
def static_can_contain(space, w_val):
return _value_or_nil_can_handle(SmallIntegerOrNilStorageShadow, space, w_val)
@@ -153,7 +153,7 @@
nil_value = sys.float_info.max
wrapper_class = model.W_Float
import_from_mixin(AbstractValueOrNilStorageMixin)
-
+
@staticmethod
def static_can_contain(space, w_val):
return _value_or_nil_can_handle(FloatOrNilStorageShadow, space, w_val)
@@ -193,17 +193,17 @@
if float_can_handle and not FloatOrNilStorageShadow.static_can_contain(space, w_obj):
float_can_handle = False
specialized_strategies = specialized_strategies - 1
-
+
if specialized_strategies <= 0:
return ListStorageShadow
-
+
if all_nil_can_handle:
return AllNilStorageShadow
if small_int_can_handle:
return SmallIntegerOrNilStorageShadow
if float_can_handle:
return FloatOrNilStorageShadow
-
+
# If this happens, please look for a bug in the code above.
assert False, "No strategy could be found for list..."
@@ -223,7 +223,7 @@
_immutable_fields_ = ['storage']
repr_classname = "ListStorageShadow"
import_from_mixin(ListStorageMixin)
-
+
def initialize_storage(self, size):
self.storage = [self.space.w_nil] * size
def fetch(self, n0):
@@ -236,7 +236,7 @@
_immutable_fields_ = ['storage']
repr_classname = "WeakListStorageShadow"
import_from_mixin(ListStorageMixin)
-
+
def initialize_storage(self, size):
self.storage = [weakref.ref(self.space.w_nil)] * size
def fetch(self, n0):
@@ -245,14 +245,14 @@
def store(self, n0, w_value):
assert w_value is not None
self.storage[n0] = weakref.ref(w_value)
-
+
class AbstractCachingShadow(ListStorageShadow):
_immutable_fields_ = ['version?']
_attrs_ = ['version']
repr_classname = "AbstractCachingShadow"
import_from_mixin(version.VersionMixin)
version = None
-
+
def __init__(self, space, w_self):
ListStorageShadow.__init__(self, space, w_self, 0)
self.changed()
@@ -284,7 +284,7 @@
_s_superclass = _s_methoddict = None
provides_getname = True
repr_classname = "ClassShadow"
-
+
def __init__(self, space, w_self):
self.subclass_s = {}
AbstractCachingShadow.__init__(self, space, w_self)
@@ -305,7 +305,7 @@
# In Slang the value is read directly as a boxed integer, so that
# the code gets a "pointer" whose bits are set as above, but
# shifted one bit to the left and with the lowest bit set to 1.
-
+
# Compute the instance size (really the size, not the number of bytes)
instsize_lo = (classformat >> 1) & 0x3F
instsize_hi = (classformat >> (9 + 1)) & 0xC0
@@ -313,10 +313,10 @@
# decode the instSpec
format = (classformat >> 7) & 15
self.instance_varsized = format >= 2
-
+
# In case of raised exception below.
self.changed()
-
+
if format < 4:
self.instance_kind = POINTERS
elif format == 4:
@@ -356,7 +356,7 @@
return
# Some of the special info has changed -> Switch version.
self.changed()
-
+
def store_w_superclass(self, w_class):
superclass = self._s_superclass
if w_class is None or w_class.is_nil(self.space):
@@ -383,24 +383,24 @@
return
if methoddict: methoddict.s_class = None
self.store_s_methoddict(s_new_methoddict)
-
+
def store_s_methoddict(self, s_methoddict):
s_methoddict.s_class = self
s_methoddict.sync_method_cache()
self._s_methoddict = s_methoddict
-
+
def attach_s_class(self, s_other):
self.subclass_s[s_other] = None
def detach_s_class(self, s_other):
del self.subclass_s[s_other]
-
+
def store_w_name(self, w_name):
if isinstance(w_name, model.W_BytesObject):
self.name = w_name.as_string()
else:
self.name = None
-
+
@jit.unroll_safe
def flush_method_caches(self):
look_in_shadow = self
@@ -497,7 +497,7 @@
self.version = version
for s_class in self.subclass_s:
s_class.superclass_changed(version)
-
+
# _______________________________________________________________
# Methods used only in testing
@@ -532,7 +532,7 @@
_immutable_fields_ = ['invalid?', 's_class']
_attrs_ = ['methoddict', 'invalid', 's_class']
repr_classname = "MethodDictionaryShadow"
-
+
def __init__(self, space, w_self):
self.invalid = True
self.s_class = None
@@ -541,7 +541,7 @@
def update(self):
self.sync_method_cache()
-
+
def find_selector(self, w_selector):
if self.invalid:
return None # we may be invalid if Smalltalk code did not call flushCache
@@ -593,7 +593,7 @@
class AbstractRedirectingShadow(AbstractShadow):
_attrs_ = ['_w_self_size']
repr_classname = "AbstractRedirectingShadow"
-
+
def __init__(self, space, w_self):
AbstractShadow.__init__(self, space, w_self)
if w_self is not None:
@@ -611,7 +611,7 @@
'_pc', '_temps_and_stack',
'_stack_ptr', 'instances_w']
repr_classname = "ContextPartShadow"
-
+
_virtualizable_ = [
'direct_sender', 'virtual_sender',
"_pc", "_temps_and_stack[*]", "_stack_ptr",
@@ -620,7 +620,7 @@
# ______________________________________________________________________
# Initialization
-
+
def __init__(self, space, w_self):
self.direct_sender = None
self.virtual_sender = jit.vref_None
@@ -632,26 +632,26 @@
AbstractRedirectingShadow.copy_field_from(self, n0, other_shadow)
except error.SenderChainManipulation, e:
assert e.s_context == self
-
+
def copy_from(self, other_shadow):
# Some fields have to be initialized before the rest, to ensure correct initialization.
privileged_fields = self.fields_to_copy_first()
for n0 in privileged_fields:
self.copy_field_from(n0, other_shadow)
-
+
# Now the temp size will be known.
self.init_stack_and_temps()
-
+
for n0 in range(self.size()):
if n0 not in privileged_fields:
self.copy_field_from(n0, other_shadow)
-
+
def fields_to_copy_first(self):
return []
-
+
# ______________________________________________________________________
# Accessing object fields
-
+
def fetch(self, n0):
if n0 == constants.CTXPART_SENDER_INDEX:
return self.w_sender()
@@ -690,45 +690,45 @@
else:
# XXX later should store tail out of known context part as well
raise error.WrapperException("Index in context out of bounds")
-
+
# === Sender ===
# There are two fields for the sender (virtual and direct). Only one of them is can be set at a time.
# As long as the frame object is virtualized, using the virtual reference should increase performance.
# As soon as a frame object is forced to the heap, the direct reference must be used.
-
+
def is_fresh(self):
return self.direct_sender is None and self.virtual_sender is jit.vref_None
-
+
def finish_virtual_sender(self, save_direct_sender=True):
if self.virtual_sender is not jit.vref_None:
- sender = self.virtual_sender()
+ sender = self.virtual_sender() # xxx: check if we can move this down
jit.virtual_ref_finish(self.virtual_sender, sender)
self.virtual_sender = jit.vref_None
if save_direct_sender:
self.direct_sender = sender
-
+
def store_s_sender(self, s_sender, raise_error=True):
# If we have a virtual back reference, we must finish it before storing the direct reference.
- self.finish_virtual_sender(save_direct_sender=False)
+ # self.finish_virtual_sender(save_direct_sender=False)
self.direct_sender = s_sender
if raise_error:
raise error.SenderChainManipulation(self)
-
+
def w_sender(self):
sender = self.s_sender()
if sender is None:
return self.space.w_nil
return sender.w_self()
-
+
def s_sender(self):
if self.direct_sender:
return self.direct_sender
else:
result = self.virtual_sender()
return result
-
+
# === Stack Pointer ===
-
+
def unwrap_store_stackpointer(self, w_sp1):
# the stackpointer in the W_PointersObject starts counting at the
# tempframe start
@@ -747,12 +747,12 @@
def stackdepth(self):
return rarithmetic.intmask(self._stack_ptr)
-
+
def wrap_stackpointer(self):
return self.space.wrap_int(self.stackdepth())
# === Program Counter ===
-
+
def store_unwrap_pc(self, w_pc):
if w_pc.is_nil(self.space):
self.store_pc(-1)
@@ -777,9 +777,9 @@
def store_pc(self, newpc):
assert newpc >= -1
self._pc = newpc
-
+
# === Subclassed accessors ===
-
+
def s_home(self):
raise NotImplementedError()
@@ -788,18 +788,18 @@
def w_receiver(self):
raise NotImplementedError()
-
+
def w_method(self):
raise NotImplementedError()
-
+
def tempsize(self):
raise NotImplementedError()
-
+
def is_closure_context(self):
raise NotImplementedError()
-
+
# === Other properties of Contexts ===
-
+
def mark_returned(self):
self.store_pc(-1)
self.store_s_sender(None, raise_error=False)
@@ -809,25 +809,25 @@
def external_stackpointer(self):
return self.stackdepth() + self.stackstart()
-
+
def stackend(self):
# XXX this is incorrect when there is subclassing
return self._w_self_size
-
+
def fetch_next_bytecode(self):
pc = jit.promote(self._pc)
assert pc >= 0
self._pc += 1
return self.fetch_bytecode(pc)
-
+
def fetch_bytecode(self, pc):
bytecode = self.w_method().fetch_bytecode(pc)
return ord(bytecode)
-
+
# ______________________________________________________________________
# Temporary Variables
#
- # Every context has it's own stack. BlockContexts share their temps with
+ # Every context has it's own stack. BlockContexts share their temps with
# their home contexts. MethodContexts created from a BlockClosure get their
# temps copied from the closure upon activation. Changes are not propagated back;
# this is handled by the compiler by allocating an extra Array for temps.
@@ -837,7 +837,7 @@
def settemp(self, index, w_value):
raise NotImplementedError()
-
+
# ______________________________________________________________________
# Stack Manipulation
@@ -851,13 +851,13 @@
for i in range(tempsize):
temps_and_stack[i] = self.space.w_nil
self._stack_ptr = rarithmetic.r_uint(tempsize) # we point after the last element
-
+
def stack_get(self, index0):
return self._temps_and_stack[index0]
-
+
def stack_put(self, index0, w_val):
self._temps_and_stack[index0] = w_val
-
+
def stack(self):
"""NOT_RPYTHON""" # purely for testing
return self._temps_and_stack[self.tempsize():self._stack_ptr]
@@ -912,7 +912,7 @@
# ______________________________________________________________________
# Primitive support
-
+
def store_instances_array(self, w_class, match_w):
# used for primitives 77 & 78
self.instances_w[w_class] = match_w
@@ -939,7 +939,7 @@
j += 1
retval += "\n---------------------"
return retval
-
+
def short_str(self):
arg_strings = self.argument_strings()
if len(arg_strings) > 0:
@@ -953,10 +953,10 @@
self.w_receiver().as_repr_string(),
args
)
-
+
def print_stack(self, method=True):
return self.print_padded_stack(method)[1]
-
+
def print_padded_stack(self, method):
padding = ret_str = ''
if self.s_sender() is not None:
@@ -970,9 +970,9 @@
class BlockContextShadow(ContextPartShadow):
_attrs_ = ['_w_home', '_initialip', '_eargc']
repr_classname = "BlockContextShadow"
-
+
# === Initialization ===
-
+
def __init__(self, space, w_self=None, w_home=None, argcnt=0, initialip=0):
self = jit.hint(self, access_directly=True, fresh_virtualizable=True)
creating_w_self = w_self is None
@@ -992,40 +992,40 @@
def fields_to_copy_first(self):
return [ constants.BLKCTX_HOME_INDEX ]
-
+
# === Implemented accessors ===
-
+
def s_home(self):
return self._w_home.as_methodcontext_get_shadow(self.space)
-
+
def stackstart(self):
return constants.BLKCTX_STACK_START
def tempsize(self):
# A blockcontext doesn't have any temps
return 0
-
+
def w_receiver(self):
return self.s_home().w_receiver()
-
+
def w_method(self):
retval = self.s_home().w_method()
assert isinstance(retval, model.W_CompiledMethod)
return retval
-
+
def is_closure_context(self):
return True
-
+
# === Temporary variables ===
-
+
def gettemp(self, index):
return self.s_home().gettemp(index)
def settemp(self, index, w_value):
self.s_home().settemp(index, w_value)
-
+
# === Accessing object fields ===
-
+
def fetch(self, n0):
if n0 == constants.BLKCTX_HOME_INDEX:
return self._w_home
@@ -1045,11 +1045,11 @@
return self.unwrap_store_eargc(w_value)
else:
return ContextPartShadow.store(self, n0, w_value)
-
+
def store_w_home(self, w_home):
assert isinstance(w_home, model.W_PointersObject)
self._w_home = w_home
-
+
def unwrap_store_initialip(self, w_value):
initialip = self.space.unwrap_int(w_value)
initialip -= 1 + self.w_method().literalsize
@@ -1057,18 +1057,18 @@
def store_initialip(self, initialip):
self._initialip = initialip
-
+
def wrap_initialip(self):
initialip = self.initialip()
initialip += 1 + self.w_method().literalsize
return self.space.wrap_int(initialip)
-
+
def reset_pc(self):
self.store_pc(self.initialip())
-
+
def initialip(self):
return self._initialip
-
+
def unwrap_store_eargc(self, w_value):
self.store_expected_argument_count(self.space.unwrap_int(w_value))
@@ -1082,24 +1082,24 @@
self._eargc = argc
# === Stack Manipulation ===
-
+
def reset_stack(self):
self.pop_n(self.stackdepth())
# === Printing ===
-
+
def argument_strings(self):
return []
-
+
def method_str(self):
return '[] in %s' % self.w_method().get_identifier_string()
class MethodContextShadow(ContextPartShadow):
_attrs_ = ['closure', '_w_receiver', '_w_method']
repr_classname = "MethodContextShadow"
-
+
# === Initialization ===
-
+
@jit.unroll_safe
def __init__(self, space, w_self=None, w_method=None, w_receiver=None,
arguments=[], closure=None, pc=0):
@@ -1108,7 +1108,7 @@
self.store_w_receiver(w_receiver)
self.store_pc(pc)
self.closure = closure
-
+
if w_method:
self.store_w_method(w_method)
# The summand is needed, because we calculate i.a. our stackdepth relative of the size of w_self.
@@ -1117,20 +1117,20 @@
self.init_stack_and_temps()
else:
self._w_method = None
-
+
argc = len(arguments)
for i0 in range(argc):
self.settemp(i0, arguments[i0])
-
+
if closure:
for i0 in range(closure.size()):
self.settemp(i0+argc, closure.at0(i0))
def fields_to_copy_first(self):
return [ constants.MTHDCTX_METHOD, constants.MTHDCTX_CLOSURE_OR_NIL ]
-
+
# === Accessing object fields ===
-
+
def fetch(self, n0):
if n0 == constants.MTHDCTX_METHOD:
return self.w_method()
@@ -1164,12 +1164,12 @@
return self.settemp(temp_i, w_value)
else:
return ContextPartShadow.store(self, n0, w_value)
-
+
def store_w_receiver(self, w_receiver):
self._w_receiver = w_receiver
-
+
# === Implemented Accessors ===
-
+
def s_home(self):
if self.is_closure_context():
# this is a context for a blockClosure
@@ -1182,31 +1182,31 @@
return s_outerContext.s_home()
else:
return self
-
+
def stackstart(self):
return constants.MTHDCTX_TEMP_FRAME_START
-
+
def store_w_method(self, w_method):
assert isinstance(w_method, model.W_CompiledMethod)
self._w_method = w_method
def w_receiver(self):
return self._w_receiver
-
+
def w_method(self):
retval = self._w_method
assert isinstance(retval, model.W_CompiledMethod)
return retval
-
+
def tempsize(self):
if not self.is_closure_context():
return self.w_method().tempsize()
else:
return self.closure.tempsize()
-
+
def is_closure_context(self):
return self.closure is not None
-
+
# ______________________________________________________________________
# Marriage of MethodContextShadows with PointerObjects only when required
@@ -1223,9 +1223,9 @@
self._w_self = w_self
self._w_self_size = w_self.size()
return w_self
-
+
# === Temporary variables ===
-
+
def gettemp(self, index0):
return self.stack_get(index0)
@@ -1233,7 +1233,7 @@
self.stack_put(index0, w_value)
# === Printing ===
-
+
def argument_strings(self):
argcount = self.w_method().argsize
tempsize = self.w_method().tempsize()
From noreply at buildbot.pypy.org Wed Jul 9 16:18:20 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Wed, 9 Jul 2014 16:18:20 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: only force and store the
sender if the context wasn't returned properly (it had an exception)
Message-ID: <20140709141820.50E171C33F5@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r872:04a55ec5b4d2
Date: 2014-07-09 16:11 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/04a55ec5b4d2/
Log: only force and store the sender if the context wasn't returned
properly (it had an exception)
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -141,7 +141,7 @@
# Cleanly leave the context. This will finish the virtual sender-reference, if
# it is still there, which can happen in case of ProcessSwitch or StackOverflow;
# in case of a Return, this will already be handled while unwinding the stack.
- s_frame.finish_virtual_sender()
+ s_frame.finish_virtual_sender(s_sender)
def step(self, context):
bytecode = context.fetch_next_bytecode()
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -699,13 +699,15 @@
def is_fresh(self):
return self.direct_sender is None and self.virtual_sender is jit.vref_None
- def finish_virtual_sender(self, save_direct_sender=True):
+ def finish_virtual_sender(self, s_sender):
if self.virtual_sender is not jit.vref_None:
- sender = self.virtual_sender() # xxx: check if we can move this down
- jit.virtual_ref_finish(self.virtual_sender, sender)
+ if self.pc() != -1:
+ # stack is unrolling, but this frame was not
+ # marked_returned: it is an escaped frame
+ sender = self.virtual_sender()
+ self.direct_sender = sender
+ jit.virtual_ref_finish(self.virtual_sender, s_sender)
self.virtual_sender = jit.vref_None
- if save_direct_sender:
- self.direct_sender = sender
def store_s_sender(self, s_sender, raise_error=True):
# If we have a virtual back reference, we must finish it before storing the direct reference.
From noreply at buildbot.pypy.org Wed Jul 9 17:18:32 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 17:18:32 +0200 (CEST)
Subject: [pypy-commit] pypy default: merge heads
Message-ID: <20140709151832.7C6571C021D@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72398:3dec80f8412a
Date: 2014-07-09 17:18 +0200
http://bitbucket.org/pypy/pypy/changeset/3dec80f8412a/
Log: merge heads
diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py
--- a/pypy/module/__pypy__/__init__.py
+++ b/pypy/module/__pypy__/__init__.py
@@ -73,13 +73,12 @@
'builtinify' : 'interp_magic.builtinify',
'lookup_special' : 'interp_magic.lookup_special',
'do_what_I_mean' : 'interp_magic.do_what_I_mean',
- 'list_strategy' : 'interp_magic.list_strategy',
'validate_fd' : 'interp_magic.validate_fd',
'resizelist_hint' : 'interp_magic.resizelist_hint',
'newlist_hint' : 'interp_magic.newlist_hint',
'add_memory_pressure' : 'interp_magic.add_memory_pressure',
'newdict' : 'interp_dict.newdict',
- 'dictstrategy' : 'interp_dict.dictstrategy',
+ 'strategy' : 'interp_magic.strategy', # dict,set,list
'set_debug' : 'interp_magic.set_debug',
'locals_to_fast' : 'interp_magic.locals_to_fast',
}
diff --git a/pypy/module/__pypy__/interp_dict.py b/pypy/module/__pypy__/interp_dict.py
--- a/pypy/module/__pypy__/interp_dict.py
+++ b/pypy/module/__pypy__/interp_dict.py
@@ -1,7 +1,6 @@
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import unwrap_spec
-from pypy.objspace.std.dictmultiobject import W_DictMultiObject
@unwrap_spec(type=str)
def newdict(space, type):
@@ -31,13 +30,3 @@
return space.newdict(strdict=True)
else:
raise oefmt(space.w_TypeError, "unknown type of dict %s", type)
-
-def dictstrategy(space, w_obj):
- """ dictstrategy(dict)
-
- show the underlaying strategy used by a dict object
- """
- if not isinstance(w_obj, W_DictMultiObject):
- raise OperationError(space.w_TypeError,
- space.wrap("expecting dict object"))
- return space.wrap('%r' % (w_obj.strategy,))
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -2,7 +2,9 @@
from pypy.interpreter.gateway import unwrap_spec
from pypy.interpreter.pyframe import PyFrame
from rpython.rlib.objectmodel import we_are_translated
+from pypy.objspace.std.dictmultiobject import W_DictMultiObject
from pypy.objspace.std.listobject import W_ListObject
+from pypy.objspace.std.setobject import W_BaseSetObject
from pypy.objspace.std.typeobject import MethodCache
from pypy.objspace.std.mapdict import MapAttrCache
from rpython.rlib import rposix, rgc
@@ -70,12 +72,27 @@
def do_what_I_mean(space):
return space.wrap(42)
-def list_strategy(space, w_list):
- if isinstance(w_list, W_ListObject):
- return space.wrap(w_list.strategy._applevel_repr)
+
+def _nameof(cls):
+ return cls.__name__
+_nameof._annspecialcase_ = 'specialize:memo'
+
+def strategy(space, w_obj):
+ """ strategy(dict or list or set)
+
+ Return the underlying strategy currently used by a dict, list or set object
+ """
+ if isinstance(w_obj, W_DictMultiObject):
+ name = _nameof(w_obj.strategy.__class__)
+ elif isinstance(w_obj, W_ListObject):
+ name = _nameof(w_obj.strategy.__class__)
+ elif isinstance(w_obj, W_BaseSetObject):
+ name = _nameof(w_obj.strategy.__class__)
else:
- w_msg = space.wrap("Can only get the list strategy of a list")
- raise OperationError(space.w_TypeError, w_msg)
+ raise OperationError(space.w_TypeError,
+ space.wrap("expecting dict or list or set object"))
+ return space.wrap(name)
+
@unwrap_spec(fd='c_int')
def validate_fd(space, fd):
diff --git a/pypy/module/__pypy__/test/test_special.py b/pypy/module/__pypy__/test/test_special.py
--- a/pypy/module/__pypy__/test/test_special.py
+++ b/pypy/module/__pypy__/test/test_special.py
@@ -46,26 +46,42 @@
assert x == 42
def test_list_strategy(self):
- from __pypy__ import list_strategy
+ from __pypy__ import strategy
l = [1, 2, 3]
- assert list_strategy(l) == "int"
+ assert strategy(l) == "IntegerListStrategy"
l = ["a", "b", "c"]
- assert list_strategy(l) == "bytes"
+ assert strategy(l) == "BytesListStrategy"
l = [u"a", u"b", u"c"]
- assert list_strategy(l) == "unicode"
+ assert strategy(l) == "UnicodeListStrategy"
l = [1.1, 2.2, 3.3]
- assert list_strategy(l) == "float"
+ assert strategy(l) == "FloatListStrategy"
l = range(3)
- assert list_strategy(l) == "simple_range"
+ assert strategy(l) == "SimpleRangeListStrategy"
l = range(1, 2)
- assert list_strategy(l) == "range"
+ assert strategy(l) == "RangeListStrategy"
l = [1, "b", 3]
- assert list_strategy(l) == "object"
+ assert strategy(l) == "ObjectListStrategy"
l = []
- assert list_strategy(l) == "empty"
+ assert strategy(l) == "EmptyListStrategy"
o = 5
- raises(TypeError, list_strategy, 5)
+ raises(TypeError, strategy, 5)
+
+ def test_dict_strategy(self):
+ from __pypy__ import strategy
+
+ d = {}
+ assert strategy(d) == "EmptyDictStrategy"
+ d = {1: None, 5: None}
+ assert strategy(d) == "IntDictStrategy"
+
+ def test_set_strategy(self):
+ from __pypy__ import strategy
+
+ s = set()
+ assert strategy(s) == "EmptySetStrategy"
+ s = set([2, 3, 4])
+ assert strategy(s) == "IntegerSetStrategy"
class AppTestJitFeatures(object):
diff --git a/pypy/objspace/std/listobject.py b/pypy/objspace/std/listobject.py
--- a/pypy/objspace/std/listobject.py
+++ b/pypy/objspace/std/listobject.py
@@ -842,8 +842,6 @@
W_Lists do not switch back to EmptyListStrategy when becoming empty again.
"""
- _applevel_repr = "empty"
-
def __init__(self, space):
ListStrategy.__init__(self, space)
@@ -1102,8 +1100,6 @@
method providing only positive length. The storage is a one element tuple
with positive integer storing length."""
- _applevel_repr = "simple_range"
-
erase, unerase = rerased.new_erasing_pair("simple_range")
erase = staticmethod(erase)
unerase = staticmethod(unerase)
@@ -1176,8 +1172,6 @@
destroying the range (inserting, appending non-ints) the strategy is
switched to IntegerListStrategy."""
- _applevel_repr = "range"
-
erase, unerase = rerased.new_erasing_pair("range")
erase = staticmethod(erase)
unerase = staticmethod(unerase)
@@ -1555,7 +1549,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = None
- _applevel_repr = "object"
def unwrap(self, w_obj):
return w_obj
@@ -1590,7 +1583,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = 0
- _applevel_repr = "int"
def wrap(self, intval):
return self.space.wrap(intval)
@@ -1644,7 +1636,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = 0.0
- _applevel_repr = "float"
def wrap(self, floatval):
return self.space.wrap(floatval)
@@ -1677,7 +1668,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = None
- _applevel_repr = "bytes"
def wrap(self, stringval):
return self.space.wrap(stringval)
@@ -1710,7 +1700,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = None
- _applevel_repr = "unicode"
def wrap(self, stringval):
return self.space.wrap(stringval)
diff --git a/rpython/rtyper/test/test_rclass.py b/rpython/rtyper/test/test_rclass.py
--- a/rpython/rtyper/test/test_rclass.py
+++ b/rpython/rtyper/test/test_rclass.py
@@ -440,6 +440,25 @@
res = self.interpret(f, [3])
assert res == ~0x0200 & 0x3ff
+ def test_class___name__(self):
+ class ACLS(object): pass
+ class Bcls(ACLS): pass
+ class CCls(ACLS): pass
+ def nameof(cls):
+ return cls.__name__
+ nameof._annspecialcase_ = "specialize:memo"
+ def f(i):
+ if i == 1: x = ACLS()
+ elif i == 2: x = Bcls()
+ else: x = CCls()
+ return nameof(x.__class__)
+ res = self.interpret(f, [1])
+ assert ''.join(res.chars) == 'ACLS'
+ res = self.interpret(f, [2])
+ assert ''.join(res.chars) == 'Bcls'
+ res = self.interpret(f, [3])
+ assert ''.join(res.chars) == 'CCls'
+
def test_hash_preservation(self):
from rpython.rlib.objectmodel import current_object_addr_as_int
from rpython.rlib.objectmodel import compute_identity_hash
From noreply at buildbot.pypy.org Wed Jul 9 17:20:21 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 17:20:21 +0200 (CEST)
Subject: [pypy-commit] pypy default: Fix a set-strategy bug:
set-of-ints.update(empty-set) would devolve the
Message-ID: <20140709152021.0FEF71C1068@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72399:cf24a62874df
Date: 2014-07-09 17:19 +0200
http://bitbucket.org/pypy/pypy/changeset/cf24a62874df/
Log: Fix a set-strategy bug: set-of-ints.update(empty-set) would devolve
the set to set-of-objects...
diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py
--- a/pypy/objspace/std/setobject.py
+++ b/pypy/objspace/std/setobject.py
@@ -1181,7 +1181,8 @@
d_other = self.unerase(w_other.sstorage)
d_set.update(d_other)
return
-
+ if w_other.length() == 0:
+ return
w_set.switch_to_object_strategy(self.space)
w_set.update(w_other)
diff --git a/pypy/objspace/std/test/test_setobject.py b/pypy/objspace/std/test/test_setobject.py
--- a/pypy/objspace/std/test/test_setobject.py
+++ b/pypy/objspace/std/test/test_setobject.py
@@ -960,3 +960,10 @@
# did not work before because of an optimization that swaps both
# operands when the first set is larger than the second
assert type(frozenset([1, 2]) & set([2])) is frozenset
+
+ def test_update_bug_strategy(self):
+ from __pypy__ import strategy
+ s = set([1, 2, 3])
+ assert strategy(s) == "IntegerSetStrategy"
+ s.update(set())
+ assert strategy(s) == "IntegerSetStrategy"
From noreply at buildbot.pypy.org Wed Jul 9 17:18:31 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 17:18:31 +0200 (CEST)
Subject: [pypy-commit] pypy default: Unify __pypy__.list_strategy() with
__pypy__.dictstrategy() and make it
Message-ID: <20140709151831.114A41C021D@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72397:65c577d414ca
Date: 2014-07-09 17:08 +0200
http://bitbucket.org/pypy/pypy/changeset/65c577d414ca/
Log: Unify __pypy__.list_strategy() with __pypy__.dictstrategy() and make
it work for sets too.
diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py
--- a/pypy/module/__pypy__/__init__.py
+++ b/pypy/module/__pypy__/__init__.py
@@ -73,13 +73,12 @@
'builtinify' : 'interp_magic.builtinify',
'lookup_special' : 'interp_magic.lookup_special',
'do_what_I_mean' : 'interp_magic.do_what_I_mean',
- 'list_strategy' : 'interp_magic.list_strategy',
'validate_fd' : 'interp_magic.validate_fd',
'resizelist_hint' : 'interp_magic.resizelist_hint',
'newlist_hint' : 'interp_magic.newlist_hint',
'add_memory_pressure' : 'interp_magic.add_memory_pressure',
'newdict' : 'interp_dict.newdict',
- 'dictstrategy' : 'interp_dict.dictstrategy',
+ 'strategy' : 'interp_magic.strategy', # dict,set,list
'set_debug' : 'interp_magic.set_debug',
'locals_to_fast' : 'interp_magic.locals_to_fast',
}
diff --git a/pypy/module/__pypy__/interp_dict.py b/pypy/module/__pypy__/interp_dict.py
--- a/pypy/module/__pypy__/interp_dict.py
+++ b/pypy/module/__pypy__/interp_dict.py
@@ -1,7 +1,6 @@
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import unwrap_spec
-from pypy.objspace.std.dictmultiobject import W_DictMultiObject
@unwrap_spec(type=str)
def newdict(space, type):
@@ -31,13 +30,3 @@
return space.newdict(strdict=True)
else:
raise oefmt(space.w_TypeError, "unknown type of dict %s", type)
-
-def dictstrategy(space, w_obj):
- """ dictstrategy(dict)
-
- show the underlaying strategy used by a dict object
- """
- if not isinstance(w_obj, W_DictMultiObject):
- raise OperationError(space.w_TypeError,
- space.wrap("expecting dict object"))
- return space.wrap('%r' % (w_obj.strategy,))
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -2,7 +2,9 @@
from pypy.interpreter.gateway import unwrap_spec
from pypy.interpreter.pyframe import PyFrame
from rpython.rlib.objectmodel import we_are_translated
+from pypy.objspace.std.dictmultiobject import W_DictMultiObject
from pypy.objspace.std.listobject import W_ListObject
+from pypy.objspace.std.setobject import W_BaseSetObject
from pypy.objspace.std.typeobject import MethodCache
from pypy.objspace.std.mapdict import MapAttrCache
from rpython.rlib import rposix, rgc
@@ -70,12 +72,27 @@
def do_what_I_mean(space):
return space.wrap(42)
-def list_strategy(space, w_list):
- if isinstance(w_list, W_ListObject):
- return space.wrap(w_list.strategy._applevel_repr)
+
+def _nameof(cls):
+ return cls.__name__
+_nameof._annspecialcase_ = 'specialize:memo'
+
+def strategy(space, w_obj):
+ """ strategy(dict or list or set)
+
+ Return the underlying strategy currently used by a dict, list or set object
+ """
+ if isinstance(w_obj, W_DictMultiObject):
+ name = _nameof(w_obj.strategy.__class__)
+ elif isinstance(w_obj, W_ListObject):
+ name = _nameof(w_obj.strategy.__class__)
+ elif isinstance(w_obj, W_BaseSetObject):
+ name = _nameof(w_obj.strategy.__class__)
else:
- w_msg = space.wrap("Can only get the list strategy of a list")
- raise OperationError(space.w_TypeError, w_msg)
+ raise OperationError(space.w_TypeError,
+ space.wrap("expecting dict or list or set object"))
+ return space.wrap(name)
+
@unwrap_spec(fd='c_int')
def validate_fd(space, fd):
diff --git a/pypy/module/__pypy__/test/test_special.py b/pypy/module/__pypy__/test/test_special.py
--- a/pypy/module/__pypy__/test/test_special.py
+++ b/pypy/module/__pypy__/test/test_special.py
@@ -46,26 +46,42 @@
assert x == 42
def test_list_strategy(self):
- from __pypy__ import list_strategy
+ from __pypy__ import strategy
l = [1, 2, 3]
- assert list_strategy(l) == "int"
+ assert strategy(l) == "IntegerListStrategy"
l = ["a", "b", "c"]
- assert list_strategy(l) == "bytes"
+ assert strategy(l) == "BytesListStrategy"
l = [u"a", u"b", u"c"]
- assert list_strategy(l) == "unicode"
+ assert strategy(l) == "UnicodeListStrategy"
l = [1.1, 2.2, 3.3]
- assert list_strategy(l) == "float"
+ assert strategy(l) == "FloatListStrategy"
l = range(3)
- assert list_strategy(l) == "simple_range"
+ assert strategy(l) == "SimpleRangeListStrategy"
l = range(1, 2)
- assert list_strategy(l) == "range"
+ assert strategy(l) == "RangeListStrategy"
l = [1, "b", 3]
- assert list_strategy(l) == "object"
+ assert strategy(l) == "ObjectListStrategy"
l = []
- assert list_strategy(l) == "empty"
+ assert strategy(l) == "EmptyListStrategy"
o = 5
- raises(TypeError, list_strategy, 5)
+ raises(TypeError, strategy, 5)
+
+ def test_dict_strategy(self):
+ from __pypy__ import strategy
+
+ d = {}
+ assert strategy(d) == "EmptyDictStrategy"
+ d = {1: None, 5: None}
+ assert strategy(d) == "IntDictStrategy"
+
+ def test_set_strategy(self):
+ from __pypy__ import strategy
+
+ s = set()
+ assert strategy(s) == "EmptySetStrategy"
+ s = set([2, 3, 4])
+ assert strategy(s) == "IntegerSetStrategy"
class AppTestJitFeatures(object):
diff --git a/pypy/objspace/std/listobject.py b/pypy/objspace/std/listobject.py
--- a/pypy/objspace/std/listobject.py
+++ b/pypy/objspace/std/listobject.py
@@ -842,8 +842,6 @@
W_Lists do not switch back to EmptyListStrategy when becoming empty again.
"""
- _applevel_repr = "empty"
-
def __init__(self, space):
ListStrategy.__init__(self, space)
@@ -1102,8 +1100,6 @@
method providing only positive length. The storage is a one element tuple
with positive integer storing length."""
- _applevel_repr = "simple_range"
-
erase, unerase = rerased.new_erasing_pair("simple_range")
erase = staticmethod(erase)
unerase = staticmethod(unerase)
@@ -1176,8 +1172,6 @@
destroying the range (inserting, appending non-ints) the strategy is
switched to IntegerListStrategy."""
- _applevel_repr = "range"
-
erase, unerase = rerased.new_erasing_pair("range")
erase = staticmethod(erase)
unerase = staticmethod(unerase)
@@ -1555,7 +1549,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = None
- _applevel_repr = "object"
def unwrap(self, w_obj):
return w_obj
@@ -1590,7 +1583,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = 0
- _applevel_repr = "int"
def wrap(self, intval):
return self.space.wrap(intval)
@@ -1644,7 +1636,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = 0.0
- _applevel_repr = "float"
def wrap(self, floatval):
return self.space.wrap(floatval)
@@ -1677,7 +1668,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = None
- _applevel_repr = "bytes"
def wrap(self, stringval):
return self.space.wrap(stringval)
@@ -1710,7 +1700,6 @@
import_from_mixin(AbstractUnwrappedStrategy)
_none_value = None
- _applevel_repr = "unicode"
def wrap(self, stringval):
return self.space.wrap(stringval)
diff --git a/rpython/rtyper/test/test_rclass.py b/rpython/rtyper/test/test_rclass.py
--- a/rpython/rtyper/test/test_rclass.py
+++ b/rpython/rtyper/test/test_rclass.py
@@ -440,6 +440,25 @@
res = self.interpret(f, [3])
assert res == ~0x0200 & 0x3ff
+ def test_class___name__(self):
+ class ACLS(object): pass
+ class Bcls(ACLS): pass
+ class CCls(ACLS): pass
+ def nameof(cls):
+ return cls.__name__
+ nameof._annspecialcase_ = "specialize:memo"
+ def f(i):
+ if i == 1: x = ACLS()
+ elif i == 2: x = Bcls()
+ else: x = CCls()
+ return nameof(x.__class__)
+ res = self.interpret(f, [1])
+ assert ''.join(res.chars) == 'ACLS'
+ res = self.interpret(f, [2])
+ assert ''.join(res.chars) == 'Bcls'
+ res = self.interpret(f, [3])
+ assert ''.join(res.chars) == 'CCls'
+
def test_hash_preservation(self):
from rpython.rlib.objectmodel import current_object_addr_as_int
from rpython.rlib.objectmodel import compute_identity_hash
From noreply at buildbot.pypy.org Wed Jul 9 17:48:05 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 17:48:05 +0200 (CEST)
Subject: [pypy-commit] pypy default: Retype the field "name" on the base
object RPython class to be a regular
Message-ID: <20140709154805.8209D1D2335@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72400:0d95b46fffc3
Date: 2014-07-09 17:46 +0200
http://bitbucket.org/pypy/pypy/changeset/0d95b46fffc3/
Log: Retype the field "name" on the base object RPython class to be a
regular rstr.STR instead of some null-terminated Array(Char).
diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py
--- a/rpython/rtyper/llinterp.py
+++ b/rpython/rtyper/llinterp.py
@@ -42,7 +42,7 @@
return ': '.join([str(x) for x in self.args])
def type_name(etype):
- return ''.join(etype.name).rstrip('\x00')
+ return ''.join(etype.name.chars)
class LLInterpreter(object):
""" low level interpreter working with concrete values. """
@@ -145,7 +145,7 @@
assert isinstance(exc, LLException)
klass, inst = exc.args[0], exc.args[1]
for cls in enumerate_exceptions_top_down():
- if "".join(klass.name).rstrip("\0") == cls.__name__:
+ if "".join(klass.name.chars) == cls.__name__:
return cls
raise ValueError("couldn't match exception, maybe it"
" has RPython attributes like OSError?")
diff --git a/rpython/rtyper/lltypesystem/rclass.py b/rpython/rtyper/lltypesystem/rclass.py
--- a/rpython/rtyper/lltypesystem/rclass.py
+++ b/rpython/rtyper/lltypesystem/rclass.py
@@ -22,6 +22,7 @@
from rpython.rlib import objectmodel
from rpython.tool.identity_dict import identity_dict
from rpython.rtyper.lltypesystem.lloperation import llop
+from rpython.rtyper.lltypesystem import rstr
#
# There is one "vtable" per user class, with the following structure:
@@ -32,7 +33,7 @@
# RuntimeTypeInfo * rtti;
# Signed subclassrange_min; //this is also the id of the class itself
# Signed subclassrange_max;
-# array { char } * name;
+# RPyString * name;
# struct object * instantiate();
# }
#
@@ -68,7 +69,7 @@
('subclassrange_min', Signed),
('subclassrange_max', Signed),
('rtti', Ptr(RuntimeTypeInfo)),
- ('name', Ptr(Array(Char))),
+ ('name', Ptr(rstr.STR)),
('hash', Signed),
('instantiate', Ptr(FuncType([], OBJECTPTR))),
hints = {'immutable': True}))
@@ -89,13 +90,6 @@
vtable = vtable.super
return vtable
-def alloc_array_name(name):
- p = malloc(Array(Char), len(name)+1, immortal=True)
- for i in range(len(name)):
- p[i] = name[i]
- p[len(name)] = '\x00'
- return p
-
class ClassRepr(AbstractClassRepr):
def __init__(self, rtyper, classdef):
@@ -203,7 +197,7 @@
name = 'object'
else:
name = rsubcls.classdef.shortname
- vtable.name = alloc_array_name(name)
+ vtable.name = rstr.string_repr.convert_const(name)
if hasattr(rsubcls.classdef, 'my_instantiate_graph'):
graph = rsubcls.classdef.my_instantiate_graph
vtable.instantiate = self.rtyper.getcallable(graph)
@@ -579,7 +573,6 @@
return hop.genop('ptr_nonzero', [vinst], resulttype=Bool)
def ll_str(self, i): # doesn't work for non-gc classes!
- from rpython.rtyper.lltypesystem import rstr
from rpython.rtyper.lltypesystem.ll_str import ll_int2hex
from rpython.rlib.rarithmetic import r_uint
if not i:
@@ -590,14 +583,8 @@
#uid = r_uint(cast_ptr_to_int(i))
uid = r_uint(llop.gc_id(lltype.Signed, i))
#
- nameLen = len(instance.typeptr.name)
- nameString = rstr.mallocstr(nameLen-1)
- i = 0
- while i < nameLen - 1:
- nameString.chars[i] = instance.typeptr.name[i]
- i += 1
res = rstr.instance_str_prefix
- res = rstr.ll_strconcat(res, nameString)
+ res = rstr.ll_strconcat(res, instance.typeptr.name)
res = rstr.ll_strconcat(res, rstr.instance_str_infix)
res = rstr.ll_strconcat(res, ll_int2hex(uid, False))
res = rstr.ll_strconcat(res, rstr.instance_str_suffix)
diff --git a/rpython/rtyper/test/tool.py b/rpython/rtyper/test/tool.py
--- a/rpython/rtyper/test/tool.py
+++ b/rpython/rtyper/test/tool.py
@@ -68,7 +68,7 @@
return fnptr._obj._callable
def class_name(self, value):
- return "".join(value.super.typeptr.name)[:-1]
+ return ''.join(value.super.typeptr.name.chars)
def read_attr(self, value, attr_name):
value = value._obj
diff --git a/rpython/translator/c/src/debug_traceback.c b/rpython/translator/c/src/debug_traceback.c
--- a/rpython/translator/c/src/debug_traceback.c
+++ b/rpython/translator/c/src/debug_traceback.c
@@ -66,7 +66,8 @@
void pypy_debug_catch_fatal_exception(void)
{
pypy_debug_traceback_print();
- fprintf(stderr, "Fatal RPython error: %s\n",
- RPyFetchExceptionType()->ov_name->items);
+ fprintf(stderr, "Fatal RPython error: %.*s\n",
+ (int)(RPyFetchExceptionType()->ov_name->rs_chars.length),
+ RPyFetchExceptionType()->ov_name->rs_chars.items);
abort();
}
diff --git a/rpython/translator/c/src/exception.c b/rpython/translator/c/src/exception.c
--- a/rpython/translator/c/src/exception.c
+++ b/rpython/translator/c/src/exception.c
@@ -16,8 +16,9 @@
long lineno, const char *functionname)
{
#ifdef DO_LOG_EXC
- fprintf(stderr, "%s %s: %s:%ld %s\n", msg,
- RPyFetchExceptionType()->ov_name->items,
+ fprintf(stderr, "%s %.*s: %s:%ld %s\n", msg,
+ (int)(RPyFetchExceptionType()->ov_name->rs_chars.length),
+ RPyFetchExceptionType()->ov_name->rs_chars.items,
filename, lineno, functionname);
#endif
}
From noreply at buildbot.pypy.org Wed Jul 9 18:33:59 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 18:33:59 +0200 (CEST)
Subject: [pypy-commit] pypy default: Support in RPython fetching the
__name__ of a class.
Message-ID: <20140709163359.D47231C33F5@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72401:4bf29ad76462
Date: 2014-07-09 18:01 +0200
http://bitbucket.org/pypy/pypy/changeset/4bf29ad76462/
Log: Support in RPython fetching the __name__ of a class.
diff --git a/rpython/annotator/test/test_annrpython.py b/rpython/annotator/test/test_annrpython.py
--- a/rpython/annotator/test/test_annrpython.py
+++ b/rpython/annotator/test/test_annrpython.py
@@ -4276,6 +4276,15 @@
py.test.raises(annmodel.AnnotatorError,
a.build_types, f, [annmodel.s_None])
+ def test_class___name__(self):
+ class Abc(object):
+ pass
+ def f():
+ return Abc().__class__.__name__
+ a = self.RPythonAnnotator()
+ s = a.build_types(f, [])
+ assert isinstance(s, annmodel.SomeString)
+
def g(n):
return [0, 1, 2, n]
diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py
--- a/rpython/annotator/unaryop.py
+++ b/rpython/annotator/unaryop.py
@@ -745,6 +745,11 @@
class __extend__(SomePBC):
def getattr(self, s_attr):
+ assert s_attr.is_constant()
+ if s_attr.const == '__name__':
+ from rpython.annotator.description import ClassDesc
+ if self.getKind() is ClassDesc:
+ return SomeString()
bookkeeper = getbookkeeper()
return bookkeeper.pbc_getattr(self, s_attr)
getattr.can_only_throw = []
diff --git a/rpython/rtyper/rpbc.py b/rpython/rtyper/rpbc.py
--- a/rpython/rtyper/rpbc.py
+++ b/rpython/rtyper/rpbc.py
@@ -5,7 +5,7 @@
from rpython.annotator.argument import simple_args
from rpython.rtyper import rclass, callparse
from rpython.rtyper.error import TyperError
-from rpython.rtyper.lltypesystem.lltype import typeOf, Void
+from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.rmodel import (Repr, inputconst, CanBeNull, mangle,
warning, impossible_repr)
from rpython.tool.pairtype import pair, pairtype
@@ -113,7 +113,7 @@
llfn = rtyper.getcallable(graph)
concreterow[funcdesc] = llfn
assert len(concreterow) > 0
- concreterow.fntype = typeOf(llfn) # 'llfn' from the loop above
+ concreterow.fntype = lltype.typeOf(llfn)# 'llfn' from the loop above
# (they should all have the same type)
concreterows[shape, index] = concreterow
@@ -161,7 +161,7 @@
self.callfamily = s_pbc.any_description().getcallfamily()
if len(s_pbc.descriptions) == 1 and not s_pbc.can_be_None:
# a single function
- self.lowleveltype = Void
+ self.lowleveltype = lltype.Void
else:
concretetable, uniquerows = get_concrete_calltable(self.rtyper,
self.callfamily)
@@ -193,7 +193,7 @@
return self.funccache[funcdesc]
except KeyError:
pass
- if self.lowleveltype is Void:
+ if self.lowleveltype is lltype.Void:
result = None
else:
llfns = {}
@@ -225,7 +225,7 @@
value = value.im_func # unbound method -> bare function
elif isinstance(value, staticmethod):
value = value.__get__(42) # hackish, get the function wrapped by staticmethod
- if self.lowleveltype is Void:
+ if self.lowleveltype is lltype.Void:
return None
if value is None:
null = self.rtyper.type_system.null_callable(self.lowleveltype)
@@ -239,27 +239,27 @@
'index' and 'shape' tells which of its items we are interested in.
"""
assert v.concretetype == self.lowleveltype
- if self.lowleveltype is Void:
+ if self.lowleveltype is lltype.Void:
assert len(self.s_pbc.descriptions) == 1
# lowleveltype wouldn't be Void otherwise
funcdesc, = self.s_pbc.descriptions
row_of_one_graph = self.callfamily.calltables[shape][index]
graph = row_of_one_graph[funcdesc]
llfn = self.rtyper.getcallable(graph)
- return inputconst(typeOf(llfn), llfn)
+ return inputconst(lltype.typeOf(llfn), llfn)
elif len(self.uniquerows) == 1:
return v
else:
# 'v' is a Struct pointer, read the corresponding field
row = self.concretetable[shape, index]
- cname = inputconst(Void, row.attrname)
+ cname = inputconst(lltype.Void, row.attrname)
return self.get_specfunc_row(llop, v, cname, row.fntype)
def get_unique_llfn(self):
# try to build a unique low-level function. Avoid to use
# whenever possible! Doesn't work with specialization, multiple
# different call sites, etc.
- if self.lowleveltype is not Void:
+ if self.lowleveltype is not lltype.Void:
raise TyperError("cannot pass multiple functions here")
assert len(self.s_pbc.descriptions) == 1
# lowleveltype wouldn't be Void otherwise
@@ -281,7 +281,7 @@
if graphs != [graph]*len(graphs):
raise TyperError("cannot pass a specialized function here")
llfn = self.rtyper.getcallable(graph)
- return inputconst(typeOf(llfn), llfn)
+ return inputconst(lltype.typeOf(llfn), llfn)
def get_concrete_llfn(self, s_pbc, args_s, op):
bk = self.rtyper.annotator.bookkeeper
@@ -293,7 +293,7 @@
row_of_one_graph = self.callfamily.calltables[shape][index]
graph = row_of_one_graph[funcdesc]
llfn = self.rtyper.getcallable(graph)
- return inputconst(typeOf(llfn), llfn)
+ return inputconst(lltype.typeOf(llfn), llfn)
def rtype_simple_call(self, hop):
return self.call(hop)
@@ -319,7 +319,7 @@
if isinstance(vlist[0], Constant):
v = hop.genop('direct_call', vlist, resulttype = rresult)
else:
- vlist.append(hop.inputconst(Void, row_of_graphs.values()))
+ vlist.append(hop.inputconst(lltype.Void, row_of_graphs.values()))
v = hop.genop('indirect_call', vlist, resulttype = rresult)
if hop.r_result is impossible_repr:
return None # see test_always_raising_methods
@@ -331,10 +331,10 @@
# this check makes sense because both source and dest repr are FunctionsPBCRepr
if r_fpbc1.lowleveltype == r_fpbc2.lowleveltype:
return v
- if r_fpbc1.lowleveltype is Void:
+ if r_fpbc1.lowleveltype is lltype.Void:
return inputconst(r_fpbc2, r_fpbc1.s_pbc.const)
- if r_fpbc2.lowleveltype is Void:
- return inputconst(Void, None)
+ if r_fpbc2.lowleveltype is lltype.Void:
+ return inputconst(lltype.Void, None)
return NotImplemented
class OverriddenFunctionPBCRepr(Repr):
@@ -342,7 +342,7 @@
self.rtyper = rtyper
self.s_pbc = s_pbc
assert len(s_pbc.descriptions) == 1
- self.lowleveltype = Void
+ self.lowleveltype = lltype.Void
def rtype_simple_call(self, hop):
from rpython.rtyper.rspecialcase import rtype_call_specialcase
@@ -377,7 +377,7 @@
class SingleFrozenPBCRepr(Repr):
"""Representation selected for a single non-callable pre-built constant."""
- lowleveltype = Void
+ lowleveltype = lltype.Void
def __init__(self, frozendesc):
self.frozendesc = frozendesc
@@ -412,7 +412,7 @@
return self.converted_pbc_cache[frozendesc]
except KeyError:
r = self.rtyper.getrepr(annmodel.SomePBC([frozendesc]))
- if r.lowleveltype is Void:
+ if r.lowleveltype is lltype.Void:
# must create a new empty structure, as a placeholder
pbc = self.create_instance()
else:
@@ -462,7 +462,7 @@
result = self.create_instance()
self.pbc_cache[frozendesc] = result
for attr, (mangled_name, r_value) in self.fieldmap.items():
- if r_value.lowleveltype is Void:
+ if r_value.lowleveltype is lltype.Void:
continue
try:
thisattrvalue = frozendesc.attrcache[attr]
@@ -479,7 +479,7 @@
return hop.inputconst(hop.r_result, hop.s_result.const)
attr = hop.args_s[1].const
- vpbc, vattr = hop.inputargs(self, Void)
+ vpbc, vattr = hop.inputargs(self, lltype.Void)
v_res = self.getfield(vpbc, attr, hop.llops)
mangled_name, r_res = self.fieldmap[attr]
return hop.llops.convertvar(v_res, r_res, hop.r_result)
@@ -503,7 +503,7 @@
class __extend__(pairtype(AbstractMultipleUnrelatedFrozenPBCRepr,
SingleFrozenPBCRepr)):
def convert_from_to((r_pbc1, r_pbc2), v, llops):
- return inputconst(Void, r_pbc2.frozendesc)
+ return inputconst(lltype.Void, r_pbc2.frozendesc)
class MethodOfFrozenPBCRepr(Repr):
@@ -594,7 +594,7 @@
# raise TyperError("unsupported: variable of type "
# "class-pointer or None")
if s_pbc.is_constant():
- self.lowleveltype = Void
+ self.lowleveltype = lltype.Void
else:
self.lowleveltype = self.getlowleveltype()
@@ -617,7 +617,7 @@
def convert_desc(self, desc):
if desc not in self.s_pbc.descriptions:
raise TyperError("%r not in %r" % (desc, self))
- if self.lowleveltype is Void:
+ if self.lowleveltype is lltype.Void:
return None
subclassdef = desc.getuniqueclassdef()
r_subclass = rclass.getclassrepr(self.rtyper, subclassdef)
@@ -625,7 +625,7 @@
def convert_const(self, cls):
if cls is None:
- if self.lowleveltype is Void:
+ if self.lowleveltype is lltype.Void:
return None
else:
T = self.lowleveltype
@@ -639,8 +639,15 @@
return hop.inputconst(hop.r_result, hop.s_result.const)
else:
attr = hop.args_s[1].const
+ if attr == '__name__':
+ from rpython.rtyper.lltypesystem import rstr
+ class_repr = rclass.getclassrepr(self.rtyper, None)
+ vcls, vattr = hop.inputargs(class_repr, lltype.Void)
+ cname = inputconst(lltype.Void, 'name')
+ return hop.genop('getfield', [vcls, cname],
+ resulttype = lltype.Ptr(rstr.STR))
access_set, class_repr = self.get_access_set(attr)
- vcls, vattr = hop.inputargs(class_repr, Void)
+ vcls, vattr = hop.inputargs(class_repr, lltype.Void)
v_res = class_repr.getpbcfield(vcls, access_set, attr, hop.llops)
s_res = access_set.s_value
r_res = self.rtyper.getrepr(s_res)
@@ -669,7 +676,7 @@
if len(self.s_pbc.descriptions) == 1:
# instantiating a single class
- if self.lowleveltype is not Void:
+ if self.lowleveltype is not lltype.Void:
assert 0, "XXX None-or-1-class instantation not implemented"
assert isinstance(s_instance, annmodel.SomeInstance)
classdef = s_instance.classdef
@@ -726,7 +733,7 @@
# turn a PBC of classes to a standard pointer-to-vtable class repr
if r_clspbc.lowleveltype == r_cls.lowleveltype:
return v
- if r_clspbc.lowleveltype is Void:
+ if r_clspbc.lowleveltype is lltype.Void:
return inputconst(r_cls, r_clspbc.s_pbc.const)
# convert from ptr-to-object-vtable to ptr-to-more-precise-vtable
return r_cls.fromclasstype(v, llops)
@@ -736,10 +743,10 @@
# this check makes sense because both source and dest repr are ClassesPBCRepr
if r_clspbc1.lowleveltype == r_clspbc2.lowleveltype:
return v
- if r_clspbc1.lowleveltype is Void:
+ if r_clspbc1.lowleveltype is lltype.Void:
return inputconst(r_clspbc2, r_clspbc1.s_pbc.const)
- if r_clspbc2.lowleveltype is Void:
- return inputconst(Void, r_clspbc2.s_pbc.const)
+ if r_clspbc2.lowleveltype is lltype.Void:
+ return inputconst(lltype.Void, r_clspbc2.s_pbc.const)
return NotImplemented
def adjust_shape(hop2, s_shape):
diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py
--- a/rpython/rtyper/test/test_rpbc.py
+++ b/rpython/rtyper/test/test_rpbc.py
@@ -1642,6 +1642,20 @@
res = self.interpret(g, [])
assert res == False
+ def test_class___name__(self):
+ class Base(object): pass
+ class ASub(Base): pass
+ def g(n):
+ if n == 1:
+ x = Base()
+ else:
+ x = ASub()
+ return x.__class__.__name__
+ res = self.interpret(g, [1])
+ assert self.ll_to_string(res) == "Base"
+ res = self.interpret(g, [2])
+ assert self.ll_to_string(res) == "ASub"
+
# ____________________________________________________________
class TestRPBCExtra(BaseRtypingTest):
From noreply at buildbot.pypy.org Wed Jul 9 18:34:01 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 18:34:01 +0200 (CEST)
Subject: [pypy-commit] pypy default: Fix
Message-ID: <20140709163401.2EA711C33F5@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72402:a5013f2a905e
Date: 2014-07-09 18:32 +0200
http://bitbucket.org/pypy/pypy/changeset/a5013f2a905e/
Log: Fix
diff --git a/rpython/jit/metainterp/virtualref.py b/rpython/jit/metainterp/virtualref.py
--- a/rpython/jit/metainterp/virtualref.py
+++ b/rpython/jit/metainterp/virtualref.py
@@ -1,5 +1,5 @@
from rpython.rtyper.rmodel import inputconst, log
-from rpython.rtyper.lltypesystem import lltype, llmemory, rclass
+from rpython.rtyper.lltypesystem import lltype, llmemory, rclass, rstr
from rpython.jit.metainterp import history
from rpython.jit.metainterp.virtualizable import TOKEN_NONE
from rpython.jit.metainterp.virtualizable import TOKEN_TRACING_RESCALL
@@ -19,7 +19,7 @@
self.jit_virtual_ref_vtable = lltype.malloc(rclass.OBJECT_VTABLE,
zero=True, flavor='raw',
immortal=True)
- self.jit_virtual_ref_vtable.name = rclass.alloc_array_name(
+ self.jit_virtual_ref_vtable.name = rstr.string_repr.convert_const(
'jit_virtual_ref')
# build some constants
adr = llmemory.cast_ptr_to_adr(self.jit_virtual_ref_vtable)
From noreply at buildbot.pypy.org Wed Jul 9 18:37:51 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 18:37:51 +0200 (CEST)
Subject: [pypy-commit] pypy default: Use __class__.__name__ directly here.
Message-ID: <20140709163751.66CE91C33F5@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72403:dab3b81bdd77
Date: 2014-07-09 18:36 +0200
http://bitbucket.org/pypy/pypy/changeset/dab3b81bdd77/
Log: Use __class__.__name__ directly here.
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -73,21 +73,17 @@
return space.wrap(42)
-def _nameof(cls):
- return cls.__name__
-_nameof._annspecialcase_ = 'specialize:memo'
-
def strategy(space, w_obj):
""" strategy(dict or list or set)
Return the underlying strategy currently used by a dict, list or set object
"""
if isinstance(w_obj, W_DictMultiObject):
- name = _nameof(w_obj.strategy.__class__)
+ name = w_obj.strategy.__class__.__name__
elif isinstance(w_obj, W_ListObject):
- name = _nameof(w_obj.strategy.__class__)
+ name = w_obj.strategy.__class__.__name__
elif isinstance(w_obj, W_BaseSetObject):
- name = _nameof(w_obj.strategy.__class__)
+ name = w_obj.strategy.__class__.__name__
else:
raise OperationError(space.w_TypeError,
space.wrap("expecting dict or list or set object"))
From noreply at buildbot.pypy.org Wed Jul 9 19:08:05 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 19:08:05 +0200 (CEST)
Subject: [pypy-commit] pypy default: Update to 0.8.6. No other changes in
_cffi_backend from 0.8.2.
Message-ID: <20140709170805.3BAB41C1068@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72404:44f8bf48500e
Date: 2014-07-09 19:07 +0200
http://bitbucket.org/pypy/pypy/changeset/44f8bf48500e/
Log: Update to 0.8.6. No other changes in _cffi_backend from 0.8.2.
diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py
--- a/pypy/module/_cffi_backend/__init__.py
+++ b/pypy/module/_cffi_backend/__init__.py
@@ -8,7 +8,7 @@
appleveldefs = {
}
interpleveldefs = {
- '__version__': 'space.wrap("0.8.2")',
+ '__version__': 'space.wrap("0.8.6")',
'load_library': 'libraryobj.load_library',
diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py
--- a/pypy/module/_cffi_backend/test/_backend_test_c.py
+++ b/pypy/module/_cffi_backend/test/_backend_test_c.py
@@ -3188,4 +3188,4 @@
def test_version():
# this test is here mostly for PyPy
- assert __version__ == "0.8.2"
+ assert __version__ == "0.8.6"
From noreply at buildbot.pypy.org Wed Jul 9 19:08:06 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 19:08:06 +0200 (CEST)
Subject: [pypy-commit] pypy default: More places that need to be fixed for
the new 'name' field type.
Message-ID: <20140709170806.8D5811C1068@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72405:2982c4350071
Date: 2014-07-09 19:07 +0200
http://bitbucket.org/pypy/pypy/changeset/2982c4350071/
Log: More places that need to be fixed for the new 'name' field type.
diff --git a/rpython/jit/codewriter/assembler.py b/rpython/jit/codewriter/assembler.py
--- a/rpython/jit/codewriter/assembler.py
+++ b/rpython/jit/codewriter/assembler.py
@@ -248,7 +248,7 @@
if isinstance(TYPE, lltype.FuncType):
name = value._obj._name
elif TYPE == rclass.OBJECT_VTABLE:
- name = ''.join(value.name).rstrip('\x00')
+ name = ''.join(value.name.chars)
else:
return
addr = llmemory.cast_ptr_to_adr(value)
diff --git a/rpython/jit/codewriter/heaptracker.py b/rpython/jit/codewriter/heaptracker.py
--- a/rpython/jit/codewriter/heaptracker.py
+++ b/rpython/jit/codewriter/heaptracker.py
@@ -1,4 +1,4 @@
-from rpython.rtyper.lltypesystem import lltype, llmemory, rclass
+from rpython.rtyper.lltypesystem import lltype, llmemory, rclass, rstr
from rpython.rlib.objectmodel import we_are_translated
@@ -66,11 +66,7 @@
def set_testing_vtable_for_gcstruct(GCSTRUCT, vtable, name):
# only for tests that need to register the vtable of their malloc'ed
# structures in case they are GcStruct inheriting from OBJECT.
- namez = name + '\x00'
- vtable.name = lltype.malloc(rclass.OBJECT_VTABLE.name.TO, len(namez),
- immortal=True)
- for i in range(len(namez)):
- vtable.name[i] = namez[i]
+ vtable.name = rstr.string_repr.convert_const(name)
testing_gcstruct2vtable[GCSTRUCT] = vtable
testing_gcstruct2vtable = {}
From noreply at buildbot.pypy.org Wed Jul 9 19:47:51 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 19:47:51 +0200 (CEST)
Subject: [pypy-commit] pypy default: Fixes
Message-ID: <20140709174751.029ED1D26B6@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72406:3ae2e0814de1
Date: 2014-07-09 19:47 +0200
http://bitbucket.org/pypy/pypy/changeset/3ae2e0814de1/
Log: Fixes
diff --git a/rpython/jit/backend/test/test_ll_random.py b/rpython/jit/backend/test/test_ll_random.py
--- a/rpython/jit/backend/test/test_ll_random.py
+++ b/rpython/jit/backend/test/test_ll_random.py
@@ -112,11 +112,7 @@
self.vtable_counter += 1
S = self.get_random_structure_type(r, with_vtable=vtable, cache=False)
name = S._name
- vtable.name = lltype.malloc(lltype.Array(lltype.Char), len(name)+1,
- immortal=True)
- for i in range(len(name)):
- vtable.name[i] = name[i]
- vtable.name[len(name)] = '\x00'
+ vtable.name = rstr.string_repr.convert_const(name)
self.structure_types_and_vtables.append((S, vtable))
#
heaptracker.register_known_gctype(self.cpu, vtable, S)
diff --git a/rpython/jit/metainterp/optimizeopt/test/test_util.py b/rpython/jit/metainterp/optimizeopt/test/test_util.py
--- a/rpython/jit/metainterp/optimizeopt/test/test_util.py
+++ b/rpython/jit/metainterp/optimizeopt/test/test_util.py
@@ -81,10 +81,10 @@
return box.getref(rclass.OBJECTPTR).typeptr
node_vtable = lltype.malloc(OBJECT_VTABLE, immortal=True)
- node_vtable.name = rclass.alloc_array_name('node')
+ node_vtable.name = rstr.string_repr.convert_const('node')
node_vtable_adr = llmemory.cast_ptr_to_adr(node_vtable)
node_vtable2 = lltype.malloc(OBJECT_VTABLE, immortal=True)
- node_vtable2.name = rclass.alloc_array_name('node2')
+ node_vtable2.name = rstr.string_repr.convert_const('node2')
node_vtable_adr2 = llmemory.cast_ptr_to_adr(node_vtable2)
cpu = runner.LLGraphCPU(None)
@@ -331,7 +331,7 @@
def get_name_from_address(self, addr):
# hack
try:
- return "".join(addr.ptr.name)[:-1] # remove \x00
+ return "".join(addr.ptr.name.chars)
except AttributeError:
return ""
From noreply at buildbot.pypy.org Wed Jul 9 19:56:21 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 19:56:21 +0200 (CEST)
Subject: [pypy-commit] pypy default: Reintroduce rclass.alloc_array_name()
and revert some changes to use it again.
Message-ID: <20140709175621.498901D26B7@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72407:1f0c59905c74
Date: 2014-07-09 19:55 +0200
http://bitbucket.org/pypy/pypy/changeset/1f0c59905c74/
Log: Reintroduce rclass.alloc_array_name() and revert some changes to use
it again.
diff --git a/rpython/jit/backend/test/test_ll_random.py b/rpython/jit/backend/test/test_ll_random.py
--- a/rpython/jit/backend/test/test_ll_random.py
+++ b/rpython/jit/backend/test/test_ll_random.py
@@ -112,7 +112,7 @@
self.vtable_counter += 1
S = self.get_random_structure_type(r, with_vtable=vtable, cache=False)
name = S._name
- vtable.name = rstr.string_repr.convert_const(name)
+ vtable.name = rclass.alloc_array_name(name)
self.structure_types_and_vtables.append((S, vtable))
#
heaptracker.register_known_gctype(self.cpu, vtable, S)
diff --git a/rpython/jit/codewriter/heaptracker.py b/rpython/jit/codewriter/heaptracker.py
--- a/rpython/jit/codewriter/heaptracker.py
+++ b/rpython/jit/codewriter/heaptracker.py
@@ -1,4 +1,4 @@
-from rpython.rtyper.lltypesystem import lltype, llmemory, rclass, rstr
+from rpython.rtyper.lltypesystem import lltype, llmemory, rclass
from rpython.rlib.objectmodel import we_are_translated
@@ -66,7 +66,7 @@
def set_testing_vtable_for_gcstruct(GCSTRUCT, vtable, name):
# only for tests that need to register the vtable of their malloc'ed
# structures in case they are GcStruct inheriting from OBJECT.
- vtable.name = rstr.string_repr.convert_const(name)
+ vtable.name = rclass.alloc_array_name(name)
testing_gcstruct2vtable[GCSTRUCT] = vtable
testing_gcstruct2vtable = {}
diff --git a/rpython/jit/metainterp/optimizeopt/test/test_util.py b/rpython/jit/metainterp/optimizeopt/test/test_util.py
--- a/rpython/jit/metainterp/optimizeopt/test/test_util.py
+++ b/rpython/jit/metainterp/optimizeopt/test/test_util.py
@@ -1,6 +1,6 @@
import py, random
-from rpython.rtyper.lltypesystem import lltype, llmemory, rclass, rstr, rffi
+from rpython.rtyper.lltypesystem import lltype, llmemory, rclass, rffi
from rpython.rtyper.lltypesystem.rclass import OBJECT, OBJECT_VTABLE
from rpython.rtyper.rclass import FieldListAccessor, IR_QUASIIMMUTABLE
@@ -81,10 +81,10 @@
return box.getref(rclass.OBJECTPTR).typeptr
node_vtable = lltype.malloc(OBJECT_VTABLE, immortal=True)
- node_vtable.name = rstr.string_repr.convert_const('node')
+ node_vtable.name = rclass.alloc_array_name('node')
node_vtable_adr = llmemory.cast_ptr_to_adr(node_vtable)
node_vtable2 = lltype.malloc(OBJECT_VTABLE, immortal=True)
- node_vtable2.name = rstr.string_repr.convert_const('node2')
+ node_vtable2.name = rclass.alloc_array_name('node2')
node_vtable_adr2 = llmemory.cast_ptr_to_adr(node_vtable2)
cpu = runner.LLGraphCPU(None)
diff --git a/rpython/jit/metainterp/virtualref.py b/rpython/jit/metainterp/virtualref.py
--- a/rpython/jit/metainterp/virtualref.py
+++ b/rpython/jit/metainterp/virtualref.py
@@ -1,5 +1,5 @@
from rpython.rtyper.rmodel import inputconst, log
-from rpython.rtyper.lltypesystem import lltype, llmemory, rclass, rstr
+from rpython.rtyper.lltypesystem import lltype, llmemory, rclass
from rpython.jit.metainterp import history
from rpython.jit.metainterp.virtualizable import TOKEN_NONE
from rpython.jit.metainterp.virtualizable import TOKEN_TRACING_RESCALL
@@ -19,7 +19,7 @@
self.jit_virtual_ref_vtable = lltype.malloc(rclass.OBJECT_VTABLE,
zero=True, flavor='raw',
immortal=True)
- self.jit_virtual_ref_vtable.name = rstr.string_repr.convert_const(
+ self.jit_virtual_ref_vtable.name = rclass.alloc_array_name(
'jit_virtual_ref')
# build some constants
adr = llmemory.cast_ptr_to_adr(self.jit_virtual_ref_vtable)
diff --git a/rpython/rtyper/lltypesystem/rclass.py b/rpython/rtyper/lltypesystem/rclass.py
--- a/rpython/rtyper/lltypesystem/rclass.py
+++ b/rpython/rtyper/lltypesystem/rclass.py
@@ -90,6 +90,9 @@
vtable = vtable.super
return vtable
+def alloc_array_name(name):
+ return rstr.string_repr.convert_const(name)
+
class ClassRepr(AbstractClassRepr):
def __init__(self, rtyper, classdef):
@@ -197,7 +200,7 @@
name = 'object'
else:
name = rsubcls.classdef.shortname
- vtable.name = rstr.string_repr.convert_const(name)
+ vtable.name = alloc_array_name(name)
if hasattr(rsubcls.classdef, 'my_instantiate_graph'):
graph = rsubcls.classdef.my_instantiate_graph
vtable.instantiate = self.rtyper.getcallable(graph)
From noreply at buildbot.pypy.org Wed Jul 9 21:26:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Wed, 9 Jul 2014 21:26:46 +0200 (CEST)
Subject: [pypy-commit] pypy default: Tests for the same situation with other
set operations. Fix for
Message-ID: <20140709192646.02CF11C1068@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72408:e27d1225e4ca
Date: 2014-07-09 21:26 +0200
http://bitbucket.org/pypy/pypy/changeset/e27d1225e4ca/
Log: Tests for the same situation with other set operations. Fix for
symmetric_difference.
diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py
--- a/pypy/objspace/std/setobject.py
+++ b/pypy/objspace/std/setobject.py
@@ -1060,10 +1060,14 @@
return storage, strategy
def symmetric_difference(self, w_set, w_other):
+ if w_other.length() == 0:
+ return w_set.copy_real()
storage, strategy = self._symmetric_difference_base(w_set, w_other)
return w_set.from_storage_and_strategy(storage, strategy)
def symmetric_difference_update(self, w_set, w_other):
+ if w_other.length() == 0:
+ return
storage, strategy = self._symmetric_difference_base(w_set, w_other)
w_set.strategy = strategy
w_set.sstorage = storage
diff --git a/pypy/objspace/std/test/test_setobject.py b/pypy/objspace/std/test/test_setobject.py
--- a/pypy/objspace/std/test/test_setobject.py
+++ b/pypy/objspace/std/test/test_setobject.py
@@ -967,3 +967,28 @@
assert strategy(s) == "IntegerSetStrategy"
s.update(set())
assert strategy(s) == "IntegerSetStrategy"
+ #
+ s = set([1, 2, 3])
+ s |= set()
+ assert strategy(s) == "IntegerSetStrategy"
+ #
+ s = set([1, 2, 3]).difference(set())
+ assert strategy(s) == "IntegerSetStrategy"
+ #
+ s = set([1, 2, 3])
+ s.difference_update(set())
+ assert strategy(s) == "IntegerSetStrategy"
+ #
+ s = set([1, 2, 3]).symmetric_difference(set())
+ assert strategy(s) == "IntegerSetStrategy"
+ #
+ s = set([1, 2, 3])
+ s.symmetric_difference_update(set())
+ assert strategy(s) == "IntegerSetStrategy"
+ #
+ s = set([1, 2, 3]).intersection(set())
+ assert strategy(s) == "EmptySetStrategy"
+ #
+ s = set([1, 2, 3])
+ s.intersection_update(set())
+ assert strategy(s) == "EmptySetStrategy"
From noreply at buildbot.pypy.org Thu Jul 10 10:16:31 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 10 Jul 2014 10:16:31 +0200 (CEST)
Subject: [pypy-commit] pypy default: Add a Python 3 section.
Message-ID: <20140710081631.620D51C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72409:7e3e9f6ace6f
Date: 2014-07-10 10:16 +0200
http://bitbucket.org/pypy/pypy/changeset/7e3e9f6ace6f/
Log: Add a Python 3 section.
diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst
--- a/pypy/doc/stm.rst
+++ b/pypy/doc/stm.rst
@@ -28,7 +28,8 @@
Introduction
============
-``pypy-stm`` is a variant of the regular PyPy interpreter. With caveats_
+``pypy-stm`` is a variant of the regular PyPy interpreter. (This
+version supports Python 2.7; see below for `Python 3`_.) With caveats_
listed below, it should be in theory within 20%-50% slower than a
regular PyPy, comparing the JIT version in both cases (but see below!).
It is called
@@ -137,6 +138,25 @@
+Python 3
+========
+
+In this document I describe "pypy-stm", which is based on PyPy's Python
+2.7 interpreter. Supporting Python 3 should take about half an
+afternoon of work. Obviously, what I *don't* mean is that by tomorrow
+you can have a finished and polished "pypy3-stm" product. General py3k
+work is still missing; and general stm work is also still missing. But
+they are rather independent from each other, as usual in PyPy. The
+required afternoon of work will certainly be done one of these days now
+that the internal interfaces seem to stabilize.
+
+The same is true for other languages implemented in the RPython
+framework, although the amount of work to put there might vary, because
+the STM framework within RPython is currently targeting the PyPy
+interpreter and other ones might have slightly different needs.
+
+
+
User Guide
==========
From noreply at buildbot.pypy.org Thu Jul 10 10:21:46 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 10 Jul 2014 10:21:46 +0200 (CEST)
Subject: [pypy-commit] pypy default: Update
Message-ID: <20140710082146.B75BB1C34C8@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72410:ccadece8737d
Date: 2014-07-10 10:21 +0200
http://bitbucket.org/pypy/pypy/changeset/ccadece8737d/
Log: Update
diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst
--- a/pypy/doc/stm.rst
+++ b/pypy/doc/stm.rst
@@ -509,8 +509,6 @@
The last two lines are special; they are an internal marker read by
``transactional_memory.print_abort_info()``.
-These statistics are not printed out for the main thread, for now.
-
Reference to implementation details
-----------------------------------
From noreply at buildbot.pypy.org Thu Jul 10 12:56:49 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:49 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Removed forgotten pdb
breakpoints.
Message-ID: <20140710105649.D08B11C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r873:d033d87d9e19
Date: 2014-07-07 17:33 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/d033d87d9e19/
Log: Removed forgotten pdb breakpoints.
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1385,13 +1385,11 @@
@expose_primitive(RESUME, unwrap_spec=[object], no_result=True, clean_stack=False)
def func(interp, s_frame, w_rcvr):
- import pdb; pdb.set_trace()
assert_class(interp, w_rcvr, interp.space.w_Process)
wrapper.ProcessWrapper(interp.space, w_rcvr).resume(s_frame)
@expose_primitive(SUSPEND, unwrap_spec=[object], no_result=True, clean_stack=False)
def func(interp, s_frame, w_rcvr):
- import pdb; pdb.set_trace()
assert_class(interp, w_rcvr, interp.space.w_Process)
wrapper.ProcessWrapper(interp.space, w_rcvr).suspend(s_frame)
From noreply at buildbot.pypy.org Thu Jul 10 12:56:51 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:51 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Changed
perform/create_toplevel_context methods of interpreter to work better with
RPython.
Message-ID: <20140710105651.21EDE1C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r874:01a714785a05
Date: 2014-07-07 17:35 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/01a714785a05/
Log: Changed perform/create_toplevel_context methods of interpreter to
work better with RPython.
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -211,20 +211,18 @@
except ReturnFromTopLevel, e:
return e.object
- def perform(self, w_receiver, selector, *w_arguments):
- s_frame = self.create_toplevel_context(w_receiver, selector, *w_arguments)
+ def perform(self, w_receiver, selector="", w_selector=None, w_arguments=[]):
+ s_frame = self.create_toplevel_context(w_receiver, selector, w_selector, w_arguments)
self.interrupt_check_counter = self.interrupt_counter_size
return self.interpret_toplevel(s_frame.w_self())
- def create_toplevel_context(self, w_receiver, selector, *w_arguments):
- if isinstance(selector, str):
+ def create_toplevel_context(self, w_receiver, selector="", w_selector=None, w_arguments=[]):
+ if w_selector is None:
+ assert selector, "Need either string or W_Object selector"
if selector == "asSymbol":
w_selector = self.image.w_asSymbol
else:
- w_selector = self.perform(self.space.wrap_string(selector),
- "asSymbol")
- else:
- w_selector = selector
+ w_selector = self.perform(self.space.wrap_string(selector), "asSymbol")
w_method = model.W_CompiledMethod(self.space, header=512)
w_method.literalatput0(self.space, 1, w_selector)
diff --git a/spyvm/test/jit.py b/spyvm/test/jit.py
--- a/spyvm/test/jit.py
+++ b/spyvm/test/jit.py
@@ -36,7 +36,7 @@
def preload_perform(imagename, receiver, selector, *args):
interp = load(imagename)
def interp_miniloop():
- return interp.perform(receiver, selector, *args)
+ return interp.perform(receiver, selector, w_arguments=list(args))
return interp_miniloop
# This will build a jit executing a synthetic method composed of the given bytecodes and literals,
diff --git a/spyvm/test/test_largeinteger.py b/spyvm/test/test_largeinteger.py
--- a/spyvm/test/test_largeinteger.py
+++ b/spyvm/test/test_largeinteger.py
@@ -7,7 +7,6 @@
def setup_module():
space, interp, _, _ = read_image('bootstrapped.image')
w = space.w
- perform = interp.perform
copy_to_module(locals(), __name__)
interp.trace = False
space.initialize_class(space.w_String, interp)
diff --git a/spyvm/test/test_miniimage.py b/spyvm/test/test_miniimage.py
--- a/spyvm/test/test_miniimage.py
+++ b/spyvm/test/test_miniimage.py
@@ -5,7 +5,10 @@
def setup_module():
space, interp, image, reader = read_image("mini.image")
w = space.w
- perform = interp.perform
+ def perform_wrapper(receiver, selector, *args):
+ w_selector = None if isinstance(selector, str) else selector
+ return interp.perform(receiver, selector, w_selector, list(args))
+ perform = perform_wrapper
copy_to_module(locals(), __name__)
def teardown_module():
@@ -191,7 +194,7 @@
w_abs = interp.perform(interp.space.w("abs"), "asSymbol")
for value in [10, -3, 0]:
w_object = model.W_SmallInteger(value)
- w_res = interp.perform(w_object, w_abs)
+ w_res = interp.perform(w_object, w_selector=w_abs)
assert w_res.value == abs(value)
def test_lookup_abs_in_integer():
diff --git a/spyvm/test/util.py b/spyvm/test/util.py
--- a/spyvm/test/util.py
+++ b/spyvm/test/util.py
@@ -266,5 +266,5 @@
def initialize_class(self, w_class, interp):
initialize_symbol = find_symbol_in_methoddict_of("initialize",
w_class.class_shadow(self))
- interp.perform(w_class, initialize_symbol)
+ interp.perform(w_class, w_selector=initialize_symbol)
\ No newline at end of file
From noreply at buildbot.pypy.org Thu Jul 10 12:56:52 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:52 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Refactored command line flags
a little to be more precise and give more controll over what happens.
Message-ID: <20140710105652.5516C1C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r875:f9f21debba52
Date: 2014-07-07 17:36 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/f9f21debba52/
Log: Refactored command line flags a little to be more precise and give
more controll over what happens. Added descriptions to the usage-
string.
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -10,201 +10,114 @@
from spyvm.tool.analyseimage import create_image
from spyvm.interpreter_proxy import VirtualMachine
-def print_result(w_result):
- # This will also print contents of strings/symbols/numbers
- print w_result.as_repr_string().replace('\r', '\n')
-
-def _run_benchmark(interp, number, benchmark, arg):
- from spyvm.plugins.vmdebugging import stop_ui_process
- stop_ui_process()
-
- space = interp.space
- scheduler = wrapper.scheduler(space)
- w_hpp = scheduler.active_process()
- if space.unwrap_int(scheduler.active_process().fetch(space, 2)) > space.unwrap_int(w_hpp.fetch(space, 2)):
- w_hpp = scheduler.active_process()
- assert isinstance(w_hpp, model.W_PointersObject)
- w_benchmark_proc = model.W_PointersObject(
- space,
- w_hpp.getclass(space),
- w_hpp.size()
- )
-
- s_frame = context_for(interp, number, benchmark, arg)
- # second variable is suspended context
- w_benchmark_proc.store(space, 1, s_frame.w_self())
-
- # third variable is priority
- priority = space.unwrap_int(w_hpp.fetch(space, 2)) / 2 + 1
- # Priorities below 10 are not allowed in newer versions of Squeak.
- if interp.image.version.has_closures:
- priority = max(11, priority)
- else:
- priority = 7
- w_benchmark_proc.store(space, 2, space.wrap_int(priority))
-
- # make process eligible for scheduling
- wrapper.ProcessWrapper(space, w_benchmark_proc).put_to_sleep()
-
- t1 = time.time()
- w_result = _run_image(interp)
- t2 = time.time()
- if w_result:
- print_result(w_result)
- print "took %s seconds" % (t2 - t1)
- return 0
- return -1
-
-def _run_image(interp):
- space = interp.space
- ap = wrapper.ProcessWrapper(space, wrapper.scheduler(space).active_process())
- w_ctx = ap.suspended_context()
- assert isinstance(w_ctx, model.W_PointersObject)
- ap.store_suspended_context(space.w_nil)
- try:
- return interp.interpret_toplevel(w_ctx)
- except error.Exit, e:
- print e.msg
-
-def _run_code(interp, code, as_benchmark=False):
- import time
- selector = "DoIt%d" % int(time.time())
- space = interp.space
- w_receiver = space.w_nil
- w_receiver_class = w_receiver.getclass(space)
- try:
- w_result = interp.perform(
- w_receiver_class,
- "compile:classified:notifying:",
- space.wrap_string("%s\r\n%s" % (selector, code)),
- space.wrap_string("spy-run-code"),
- space.w_nil
- )
- w_receiver_class.as_class_get_shadow(space).s_methoddict().sync_method_cache()
- except interpreter.ReturnFromTopLevel, e:
- print e.object
- return 1
- except error.Exit, e:
- print e.msg
- return 1
-
- if not as_benchmark:
- try:
- w_result = interp.perform(w_receiver, selector)
- except interpreter.ReturnFromTopLevel, e:
- print e.object
- return 1
- except error.Exit, e:
- print e.msg
- return 1
- if w_result:
- print_result(w_result)
- return 0
- else:
- return _run_benchmark(interp, 0, selector, "")
-
-def context_for(interp, number, benchmark, stringarg):
- w_receiver = interp.space.wrap_int(number)
- if stringarg:
- return interp.create_toplevel_context(w_receiver, benchmark, interp.space.wrap_string(stringarg))
- else:
- return interp.create_toplevel_context(w_receiver, benchmark)
-
def _usage(argv):
print """
- Usage: %s
- -j|--jit [jitargs]
- -n|--number [smallint, default: 0]
- -m|--method [benchmark on smallint]
- -a|--arg [string argument to #method]
- -r|--run [code string]
- -b|--benchmark [code string]
- -p|--poll_events
- -ni|--no-interrupts
- -d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
- -l|--storage-log
- -L|--storage-log-aggregate
- -E|--storage-log-elements
- [image path, default: Squeak.image]
+ Usage: %s [-r|-m] [-naH] [-jpis] [-tlLE]
+ - image path (default: Squeak.image)
+
+ Execution mode:
+ (no flags) - Image will be normally opened.
+ -r|--run - Code will be compiled and executed, result printed.
+ -m|--method - Selector will be sent to a SmallInteger, result printed.
+ -h|--help - Output this and exit.
+
+ Execution parameters:
+ -n|--num - Only with -m or -r, SmallInteger to be used as receiver (default: nil).
+ -a|--arg - Only with -m, will be used as single String argument.
+ -H|--headless - Only with -m or -r, run in headless mode.
+ Execute the context directly, ignoring the active context in the image.
+ The execution will 'hijack' the active process.
+ Image window will probably not open. Good for benchmarking.
+ By default, a high-priority process will be created for the context, then the image
+ will be started normally.
+ -u - Only with -m or -r, try to stop UI-process at startup. Can help with -H.
+
+ Other parameters:
+ -j|--jit - jitargs will be passed to the jit configuration.
+ -p|--poll - Actively poll for events. Try this if the image is not responding well.
+ -i|--no-interrupts - Disable timer interrupt. Disables non-cooperative scheduling.
+ -s - After num stack frames, the entire stack will be dumped to the heap.
+ This breaks performance, but protects agains stack overflow.
+ num <= 0 disables stack protection (default: %d)
+
+ Logging parameters:
+ -t|--trace - Output a trace of each message, primitive, return value and process switch.
+ -l|--storage-log - Output a log of storage operations.
+ -L|--storage-log-aggregate - Output an aggregated storage log at the end of execution.
+ -E|--storage-log-elements - Include classnames of elements into the storage log.
+
""" % (argv[0], constants.MAX_LOOP_DEPTH)
-def _arg_missing(argv, idx, arg):
- if len(argv) == idx + 1:
+def get_parameter(argv, idx, arg):
+ if len(argv) < idx + 1:
raise RuntimeError("Error: missing argument after %s" % arg)
-
+ return argv[idx], idx + 1
+
prebuilt_space = objspace.ObjSpace()
def entry_point(argv):
- idx = 1
- path = None
+ # == Main execution parameters
+ selector = None
+ code = ""
number = 0
- benchmark = None
+ have_number = False
+ stringarg = None
+ headless = False
+ # == Other parameters
+ poll = False
+ interrupts = True
+ max_stack_depth = constants.MAX_LOOP_DEPTH
trace = False
- evented = True
- stringarg = ""
- code = None
- as_benchmark = False
- max_stack_depth = constants.MAX_LOOP_DEPTH
- interrupts = True
+
+ path = argv[1] if len(argv) > 1 else "Squeak.image"
+ idx = 2
while idx < len(argv):
arg = argv[idx]
+ idx += 1
if arg in ["-h", "--help"]:
_usage(argv)
return 0
elif arg in ["-j", "--jit"]:
- _arg_missing(argv, idx, arg)
- jitarg = argv[idx + 1]
- idx += 1
+ jitarg, idx = get_parameter(argv, idx, arg)
jit.set_user_param(interpreter.Interpreter.jit_driver, jitarg)
elif arg in ["-n", "--number"]:
- _arg_missing(argv, idx, arg)
- number = int(argv[idx + 1])
- idx += 1
+ numarg, idx = get_parameter(argv, idx, arg)
+ number = int(numarg)
+ have_number = True
elif arg in ["-m", "--method"]:
- _arg_missing(argv, idx, arg)
- benchmark = argv[idx + 1]
- idx += 1
+ selector, idx = get_parameter(argv, idx, arg)
elif arg in ["-t", "--trace"]:
trace = True
- elif arg in ["-p", "--poll_events"]:
- evented = False
+ elif arg in ["-p", "--poll"]:
+ poll = True
elif arg in ["-a", "--arg"]:
- _arg_missing(argv, idx, arg)
- stringarg = argv[idx + 1]
- idx += 1
+ stringarg, idx = get_parameter(argv, idx, arg)
elif arg in ["-r", "--run"]:
- _arg_missing(argv, idx, arg)
- code = argv[idx + 1]
- as_benchmark = False
- idx += 1
- elif arg in ["-b", "--benchmark"]:
- _arg_missing(argv, idx, arg)
- code = argv[idx + 1]
- as_benchmark = True
- idx += 1
- elif arg in ["-ni", "--no-interrupts"]:
+ code, idx = get_parameter(argv, idx, arg)
+ elif arg in ["-i", "--no-interrupts"]:
interrupts = False
- elif arg in ["-d", "--max-stack-depth"]:
- _arg_missing(argv, idx, arg)
- max_stack_depth = int(argv[idx + 1])
- idx += 1
+ elif arg in ["-s"]:
+ arg, idx = get_parameter(argv, idx, arg)
+ max_stack_depth = int(arg)
+ elif arg in ["-H", "--headless"]:
+ headless = True
+ elif arg in ["-u"]:
+ from spyvm.plugins.vmdebugging import stop_ui_process
+ stop_ui_process()
elif arg in ["-l", "--storage-log"]:
storage_logger.activate()
elif arg in ["-L", "--storage-log-aggregate"]:
storage_logger.activate(aggregate=True)
elif arg in ["-E", "--storage-log-elements"]:
storage_logger.activate(elements=True)
- elif path is None:
- path = argv[idx]
else:
_usage(argv)
return -1
- idx += 1
-
- if path is None:
- path = "Squeak.image"
-
+
+ if code and selector:
+ raise RuntimeError("Cannot handle both -r and -m.")
+
path = rpath.rabspath(path)
try:
f = open_file_as_stream(path, mode="rb", buffering=0)
@@ -216,27 +129,108 @@
os.write(2, "%s -- %s (LoadError)\n" % (os.strerror(e.errno), path))
return 1
+ # Load & prepare image and environment
space = prebuilt_space
image_reader = squeakimage.reader_for_image(space, squeakimage.Stream(data=imagedata))
image = create_image(space, image_reader)
interp = interpreter.Interpreter(space, image, image_name=path,
- trace=trace, evented=evented,
+ trace=trace, evented=not poll,
interrupts=interrupts, max_stack_depth=max_stack_depth)
space.runtime_setup(argv[0])
- result = 0
- if benchmark is not None:
- result = _run_benchmark(interp, number, benchmark, stringarg)
- elif code is not None:
- result = _run_code(interp, code, as_benchmark=as_benchmark)
+
+ # Create context to be executed
+ if code or selector:
+ if not have_number:
+ w_receiver = interp.space.w_nil
+ else:
+ w_receiver = interp.space.wrap_int(number)
+ if code:
+ selector = compile_code(interp, w_receiver, code)
+ if selector is None:
+ return -1 # Compilation failed, message is printed.
+ s_frame = create_context(interp, w_receiver, selector, stringarg)
+ if headless:
+ context = s_frame
+ else:
+ create_process(interp, s_frame)
+ context = active_context(interp.space)
else:
- _run_image(interp)
- result = 0
+ context = active_context(interp.space)
+
+ w_result = execute_context(interp, context)
+ print result_string(w_result)
storage_logger.print_aggregated_log()
- return result
+ return 0
+def result_string(w_result):
+ # This will also print contents of strings/symbols/numbers
+ return w_result.as_repr_string().replace('\r', '\n')
-# _____ Define and setup target ___
+def compile_code(interp, w_receiver, code):
+ import time
+ selector = "DoIt%d" % int(time.time())
+ space = interp.space
+ w_receiver_class = w_receiver.getclass(space)
+ try:
+ w_result = interp.perform(
+ w_receiver_class,
+ "compile:classified:notifying:",
+ w_arguments = [space.wrap_string("%s\r\n%s" % (selector, code)),
+ space.wrap_string("spy-run-code"),
+ space.w_nil]
+ )
+ # TODO - is this expected in every image?
+ if not isinstance(w_result, model.W_BytesObject) or w_result.as_string() != selector:
+ print "Compilation failed, unexpected result: %s" % result_string(w_result)
+ return None
+ except error.Exit, e:
+ print "Exited while compiling code: %s" % e.msg
+ return None
+ w_receiver_class.as_class_get_shadow(space).s_methoddict().sync_method_cache()
+ return selector
+
+def create_context(interp, w_receiver, selector, stringarg):
+ args = []
+ if stringarg:
+ args.append(interp.space.wrap_string(stringarg))
+ return interp.create_toplevel_context(w_receiver, selector, w_arguments = args)
+
+def create_process(interp, s_frame):
+ space = interp.space
+ w_active_process = wrapper.scheduler(space).active_process()
+ assert isinstance(w_active_process, model.W_PointersObject)
+ w_benchmark_proc = model.W_PointersObject(
+ space, w_active_process.getclass(space), w_active_process.size()
+ )
+ if interp.image.version.has_closures:
+ # Priorities below 10 are not allowed in newer versions of Squeak.
+ active_priority = space.unwrap_int(w_active_process.fetch(space, 2))
+ priority = active_priority / 2 + 1
+ priority = max(11, priority)
+ else:
+ priority = 7
+ w_benchmark_proc.store(space, 1, s_frame.w_self())
+ w_benchmark_proc.store(space, 2, space.wrap_int(priority))
+
+ # Make process eligible for scheduling
+ wrapper.ProcessWrapper(space, w_benchmark_proc).put_to_sleep()
+
+def active_context(space):
+ w_active_process = wrapper.scheduler(space).active_process()
+ active_process = wrapper.ProcessWrapper(space, w_active_process)
+ w_active_context = active_process.suspended_context()
+ assert isinstance(w_active_context, model.W_PointersObject)
+ active_process.store_suspended_context(space.w_nil)
+ return w_active_context.as_context_get_shadow(space)
+def execute_context(interp, s_frame, measure=False):
+ try:
+ return interp.interpret_toplevel(s_frame.w_self())
+ except error.Exit, e:
+ print "Exited: %s" % e.msg
+ return None
+
+# _____ Target and Main _____
def target(driver, *args):
# driver.config.translation.gc = "stmgc"
@@ -247,11 +241,9 @@
driver.config.translation.thread = True
return entry_point, None
-
def jitpolicy(self):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
-
if __name__ == "__main__":
entry_point(sys.argv)
From noreply at buildbot.pypy.org Thu Jul 10 12:56:53 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:53 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Added a
suppress_process_switch flag as a hack to enable the -r flag in the Squeak
image.
Message-ID: <20140710105653.7134A1C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r876:f77d391e1255
Date: 2014-07-07 18:46 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/f77d391e1255/
Log: Added a suppress_process_switch flag as a hack to enable the -r flag
in the Squeak image.
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -86,6 +86,7 @@
s_new_context = s_sender
s_new_context.push(nlr.value)
except ProcessSwitch, p:
+ assert not self.space.suppress_process_switch, "ProcessSwitch should be disabled..."
if self.trace:
print "====== Switched process from: %s" % s_new_context.short_str()
print "====== to: %s " % p.s_new_context.short_str()
diff --git a/spyvm/objspace.py b/spyvm/objspace.py
--- a/spyvm/objspace.py
+++ b/spyvm/objspace.py
@@ -21,6 +21,9 @@
self.make_bootstrap_classes()
self.make_bootstrap_objects()
+
+ # This is a hack; see compile_code() in targetimageloadingsmalltalk.py
+ self.suppress_process_switch = False
def find_executable(self, executable):
if os.sep in executable or (os.name == "nt" and ":" in executable):
diff --git a/spyvm/wrapper.py b/spyvm/wrapper.py
--- a/spyvm/wrapper.py
+++ b/spyvm/wrapper.py
@@ -93,8 +93,9 @@
active_priority = active_process.priority()
priority = self.priority()
if priority > active_priority:
- active_process.deactivate(s_current_frame)
- self.activate()
+ if not self.space.suppress_process_switch:
+ active_process.deactivate(s_current_frame)
+ self.activate()
else:
self.put_to_sleep()
@@ -103,10 +104,11 @@
def suspend(self, s_current_frame):
if self.is_active_process():
- assert self.my_list().is_nil(self.space)
- w_process = scheduler(self.space).pop_highest_priority_process()
- self.deactivate(s_current_frame, put_to_sleep=False)
- ProcessWrapper(self.space, w_process).activate()
+ if not self.space.suppress_process_switch:
+ assert self.my_list().is_nil(self.space)
+ w_process = scheduler(self.space).pop_highest_priority_process()
+ self.deactivate(s_current_frame, put_to_sleep=False)
+ ProcessWrapper(self.space, w_process).activate()
else:
if not self.my_list().is_nil(self.space):
process_list = ProcessListWrapper(self.space, self.my_list())
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -172,13 +172,22 @@
space = interp.space
w_receiver_class = w_receiver.getclass(space)
try:
- w_result = interp.perform(
- w_receiver_class,
- "compile:classified:notifying:",
- w_arguments = [space.wrap_string("%s\r\n%s" % (selector, code)),
- space.wrap_string("spy-run-code"),
- space.w_nil]
- )
+ try:
+ # The suppress_process_switch flag is a hack/workaround to enable compiling code
+ # before having initialized the image cleanly. The problem is that the TimingSemaphore is not yet
+ # registered (primitive 136 not called), so the idle process will never be left once it is entered.
+ # TODO - Find a way to cleanly initialize the image, without executing the active_context of the image.
+ # Instead, we want to execute our own context. Then remove this flag (and all references to it)
+ interp.space.suppress_process_switch = True
+ w_result = interp.perform(
+ w_receiver_class,
+ "compile:classified:notifying:",
+ w_arguments = [space.wrap_string("%s\r\n%s" % (selector, code)),
+ space.wrap_string("spy-run-code"),
+ space.w_nil]
+ )
+ finally:
+ interp.space.suppress_process_switch = False
# TODO - is this expected in every image?
if not isinstance(w_result, model.W_BytesObject) or w_result.as_string() != selector:
print "Compilation failed, unexpected result: %s" % result_string(w_result)
From noreply at buildbot.pypy.org Thu Jul 10 12:56:54 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:54 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Fixed the
suppress_process_switch hack, using an array to store the flag,
making it modifiable.
Message-ID: <20140710105654.88F551C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r877:307b424d7195
Date: 2014-07-09 13:59 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/307b424d7195/
Log: Fixed the suppress_process_switch hack, using an array to store the
flag, making it modifiable.
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -86,7 +86,7 @@
s_new_context = s_sender
s_new_context.push(nlr.value)
except ProcessSwitch, p:
- assert not self.space.suppress_process_switch, "ProcessSwitch should be disabled..."
+ assert not self.space.suppress_process_switch[0], "ProcessSwitch should be disabled..."
if self.trace:
print "====== Switched process from: %s" % s_new_context.short_str()
print "====== to: %s " % p.s_new_context.short_str()
diff --git a/spyvm/objspace.py b/spyvm/objspace.py
--- a/spyvm/objspace.py
+++ b/spyvm/objspace.py
@@ -23,7 +23,7 @@
self.make_bootstrap_objects()
# This is a hack; see compile_code() in targetimageloadingsmalltalk.py
- self.suppress_process_switch = False
+ self.suppress_process_switch = [False]
def find_executable(self, executable):
if os.sep in executable or (os.name == "nt" and ":" in executable):
diff --git a/spyvm/wrapper.py b/spyvm/wrapper.py
--- a/spyvm/wrapper.py
+++ b/spyvm/wrapper.py
@@ -93,7 +93,7 @@
active_priority = active_process.priority()
priority = self.priority()
if priority > active_priority:
- if not self.space.suppress_process_switch:
+ if not self.space.suppress_process_switch[0]:
active_process.deactivate(s_current_frame)
self.activate()
else:
@@ -104,7 +104,7 @@
def suspend(self, s_current_frame):
if self.is_active_process():
- if not self.space.suppress_process_switch:
+ if not self.space.suppress_process_switch[0]:
assert self.my_list().is_nil(self.space)
w_process = scheduler(self.space).pop_highest_priority_process()
self.deactivate(s_current_frame, put_to_sleep=False)
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -178,7 +178,7 @@
# registered (primitive 136 not called), so the idle process will never be left once it is entered.
# TODO - Find a way to cleanly initialize the image, without executing the active_context of the image.
# Instead, we want to execute our own context. Then remove this flag (and all references to it)
- interp.space.suppress_process_switch = True
+ interp.space.suppress_process_switch[0] = True
w_result = interp.perform(
w_receiver_class,
"compile:classified:notifying:",
@@ -187,7 +187,7 @@
space.w_nil]
)
finally:
- interp.space.suppress_process_switch = False
+ interp.space.suppress_process_switch[0] = False
# TODO - is this expected in every image?
if not isinstance(w_result, model.W_BytesObject) or w_result.as_string() != selector:
print "Compilation failed, unexpected result: %s" % result_string(w_result)
From noreply at buildbot.pypy.org Thu Jul 10 12:56:55 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:55 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Merged
Message-ID: <20140710105655.C64EB1C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r878:656c751d8f58
Date: 2014-07-09 17:13 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/656c751d8f58/
Log: Merged
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -26,7 +26,7 @@
_immutable_fields_ = ["space", "image", "image_name",
"max_stack_depth", "interrupt_counter_size",
"startup_time", "evented", "interrupts"]
-
+
jit_driver = jit.JitDriver(
greens=['pc', 'self', 'method'],
reds=['s_context'],
@@ -38,7 +38,7 @@
trace=False, evented=True, interrupts=True,
max_stack_depth=constants.MAX_LOOP_DEPTH):
import time
-
+
# === Initialize immutable variables
self.space = space
self.image = image
@@ -54,7 +54,7 @@
self.interrupt_counter_size = int(os.environ["SPY_ICS"])
except KeyError:
self.interrupt_counter_size = constants.INTERRUPT_COUNTER_SIZE
-
+
# === Initialize mutable variables
self.interrupt_check_counter = self.interrupt_counter_size
self.current_stack_depth = 0
@@ -91,7 +91,7 @@
print "====== Switched process from: %s" % s_new_context.short_str()
print "====== to: %s " % p.s_new_context.short_str()
s_new_context = p.s_new_context
-
+
def loop_bytecodes(self, s_context, may_context_switch=True):
old_pc = 0
if not jit.we_are_jitted() and may_context_switch:
@@ -118,7 +118,7 @@
raise nlr
else:
s_context.push(nlr.value)
-
+
# This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
# and handles the stack overflow protection mechanism.
def stack_frame(self, s_frame, s_sender, may_context_switch=True):
@@ -127,14 +127,14 @@
# Enter the context - store a virtual reference back to the sender
# Non-fresh contexts can happen, e.g. when activating a stored BlockContext.
# The same frame object must not pass through here recursively!
- if s_frame.is_fresh():
+ if s_frame.is_fresh() and s_sender is not None:
s_frame.virtual_sender = jit.virtual_ref(s_sender)
-
+
self.current_stack_depth += 1
if self.max_stack_depth > 0:
if self.current_stack_depth >= self.max_stack_depth:
raise StackOverflow(s_frame)
-
+
# Now (continue to) execute the context bytecodes
self.loop_bytecodes(s_frame, may_context_switch)
finally:
@@ -142,8 +142,8 @@
# Cleanly leave the context. This will finish the virtual sender-reference, if
# it is still there, which can happen in case of ProcessSwitch or StackOverflow;
# in case of a Return, this will already be handled while unwinding the stack.
- s_frame.finish_virtual_sender()
-
+ s_frame.finish_virtual_sender(s_sender)
+
def step(self, context):
bytecode = context.fetch_next_bytecode()
for entry in UNROLLING_BYTECODE_RANGES:
@@ -156,9 +156,9 @@
if start <= bytecode <= stop:
return getattr(context, methname)(self, bytecode)
assert 0, "unreachable"
-
+
# ============== Methods for handling user interrupts ==============
-
+
def jitted_check_for_interrupt(self, s_frame):
if not self.interrupts:
return
@@ -169,7 +169,7 @@
decr_by = int(trace_length // 100)
decr_by = max(decr_by, 1)
self.quick_check_for_interrupt(s_frame, decr_by)
-
+
def quick_check_for_interrupt(self, s_frame, dec=1):
if not self.interrupts:
return
@@ -205,7 +205,7 @@
return intmask(int((time.time() - self.startup_time) * 1000) & constants.TAGGED_MASK)
# ============== Convenience methods for executing code ==============
-
+
def interpret_toplevel(self, w_frame):
try:
self.loop(w_frame)
@@ -234,7 +234,7 @@
s_frame.push(w_receiver)
s_frame.push_all(list(w_arguments))
return s_frame
-
+
def padding(self, symbol=' '):
return symbol * self.current_stack_depth
@@ -264,11 +264,26 @@
class ProcessSwitch(ContextSwitchException):
"""This causes the interpreter to switch the executed context."""
+
+import rpython.rlib.unroll
+if hasattr(unroll, "unrolling_zero"):
+ unrolling_zero = unroll.unrolling_zero
+else:
+ class unrolling_int(int, unroll.SpecTag):
+ def __add__(self, other):
+ return unrolling_int(int.__add__(self, other))
+ __radd__ = __add__
+ def __sub__(self, other):
+ return unrolling_int(int.__sub__(self, other))
+ def __rsub__(self, other):
+ return unrolling_int(int.__rsub__(self, other))
+ unrolling_zero = unrolling_int(0)
+
+
# This is a decorator for bytecode implementation methods.
# parameter_bytes=N means N additional bytes are fetched as parameters.
def bytecode_implementation(parameter_bytes=0):
def bytecode_implementation_decorator(actual_implementation_method):
- from rpython.rlib.unroll import unrolling_zero
@jit.unroll_safe
def bytecode_implementation_wrapper(self, interp, current_bytecode):
parameters = ()
@@ -344,9 +359,9 @@
# __extend__ adds new methods to the ContextPartShadow class
class __extend__(ContextPartShadow):
-
+
# ====== Push/Pop bytecodes ======
-
+
@bytecode_implementation()
def pushReceiverVariableBytecode(self, interp, current_bytecode):
index = current_bytecode & 15
@@ -425,7 +440,7 @@
@bytecode_implementation()
def popStackBytecode(self, interp, current_bytecode):
self.pop()
-
+
@bytecode_implementation(parameter_bytes=1)
def pushNewArrayBytecode(self, interp, current_bytecode, descriptor):
arraySize, popIntoArray = splitter[7, 1](descriptor)
@@ -435,9 +450,9 @@
else:
newArray = interp.space.w_Array.as_class_get_shadow(interp.space).new(arraySize)
self.push(newArray)
-
+
# ====== Extended Push/Pop bytecodes ======
-
+
def _extendedVariableTypeAndIndex(self, descriptor):
return ((descriptor >> 6) & 3), (descriptor & 63)
@@ -473,16 +488,16 @@
@bytecode_implementation(parameter_bytes=1)
def extendedStoreBytecode(self, interp, current_bytecode, descriptor):
return self._extendedStoreBytecode(interp, current_bytecode, descriptor)
-
+
@bytecode_implementation(parameter_bytes=1)
def extendedStoreAndPopBytecode(self, interp, current_bytecode, descriptor):
self._extendedStoreBytecode(interp, current_bytecode, descriptor)
self.pop()
-
+
def _extract_index_and_temps(self, index_in_array, index_of_array):
w_indirectTemps = self.gettemp(index_of_array)
return index_in_array, w_indirectTemps
-
+
@bytecode_implementation(parameter_bytes=2)
def pushRemoteTempLongBytecode(self, interp, current_bytecode, index_in_array, index_of_array):
index_in_array, w_indirectTemps = self._extract_index_and_temps(index_in_array, index_of_array)
@@ -520,7 +535,7 @@
copiedValues: copiedValues).
self jump: blockSize
"""
-
+
space = self.space
numArgs, numCopied = splitter[4, 4](descriptor)
blockSize = (j << 8) | i
@@ -529,7 +544,7 @@
self.pop_and_return_n(numCopied))
self.push(w_closure)
self._jump(blockSize)
-
+
# ====== Helpers for send/return bytecodes ======
def _sendSelfSelector(self, w_selector, argcount, interp):
@@ -551,7 +566,7 @@
w_method = receiverclassshadow.lookup(w_selector)
except MethodNotFound:
return self._doesNotUnderstand(w_selector, argcount, interp, receiver)
-
+
code = w_method.primitive()
if code:
if w_arguments:
@@ -575,21 +590,21 @@
def _sendSelfSelectorSpecial(self, selector, numargs, interp):
w_selector = self.space.get_special_selector(selector)
return self._sendSelfSelector(w_selector, numargs, interp)
-
+
def _sendSpecialSelector(self, interp, receiver, special_selector, w_args=[]):
w_special_selector = self.space.objtable["w_" + special_selector]
s_class = receiver.class_shadow(self.space)
w_method = s_class.lookup(w_special_selector)
s_frame = w_method.create_frame(interp.space, receiver, w_args)
-
+
# ######################################################################
if interp.trace:
print '%s %s %s: #%s' % (interp.padding('#'), special_selector, s_frame.short_str(), w_args)
if not objectmodel.we_are_translated():
import pdb; pdb.set_trace()
-
+
return interp.stack_frame(s_frame, self)
-
+
def _doesNotUnderstand(self, w_selector, argcount, interp, receiver):
arguments = self.pop_and_return_n(argcount)
w_message_class = self.space.classtable["w_Message"]
@@ -599,7 +614,7 @@
w_message.store(self.space, 0, w_selector)
w_message.store(self.space, 1, self.space.wrap_list(arguments))
self.pop() # The receiver, already known.
-
+
try:
return self._sendSpecialSelector(interp, receiver, "doesNotUnderstand", [w_message])
except MethodNotFound:
@@ -608,10 +623,10 @@
assert isinstance(s_class, ClassShadow)
print "Missing doesNotUnderstand in hierarchy of %s" % s_class.getname()
raise
-
+
def _mustBeBoolean(self, interp, receiver):
return self._sendSpecialSelector(interp, receiver, "mustBeBoolean")
-
+
def _call_primitive(self, code, interp, argcount, w_method, w_selector):
# ##################################################################
if interp.trace:
@@ -631,11 +646,11 @@
def _return(self, return_value, interp, s_return_to):
# unfortunately, this assert is not true for some tests. TODO fix this.
# assert self._stack_ptr == self.tempsize()
-
+
# ##################################################################
if interp.trace:
print '%s<- %s' % (interp.padding(), return_value.as_repr_string())
-
+
if s_return_to is None:
# This should never happen while executing a normal image.
raise ReturnFromTopLevel(return_value)
@@ -732,7 +747,7 @@
return self._sendSelfSelector(w_selector, argcount, interp)
# ====== Misc ======
-
+
def _activate_unwind_context(self, interp):
# TODO put the constant somewhere else.
# Primitive 198 is used in BlockClosure >> ensure:
@@ -750,11 +765,11 @@
raise nlr
finally:
self.mark_returned()
-
+
@bytecode_implementation()
def unknownBytecode(self, interp, current_bytecode):
raise MissingBytecode("unknownBytecode")
-
+
@bytecode_implementation()
def experimentalBytecode(self, interp, current_bytecode):
raise MissingBytecode("experimentalBytecode")
@@ -771,7 +786,7 @@
else:
w_alternative = interp.space.w_true
w_expected = interp.space.w_false
-
+
# Don't check the class, just compare with only two Boolean instances.
w_bool = self.pop()
if w_expected.is_same_object(w_bool):
diff --git a/spyvm/primitives.py b/spyvm/primitives.py
--- a/spyvm/primitives.py
+++ b/spyvm/primitives.py
@@ -1356,7 +1356,7 @@
def func(interp, s_frame, w_rcvr, w_selector, w_arguments):
from spyvm.shadow import MethodNotFound
s_frame.pop_n(2) # removing our arguments
-
+
return s_frame._sendSelector(w_selector, len(w_arguments), interp, w_rcvr,
w_rcvr.class_shadow(interp.space), w_arguments=w_arguments)
@@ -1392,8 +1392,8 @@
def func(interp, s_frame, w_rcvr):
assert_class(interp, w_rcvr, interp.space.w_Process)
wrapper.ProcessWrapper(interp.space, w_rcvr).suspend(s_frame)
-
-
+
+
@expose_primitive(FLUSH_CACHE, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -20,7 +20,7 @@
_immutable_fields_ = ['space']
provides_getname = False
repr_classname = "AbstractShadow"
-
+
def __init__(self, space, w_self):
self.space = space
assert w_self is None or isinstance(w_self, model.W_PointersObject)
@@ -34,19 +34,19 @@
return "<%s %s>" % (self.repr_classname, self.getname())
else:
return "<%s>" % self.repr_classname
-
+
def fetch(self, n0):
raise NotImplementedError("Abstract class")
def store(self, n0, w_value):
raise NotImplementedError("Abstract class")
def size(self):
raise NotImplementedError("Abstract class")
-
+
def attach_shadow(self): pass
-
+
def copy_field_from(self, n0, other_shadow):
self.store(n0, other_shadow.fetch(n0))
-
+
# This can be overwritten to change the order of initialization.
def copy_from(self, other_shadow):
assert self.size() == other_shadow.size()
@@ -98,24 +98,24 @@
# Class must provide: wrap, unwrap, nil_value, is_nil_value, wrapper_class
_attrs_ = ['storage']
_immutable_fields_ = ['storage']
-
+
def __init__(self, space, w_self, size):
AbstractStorageShadow.__init__(self, space, w_self, size)
self.storage = [self.nil_value] * size
-
+
def size(self):
return len(self.storage)
-
+
def generalized_strategy_for(self, w_val):
return ListStorageShadow
-
+
def fetch(self, n0):
val = self.storage[n0]
if self.is_nil_value(val):
return self.space.w_nil
else:
return self.wrap(self.space, val)
-
+
def do_store(self, n0, w_val):
if w_val.is_nil(self.space):
self.storage[n0] = self.nil_value
@@ -134,7 +134,7 @@
nil_value = constants.MAXINT
wrapper_class = model.W_SmallInteger
import_from_mixin(AbstractValueOrNilStorageMixin)
-
+
@staticmethod
def static_can_contain(space, w_val):
return _value_or_nil_can_handle(SmallIntegerOrNilStorageShadow, space, w_val)
@@ -153,7 +153,7 @@
nil_value = sys.float_info.max
wrapper_class = model.W_Float
import_from_mixin(AbstractValueOrNilStorageMixin)
-
+
@staticmethod
def static_can_contain(space, w_val):
return _value_or_nil_can_handle(FloatOrNilStorageShadow, space, w_val)
@@ -193,17 +193,17 @@
if float_can_handle and not FloatOrNilStorageShadow.static_can_contain(space, w_obj):
float_can_handle = False
specialized_strategies = specialized_strategies - 1
-
+
if specialized_strategies <= 0:
return ListStorageShadow
-
+
if all_nil_can_handle:
return AllNilStorageShadow
if small_int_can_handle:
return SmallIntegerOrNilStorageShadow
if float_can_handle:
return FloatOrNilStorageShadow
-
+
# If this happens, please look for a bug in the code above.
assert False, "No strategy could be found for list..."
@@ -223,7 +223,7 @@
_immutable_fields_ = ['storage']
repr_classname = "ListStorageShadow"
import_from_mixin(ListStorageMixin)
-
+
def initialize_storage(self, size):
self.storage = [self.space.w_nil] * size
def fetch(self, n0):
@@ -236,7 +236,7 @@
_immutable_fields_ = ['storage']
repr_classname = "WeakListStorageShadow"
import_from_mixin(ListStorageMixin)
-
+
def initialize_storage(self, size):
self.storage = [weakref.ref(self.space.w_nil)] * size
def fetch(self, n0):
@@ -245,14 +245,14 @@
def store(self, n0, w_value):
assert w_value is not None
self.storage[n0] = weakref.ref(w_value)
-
+
class AbstractCachingShadow(ListStorageShadow):
_immutable_fields_ = ['version?']
_attrs_ = ['version']
repr_classname = "AbstractCachingShadow"
import_from_mixin(version.VersionMixin)
version = None
-
+
def __init__(self, space, w_self):
ListStorageShadow.__init__(self, space, w_self, 0)
self.changed()
@@ -284,7 +284,7 @@
_s_superclass = _s_methoddict = None
provides_getname = True
repr_classname = "ClassShadow"
-
+
def __init__(self, space, w_self):
self.subclass_s = {}
AbstractCachingShadow.__init__(self, space, w_self)
@@ -305,7 +305,7 @@
# In Slang the value is read directly as a boxed integer, so that
# the code gets a "pointer" whose bits are set as above, but
# shifted one bit to the left and with the lowest bit set to 1.
-
+
# Compute the instance size (really the size, not the number of bytes)
instsize_lo = (classformat >> 1) & 0x3F
instsize_hi = (classformat >> (9 + 1)) & 0xC0
@@ -313,10 +313,10 @@
# decode the instSpec
format = (classformat >> 7) & 15
self.instance_varsized = format >= 2
-
+
# In case of raised exception below.
self.changed()
-
+
if format < 4:
self.instance_kind = POINTERS
elif format == 4:
@@ -356,7 +356,7 @@
return
# Some of the special info has changed -> Switch version.
self.changed()
-
+
def store_w_superclass(self, w_class):
superclass = self._s_superclass
if w_class is None or w_class.is_nil(self.space):
@@ -383,24 +383,24 @@
return
if methoddict: methoddict.s_class = None
self.store_s_methoddict(s_new_methoddict)
-
+
def store_s_methoddict(self, s_methoddict):
s_methoddict.s_class = self
s_methoddict.sync_method_cache()
self._s_methoddict = s_methoddict
-
+
def attach_s_class(self, s_other):
self.subclass_s[s_other] = None
def detach_s_class(self, s_other):
del self.subclass_s[s_other]
-
+
def store_w_name(self, w_name):
if isinstance(w_name, model.W_BytesObject):
self.name = w_name.as_string()
else:
self.name = None
-
+
@jit.unroll_safe
def flush_method_caches(self):
look_in_shadow = self
@@ -497,7 +497,7 @@
self.version = version
for s_class in self.subclass_s:
s_class.superclass_changed(version)
-
+
# _______________________________________________________________
# Methods used only in testing
@@ -532,7 +532,7 @@
_immutable_fields_ = ['invalid?', 's_class']
_attrs_ = ['methoddict', 'invalid', 's_class']
repr_classname = "MethodDictionaryShadow"
-
+
def __init__(self, space, w_self):
self.invalid = True
self.s_class = None
@@ -541,7 +541,7 @@
def update(self):
self.sync_method_cache()
-
+
def find_selector(self, w_selector):
if self.invalid:
return None # we may be invalid if Smalltalk code did not call flushCache
@@ -593,7 +593,7 @@
class AbstractRedirectingShadow(AbstractShadow):
_attrs_ = ['_w_self_size']
repr_classname = "AbstractRedirectingShadow"
-
+
def __init__(self, space, w_self):
AbstractShadow.__init__(self, space, w_self)
if w_self is not None:
@@ -611,7 +611,7 @@
'_pc', '_temps_and_stack',
'_stack_ptr', 'instances_w']
repr_classname = "ContextPartShadow"
-
+
_virtualizable_ = [
'direct_sender', 'virtual_sender',
"_pc", "_temps_and_stack[*]", "_stack_ptr",
@@ -620,7 +620,7 @@
# ______________________________________________________________________
# Initialization
-
+
def __init__(self, space, w_self):
self.direct_sender = None
self.virtual_sender = jit.vref_None
@@ -632,26 +632,26 @@
AbstractRedirectingShadow.copy_field_from(self, n0, other_shadow)
except error.SenderChainManipulation, e:
assert e.s_context == self
-
+
def copy_from(self, other_shadow):
# Some fields have to be initialized before the rest, to ensure correct initialization.
privileged_fields = self.fields_to_copy_first()
for n0 in privileged_fields:
self.copy_field_from(n0, other_shadow)
-
+
# Now the temp size will be known.
self.init_stack_and_temps()
-
+
for n0 in range(self.size()):
if n0 not in privileged_fields:
self.copy_field_from(n0, other_shadow)
-
+
def fields_to_copy_first(self):
return []
-
+
# ______________________________________________________________________
# Accessing object fields
-
+
def fetch(self, n0):
if n0 == constants.CTXPART_SENDER_INDEX:
return self.w_sender()
@@ -690,45 +690,47 @@
else:
# XXX later should store tail out of known context part as well
raise error.WrapperException("Index in context out of bounds")
-
+
# === Sender ===
# There are two fields for the sender (virtual and direct). Only one of them is can be set at a time.
# As long as the frame object is virtualized, using the virtual reference should increase performance.
# As soon as a frame object is forced to the heap, the direct reference must be used.
-
+
def is_fresh(self):
return self.direct_sender is None and self.virtual_sender is jit.vref_None
-
- def finish_virtual_sender(self, save_direct_sender=True):
+
+ def finish_virtual_sender(self, s_sender):
if self.virtual_sender is not jit.vref_None:
- sender = self.virtual_sender()
- jit.virtual_ref_finish(self.virtual_sender, sender)
+ if self.pc() != -1:
+ # stack is unrolling, but this frame was not
+ # marked_returned: it is an escaped frame
+ sender = self.virtual_sender()
+ self.direct_sender = sender
+ jit.virtual_ref_finish(self.virtual_sender, s_sender)
self.virtual_sender = jit.vref_None
- if save_direct_sender:
- self.direct_sender = sender
-
+
def store_s_sender(self, s_sender, raise_error=True):
# If we have a virtual back reference, we must finish it before storing the direct reference.
- self.finish_virtual_sender(save_direct_sender=False)
+ # self.finish_virtual_sender(save_direct_sender=False)
self.direct_sender = s_sender
if raise_error:
raise error.SenderChainManipulation(self)
-
+
def w_sender(self):
sender = self.s_sender()
if sender is None:
return self.space.w_nil
return sender.w_self()
-
+
def s_sender(self):
if self.direct_sender:
return self.direct_sender
else:
result = self.virtual_sender()
return result
-
+
# === Stack Pointer ===
-
+
def unwrap_store_stackpointer(self, w_sp1):
# the stackpointer in the W_PointersObject starts counting at the
# tempframe start
@@ -747,12 +749,12 @@
def stackdepth(self):
return rarithmetic.intmask(self._stack_ptr)
-
+
def wrap_stackpointer(self):
return self.space.wrap_int(self.stackdepth())
# === Program Counter ===
-
+
def store_unwrap_pc(self, w_pc):
if w_pc.is_nil(self.space):
self.store_pc(-1)
@@ -777,9 +779,9 @@
def store_pc(self, newpc):
assert newpc >= -1
self._pc = newpc
-
+
# === Subclassed accessors ===
-
+
def s_home(self):
raise NotImplementedError()
@@ -788,18 +790,18 @@
def w_receiver(self):
raise NotImplementedError()
-
+
def w_method(self):
raise NotImplementedError()
-
+
def tempsize(self):
raise NotImplementedError()
-
+
def is_closure_context(self):
raise NotImplementedError()
-
+
# === Other properties of Contexts ===
-
+
def mark_returned(self):
self.store_pc(-1)
self.store_s_sender(None, raise_error=False)
@@ -809,25 +811,25 @@
def external_stackpointer(self):
return self.stackdepth() + self.stackstart()
-
+
def stackend(self):
# XXX this is incorrect when there is subclassing
return self._w_self_size
-
+
def fetch_next_bytecode(self):
pc = jit.promote(self._pc)
assert pc >= 0
self._pc += 1
return self.fetch_bytecode(pc)
-
+
def fetch_bytecode(self, pc):
bytecode = self.w_method().fetch_bytecode(pc)
return ord(bytecode)
-
+
# ______________________________________________________________________
# Temporary Variables
#
- # Every context has it's own stack. BlockContexts share their temps with
+ # Every context has it's own stack. BlockContexts share their temps with
# their home contexts. MethodContexts created from a BlockClosure get their
# temps copied from the closure upon activation. Changes are not propagated back;
# this is handled by the compiler by allocating an extra Array for temps.
@@ -837,7 +839,7 @@
def settemp(self, index, w_value):
raise NotImplementedError()
-
+
# ______________________________________________________________________
# Stack Manipulation
@@ -851,13 +853,13 @@
for i in range(tempsize):
temps_and_stack[i] = self.space.w_nil
self._stack_ptr = rarithmetic.r_uint(tempsize) # we point after the last element
-
+
def stack_get(self, index0):
return self._temps_and_stack[index0]
-
+
def stack_put(self, index0, w_val):
self._temps_and_stack[index0] = w_val
-
+
def stack(self):
"""NOT_RPYTHON""" # purely for testing
return self._temps_and_stack[self.tempsize():self._stack_ptr]
@@ -912,7 +914,7 @@
# ______________________________________________________________________
# Primitive support
-
+
def store_instances_array(self, w_class, match_w):
# used for primitives 77 & 78
self.instances_w[w_class] = match_w
@@ -939,7 +941,7 @@
j += 1
retval += "\n---------------------"
return retval
-
+
def short_str(self):
arg_strings = self.argument_strings()
if len(arg_strings) > 0:
@@ -953,10 +955,10 @@
self.w_receiver().as_repr_string(),
args
)
-
+
def print_stack(self, method=True):
return self.print_padded_stack(method)[1]
-
+
def print_padded_stack(self, method):
padding = ret_str = ''
if self.s_sender() is not None:
@@ -970,9 +972,9 @@
class BlockContextShadow(ContextPartShadow):
_attrs_ = ['_w_home', '_initialip', '_eargc']
repr_classname = "BlockContextShadow"
-
+
# === Initialization ===
-
+
def __init__(self, space, w_self=None, w_home=None, argcnt=0, initialip=0):
self = jit.hint(self, access_directly=True, fresh_virtualizable=True)
creating_w_self = w_self is None
@@ -992,40 +994,40 @@
def fields_to_copy_first(self):
return [ constants.BLKCTX_HOME_INDEX ]
-
+
# === Implemented accessors ===
-
+
def s_home(self):
return self._w_home.as_methodcontext_get_shadow(self.space)
-
+
def stackstart(self):
return constants.BLKCTX_STACK_START
def tempsize(self):
# A blockcontext doesn't have any temps
return 0
-
+
def w_receiver(self):
return self.s_home().w_receiver()
-
+
def w_method(self):
retval = self.s_home().w_method()
assert isinstance(retval, model.W_CompiledMethod)
return retval
-
+
def is_closure_context(self):
return True
-
+
# === Temporary variables ===
-
+
def gettemp(self, index):
return self.s_home().gettemp(index)
def settemp(self, index, w_value):
self.s_home().settemp(index, w_value)
-
+
# === Accessing object fields ===
-
+
def fetch(self, n0):
if n0 == constants.BLKCTX_HOME_INDEX:
return self._w_home
@@ -1045,11 +1047,11 @@
return self.unwrap_store_eargc(w_value)
else:
return ContextPartShadow.store(self, n0, w_value)
-
+
def store_w_home(self, w_home):
assert isinstance(w_home, model.W_PointersObject)
self._w_home = w_home
-
+
def unwrap_store_initialip(self, w_value):
initialip = self.space.unwrap_int(w_value)
initialip -= 1 + self.w_method().literalsize
@@ -1057,18 +1059,18 @@
def store_initialip(self, initialip):
self._initialip = initialip
-
+
def wrap_initialip(self):
initialip = self.initialip()
initialip += 1 + self.w_method().literalsize
return self.space.wrap_int(initialip)
-
+
def reset_pc(self):
self.store_pc(self.initialip())
-
+
def initialip(self):
return self._initialip
-
+
def unwrap_store_eargc(self, w_value):
self.store_expected_argument_count(self.space.unwrap_int(w_value))
@@ -1082,24 +1084,24 @@
self._eargc = argc
# === Stack Manipulation ===
-
+
def reset_stack(self):
self.pop_n(self.stackdepth())
# === Printing ===
-
+
def argument_strings(self):
return []
-
+
def method_str(self):
return '[] in %s' % self.w_method().get_identifier_string()
class MethodContextShadow(ContextPartShadow):
_attrs_ = ['closure', '_w_receiver', '_w_method']
repr_classname = "MethodContextShadow"
-
+
# === Initialization ===
-
+
@jit.unroll_safe
def __init__(self, space, w_self=None, w_method=None, w_receiver=None,
arguments=[], closure=None, pc=0):
@@ -1108,7 +1110,7 @@
self.store_w_receiver(w_receiver)
self.store_pc(pc)
self.closure = closure
-
+
if w_method:
self.store_w_method(w_method)
# The summand is needed, because we calculate i.a. our stackdepth relative of the size of w_self.
@@ -1117,20 +1119,20 @@
self.init_stack_and_temps()
else:
self._w_method = None
-
+
argc = len(arguments)
for i0 in range(argc):
self.settemp(i0, arguments[i0])
-
+
if closure:
for i0 in range(closure.size()):
self.settemp(i0+argc, closure.at0(i0))
def fields_to_copy_first(self):
return [ constants.MTHDCTX_METHOD, constants.MTHDCTX_CLOSURE_OR_NIL ]
-
+
# === Accessing object fields ===
-
+
def fetch(self, n0):
if n0 == constants.MTHDCTX_METHOD:
return self.w_method()
@@ -1164,12 +1166,12 @@
return self.settemp(temp_i, w_value)
else:
return ContextPartShadow.store(self, n0, w_value)
-
+
def store_w_receiver(self, w_receiver):
self._w_receiver = w_receiver
-
+
# === Implemented Accessors ===
-
+
def s_home(self):
if self.is_closure_context():
# this is a context for a blockClosure
@@ -1182,31 +1184,31 @@
return s_outerContext.s_home()
else:
return self
-
+
def stackstart(self):
return constants.MTHDCTX_TEMP_FRAME_START
-
+
def store_w_method(self, w_method):
assert isinstance(w_method, model.W_CompiledMethod)
self._w_method = w_method
def w_receiver(self):
return self._w_receiver
-
+
def w_method(self):
retval = self._w_method
assert isinstance(retval, model.W_CompiledMethod)
return retval
-
+
def tempsize(self):
if not self.is_closure_context():
return self.w_method().tempsize()
else:
return self.closure.tempsize()
-
+
def is_closure_context(self):
return self.closure is not None
-
+
# ______________________________________________________________________
# Marriage of MethodContextShadows with PointerObjects only when required
@@ -1223,9 +1225,9 @@
self._w_self = w_self
self._w_self_size = w_self.size()
return w_self
-
+
# === Temporary variables ===
-
+
def gettemp(self, index0):
return self.stack_get(index0)
@@ -1233,7 +1235,7 @@
self.stack_put(index0, w_value)
# === Printing ===
-
+
def argument_strings(self):
argcount = self.w_method().argsize
tempsize = self.w_method().tempsize()
From noreply at buildbot.pypy.org Thu Jul 10 12:56:56 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:56 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Enabled headless mode by
default, to behave like the original flags.
Message-ID: <20140710105656.E628C1C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r879:cf03ec7872eb
Date: 2014-07-10 11:46 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/cf03ec7872eb/
Log: Enabled headless mode by default, to behave like the original flags.
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -12,7 +12,7 @@
def _usage(argv):
print """
- Usage: %s [-r|-m] [-naH] [-jpis] [-tlLE]
+ Usage: %s [-r|-m] [-naHu] [-jpis] [-tlLE]
- image path (default: Squeak.image)
Execution mode:
@@ -24,13 +24,12 @@
Execution parameters:
-n|--num - Only with -m or -r, SmallInteger to be used as receiver (default: nil).
-a|--arg - Only with -m, will be used as single String argument.
- -H|--headless - Only with -m or -r, run in headless mode.
- Execute the context directly, ignoring the active context in the image.
- The execution will 'hijack' the active process.
- Image window will probably not open. Good for benchmarking.
- By default, a high-priority process will be created for the context, then the image
- will be started normally.
- -u - Only with -m or -r, try to stop UI-process at startup. Can help with -H.
+ -P|--process - Only with -m or -r, create a high-priority Process for the context.
+ The images last active Process will be started first.
+ By default, run in headless mode. This will ignore the active process
+ in the image and execute the context directly. The image window will
+ probably not open. Good for benchmarking.
+ -u - Only with -m or -r, try to stop UI-process at startup. Can help benchmarking.
Other parameters:
-j|--jit - jitargs will be passed to the jit configuration.
@@ -62,7 +61,7 @@
number = 0
have_number = False
stringarg = None
- headless = False
+ headless = True
# == Other parameters
poll = False
interrupts = True
@@ -100,8 +99,8 @@
elif arg in ["-s"]:
arg, idx = get_parameter(argv, idx, arg)
max_stack_depth = int(arg)
- elif arg in ["-H", "--headless"]:
- headless = True
+ elif arg in ["-P", "--process"]:
+ headless = False
elif arg in ["-u"]:
from spyvm.plugins.vmdebugging import stop_ui_process
stop_ui_process()
From noreply at buildbot.pypy.org Thu Jul 10 12:56:58 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:58 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Removed virtual reference of
sender. Was causing performance problems.
Message-ID: <20140710105658.07B3F1C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r880:4934d77ae183
Date: 2014-07-10 12:38 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/4934d77ae183/
Log: Removed virtual reference of sender. Was causing performance
problems. Going to add this refactoring to the vref branch.
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -67,10 +67,7 @@
s_new_context = w_active_context.as_context_get_shadow(self.space)
while True:
assert self.current_stack_depth == 0
- # Need to save s_sender, loop_bytecodes will nil this on return
- # Virtual references are not allowed here, and neither are "fresh" contexts (except for the toplevel one).
- assert s_new_context.virtual_sender is jit.vref_None
- s_sender = s_new_context.direct_sender
+ s_sender = s_new_context.s_sender()
try:
self.loop_bytecodes(s_new_context)
raise Exception("loop_bytecodes left without raising...")
@@ -81,7 +78,7 @@
except Return, nlr:
s_new_context = s_sender
while s_new_context is not nlr.s_target_context:
- s_sender = s_new_context.direct_sender
+ s_sender = s_new_context.s_sender()
s_new_context._activate_unwind_context(self)
s_new_context = s_sender
s_new_context.push(nlr.value)
@@ -122,27 +119,19 @@
# This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
# and handles the stack overflow protection mechanism.
def stack_frame(self, s_frame, s_sender, may_context_switch=True):
- assert s_frame.virtual_sender is jit.vref_None
try:
- # Enter the context - store a virtual reference back to the sender
- # Non-fresh contexts can happen, e.g. when activating a stored BlockContext.
- # The same frame object must not pass through here recursively!
- if s_frame.is_fresh() and s_sender is not None:
- s_frame.virtual_sender = jit.virtual_ref(s_sender)
-
+ if s_frame._s_sender is None and s_sender is not None:
+ s_frame.store_s_sender(s_sender, raise_error=False)
+
self.current_stack_depth += 1
if self.max_stack_depth > 0:
if self.current_stack_depth >= self.max_stack_depth:
raise StackOverflow(s_frame)
-
+
# Now (continue to) execute the context bytecodes
self.loop_bytecodes(s_frame, may_context_switch)
finally:
self.current_stack_depth -= 1
- # Cleanly leave the context. This will finish the virtual sender-reference, if
- # it is still there, which can happen in case of ProcessSwitch or StackOverflow;
- # in case of a Return, this will already be handled while unwinding the stack.
- s_frame.finish_virtual_sender(s_sender)
def step(self, context):
bytecode = context.fetch_next_bytecode()
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -607,13 +607,13 @@
class ContextPartShadow(AbstractRedirectingShadow):
__metaclass__ = extendabletype
- _attrs_ = ['direct_sender', 'virtual_sender',
+ _attrs_ = ['_s_sender',
'_pc', '_temps_and_stack',
'_stack_ptr', 'instances_w']
repr_classname = "ContextPartShadow"
_virtualizable_ = [
- 'direct_sender', 'virtual_sender',
+ '_s_sender',
"_pc", "_temps_and_stack[*]", "_stack_ptr",
"_w_self", "_w_self_size"
]
@@ -622,8 +622,7 @@
# Initialization
def __init__(self, space, w_self):
- self.direct_sender = None
- self.virtual_sender = jit.vref_None
+ self._s_sender = None
AbstractRedirectingShadow.__init__(self, space, w_self)
self.instances_w = {}
@@ -692,27 +691,9 @@
raise error.WrapperException("Index in context out of bounds")
# === Sender ===
- # There are two fields for the sender (virtual and direct). Only one of them is can be set at a time.
- # As long as the frame object is virtualized, using the virtual reference should increase performance.
- # As soon as a frame object is forced to the heap, the direct reference must be used.
-
- def is_fresh(self):
- return self.direct_sender is None and self.virtual_sender is jit.vref_None
-
- def finish_virtual_sender(self, s_sender):
- if self.virtual_sender is not jit.vref_None:
- if self.pc() != -1:
- # stack is unrolling, but this frame was not
- # marked_returned: it is an escaped frame
- sender = self.virtual_sender()
- self.direct_sender = sender
- jit.virtual_ref_finish(self.virtual_sender, s_sender)
- self.virtual_sender = jit.vref_None
def store_s_sender(self, s_sender, raise_error=True):
- # If we have a virtual back reference, we must finish it before storing the direct reference.
- # self.finish_virtual_sender(save_direct_sender=False)
- self.direct_sender = s_sender
+ self._s_sender = s_sender
if raise_error:
raise error.SenderChainManipulation(self)
@@ -723,11 +704,7 @@
return sender.w_self()
def s_sender(self):
- if self.direct_sender:
- return self.direct_sender
- else:
- result = self.virtual_sender()
- return result
+ return self._s_sender
# === Stack Pointer ===
From noreply at buildbot.pypy.org Thu Jul 10 12:56:59 2014
From: noreply at buildbot.pypy.org (anton_gulenko)
Date: Thu, 10 Jul 2014 12:56:59 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: Fixed console output when
executing an entire image.
Message-ID: <20140710105659.190181C0906@cobra.cs.uni-duesseldorf.de>
Author: Anton Gulenko
Branch: storage
Changeset: r881:54246f8c1106
Date: 2014-07-10 12:41 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/54246f8c1106/
Log: Fixed console output when executing an entire image.
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -163,6 +163,8 @@
def result_string(w_result):
# This will also print contents of strings/symbols/numbers
+ if not w_result:
+ return ""
return w_result.as_repr_string().replace('\r', '\n')
def compile_code(interp, w_receiver, code):
@@ -232,6 +234,7 @@
return w_active_context.as_context_get_shadow(space)
def execute_context(interp, s_frame, measure=False):
+ print "" # Line break after image-loading-indicator characters
try:
return interp.interpret_toplevel(s_frame.w_self())
except error.Exit, e:
From noreply at buildbot.pypy.org Thu Jul 10 14:28:02 2014
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 10 Jul 2014 14:28:02 +0200 (CEST)
Subject: [pypy-commit] pypy default: "Your tests are not a benchmark": add
link from the FAQ
Message-ID: <20140710122802.890E41C1068@cobra.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r72411:f57183ddd4d0
Date: 2014-07-10 14:27 +0200
http://bitbucket.org/pypy/pypy/changeset/f57183ddd4d0/
Log: "Your tests are not a benchmark": add link from the FAQ
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -171,16 +171,21 @@
You might be interested in our `benchmarking site`_ and our
`jit documentation`_.
-Note that the JIT has a very high warm-up cost, meaning that the
-programs are slow at the beginning. If you want to compare the timings
-with CPython, even relatively simple programs need to run *at least* one
-second, preferrably at least a few seconds. Large, complicated programs
-need even more time to warm-up the JIT.
+`Your tests are not a benchmark`_: tests tend to be slow under PyPy
+because they run exactly once; if they are good tests, they exercise
+various corner cases in your code. This is a bad case for JIT
+compilers. Note also that our JIT has a very high warm-up cost, meaning
+that any program is slow at the beginning. If you want to compare the
+timings with CPython, even relatively simple programs need to run *at
+least* one second, preferrably at least a few seconds. Large,
+complicated programs need even more time to warm-up the JIT.
.. _`benchmarking site`: http://speed.pypy.org
.. _`jit documentation`: jit/index.html
+.. _`your tests are not a benchmark`: http://alexgaynor.net/2013/jul/15/your-tests-are-not-benchmark/
+
---------------------------------------------------------------
Couldn't the JIT dump and reload already-compiled machine code?
---------------------------------------------------------------
From noreply at buildbot.pypy.org Thu Jul 10 15:29:22 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:22 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: use StackOverflow protection
from rlib
Message-ID: <20140710132922.73BC51C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r882:ca8d309213f3
Date: 2014-07-10 12:00 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/ca8d309213f3/
Log: use StackOverflow protection from rlib
diff --git a/spyvm/constants.py b/spyvm/constants.py
--- a/spyvm/constants.py
+++ b/spyvm/constants.py
@@ -190,6 +190,5 @@
# Interpreter constants
#
-MAX_LOOP_DEPTH = 100
INTERRUPT_COUNTER_SIZE = 10000
CompileTime = time.time()
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -4,7 +4,7 @@
from spyvm import model, constants, primitives, conftest, wrapper
from spyvm.tool.bitmanipulation import splitter
-from rpython.rlib import jit
+from rpython.rlib import jit, rstackovf
from rpython.rlib import objectmodel, unroll
class MissingBytecode(Exception):
@@ -24,7 +24,7 @@
class Interpreter(object):
_immutable_fields_ = ["space", "image", "image_name",
- "max_stack_depth", "interrupt_counter_size",
+ "interrupt_counter_size",
"startup_time", "evented", "interrupts"]
jit_driver = jit.JitDriver(
@@ -35,8 +35,7 @@
)
def __init__(self, space, image=None, image_name="",
- trace=False, evented=True, interrupts=True,
- max_stack_depth=constants.MAX_LOOP_DEPTH):
+ trace=False, evented=True, interrupts=True):
import time
# === Initialize immutable variables
@@ -47,7 +46,6 @@
self.startup_time = image.startup_time
else:
self.startup_time = constants.CompileTime
- self.max_stack_depth = max_stack_depth
self.evented = evented
self.interrupts = interrupts
try:
@@ -57,7 +55,6 @@
# === Initialize mutable variables
self.interrupt_check_counter = self.interrupt_counter_size
- self.current_stack_depth = 0
self.next_wakeup_tick = 0
self.trace = trace
self.trace_proxy = False
@@ -66,7 +63,6 @@
# This is the top-level loop and is not invoked recursively.
s_new_context = w_active_context.as_context_get_shadow(self.space)
while True:
- assert self.current_stack_depth == 0
# Need to save s_sender, loop_bytecodes will nil this on return
# Virtual references are not allowed here, and neither are "fresh" contexts (except for the toplevel one).
assert s_new_context.virtual_sender is jit.vref_None
@@ -128,16 +124,12 @@
# The same frame object must not pass through here recursively!
if s_frame.is_fresh() and s_sender is not None:
s_frame.virtual_sender = jit.virtual_ref(s_sender)
-
- self.current_stack_depth += 1
- if self.max_stack_depth > 0:
- if self.current_stack_depth >= self.max_stack_depth:
- raise StackOverflow(s_frame)
-
# Now (continue to) execute the context bytecodes
self.loop_bytecodes(s_frame, may_context_switch)
+ except rstackovf.StackOverflow:
+ rstackovf.check_stack_overflow()
+ raise StackOverflow(s_frame)
finally:
- self.current_stack_depth -= 1
# Cleanly leave the context. This will finish the virtual sender-reference, if
# it is still there, which can happen in case of ProcessSwitch or StackOverflow;
# in case of a Return, this will already be handled while unwinding the stack.
@@ -237,7 +229,7 @@
return s_frame
def padding(self, symbol=' '):
- return symbol * self.current_stack_depth
+ return symbol
class ReturnFromTopLevel(Exception):
_attrs_ = ["object"]
@@ -976,11 +968,9 @@
# in order to enable tracing/jumping for message sends etc.
def debugging():
def stepping_debugger_init(original):
- def meth(self, space, image=None, image_name="", trace=False,
- max_stack_depth=constants.MAX_LOOP_DEPTH):
+ def meth(self, space, image=None, image_name="", trace=False):
return_value = original(self, space, image=image,
- image_name=image_name, trace=trace,
- max_stack_depth=max_stack_depth)
+ image_name=image_name, trace=trace)
# ##############################################################
self.message_stepping = False
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -87,7 +87,7 @@
except error.Exit, e:
print e.msg
return 1
-
+
if not as_benchmark:
try:
w_result = interp.perform(w_receiver, selector)
@@ -121,12 +121,11 @@
-b|--benchmark [code string]
-p|--poll_events
-ni|--no-interrupts
- -d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
-l|--storage-log
-L|--storage-log-aggregate
-E|--storage-log-elements
[image path, default: Squeak.image]
- """ % (argv[0], constants.MAX_LOOP_DEPTH)
+ """ % argv[0]
def _arg_missing(argv, idx, arg):
if len(argv) == idx + 1:
@@ -144,9 +143,8 @@
stringarg = ""
code = None
as_benchmark = False
- max_stack_depth = constants.MAX_LOOP_DEPTH
interrupts = True
-
+
while idx < len(argv):
arg = argv[idx]
if arg in ["-h", "--help"]:
@@ -185,10 +183,6 @@
idx += 1
elif arg in ["-ni", "--no-interrupts"]:
interrupts = False
- elif arg in ["-d", "--max-stack-depth"]:
- _arg_missing(argv, idx, arg)
- max_stack_depth = int(argv[idx + 1])
- idx += 1
elif arg in ["-l", "--storage-log"]:
storage_logger.activate()
elif arg in ["-L", "--storage-log-aggregate"]:
@@ -215,13 +209,13 @@
except OSError as e:
os.write(2, "%s -- %s (LoadError)\n" % (os.strerror(e.errno), path))
return 1
-
+
space = prebuilt_space
image_reader = squeakimage.reader_for_image(space, squeakimage.Stream(data=imagedata))
image = create_image(space, image_reader)
interp = interpreter.Interpreter(space, image, image_name=path,
trace=trace, evented=evented,
- interrupts=interrupts, max_stack_depth=max_stack_depth)
+ interrupts=interrupts)
space.runtime_setup(argv[0])
result = 0
if benchmark is not None:
From noreply at buildbot.pypy.org Thu Jul 10 15:29:23 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:23 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: avoid forcing s_sender for
local returns (that is, all returns from methods and return top from block)
Message-ID: <20140710132923.9E83A1C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r883:cb252f497113
Date: 2014-07-10 14:08 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/cb252f497113/
Log: avoid forcing s_sender for local returns (that is, all returns from
methods and return top from block)
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -75,11 +75,13 @@
print "====== StackOverflow, contexts forced to heap at: %s" % e.s_new_context.short_str()
s_new_context = e.s_new_context
except Return, nlr:
+ assert nlr.s_target_context or nlr.is_local
s_new_context = s_sender
- while s_new_context is not nlr.s_target_context:
- s_sender = s_new_context.direct_sender
- s_new_context._activate_unwind_context(self)
- s_new_context = s_sender
+ if not nlr.is_local:
+ while s_new_context is not nlr.s_target_context:
+ s_sender = s_new_context.direct_sender
+ s_new_context._activate_unwind_context(self)
+ s_new_context = s_sender
s_new_context.push(nlr.value)
except ProcessSwitch, p:
if self.trace:
@@ -108,11 +110,16 @@
try:
self.step(s_context)
except Return, nlr:
- if nlr.s_target_context is not s_context:
+ if nlr.s_target_context is s_context or nlr.is_local:
+ s_context.push(nlr.value)
+ else:
+ if nlr.s_target_context is None:
+ # This is the case where we are returning to our sender.
+ # Mark the return as local, so our sender will take it
+ nlr.is_local = True
s_context._activate_unwind_context(self)
raise nlr
- else:
- s_context.push(nlr.value)
+
# This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
# and handles the stack overflow protection mechanism.
@@ -237,10 +244,11 @@
self.object = object
class Return(Exception):
- _attrs_ = ["value", "s_target_context"]
+ _attrs_ = ["value", "s_target_context", "is_local"]
def __init__(self, s_target_context, w_result):
self.value = w_result
self.s_target_context = s_target_context
+ self.is_local = False
class ContextSwitchException(Exception):
"""General Exception that causes the interpreter to leave
@@ -636,7 +644,7 @@
interp.padding(), code, w_method.safe_identifier_string(), w_selector.str_content())
raise e
- def _return(self, return_value, interp, s_return_to):
+ def _return(self, return_value, interp, local_return=False):
# unfortunately, this assert is not true for some tests. TODO fix this.
# assert self._stack_ptr == self.tempsize()
@@ -644,36 +652,47 @@
if interp.trace:
print '%s<- %s' % (interp.padding(), return_value.as_repr_string())
- if s_return_to is None:
- # This should never happen while executing a normal image.
- raise ReturnFromTopLevel(return_value)
+ if self.home_is_self() or local_return:
+ # a local return just needs to go up the stack once. there
+ # it will find the sender as a local, and we don't have to
+ # force the reference
+ s_return_to = None
+ if self.direct_sender is None and self.virtual_sender is jit.vref_None:
+ # This should never happen while executing a normal image.
+ raise ReturnFromTopLevel(return_value)
+ else:
+ s_return_to = self.s_home().s_sender()
+ if s_return_to is None:
+ # This should never happen while executing a normal image.
+ raise ReturnFromTopLevel(return_value)
+
raise Return(s_return_to, return_value)
# ====== Send/Return bytecodes ======
@bytecode_implementation()
def returnReceiverBytecode(self, interp, current_bytecode):
- return self._return(self.w_receiver(), interp, self.s_home().s_sender())
+ return self._return(self.w_receiver(), interp)
@bytecode_implementation()
def returnTrueBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_true, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_true, interp)
@bytecode_implementation()
def returnFalseBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_false, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_false, interp)
@bytecode_implementation()
def returnNilBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_nil, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_nil, interp)
@bytecode_implementation()
def returnTopFromMethodBytecode(self, interp, current_bytecode):
- return self._return(self.pop(), interp, self.s_home().s_sender())
+ return self._return(self.pop(), interp)
@bytecode_implementation()
def returnTopFromBlockBytecode(self, interp, current_bytecode):
- return self._return(self.pop(), interp, self.s_sender())
+ return self._return(self.pop(), interp, local_return=True)
@bytecode_implementation()
def sendLiteralSelectorBytecode(self, interp, current_bytecode):
@@ -754,7 +773,8 @@
try:
self.bytecodePrimValue(interp, 0)
except Return, nlr:
- if self is not nlr.s_target_context:
+ assert nlr.s_target_context or nlr.is_local
+ if self is not nlr.s_target_context and not nlr.is_local:
raise nlr
finally:
self.mark_returned()
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -800,6 +800,9 @@
def is_closure_context(self):
raise NotImplementedError()
+ def home_is_self(self):
+ raise NotImplementedError()
+
# === Other properties of Contexts ===
def mark_returned(self):
@@ -1018,6 +1021,9 @@
def is_closure_context(self):
return True
+ def home_is_self(self):
+ return False
+
# === Temporary variables ===
def gettemp(self, index):
@@ -1209,6 +1215,9 @@
def is_closure_context(self):
return self.closure is not None
+ def home_is_self(self):
+ return not self.is_closure_context()
+
# ______________________________________________________________________
# Marriage of MethodContextShadows with PointerObjects only when required
From noreply at buildbot.pypy.org Thu Jul 10 15:29:24 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:24 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage-vrefs-rstackovf-localreturn:
separate branch
Message-ID: <20140710132924.B4D2A1C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage-vrefs-rstackovf-localreturn
Changeset: r884:03b440963bdb
Date: 2014-07-10 14:25 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/03b440963bdb/
Log: separate branch
From noreply at buildbot.pypy.org Thu Jul 10 15:29:25 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:25 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage-vrefs-rstackovf-localreturn:
use StackOverflow protection from rlib
Message-ID: <20140710132925.DD5CB1C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage-vrefs-rstackovf-localreturn
Changeset: r885:6704ab7a2008
Date: 2014-07-10 12:00 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/6704ab7a2008/
Log: use StackOverflow protection from rlib
diff --git a/spyvm/constants.py b/spyvm/constants.py
--- a/spyvm/constants.py
+++ b/spyvm/constants.py
@@ -190,6 +190,5 @@
# Interpreter constants
#
-MAX_LOOP_DEPTH = 100
INTERRUPT_COUNTER_SIZE = 10000
CompileTime = time.time()
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -4,7 +4,7 @@
from spyvm import model, constants, primitives, conftest, wrapper
from spyvm.tool.bitmanipulation import splitter
-from rpython.rlib import jit
+from rpython.rlib import jit, rstackovf
from rpython.rlib import objectmodel, unroll
class MissingBytecode(Exception):
@@ -24,7 +24,7 @@
class Interpreter(object):
_immutable_fields_ = ["space", "image", "image_name",
- "max_stack_depth", "interrupt_counter_size",
+ "interrupt_counter_size",
"startup_time", "evented", "interrupts"]
jit_driver = jit.JitDriver(
@@ -35,8 +35,7 @@
)
def __init__(self, space, image=None, image_name="",
- trace=False, evented=True, interrupts=True,
- max_stack_depth=constants.MAX_LOOP_DEPTH):
+ trace=False, evented=True, interrupts=True):
import time
# === Initialize immutable variables
@@ -47,7 +46,6 @@
self.startup_time = image.startup_time
else:
self.startup_time = constants.CompileTime
- self.max_stack_depth = max_stack_depth
self.evented = evented
self.interrupts = interrupts
try:
@@ -57,7 +55,6 @@
# === Initialize mutable variables
self.interrupt_check_counter = self.interrupt_counter_size
- self.current_stack_depth = 0
self.next_wakeup_tick = 0
self.trace = trace
self.trace_proxy = False
@@ -66,7 +63,6 @@
# This is the top-level loop and is not invoked recursively.
s_new_context = w_active_context.as_context_get_shadow(self.space)
while True:
- assert self.current_stack_depth == 0
# Need to save s_sender, loop_bytecodes will nil this on return
# Virtual references are not allowed here, and neither are "fresh" contexts (except for the toplevel one).
assert s_new_context.virtual_sender is jit.vref_None
@@ -128,16 +124,12 @@
# The same frame object must not pass through here recursively!
if s_frame.is_fresh() and s_sender is not None:
s_frame.virtual_sender = jit.virtual_ref(s_sender)
-
- self.current_stack_depth += 1
- if self.max_stack_depth > 0:
- if self.current_stack_depth >= self.max_stack_depth:
- raise StackOverflow(s_frame)
-
# Now (continue to) execute the context bytecodes
self.loop_bytecodes(s_frame, may_context_switch)
+ except rstackovf.StackOverflow:
+ rstackovf.check_stack_overflow()
+ raise StackOverflow(s_frame)
finally:
- self.current_stack_depth -= 1
# Cleanly leave the context. This will finish the virtual sender-reference, if
# it is still there, which can happen in case of ProcessSwitch or StackOverflow;
# in case of a Return, this will already be handled while unwinding the stack.
@@ -237,7 +229,7 @@
return s_frame
def padding(self, symbol=' '):
- return symbol * self.current_stack_depth
+ return symbol
class ReturnFromTopLevel(Exception):
_attrs_ = ["object"]
@@ -976,11 +968,9 @@
# in order to enable tracing/jumping for message sends etc.
def debugging():
def stepping_debugger_init(original):
- def meth(self, space, image=None, image_name="", trace=False,
- max_stack_depth=constants.MAX_LOOP_DEPTH):
+ def meth(self, space, image=None, image_name="", trace=False):
return_value = original(self, space, image=image,
- image_name=image_name, trace=trace,
- max_stack_depth=max_stack_depth)
+ image_name=image_name, trace=trace)
# ##############################################################
self.message_stepping = False
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -87,7 +87,7 @@
except error.Exit, e:
print e.msg
return 1
-
+
if not as_benchmark:
try:
w_result = interp.perform(w_receiver, selector)
@@ -121,12 +121,11 @@
-b|--benchmark [code string]
-p|--poll_events
-ni|--no-interrupts
- -d|--max-stack-depth [number, default %d, <= 0 disables stack protection]
-l|--storage-log
-L|--storage-log-aggregate
-E|--storage-log-elements
[image path, default: Squeak.image]
- """ % (argv[0], constants.MAX_LOOP_DEPTH)
+ """ % argv[0]
def _arg_missing(argv, idx, arg):
if len(argv) == idx + 1:
@@ -144,9 +143,8 @@
stringarg = ""
code = None
as_benchmark = False
- max_stack_depth = constants.MAX_LOOP_DEPTH
interrupts = True
-
+
while idx < len(argv):
arg = argv[idx]
if arg in ["-h", "--help"]:
@@ -185,10 +183,6 @@
idx += 1
elif arg in ["-ni", "--no-interrupts"]:
interrupts = False
- elif arg in ["-d", "--max-stack-depth"]:
- _arg_missing(argv, idx, arg)
- max_stack_depth = int(argv[idx + 1])
- idx += 1
elif arg in ["-l", "--storage-log"]:
storage_logger.activate()
elif arg in ["-L", "--storage-log-aggregate"]:
@@ -215,13 +209,13 @@
except OSError as e:
os.write(2, "%s -- %s (LoadError)\n" % (os.strerror(e.errno), path))
return 1
-
+
space = prebuilt_space
image_reader = squeakimage.reader_for_image(space, squeakimage.Stream(data=imagedata))
image = create_image(space, image_reader)
interp = interpreter.Interpreter(space, image, image_name=path,
trace=trace, evented=evented,
- interrupts=interrupts, max_stack_depth=max_stack_depth)
+ interrupts=interrupts)
space.runtime_setup(argv[0])
result = 0
if benchmark is not None:
From noreply at buildbot.pypy.org Thu Jul 10 15:29:26 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:26 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage-vrefs-rstackovf-localreturn:
avoid forcing s_sender for local returns (that is,
all returns from methods and return top from block)
Message-ID: <20140710132926.EF9371C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage-vrefs-rstackovf-localreturn
Changeset: r886:71d4742bcc58
Date: 2014-07-10 14:08 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/71d4742bcc58/
Log: avoid forcing s_sender for local returns (that is, all returns from
methods and return top from block)
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -75,11 +75,13 @@
print "====== StackOverflow, contexts forced to heap at: %s" % e.s_new_context.short_str()
s_new_context = e.s_new_context
except Return, nlr:
+ assert nlr.s_target_context or nlr.is_local
s_new_context = s_sender
- while s_new_context is not nlr.s_target_context:
- s_sender = s_new_context.direct_sender
- s_new_context._activate_unwind_context(self)
- s_new_context = s_sender
+ if not nlr.is_local:
+ while s_new_context is not nlr.s_target_context:
+ s_sender = s_new_context.direct_sender
+ s_new_context._activate_unwind_context(self)
+ s_new_context = s_sender
s_new_context.push(nlr.value)
except ProcessSwitch, p:
if self.trace:
@@ -108,11 +110,16 @@
try:
self.step(s_context)
except Return, nlr:
- if nlr.s_target_context is not s_context:
+ if nlr.s_target_context is s_context or nlr.is_local:
+ s_context.push(nlr.value)
+ else:
+ if nlr.s_target_context is None:
+ # This is the case where we are returning to our sender.
+ # Mark the return as local, so our sender will take it
+ nlr.is_local = True
s_context._activate_unwind_context(self)
raise nlr
- else:
- s_context.push(nlr.value)
+
# This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
# and handles the stack overflow protection mechanism.
@@ -237,10 +244,11 @@
self.object = object
class Return(Exception):
- _attrs_ = ["value", "s_target_context"]
+ _attrs_ = ["value", "s_target_context", "is_local"]
def __init__(self, s_target_context, w_result):
self.value = w_result
self.s_target_context = s_target_context
+ self.is_local = False
class ContextSwitchException(Exception):
"""General Exception that causes the interpreter to leave
@@ -636,7 +644,7 @@
interp.padding(), code, w_method.safe_identifier_string(), w_selector.str_content())
raise e
- def _return(self, return_value, interp, s_return_to):
+ def _return(self, return_value, interp, local_return=False):
# unfortunately, this assert is not true for some tests. TODO fix this.
# assert self._stack_ptr == self.tempsize()
@@ -644,36 +652,47 @@
if interp.trace:
print '%s<- %s' % (interp.padding(), return_value.as_repr_string())
- if s_return_to is None:
- # This should never happen while executing a normal image.
- raise ReturnFromTopLevel(return_value)
+ if self.home_is_self() or local_return:
+ # a local return just needs to go up the stack once. there
+ # it will find the sender as a local, and we don't have to
+ # force the reference
+ s_return_to = None
+ if self.direct_sender is None and self.virtual_sender is jit.vref_None:
+ # This should never happen while executing a normal image.
+ raise ReturnFromTopLevel(return_value)
+ else:
+ s_return_to = self.s_home().s_sender()
+ if s_return_to is None:
+ # This should never happen while executing a normal image.
+ raise ReturnFromTopLevel(return_value)
+
raise Return(s_return_to, return_value)
# ====== Send/Return bytecodes ======
@bytecode_implementation()
def returnReceiverBytecode(self, interp, current_bytecode):
- return self._return(self.w_receiver(), interp, self.s_home().s_sender())
+ return self._return(self.w_receiver(), interp)
@bytecode_implementation()
def returnTrueBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_true, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_true, interp)
@bytecode_implementation()
def returnFalseBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_false, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_false, interp)
@bytecode_implementation()
def returnNilBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_nil, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_nil, interp)
@bytecode_implementation()
def returnTopFromMethodBytecode(self, interp, current_bytecode):
- return self._return(self.pop(), interp, self.s_home().s_sender())
+ return self._return(self.pop(), interp)
@bytecode_implementation()
def returnTopFromBlockBytecode(self, interp, current_bytecode):
- return self._return(self.pop(), interp, self.s_sender())
+ return self._return(self.pop(), interp, local_return=True)
@bytecode_implementation()
def sendLiteralSelectorBytecode(self, interp, current_bytecode):
@@ -754,7 +773,8 @@
try:
self.bytecodePrimValue(interp, 0)
except Return, nlr:
- if self is not nlr.s_target_context:
+ assert nlr.s_target_context or nlr.is_local
+ if self is not nlr.s_target_context and not nlr.is_local:
raise nlr
finally:
self.mark_returned()
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -800,6 +800,9 @@
def is_closure_context(self):
raise NotImplementedError()
+ def home_is_self(self):
+ raise NotImplementedError()
+
# === Other properties of Contexts ===
def mark_returned(self):
@@ -1018,6 +1021,9 @@
def is_closure_context(self):
return True
+ def home_is_self(self):
+ return False
+
# === Temporary variables ===
def gettemp(self, index):
@@ -1209,6 +1215,9 @@
def is_closure_context(self):
return self.closure is not None
+ def home_is_self(self):
+ return not self.is_closure_context()
+
# ______________________________________________________________________
# Marriage of MethodContextShadows with PointerObjects only when required
From noreply at buildbot.pypy.org Thu Jul 10 15:29:28 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:28 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: use StackOverflow protection
from rlib
Message-ID: <20140710132928.14E411C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r887:75fb3a75ff6b
Date: 2014-07-10 12:00 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/75fb3a75ff6b/
Log: use StackOverflow protection from rlib
diff --git a/spyvm/constants.py b/spyvm/constants.py
--- a/spyvm/constants.py
+++ b/spyvm/constants.py
@@ -190,6 +190,5 @@
# Interpreter constants
#
-MAX_LOOP_DEPTH = 100
INTERRUPT_COUNTER_SIZE = 10000
CompileTime = time.time()
From noreply at buildbot.pypy.org Thu Jul 10 15:29:29 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:29 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: use StackOverflow protection
from rlib
Message-ID: <20140710132929.318AE1C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r888:afd77668220d
Date: 2014-07-10 12:00 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/afd77668220d/
Log: use StackOverflow protection from rlib
diff --git a/spyvm/constants.py b/spyvm/constants.py
--- a/spyvm/constants.py
+++ b/spyvm/constants.py
@@ -190,6 +190,5 @@
# Interpreter constants
#
-MAX_LOOP_DEPTH = 100
INTERRUPT_COUNTER_SIZE = 10000
CompileTime = time.time()
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -4,7 +4,7 @@
from spyvm import model, constants, primitives, conftest, wrapper
from spyvm.tool.bitmanipulation import splitter
-from rpython.rlib import jit
+from rpython.rlib import jit, rstackovf
from rpython.rlib import objectmodel, unroll
class MissingBytecode(Exception):
@@ -24,7 +24,7 @@
class Interpreter(object):
_immutable_fields_ = ["space", "image", "image_name",
- "max_stack_depth", "interrupt_counter_size",
+ "interrupt_counter_size",
"startup_time", "evented", "interrupts"]
jit_driver = jit.JitDriver(
@@ -35,8 +35,7 @@
)
def __init__(self, space, image=None, image_name="",
- trace=False, evented=True, interrupts=True,
- max_stack_depth=constants.MAX_LOOP_DEPTH):
+ trace=False, evented=True, interrupts=True):
import time
# === Initialize immutable variables
@@ -47,7 +46,6 @@
self.startup_time = image.startup_time
else:
self.startup_time = constants.CompileTime
- self.max_stack_depth = max_stack_depth
self.evented = evented
self.interrupts = interrupts
try:
@@ -57,7 +55,6 @@
# === Initialize mutable variables
self.interrupt_check_counter = self.interrupt_counter_size
- self.current_stack_depth = 0
self.next_wakeup_tick = 0
self.trace = trace
self.trace_proxy = False
@@ -66,7 +63,6 @@
# This is the top-level loop and is not invoked recursively.
s_new_context = w_active_context.as_context_get_shadow(self.space)
while True:
- assert self.current_stack_depth == 0
s_sender = s_new_context.s_sender()
try:
self.loop_bytecodes(s_new_context)
@@ -122,16 +118,11 @@
try:
if s_frame._s_sender is None and s_sender is not None:
s_frame.store_s_sender(s_sender, raise_error=False)
-
- self.current_stack_depth += 1
- if self.max_stack_depth > 0:
- if self.current_stack_depth >= self.max_stack_depth:
- raise StackOverflow(s_frame)
-
# Now (continue to) execute the context bytecodes
self.loop_bytecodes(s_frame, may_context_switch)
- finally:
- self.current_stack_depth -= 1
+ except rstackovf.StackOverflow:
+ rstackovf.check_stack_overflow()
+ raise StackOverflow(s_frame)
def step(self, context):
bytecode = context.fetch_next_bytecode()
@@ -205,7 +196,7 @@
s_frame = self.create_toplevel_context(w_receiver, selector, w_selector, w_arguments)
self.interrupt_check_counter = self.interrupt_counter_size
return self.interpret_toplevel(s_frame.w_self())
-
+
def create_toplevel_context(self, w_receiver, selector="", w_selector=None, w_arguments=[]):
if w_selector is None:
assert selector, "Need either string or W_Object selector"
@@ -213,7 +204,7 @@
w_selector = self.image.w_asSymbol
else:
w_selector = self.perform(self.space.wrap_string(selector), "asSymbol")
-
+
w_method = model.W_CompiledMethod(self.space, header=512)
w_method.literalatput0(self.space, 1, w_selector)
assert len(w_arguments) <= 7
@@ -225,7 +216,7 @@
return s_frame
def padding(self, symbol=' '):
- return symbol * self.current_stack_depth
+ return symbol
class ReturnFromTopLevel(Exception):
_attrs_ = ["object"]
@@ -964,11 +955,9 @@
# in order to enable tracing/jumping for message sends etc.
def debugging():
def stepping_debugger_init(original):
- def meth(self, space, image=None, image_name="", trace=False,
- max_stack_depth=constants.MAX_LOOP_DEPTH):
+ def meth(self, space, image=None, image_name="", trace=False):
return_value = original(self, space, image=image,
- image_name=image_name, trace=trace,
- max_stack_depth=max_stack_depth)
+ image_name=image_name, trace=trace)
# ##############################################################
self.message_stepping = False
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -14,13 +14,13 @@
print """
Usage: %s [-r|-m] [-naHu] [-jpis] [-tlLE]
- image path (default: Squeak.image)
-
+
Execution mode:
(no flags) - Image will be normally opened.
-r|--run - Code will be compiled and executed, result printed.
-m|--method - Selector will be sent to a SmallInteger, result printed.
-h|--help - Output this and exit.
-
+
Execution parameters:
-n|--num - Only with -m or -r, SmallInteger to be used as receiver (default: nil).
-a|--arg - Only with -m, will be used as single String argument.
@@ -30,28 +30,25 @@
in the image and execute the context directly. The image window will
probably not open. Good for benchmarking.
-u - Only with -m or -r, try to stop UI-process at startup. Can help benchmarking.
-
+
Other parameters:
-j|--jit - jitargs will be passed to the jit configuration.
-p|--poll - Actively poll for events. Try this if the image is not responding well.
-i|--no-interrupts - Disable timer interrupt. Disables non-cooperative scheduling.
- -s - After num stack frames, the entire stack will be dumped to the heap.
- This breaks performance, but protects agains stack overflow.
- num <= 0 disables stack protection (default: %d)
-
+
Logging parameters:
-t|--trace - Output a trace of each message, primitive, return value and process switch.
-l|--storage-log - Output a log of storage operations.
-L|--storage-log-aggregate - Output an aggregated storage log at the end of execution.
-E|--storage-log-elements - Include classnames of elements into the storage log.
-
- """ % (argv[0], constants.MAX_LOOP_DEPTH)
+
+ """ % argv[0]
def get_parameter(argv, idx, arg):
if len(argv) < idx + 1:
raise RuntimeError("Error: missing argument after %s" % arg)
return argv[idx], idx + 1
-
+
prebuilt_space = objspace.ObjSpace()
def entry_point(argv):
@@ -65,12 +62,11 @@
# == Other parameters
poll = False
interrupts = True
- max_stack_depth = constants.MAX_LOOP_DEPTH
trace = False
-
+
path = argv[1] if len(argv) > 1 else "Squeak.image"
idx = 2
-
+
while idx < len(argv):
arg = argv[idx]
idx += 1
@@ -96,9 +92,6 @@
code, idx = get_parameter(argv, idx, arg)
elif arg in ["-i", "--no-interrupts"]:
interrupts = False
- elif arg in ["-s"]:
- arg, idx = get_parameter(argv, idx, arg)
- max_stack_depth = int(arg)
elif arg in ["-P", "--process"]:
headless = False
elif arg in ["-u"]:
@@ -113,10 +106,10 @@
else:
_usage(argv)
return -1
-
+
if code and selector:
raise RuntimeError("Cannot handle both -r and -m.")
-
+
path = rpath.rabspath(path)
try:
f = open_file_as_stream(path, mode="rb", buffering=0)
@@ -127,16 +120,16 @@
except OSError as e:
os.write(2, "%s -- %s (LoadError)\n" % (os.strerror(e.errno), path))
return 1
-
+
# Load & prepare image and environment
space = prebuilt_space
image_reader = squeakimage.reader_for_image(space, squeakimage.Stream(data=imagedata))
image = create_image(space, image_reader)
interp = interpreter.Interpreter(space, image, image_name=path,
trace=trace, evented=not poll,
- interrupts=interrupts, max_stack_depth=max_stack_depth)
+ interrupts=interrupts)
space.runtime_setup(argv[0])
-
+
# Create context to be executed
if code or selector:
if not have_number:
@@ -155,7 +148,7 @@
context = active_context(interp.space)
else:
context = active_context(interp.space)
-
+
w_result = execute_context(interp, context)
print result_string(w_result)
storage_logger.print_aggregated_log()
@@ -198,13 +191,13 @@
return None
w_receiver_class.as_class_get_shadow(space).s_methoddict().sync_method_cache()
return selector
-
+
def create_context(interp, w_receiver, selector, stringarg):
args = []
if stringarg:
args.append(interp.space.wrap_string(stringarg))
return interp.create_toplevel_context(w_receiver, selector, w_arguments = args)
-
+
def create_process(interp, s_frame):
space = interp.space
w_active_process = wrapper.scheduler(space).active_process()
@@ -221,10 +214,10 @@
priority = 7
w_benchmark_proc.store(space, 1, s_frame.w_self())
w_benchmark_proc.store(space, 2, space.wrap_int(priority))
-
+
# Make process eligible for scheduling
wrapper.ProcessWrapper(space, w_benchmark_proc).put_to_sleep()
-
+
def active_context(space):
w_active_process = wrapper.scheduler(space).active_process()
active_process = wrapper.ProcessWrapper(space, w_active_process)
@@ -240,7 +233,7 @@
except error.Exit, e:
print "Exited: %s" % e.msg
return None
-
+
# _____ Target and Main _____
def target(driver, *args):
From noreply at buildbot.pypy.org Thu Jul 10 15:29:30 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 15:29:30 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: avoid forcing s_sender for
local returns (that is, all returns from methods and return top from block)
Message-ID: <20140710132930.6BB661C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r889:50071fb31ad9
Date: 2014-07-10 14:08 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/50071fb31ad9/
Log: avoid forcing s_sender for local returns (that is, all returns from
methods and return top from block)
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -72,11 +72,13 @@
print "====== StackOverflow, contexts forced to heap at: %s" % e.s_new_context.short_str()
s_new_context = e.s_new_context
except Return, nlr:
+ assert nlr.s_target_context or nlr.is_local
s_new_context = s_sender
- while s_new_context is not nlr.s_target_context:
- s_sender = s_new_context.s_sender()
- s_new_context._activate_unwind_context(self)
- s_new_context = s_sender
+ if not nlr.is_local:
+ while s_new_context is not nlr.s_target_context:
+ s_sender = s_new_context.s_sender()
+ s_new_context._activate_unwind_context(self)
+ s_new_context = s_sender
s_new_context.push(nlr.value)
except ProcessSwitch, p:
assert not self.space.suppress_process_switch[0], "ProcessSwitch should be disabled..."
@@ -106,11 +108,16 @@
try:
self.step(s_context)
except Return, nlr:
- if nlr.s_target_context is not s_context:
+ if nlr.s_target_context is s_context or nlr.is_local:
+ s_context.push(nlr.value)
+ else:
+ if nlr.s_target_context is None:
+ # This is the case where we are returning to our sender.
+ # Mark the return as local, so our sender will take it
+ nlr.is_local = True
s_context._activate_unwind_context(self)
raise nlr
- else:
- s_context.push(nlr.value)
+
# This is a wrapper around loop_bytecodes that cleanly enters/leaves the frame
# and handles the stack overflow protection mechanism.
@@ -224,10 +231,11 @@
self.object = object
class Return(Exception):
- _attrs_ = ["value", "s_target_context"]
+ _attrs_ = ["value", "s_target_context", "is_local"]
def __init__(self, s_target_context, w_result):
self.value = w_result
self.s_target_context = s_target_context
+ self.is_local = False
class ContextSwitchException(Exception):
"""General Exception that causes the interpreter to leave
@@ -623,7 +631,7 @@
interp.padding(), code, w_method.safe_identifier_string(), w_selector.str_content())
raise e
- def _return(self, return_value, interp, s_return_to):
+ def _return(self, return_value, interp, local_return=False):
# unfortunately, this assert is not true for some tests. TODO fix this.
# assert self._stack_ptr == self.tempsize()
@@ -631,36 +639,47 @@
if interp.trace:
print '%s<- %s' % (interp.padding(), return_value.as_repr_string())
- if s_return_to is None:
- # This should never happen while executing a normal image.
- raise ReturnFromTopLevel(return_value)
+ if self.home_is_self() or local_return:
+ # a local return just needs to go up the stack once. there
+ # it will find the sender as a local, and we don't have to
+ # force the reference
+ s_return_to = None
+ if self.s_sender() is None:
+ # This should never happen while executing a normal image.
+ raise ReturnFromTopLevel(return_value)
+ else:
+ s_return_to = self.s_home().s_sender()
+ if s_return_to is None:
+ # This should never happen while executing a normal image.
+ raise ReturnFromTopLevel(return_value)
+
raise Return(s_return_to, return_value)
# ====== Send/Return bytecodes ======
@bytecode_implementation()
def returnReceiverBytecode(self, interp, current_bytecode):
- return self._return(self.w_receiver(), interp, self.s_home().s_sender())
+ return self._return(self.w_receiver(), interp)
@bytecode_implementation()
def returnTrueBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_true, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_true, interp)
@bytecode_implementation()
def returnFalseBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_false, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_false, interp)
@bytecode_implementation()
def returnNilBytecode(self, interp, current_bytecode):
- return self._return(interp.space.w_nil, interp, self.s_home().s_sender())
+ return self._return(interp.space.w_nil, interp)
@bytecode_implementation()
def returnTopFromMethodBytecode(self, interp, current_bytecode):
- return self._return(self.pop(), interp, self.s_home().s_sender())
+ return self._return(self.pop(), interp)
@bytecode_implementation()
def returnTopFromBlockBytecode(self, interp, current_bytecode):
- return self._return(self.pop(), interp, self.s_sender())
+ return self._return(self.pop(), interp, local_return=True)
@bytecode_implementation()
def sendLiteralSelectorBytecode(self, interp, current_bytecode):
@@ -741,7 +760,8 @@
try:
self.bytecodePrimValue(interp, 0)
except Return, nlr:
- if self is not nlr.s_target_context:
+ assert nlr.s_target_context or nlr.is_local
+ if self is not nlr.s_target_context and not nlr.is_local:
raise nlr
finally:
self.mark_returned()
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -777,6 +777,9 @@
def is_closure_context(self):
raise NotImplementedError()
+ def home_is_self(self):
+ raise NotImplementedError()
+
# === Other properties of Contexts ===
def mark_returned(self):
@@ -995,6 +998,9 @@
def is_closure_context(self):
return True
+ def home_is_self(self):
+ return False
+
# === Temporary variables ===
def gettemp(self, index):
@@ -1186,6 +1192,9 @@
def is_closure_context(self):
return self.closure is not None
+ def home_is_self(self):
+ return not self.is_closure_context()
+
# ______________________________________________________________________
# Marriage of MethodContextShadows with PointerObjects only when required
From noreply at buildbot.pypy.org Thu Jul 10 16:58:32 2014
From: noreply at buildbot.pypy.org (timfel)
Date: Thu, 10 Jul 2014 16:58:32 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk storage: pull checking for
BlockContext>>ensure: primitive into the shadows
Message-ID: <20140710145832.583281C1068@cobra.cs.uni-duesseldorf.de>
Author: Tim Felgentreff
Branch: storage
Changeset: r890:6998efebafd6
Date: 2014-07-10 16:58 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/6998efebafd6/
Log: pull checking for BlockContext>>ensure: primitive into the shadows
diff --git a/spyvm/interpreter.py b/spyvm/interpreter.py
--- a/spyvm/interpreter.py
+++ b/spyvm/interpreter.py
@@ -748,9 +748,7 @@
# ====== Misc ======
def _activate_unwind_context(self, interp):
- # TODO put the constant somewhere else.
- # Primitive 198 is used in BlockClosure >> ensure:
- if self.is_closure_context() or self.w_method().primitive() != 198:
+ if self.is_closure_context() or not self.is_BlockClosure_ensure():
self.mark_returned()
return
# The first temp is executed flag for both #ensure: and #ifCurtailed:
diff --git a/spyvm/shadow.py b/spyvm/shadow.py
--- a/spyvm/shadow.py
+++ b/spyvm/shadow.py
@@ -777,6 +777,9 @@
def is_closure_context(self):
raise NotImplementedError()
+ def is_BlockClosure_ensure(self):
+ raise NotImplementedError()
+
def home_is_self(self):
raise NotImplementedError()
@@ -998,6 +1001,9 @@
def is_closure_context(self):
return True
+ def is_BlockClosure_ensure(self):
+ return False
+
def home_is_self(self):
return False
@@ -1080,7 +1086,7 @@
return '[] in %s' % self.w_method().get_identifier_string()
class MethodContextShadow(ContextPartShadow):
- _attrs_ = ['closure', '_w_receiver', '_w_method']
+ _attrs_ = ['closure', '_w_receiver', '_w_method', '_is_BlockClosure_ensure']
repr_classname = "MethodContextShadow"
# === Initialization ===
@@ -1102,6 +1108,7 @@
self.init_stack_and_temps()
else:
self._w_method = None
+ self._is_BlockClosure_ensure = False
argc = len(arguments)
for i0 in range(argc):
@@ -1174,6 +1181,9 @@
def store_w_method(self, w_method):
assert isinstance(w_method, model.W_CompiledMethod)
self._w_method = w_method
+ if w_method:
+ # Primitive 198 is used in BlockClosure >> ensure:
+ self._is_BlockClosure_ensure = (w_method.primitive() == 198)
def w_receiver(self):
return self._w_receiver
@@ -1192,6 +1202,9 @@
def is_closure_context(self):
return self.closure is not None
+ def is_BlockClosure_ensure(self):
+ return self._is_BlockClosure_ensure
+
def home_is_self(self):
return not self.is_closure_context()
diff --git a/spyvm/test/jittest/base.py b/spyvm/test/jittest/base.py
--- a/spyvm/test/jittest/base.py
+++ b/spyvm/test/jittest/base.py
@@ -18,7 +18,7 @@
def run(self, spy, tmpdir, code):
logfile = str(tmpdir.join("x.pypylog"))
proc = subprocess.Popen(
- [str(spy), "-r", code.replace("\n", "\r\n"), BenchmarkImage],
+ [str(spy), BenchmarkImage, "-r", code.replace("\n", "\r\n")],
cwd=str(tmpdir),
env={"PYPYLOG": "jit-log-opt:%s" % logfile,
"SDL_VIDEODRIVER": "dummy"}
From noreply at buildbot.pypy.org Thu Jul 10 17:17:26 2014
From: noreply at buildbot.pypy.org (Conrad Calmez)
Date: Thu, 10 Jul 2014 17:17:26 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk stmgc-c7: passed arguments to image
via own flag
Message-ID: <20140710151726.797A61C0F86@cobra.cs.uni-duesseldorf.de>
Author: Conrad Calmez
Branch: stmgc-c7
Changeset: r891:ada1ec7e18d8
Date: 2014-07-10 17:17 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/ada1ec7e18d8/
Log: passed arguments to image via own flag also fixes BSD-like argument
parsing aka. you can put them in arbitrary order again
diff --git a/targetimageloadingsmalltalk.py b/targetimageloadingsmalltalk.py
--- a/targetimageloadingsmalltalk.py
+++ b/targetimageloadingsmalltalk.py
@@ -125,6 +125,7 @@
-r|--run [code string]
-b|--benchmark [code string]
-p|--poll_events
+ -s|--smalltalk-args [argument to pass]
[image path, default: Squeak.image]
""" % argv[0]
@@ -148,9 +149,7 @@
while idx < len(argv):
arg = argv[idx]
- if path is not None: # smalltalk args
- smalltalk_args.append(arg)
- elif arg in ["-h", "--help"]:
+ if arg in ["-h", "--help"]:
_usage(argv)
return 0
elif arg in ["-j", "--jit"]:
@@ -184,6 +183,9 @@
code = argv[idx + 1]
as_benchmark = True
idx += 1
+ elif arg in ["-s", "--smalltalk-args"]:
+ smalltalk_args.append(argv[idx + 1])
+ idx += 1
elif path is None:
path = argv[idx]
else:
From noreply at buildbot.pypy.org Thu Jul 10 17:23:23 2014
From: noreply at buildbot.pypy.org (waedt)
Date: Thu, 10 Jul 2014 17:23:23 +0200 (CEST)
Subject: [pypy-commit] pypy utf8-unicode2: Fix byte-index / char-index mixup
Message-ID: <20140710152323.39FF81C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tyler Wade
Branch: utf8-unicode2
Changeset: r72412:b286a841b645
Date: 2014-07-08 22:24 -0500
http://bitbucket.org/pypy/pypy/changeset/b286a841b645/
Log: Fix byte-index / char-index mixup
diff --git a/pypy/objspace/std/test/test_unicodeobject.py b/pypy/objspace/std/test/test_unicodeobject.py
--- a/pypy/objspace/std/test/test_unicodeobject.py
+++ b/pypy/objspace/std/test/test_unicodeobject.py
@@ -380,7 +380,9 @@
assert u'ab'.startswith(u'a', 1) is False
assert u'ab'.startswith(u'b', 1) is True
assert u'abc'.startswith(u'bc', 1, 2) is False
- assert u'abc'.startswith(u'c', -1, 4) is True
+
+ assert u'\xE4bc'.startswith(u'\xE4') is True
+ assert u'\xE4\xE4bc'.startswith(u'\xE4', 1) is True
def test_startswith_tuples(self):
assert u'hello'.startswith((u'he', u'ha'))
diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
--- a/pypy/objspace/std/unicodeobject.py
+++ b/pypy/objspace/std/unicodeobject.py
@@ -324,11 +324,11 @@
def _startswith(self, space, value, w_prefix, start, end):
return startswith(value.bytes, self._op_val(space, w_prefix).bytes,
- start, end)
+ value.index_of_char(start), value.index_of_char(end))
def _endswith(self, space, value, w_prefix, start, end):
return endswith(value.bytes, self._op_val(space, w_prefix).bytes,
- start, end)
+ value.index_of_char(start), value.index_of_char(end))
@staticmethod
def _split(value, sep=None, maxsplit=-1):
From noreply at buildbot.pypy.org Thu Jul 10 17:23:24 2014
From: noreply at buildbot.pypy.org (waedt)
Date: Thu, 10 Jul 2014 17:23:24 +0200 (CEST)
Subject: [pypy-commit] pypy utf8-unicode2: Handle UnicodeEncodeError better;
make default error handlers RPython
Message-ID: <20140710152324.B46441C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tyler Wade
Branch: utf8-unicode2
Changeset: r72413:02e451d4a78b
Date: 2014-07-09 00:42 -0500
http://bitbucket.org/pypy/pypy/changeset/02e451d4a78b/
Log: Handle UnicodeEncodeError better; make default error handlers
RPython
diff --git a/pypy/interpreter/utf8_codecs.py b/pypy/interpreter/utf8_codecs.py
--- a/pypy/interpreter/utf8_codecs.py
+++ b/pypy/interpreter/utf8_codecs.py
@@ -1,7 +1,7 @@
import sys
from rpython.rlib.rstring import StringBuilder
-from rpython.rlib.objectmodel import specialize
+from rpython.rlib.objectmodel import we_are_translated, specialize
from rpython.rlib.rarithmetic import r_uint, intmask
from rpython.rlib.unicodedata import unicodedb
from rpython.rlib.runicode import utf8_code_length
@@ -1564,7 +1564,6 @@
def default_unicode_error_decode(errors, encoding, msg, s,
startingpos, endingpos):
- """NOT_RPYTHON"""
if errors == 'replace':
return _unicode_error_replacement, endingpos
if errors == 'ignore':
@@ -1574,10 +1573,17 @@
def default_unicode_error_encode(errors, encoding, msg, u,
startingpos, endingpos):
- """NOT_RPYTHON"""
if errors == 'replace':
return '?', None, endingpos
if errors == 'ignore':
return '', None, endingpos
+
+ if we_are_translated():
+ # The constructor for UnicodeEncodeError requires an actual unicode
+ # object; a Utf8Str isn't good enough. Converting a Utf8Str to a
+ # unicode is (somewhat arbitrarily) not RPython. Since, translated
+ # built-in exceptions don't care about their arguments, only do the
+ # conversion when not translated.
+ raise UnicodeEncodeError()
raise UnicodeEncodeError(encoding, unicode(u), startingpos, endingpos, msg)
From noreply at buildbot.pypy.org Thu Jul 10 17:23:26 2014
From: noreply at buildbot.pypy.org (waedt)
Date: Thu, 10 Jul 2014 17:23:26 +0200 (CEST)
Subject: [pypy-commit] pypy utf8-unicode2: Fix cpyext
Message-ID: <20140710152326.5C48F1C0F86@cobra.cs.uni-duesseldorf.de>
Author: Tyler Wade
Branch: utf8-unicode2
Changeset: r72414:e6b1c681e8ec
Date: 2014-07-09 03:30 -0500
http://bitbucket.org/pypy/pypy/changeset/e6b1c681e8ec/
Log: Fix cpyext
diff --git a/pypy/interpreter/test/test_utf8.py b/pypy/interpreter/test/test_utf8.py
--- a/pypy/interpreter/test/test_utf8.py
+++ b/pypy/interpreter/test/test_utf8.py
@@ -4,6 +4,7 @@
import sys
from pypy.interpreter.utf8 import (
Utf8Str, Utf8Builder, utf8chr, utf8ord)
+from rpython.rtyper.lltypesystem import rffi
def build_utf8str():
builder = Utf8Builder()
@@ -193,3 +194,15 @@
assert s.rsplit(maxsplit=2) == u.rsplit(None, 2)
assert s.rsplit(' ', 2) == u.rsplit(' ', 2)
assert s.rsplit('\n') == [s]
+
+def test_copy_to_wcharp():
+ s = build_utf8str()
+ if sys.maxunicode < 0x10000:
+ # The last character requires a surrogate pair on narrow builds and
+ # so won't be converted correctly by rffi.wcharp2unicode
+ s = s[:-1]
+
+ wcharp = s.copy_to_wcharp()
+ u = rffi.wcharp2unicode(wcharp)
+ rffi.free_wcharp(wcharp)
+ assert s == u
diff --git a/pypy/interpreter/utf8.py b/pypy/interpreter/utf8.py
--- a/pypy/interpreter/utf8.py
+++ b/pypy/interpreter/utf8.py
@@ -3,6 +3,7 @@
from rpython.rlib.runicode import utf8_code_length
from rpython.rlib.unicodedata import unicodedb_5_2_0 as unicodedb
from rpython.rlib.rarithmetic import r_uint
+from rpython.rtyper.lltypesystem import rffi
def utf8chr(value):
# Like unichr, but returns a Utf8Str object
@@ -73,6 +74,8 @@
self._len = length
def index_of_char(self, char):
+ if char >= len(self):
+ return len(self.bytes)
byte = 0
pos = 0
while pos < char:
@@ -412,6 +415,14 @@
byte_pos -= 1
return byte_pos
+ def copy_to_wcharp(self):
+ # XXX Temporary solution. This won't work on correctly on systems
+ # where sizeof(wchar_t) == 2. Also, it copies twice.
+ from pypy.interpreter.utf8_codecs import unicode_encode_unicode_internal
+ from rpython.rlib.runicode import MAXUNICODE
+ bytes = unicode_encode_unicode_internal(self, len(self), 'strict')
+ return rffi.cast(rffi.CWCHARP, rffi.str2charp(bytes))
+
class Utf8Builder(object):
diff --git a/pypy/interpreter/utf8_codecs.py b/pypy/interpreter/utf8_codecs.py
--- a/pypy/interpreter/utf8_codecs.py
+++ b/pypy/interpreter/utf8_codecs.py
@@ -1538,8 +1538,8 @@
if rs is not None:
# py3k only
errorhandler('strict', 'decimal', msg, s, collstart, collend)
- for char in ru:
- ch = ord(char)
+ for i in range(len(ru)):
+ ch = ORD(ru, i)
if unicodedb.isspace(ch):
result.append(' ')
continue
diff --git a/pypy/module/_codecs/interp_codecs.py b/pypy/module/_codecs/interp_codecs.py
--- a/pypy/module/_codecs/interp_codecs.py
+++ b/pypy/module/_codecs/interp_codecs.py
@@ -229,7 +229,7 @@
builder = UnicodeBuilder()
pos = start
while pos < end:
- code = ord(obj[pos])
+ code = utf8ord(obj, pos)
if (MAXUNICODE == 0xffff and 0xD800 <= code <= 0xDBFF and
pos + 1 < end and 0xDC00 <= ord(obj[pos+1]) <= 0xDFFF):
code = (code & 0x03FF) << 10
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -188,7 +188,7 @@
w_u = api.PyUnicode_DecodeUTF8(u, 2, None)
assert space.type(w_u) is space.w_unicode
- assert space.unwrap(w_u) == 'sp'
+ assert space.unwrap(w_u) == u'sp'
rffi.free_charp(u)
def test_encode_utf8(self, space, api):
@@ -296,7 +296,7 @@
w_u = space.wrap(u'a')
assert api.PyUnicode_FromObject(w_u) is w_u
assert space.unwrap(
- api.PyUnicode_FromObject(space.wrap('test'))) == 'test'
+ api.PyUnicode_FromObject(space.wrap('test'))) == u'test'
def test_decode(self, space, api):
b_text = rffi.str2charp('caf\x82xx')
@@ -306,7 +306,7 @@
w_text = api.PyUnicode_FromEncodedObject(space.wrap("test"), b_encoding, None)
assert space.isinstance_w(w_text, space.w_unicode)
- assert space.unwrap(w_text) == "test"
+ assert space.unwrap(w_text) == u"test"
assert api.PyUnicode_FromEncodedObject(space.wrap(u"test"), b_encoding, None) is None
assert api.PyErr_Occurred() is space.w_TypeError
diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py
--- a/pypy/module/cpyext/unicodeobject.py
+++ b/pypy/module/cpyext/unicodeobject.py
@@ -1,4 +1,5 @@
from pypy.interpreter.error import OperationError
+from pypy.interpreter import utf8_codecs
from rpython.rtyper.lltypesystem import rffi, lltype
from pypy.module.unicodedata import unicodedb
from pypy.module.cpyext.api import (
@@ -208,7 +209,7 @@
# Copy unicode buffer
w_unicode = from_ref(space, ref)
u = space.unicode_w(w_unicode)
- ref_unicode.c_buffer = rffi.unicode2wcharp(u)
+ ref_unicode.c_buffer = u.copy_to_wcharp()
return ref_unicode.c_buffer
@cpython_api([PyObject], rffi.CWCHARP)
@@ -552,7 +553,7 @@
else:
errors = None
- result, length, byteorder = runicode.str_decode_utf_16_helper(
+ result, length, byteorder = utf8_codecs.str_decode_utf_16_helper(
string, size, errors,
True, # final ? false for multiple passes?
None, # errorhandler
@@ -608,7 +609,7 @@
else:
errors = None
- result, length, byteorder = runicode.str_decode_utf_32_helper(
+ result, length, byteorder = utf8_codecs.str_decode_utf_32_helper(
string, size, errors,
True, # final ? false for multiple passes?
None, # errorhandler
@@ -640,7 +641,7 @@
else:
errors = None
state = space.fromcache(CodecState)
- result = runicode.unicode_encode_decimal(u, length, errors,
+ result = utf8_codecs.unicode_encode_decimal(u, length, errors,
state.encode_error_handler)
i = len(result)
output[i] = '\0'
@@ -691,10 +692,12 @@
suffix match), 0 otherwise. Return -1 if an error occurred."""
str = space.unicode_w(w_str)
substr = space.unicode_w(w_substr)
+ start = str.index_of_char(start)
+ end = str.index_of_char(end)
if rffi.cast(lltype.Signed, direction) <= 0:
- return rstring.startswith(str, substr, start, end)
+ return rstring.startswith(str.bytes, substr.bytes, start, end)
else:
- return rstring.endswith(str, substr, start, end)
+ return rstring.endswith(str.bytes, substr.bytes, start, end)
@cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t], Py_ssize_t, error=-1)
def PyUnicode_Count(space, w_str, w_substr, start, end):
From noreply at buildbot.pypy.org Thu Jul 10 17:46:37 2014
From: noreply at buildbot.pypy.org (Hubert Hesse)
Date: Thu, 10 Jul 2014 17:46:37 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk stmgc-c7: Integrated Benchmarks into
Image
Message-ID: <20140710154637.186FE1C021D@cobra.cs.uni-duesseldorf.de>
Author: Hubert Hesse
Branch: stmgc-c7
Changeset: r892:2152a5634111
Date: 2014-07-10 17:39 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/2152a5634111/
Log: Integrated Benchmarks into Image
diff too long, truncating to 2000 out of 32523 lines
diff --git a/images/Squeak4.5-12568.changes b/images/Squeak4.5-12568.changes
--- a/images/Squeak4.5-12568.changes
+++ b/images/Squeak4.5-12568.changes
@@ -758,4 +758,14340 @@
self fieldNew: swapField.
].
- ^ self field! !
----QUIT----{22 May 2014 . 3:33:07 pm} Squeak4.5-12568.image priorSource: 93437!
----STARTUP----{22 May 2014 . 3:33:13 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:33'!
gameLifeOfLife
STMSimulation benchmark.! !
----QUIT----{22 May 2014 . 3:34:03 pm} Squeak4.5-12568.image priorSource: 110218!
----STARTUP----{22 May 2014 . 3:34:57 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:35'!
gameOfLife
STMSimulation benchmark.! !
Integer removeSelector: #gameLifeOfLife!
----QUIT----{22 May 2014 . 3:35:14 pm} Squeak4.5-12568.image priorSource: 110526!
----STARTUP----{22 May 2014 . 3:36:22 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:36' prior: 33665224!
gameOfLife
STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:36:45 pm} Squeak4.5-12568.image priorSource: 110873!
----STARTUP----{22 May 2014 . 3:36:49 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:36:53 pm} Squeak4.5-12568.image priorSource: 111195!
----STARTUP----{22 May 2014 . 3:36:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:37' prior: 33665587!
gameOfLife
SPyVM print: STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:37:32 pm} Squeak4.5-12568.image priorSource: 111392!
----STARTUP----{22 May 2014 . 3:38:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:38:35 pm} Squeak4.5-12568.image priorSource: 111727!
----STARTUP----{2 June 2014 . 12:57:18 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 12:58'!
benchStmParallelWarmed
10 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 12:58:21 pm} Squeak4.5-12568.image priorSource: 111924!
----STARTUP----{26 June 2014 . 2:47:09 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:54 pm'!
Object subclass: #OSLock
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Kernel-STM'!
Object subclass: #OSLock
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Kernel-STM'!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 14:25'!
lock
! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 14:26'!
release
! !
----End fileIn of a stream----!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:45 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 14:48' prior: 33647508!
parallelForkTest
| p |
FileStream stdout nextPutAll: 'starting stm process.'.
p := [ 1 + 1. ] parallelFork.
p wait! !
----End fileIn of D:\code\python\spy-vm\lang-smalltalk\images\Integer-parallelForkTest.st----!
----QUIT----{26 June 2014 . 2:47:49 pm} Squeak4.5-12568.image priorSource: 112268!
----STARTUP----{26 June 2014 . 2:49:11 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:45 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 14:48' prior: 33667646!
parallelForkTest
| p |
FileStream stdout nextPutAll: 'starting stm process.'.
p := [ 1 + 1. ] parallelFork.
p wait! !
----End fileIn of D:\code\python\spy-vm\lang-smalltalk\images\Integer-parallelForkTest.st----!
----SNAPSHOT----{26 June 2014 . 2:49:57 pm} Squeak4.5-12568.image priorSource: 113431!
----QUIT----{26 June 2014 . 2:50 pm} Squeak4.5-12568.image priorSource: 114022!
----STARTUP----{26 June 2014 . 2:52:02 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:51:52 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 14:43'!
osLockTest
| lock process1 process2 process2lock |
lock := OSLock new.
lock lock.
process2lock := OSLock new.
process2lock lock.
process1 := [ SPyVM print: 'First process start'.
lock lock.
process2lock release.
SPyVM print: 'First process after lock'.
lock release. ] parallelFork .
process2 := [ SPyVM print: 'Second process start'.
process2lock lock.
lock lock.
SPyVM print: 'Second process after lock'.
process2lock release.
lock release. ] parallelFork .
SPyVM print: 'Processes initialized.'.
lock release.
process1 wait.
process2 wait.! !
----End fileIn of a stream----!
----QUIT----{26 June 2014 . 2:53:18 pm} Squeak4.5-12568.image priorSource: 114110!
----STARTUP----{26 June 2014 . 3:02:03 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:02'!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
lock release.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:02:52 pm} Squeak4.5-12568.image priorSource: 115096!
----STARTUP----{26 June 2014 . 3:03:40 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33667310!
lock
SPyVM print: '* STM Process did not fork *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33667409!
release
SPyVM print: '* STM Process did not fork *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33670408!
release
SPyVM print: '* OS Lock could not be released *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33670196!
lock
SPyVM print: '* OS Lock could not be locked *' , Character cr.
self primitiveFailed.
self resume! !
----QUIT----{26 June 2014 . 3:04:45 pm} Squeak4.5-12568.image priorSource: 115476!
----STARTUP----{26 June 2014 . 3:08:07 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:08' prior: 33669797!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:08:24 pm} Squeak4.5-12568.image priorSource: 116537!
----STARTUP----{26 June 2014 . 3:09:05 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:09' prior: 33671254!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
lock release.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:09:17 pm} Squeak4.5-12568.image priorSource: 116916!
----STARTUP----{26 June 2014 . 3:57:38 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:57' prior: 33671633!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'
lock release.
SPyVM print: 'Survived lock.'! !
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:58' prior: 33672027!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'.
lock release.
SPyVM print: 'Survived lock.'.
^ self! !
----QUIT----{26 June 2014 . 3:58:58 pm} Squeak4.5-12568.image priorSource: 117310!
----STARTUP----{26 June 2014 . 3:59:21 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:59' prior: 33672253!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived locking.'.
lock release.
SPyVM print: 'Survived releasing.'.
^ self! !
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:59' prior: 33672690!
osLockEasyTest
| lock1 |
lock1 := OSLock new.
lock1 lock.
SPyVM print: 'Survived locking.'.
lock1 release.
SPyVM print: 'Survived releasing.'.
^ self! !
----QUIT----{26 June 2014 . 4:00 pm} Squeak4.5-12568.image priorSource: 117973!
----STARTUP----{26 June 2014 . 4:07:56 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09' prior: 33670843!
lock
self internalLock
! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09'!
internalLock
SPyVM print: '* OS Lock could not be locked *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09'!
internalRelease
SPyVM print: '* OS Lock could not be released *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09' prior: 33670623!
release
self internalRelease! !
----QUIT----{26 June 2014 . 4:09:34 pm} Squeak4.5-12568.image priorSource: 118668!
\ No newline at end of file
+ ^ self field! !
----QUIT----{22 May 2014 . 3:33:07 pm} Squeak4.5-12568.image priorSource: 93437!
----STARTUP----{22 May 2014 . 3:33:13 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:33'!
gameLifeOfLife
STMSimulation benchmark.! !
----QUIT----{22 May 2014 . 3:34:03 pm} Squeak4.5-12568.image priorSource: 110218!
----STARTUP----{22 May 2014 . 3:34:57 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:35'!
gameOfLife
STMSimulation benchmark.! !
Integer removeSelector: #gameLifeOfLife!
----QUIT----{22 May 2014 . 3:35:14 pm} Squeak4.5-12568.image priorSource: 110526!
----STARTUP----{22 May 2014 . 3:36:22 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:36' prior: 33665224!
gameOfLife
STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:36:45 pm} Squeak4.5-12568.image priorSource: 110873!
----STARTUP----{22 May 2014 . 3:36:49 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:36:53 pm} Squeak4.5-12568.image priorSource: 111195!
----STARTUP----{22 May 2014 . 3:36:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:37' prior: 33665587!
gameOfLife
SPyVM print: STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:37:32 pm} Squeak4.5-12568.image priorSource: 111392!
----STARTUP----{22 May 2014 . 3:38:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:38:35 pm} Squeak4.5-12568.image priorSource: 111727!
----STARTUP----{22 May 2014 . 3:48:40 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT/NOSAVE----{22 May 2014 . 3:48:43 pm} Squeak4.5-12568.image priorSource: 111924!
----STARTUP----{22 May 2014 . 4:20:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:21' prior: 33603722!
benchStmParallel
| sum num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
sum := 0.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ]
] parallelFork
].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:25' prior: 33666837!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum.
] parallelFork
].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:26' prior: 33667525!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum.
] parallelFork
].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:26' prior: 33668262!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum.
] parallelFork
].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:26' prior: 33669000!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:26:33 pm} Squeak4.5-12568.image priorSource: 111924!
----STARTUP----{22 May 2014 . 4:28:08 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:28' prior: 33669737!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:28:57 pm} Squeak4.5-12568.image priorSource: 115957!
----STARTUP----{22 May 2014 . 4:30:39 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:31' prior: 33670666!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[((i * max) to: ((i + 1) * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:31:47 pm} Squeak4.5-12568.image priorSource: 116913!
----STARTUP----{22 May 2014 . 4:37:03 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 4:37:21 pm} Squeak4.5-12568.image priorSource: 117878!
----STARTUP----{22 May 2014 . 4:37:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:38' prior: 33671622!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * max - 1)) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:38' prior: 33672784!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:38:37 pm} Squeak4.5-12568.image priorSource: 118075!
----STARTUP----{22 May 2014 . 4:39:37 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
#('1' '2' '3')!
#('1' '2' '3') at: 2!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:49' prior: 33673552!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads add: [
| sum |
sum := 0.
[((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ]].
localSums at: num put: sum.
]parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:49' prior: 33674559!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads add: [
| sum |
sum := 0.
[((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ]].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:49:24 pm} Squeak4.5-12568.image priorSource: 119810!
----STARTUP----{22 May 2014 . 4:49:54 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:50' prior: 33675489!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads add: [
| sum |
sum := 0.
[((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ]].
localSums at: num put: sum.
] parallelFork.
SPyVM print: 'bar'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:50:32 pm} Squeak4.5-12568.image priorSource: 121907!
----STARTUP----{22 May 2014 . 4:50:54 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:52' prior: 33676616!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: 'bar'.
threads add: [
| sum |
sum := 0.
[((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ]].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:52' prior: 33677764!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: 'bar'.
threads add: [
| sum |
sum := 0.
[((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ]].
localSums at: num put: sum.
] parallelFork.
SPyVM print: 'bar2'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:52:16 pm} Squeak4.5-12568.image priorSource: 123055!
----STARTUP----{22 May 2014 . 4:52:32 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:52' prior: 33678717!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads add: ([
| sum |
sum := 0.
[((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ]].
localSums at: num put: sum.
] parallelFork).
SPyVM print: 'bar2'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:52:57 pm} Squeak4.5-12568.image priorSource: 125181!
----STARTUP----{22 May 2014 . 4:53:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 4:54:05 pm} Squeak4.5-12568.image priorSource: 126335!
----STARTUP----{22 May 2014 . 4:54:23 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:56' prior: 33679890!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: self-num-max.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:56' prior: 33681241!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: self-num-max.
SPyVM print: self-num.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:56:21 pm} Squeak4.5-12568.image priorSource: 126532!
----STARTUP----{22 May 2014 . 4:56:41 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:58' prior: 33682201!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:58' prior: 33683383!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ] asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 16:58' prior: 33684335!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 4:58:37 pm} Squeak4.5-12568.image priorSource: 128674!
----STARTUP----{22 May 2014 . 5:20:50 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 5:21:16 pm} Squeak4.5-12568.image priorSource: 131728!
----STARTUP----{22 May 2014 . 5:21:29 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:21' prior: 33685288!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: '1'.
SPyVM print: localSums asString.
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:22' prior: 33686634!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: '1:'.
SPyVM print: localSums asString.
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '2:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:22:18 pm} Squeak4.5-12568.image priorSource: 131925!
----STARTUP----{22 May 2014 . 5:22:42 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:23' prior: 33687638!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: '1:'.
SPyVM print: threads asString.
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '2:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:23:22 pm} Squeak4.5-12568.image priorSource: 134149!
----STARTUP----{22 May 2014 . 5:23:33 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:25' prior: 33688858!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
SPyVM print: 'xx'.
SPyVM print: sum.
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '2:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----SNAPSHOT----{22 May 2014 . 5:25:10 pm} Squeak4.5-12568.image priorSource: 135367!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:25' prior: 33690076!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
SPyVM print: 'xx'.
SPyVM print: sum.
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '2:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:25:30 pm} Squeak4.5-12568.image priorSource: 136577!
----STARTUP----{22 May 2014 . 5:25:55 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 5:26:05 pm} Squeak4.5-12568.image priorSource: 137699!
----STARTUP----{22 May 2014 . 5:26:27 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 5:28:45 pm} Squeak4.5-12568.image priorSource: 137896!
----STARTUP----{22 May 2014 . 5:29:05 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT/NOSAVE----{22 May 2014 . 5:29:16 pm} Squeak4.5-12568.image priorSource: 138093!
----STARTUP----{22 May 2014 . 5:29:48 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 5:30:10 pm} Squeak4.5-12568.image priorSource: 138093!
----STARTUP----{22 May 2014 . 5:30:41 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 5:31 pm} Squeak4.5-12568.image priorSource: 138494!
----STARTUP----{22 May 2014 . 5:31:19 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT/NOSAVE----{22 May 2014 . 5:31:35 pm} Squeak4.5-12568.image priorSource: 138691!
----STARTUP----{22 May 2014 . 5:32:08 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:32' prior: 33691176!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: 'yy'.
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
SPyVM print: 'xx'.
SPyVM print: sum.
localSums at: num put: sum.
] parallelFork.
SPyVM print: 'sss'.
SPyVM print: threads.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside methodaaaaaaaaaaaaaaaaaa:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:32:31 pm} Squeak4.5-12568.image priorSource: 138691!
----STARTUP----{22 May 2014 . 5:32:45 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:33' prior: 33693601!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: 'yy'.
threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
SPyVM print: 'xx'.
SPyVM print: sum.
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:34:02 pm} Squeak4.5-12568.image priorSource: 140184!
----STARTUP----{22 May 2014 . 5:34:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:35' prior: 33694893!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: 'yy'.
threads = threads asOrderedCollection add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
SPyVM print: 'xx'.
SPyVM print: sum.
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:35' prior: 33696165!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k. ].
localSums at: i put: sum. ] parallelFork ].
SPyVM print: 'yy'.
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
SPyVM print: 'xx'.
SPyVM print: sum.
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:35:23 pm} Squeak4.5-12568.image priorSource: 141456!
----STARTUP----{22 May 2014 . 5:40:42 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:42' prior: 33697250!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k.
SPyVM print: 'from', ( (i-1) * max) asString, 'to', (i * (max - 1)) asString.
].
localSums at: i put: sum. ] parallelFork ].
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:42:11 pm} Squeak4.5-12568.image priorSource: 143834!
----STARTUP----{22 May 2014 . 5:42:33 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:42' prior: 33698543!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * (max - 1))) do: [ :k |
sum := sum + k.
].
SPyVM print: 'from', ( (i-1) * max) asString, 'to', (i * (max - 1)) asString.
localSums at: i put: sum. ] parallelFork ].
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:42:55 pm} Squeak4.5-12568.image priorSource: 145152!
----STARTUP----{22 May 2014 . 5:43:27 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 5:43:55 pm} Squeak4.5-12568.image priorSource: 146468!
----STARTUP----{22 May 2014 . 5:44:27 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:45' prior: 33699861!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: (i * max - 1)) do: [ :k |
sum := sum + k.
].
SPyVM print: 'from', ( (i-1) * max) asString, 'to', ((i * max) - 1) asString.
localSums at: i put: sum. ] parallelFork ].
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:45' prior: 33701374!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: ((i * max) - 1)) do: [ :k |
sum := sum + k.
].
SPyVM print: 'from', ( (i-1) * max) asString, 'to', ((i * max) - 1) asString.
localSums at: i put: sum. ] parallelFork ].
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
SPyVM print: threads asString, ' threads'.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
SPyVM print: '23:'.
SPyVM print: localSums asString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:45:21 pm} Squeak4.5-12568.image priorSource: 146665!
----STARTUP----{22 May 2014 . 5:46:08 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 5/22/2014 17:46' prior: 33702491!
benchStmParallel
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: ((i * max) - 1)) do: [ :k |
sum := sum + k.
].
localSums at: i put: sum. ] parallelFork ].
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ localSums sum printString! !
----QUIT----{22 May 2014 . 5:46:37 pm} Squeak4.5-12568.image priorSource: 149098!
----STARTUP----{2 June 2014 . 11:04:06 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:06'!
benchStmParallelWarmed
| num threads max start localSums |
num := self \\ 100.
max := (self - num) // num.
SPyVM print: ('Threads:', (num printString)).
SPyVM print: ('Items/Thread:', (max printString)).
localSums := Array new: num.
start := Time now asNanoSeconds.
threads := (1 to: num-1) collect: [ :i |
| sum |
sum := 0.
[( ( (i-1) * max) to: ((i * max) - 1)) do: [ :k |
sum := sum + k.
].
localSums at: i put: sum. ] parallelFork ].
threads := threads asOrderedCollection.
threads add: [
| sum |
sum := 0.
((self-num-max) to: self-num) do: [ :k |
sum := sum + k. ].
localSums at: num put: sum.
] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.
^ localSums sum printString! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:06' prior: 33704928!
benchStmParallelWarmed
! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:09' prior: 33705873!
benchStmParallelWarmed
3 timesRepeat: [1 benchStmParallel].! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:09' prior: 33705981!
benchStmParallelWarmed
3 timesRepeat: [SPyVM print: 1 benchStmParallel].! !
----SNAPSHOT----{2 June 2014 . 11:09:59 am} Squeak4.5-12568.image priorSource: 150235!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:10' prior: 33706125!
benchStmParallelWarmed
3 timesRepeat: [1 benchStmParallel].! !
----SNAPSHOT----{2 June 2014 . 11:10:29 am} Squeak4.5-12568.image priorSource: 151771!
----QUIT----{2 June 2014 . 11:10:32 am} Squeak4.5-12568.image priorSource: 152003!
----STARTUP----{2 June 2014 . 11:20:35 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:20' prior: 33706370!
benchStmParallelWarmed
3 timesRepeat: [SPyVM print: (1 benchStmParallel)].! !
----QUIT----{2 June 2014 . 11:21:01 am} Squeak4.5-12568.image priorSource: 152091!
----STARTUP----{2 June 2014 . 11:21:35 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{2 June 2014 . 11:22:41 am} Squeak4.5-12568.image priorSource: 152449!
----STARTUP----{2 June 2014 . 11:23:17 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:23' prior: 33706801!
benchStmParallelWarmed
3 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 11:23:36 am} Squeak4.5-12568.image priorSource: 152648!
----STARTUP----{2 June 2014 . 11:25:39 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:25' prior: 33707358!
benchStmParallelWarmed
10 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 11:25:46 am} Squeak4.5-12568.image priorSource: 153009!
----STARTUP----{2 June 2014 . 11:25:48 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{2 June 2014 . 11:25:56 am} Squeak4.5-12568.image priorSource: 153371!
----STARTUP----{2 June 2014 . 11:26:31 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:26' prior: 33707719!
benchStmParallelWarmed
100 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 11:26:36 am} Squeak4.5-12568.image priorSource: 153570!
----STARTUP----{2 June 2014 . 11:26:38 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{2 June 2014 . 11:26:42 am} Squeak4.5-12568.image priorSource: 153933!
----STARTUP----{2 June 2014 . 11:27:14 am} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 11:27' prior: 33708280!
benchStmParallelWarmed
10 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 11:27:19 am} Squeak4.5-12568.image priorSource: 154132!
----STARTUP----{5 June 2014 . 1:39:24 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:45'!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self)
do: [:t1 | SPyVM print: 'Thread 1 reporting!!']] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:45' prior: 33709187!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self)
do: [:t1 | SPyVM print: 'Thread 1 reporting!!']] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:45' prior: 33709650!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self)
do: [:t1 | SPyVM print: 'Thread reporting!!']] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:50' prior: 33710111!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self)
do: []] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
----QUIT----{5 June 2014 . 1:50:23 pm} Squeak4.5-12568.image priorSource: 154494!
----STARTUP----{5 June 2014 . 1:51:28 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:51' prior: 33710570!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self)
do: [1+1]] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
----QUIT----{5 June 2014 . 1:51:37 pm} Squeak4.5-12568.image priorSource: 156479!
----STARTUP----{5 June 2014 . 1:52:09 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:53' prior: 33711187!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self) do: [1+1] parallelFork.]
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
----QUIT----{5 June 2014 . 1:53:31 pm} Squeak4.5-12568.image priorSource: 157099!
----STARTUP----{5 June 2014 . 1:53:59 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:54' prior: 33711807!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := [(1 to: self) do: [1+1]] parallelFork.
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:55' prior: 33712425!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := (1 to: self) do: [[1+1] parallelFork].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
----QUIT----{5 June 2014 . 1:55:56 pm} Squeak4.5-12568.image priorSource: 157717!
----STARTUP----{5 June 2014 . 1:55:58 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{5 June 2014 . 1:56:05 pm} Squeak4.5-12568.image priorSource: 158756!
----STARTUP----{5 June 2014 . 1:56:36 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:56' prior: 33712846!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := (1 to: self) collect: [[1+1] parallelFork].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
----QUIT----{5 June 2014 . 1:56:46 pm} Squeak4.5-12568.image priorSource: 158953!
----STARTUP----{5 June 2014 . 1:57:51 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:58' prior: 33713661!
benchStmThreadCreation
| threads start |
SPyVM print: ('Threads:', (self printString)).
start := Time now asNanoSeconds.
threads := (1 to: self) collect: [[SPyVM print: 'hello'.] parallelFork].
threads do: [:t | t wait].
SPyVM print: '[squeak] milliseconds inside method:'.
SPyVM print: (((Time now asNanoSeconds) - start) // 1000000) printString.! !
----QUIT----{5 June 2014 . 1:58:24 pm} Squeak4.5-12568.image priorSource: 159576!
----STARTUP----{5 June 2014 . 1:58:49 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/5/2014 13:59' prior: 33714284!
benchStmThreadCreation
| threads start |
From noreply at buildbot.pypy.org Thu Jul 10 17:46:46 2014
From: noreply at buildbot.pypy.org (Hubert Hesse)
Date: Thu, 10 Jul 2014 17:46:46 +0200 (CEST)
Subject: [pypy-commit] lang-smalltalk stmgc-c7: merge
Message-ID: <20140710154646.879F41C021D@cobra.cs.uni-duesseldorf.de>
Author: Hubert Hesse
Branch: stmgc-c7
Changeset: r893:63cabb3a874b
Date: 2014-07-10 17:45 +0200
http://bitbucket.org/pypy/lang-smalltalk/changeset/63cabb3a874b/
Log: merge
diff too long, truncating to 2000 out of 32528 lines
diff --git a/images/Squeak4.5-12568.changes b/images/Squeak4.5-12568.changes
--- a/images/Squeak4.5-12568.changes
+++ b/images/Squeak4.5-12568.changes
@@ -758,4 +758,14340 @@
self fieldNew: swapField.
].
- ^ self field! !
----QUIT----{22 May 2014 . 3:33:07 pm} Squeak4.5-12568.image priorSource: 93437!
----STARTUP----{22 May 2014 . 3:33:13 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:33'!
gameLifeOfLife
STMSimulation benchmark.! !
----QUIT----{22 May 2014 . 3:34:03 pm} Squeak4.5-12568.image priorSource: 110218!
----STARTUP----{22 May 2014 . 3:34:57 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:35'!
gameOfLife
STMSimulation benchmark.! !
Integer removeSelector: #gameLifeOfLife!
----QUIT----{22 May 2014 . 3:35:14 pm} Squeak4.5-12568.image priorSource: 110526!
----STARTUP----{22 May 2014 . 3:36:22 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:36' prior: 33665224!
gameOfLife
STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:36:45 pm} Squeak4.5-12568.image priorSource: 110873!
----STARTUP----{22 May 2014 . 3:36:49 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:36:53 pm} Squeak4.5-12568.image priorSource: 111195!
----STARTUP----{22 May 2014 . 3:36:56 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 15:37' prior: 33665587!
gameOfLife
SPyVM print: STMSimulation benchmark2.! !
----QUIT----{22 May 2014 . 3:37:32 pm} Squeak4.5-12568.image priorSource: 111392!
----STARTUP----{22 May 2014 . 3:38:15 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
----QUIT----{22 May 2014 . 3:38:35 pm} Squeak4.5-12568.image priorSource: 111727!
----STARTUP----{2 June 2014 . 12:57:18 pm} as /home/hub/hpi/stm/src/lang-smalltalk/images/Squeak4.5-12568.image!
!Integer methodsFor: 'benchmarks' stamp: 'hh 6/2/2014 12:58'!
benchStmParallelWarmed
10 timesRepeat: [SPyVM print: (self benchStmParallel)].! !
----QUIT----{2 June 2014 . 12:58:21 pm} Squeak4.5-12568.image priorSource: 111924!
----STARTUP----{26 June 2014 . 2:47:09 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:54 pm'!
Object subclass: #OSLock
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Kernel-STM'!
Object subclass: #OSLock
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Kernel-STM'!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 14:25'!
lock
! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 14:26'!
release
! !
----End fileIn of a stream----!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:45 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 14:48' prior: 33647508!
parallelForkTest
| p |
FileStream stdout nextPutAll: 'starting stm process.'.
p := [ 1 + 1. ] parallelFork.
p wait! !
----End fileIn of D:\code\python\spy-vm\lang-smalltalk\images\Integer-parallelForkTest.st----!
----QUIT----{26 June 2014 . 2:47:49 pm} Squeak4.5-12568.image priorSource: 112268!
----STARTUP----{26 June 2014 . 2:49:11 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:46:45 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'hh 5/22/2014 14:48' prior: 33667646!
parallelForkTest
| p |
FileStream stdout nextPutAll: 'starting stm process.'.
p := [ 1 + 1. ] parallelFork.
p wait! !
----End fileIn of D:\code\python\spy-vm\lang-smalltalk\images\Integer-parallelForkTest.st----!
----SNAPSHOT----{26 June 2014 . 2:49:57 pm} Squeak4.5-12568.image priorSource: 113431!
----QUIT----{26 June 2014 . 2:50 pm} Squeak4.5-12568.image priorSource: 114022!
----STARTUP----{26 June 2014 . 2:52:02 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
'From Squeak4.4 of 28 April 2013 [latest update: #12627] on 26 June 2014 at 2:51:52 pm'!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 14:43'!
osLockTest
| lock process1 process2 process2lock |
lock := OSLock new.
lock lock.
process2lock := OSLock new.
process2lock lock.
process1 := [ SPyVM print: 'First process start'.
lock lock.
process2lock release.
SPyVM print: 'First process after lock'.
lock release. ] parallelFork .
process2 := [ SPyVM print: 'Second process start'.
process2lock lock.
lock lock.
SPyVM print: 'Second process after lock'.
process2lock release.
lock release. ] parallelFork .
SPyVM print: 'Processes initialized.'.
lock release.
process1 wait.
process2 wait.! !
----End fileIn of a stream----!
----QUIT----{26 June 2014 . 2:53:18 pm} Squeak4.5-12568.image priorSource: 114110!
----STARTUP----{26 June 2014 . 3:02:03 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:02'!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
lock release.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:02:52 pm} Squeak4.5-12568.image priorSource: 115096!
----STARTUP----{26 June 2014 . 3:03:40 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33667310!
lock
SPyVM print: '* STM Process did not fork *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33667409!
release
SPyVM print: '* STM Process did not fork *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33670408!
release
SPyVM print: '* OS Lock could not be released *' , Character cr.
self primitiveFailed.
self resume! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 15:04' prior: 33670196!
lock
SPyVM print: '* OS Lock could not be locked *' , Character cr.
self primitiveFailed.
self resume! !
----QUIT----{26 June 2014 . 3:04:45 pm} Squeak4.5-12568.image priorSource: 115476!
----STARTUP----{26 June 2014 . 3:08:07 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:08' prior: 33669797!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:08:24 pm} Squeak4.5-12568.image priorSource: 116537!
----STARTUP----{26 June 2014 . 3:09:05 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:09' prior: 33671254!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
lock release.
SPyVM print: 'Survived lock.'! !
----QUIT----{26 June 2014 . 3:09:17 pm} Squeak4.5-12568.image priorSource: 116916!
----STARTUP----{26 June 2014 . 3:57:38 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:57' prior: 33671633!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'
lock release.
SPyVM print: 'Survived lock.'! !
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:58' prior: 33672027!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived lock.'.
lock release.
SPyVM print: 'Survived lock.'.
^ self! !
----QUIT----{26 June 2014 . 3:58:58 pm} Squeak4.5-12568.image priorSource: 117310!
----STARTUP----{26 June 2014 . 3:59:21 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:59' prior: 33672253!
osLockEasyTest
| lock |
lock := OSLock new.
lock lock.
SPyVM print: 'Survived locking.'.
lock release.
SPyVM print: 'Survived releasing.'.
^ self! !
!Integer methodsFor: '*SPy-Benchmarks' stamp: 'pre 6/26/2014 15:59' prior: 33672690!
osLockEasyTest
| lock1 |
lock1 := OSLock new.
lock1 lock.
SPyVM print: 'Survived locking.'.
lock1 release.
SPyVM print: 'Survived releasing.'.
^ self! !
----QUIT----{26 June 2014 . 4:00 pm} Squeak4.5-12568.image priorSource: 117973!
----STARTUP----{26 June 2014 . 4:07:56 pm} as D:\code\python\spy-vm\lang-smalltalk\images\Squeak4.5-12568.image!
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09' prior: 33670843!
lock
self internalLock
! !
!OSLock methodsFor: 'as yet unclassified' stamp: 'pre 6/26/2014 16:09'!
internalLock