[pypy-commit] stmgc c8-gil-like: Tweaks: remove atomic_exchange() again, and use the regular

arigo noreply at buildbot.pypy.org
Thu Jun 11 22:32:42 CEST 2015


Author: Armin Rigo <arigo at tunes.org>
Branch: c8-gil-like
Changeset: r1807:a30818fb55ed
Date: 2015-06-11 17:59 +0200
http://bitbucket.org/pypy/stmgc/changeset/a30818fb55ed/

Log:	Tweaks: remove atomic_exchange() again, and use the regular
	__sync_bool_compare_and_swap() instead. Avoids an issue with the
	value 0 changing to -1 and back to 0, which could overwrite by
	mistake a _stm_detach_inevitable_transaction().

diff --git a/c8/demo/demo_random.c b/c8/demo/demo_random.c
--- a/c8/demo/demo_random.c
+++ b/c8/demo/demo_random.c
@@ -8,6 +8,7 @@
 #include <sys/wait.h>
 
 #include "stmgc.h"
+#include "stm/fprintcolor.h"
 
 #define NUMTHREADS 2
 #define STEPS_PER_THREAD 500
@@ -145,10 +146,10 @@
         }
     }
 
-    fprintf(stderr, "stm_is_inevitable() = %d\n", (int)stm_is_inevitable());
+    dprintf(("stm_is_inevitable() = %d\n", (int)stm_is_inevitable()));
     for (i = 0; i < td.num_roots_at_transaction_start; i++) {
         if (td.roots[i]) {
-            fprintf(stderr, "root %d: %p\n", i, td.roots[i]);
+            dprintf(("root %d: %p\n", i, td.roots[i]));
             STM_PUSH_ROOT(stm_thread_local, td.roots[i]);
         }
     }
@@ -390,9 +391,9 @@
                     /* Nothing here; it's unlikely that a different thread
                        manages to steal the detached inev transaction.
                        Give them a little chance with a usleep(). */
-                    fprintf(stderr, "sleep...\n");
+                    dprintf(("sleep...\n"));
                     usleep(1);
-                    fprintf(stderr, "sleep done\n");
+                    dprintf(("sleep done\n"));
                     td.num_roots_at_transaction_start = td.num_roots;
                     stm_enter_transactional_zone(&stm_thread_local);
                 }
diff --git a/c8/stm/atomic.h b/c8/stm/atomic.h
--- a/c8/stm/atomic.h
+++ b/c8/stm/atomic.h
@@ -26,18 +26,18 @@
 
   static inline void spin_loop(void) { asm("pause" : : : "memory"); }
   static inline void write_fence(void) { asm("" : : : "memory"); }
-# define atomic_exchange(ptr, old, new)  do {           \
-        (old) = __sync_lock_test_and_set(ptr, new);     \
-    } while (0)
+/*# define atomic_exchange(ptr, old, new)  do {         \
+          (old) = __sync_lock_test_and_set(ptr, new);   \
+      } while (0)*/
 
 #else
 
   static inline void spin_loop(void) { asm("" : : : "memory"); }
   static inline void write_fence(void) { __sync_synchronize(); }
 
-# define atomic_exchange(ptr, old, new)  do {           \
-        (old) = *(ptr);                                 \
-    } while (UNLIKELY(!__sync_bool_compare_and_swap(ptr, old, new)));
+/*# define atomic_exchange(ptr, old, new)  do {           \
+          (old) = *(ptr);                                 \
+      } while (UNLIKELY(!__sync_bool_compare_and_swap(ptr, old, new))); */
 
 #endif
 
diff --git a/c8/stm/detach.c b/c8/stm/detach.c
--- a/c8/stm/detach.c
+++ b/c8/stm/detach.c
@@ -53,9 +53,11 @@
     _core_commit_transaction(/*external=*/ true);
 }
 
-void _stm_reattach_transaction(intptr_t old, stm_thread_local_t *tl)
+void _stm_reattach_transaction(stm_thread_local_t *tl)
 {
+    intptr_t old;
  restart:
+    old = _stm_detached_inevitable_from_thread;
     if (old != 0) {
         if (old == -1) {
             /* busy-loop: wait until _stm_detached_inevitable_from_thread
@@ -65,10 +67,13 @@
                 spin_loop();
 
             /* then retry */
-            atomic_exchange(&_stm_detached_inevitable_from_thread, old, -1);
             goto restart;
         }
 
+        if (!__sync_bool_compare_and_swap(&_stm_detached_inevitable_from_thread,
+                                          old, -1))
+            goto restart;
+
         stm_thread_local_t *old_tl = (stm_thread_local_t *)old;
         int remote_seg_num = old_tl->last_associated_segment_num;
         dprintf(("reattach_transaction: commit detached from seg %d\n",
@@ -78,10 +83,6 @@
         ensure_gs_register(remote_seg_num);
         commit_external_inevitable_transaction();
     }
-    else {
-        assert(_stm_detached_inevitable_from_thread == -1);
-        _stm_detached_inevitable_from_thread = 0;
-    }
     dprintf(("reattach_transaction: start a new transaction\n"));
     _stm_start_transaction(tl);
 }
@@ -102,14 +103,6 @@
     if (cur == 0) {    /* fast-path */
         return 0;   /* _stm_detached_inevitable_from_thread not changed */
     }
-    if (cur != -1) {
-        atomic_exchange(&_stm_detached_inevitable_from_thread, cur, -1);
-        if (cur == 0) {
-            /* found 0, so change from -1 to 0 again and return */
-            _stm_detached_inevitable_from_thread = 0;
-            return 0;
-        }
-    }
     if (cur == -1) {
         /* busy-loop: wait until _stm_detached_inevitable_from_thread
            is reset to a value different from -1 */
@@ -117,6 +110,10 @@
             spin_loop();
         goto restart;
     }
+    if (!__sync_bool_compare_and_swap(&_stm_detached_inevitable_from_thread,
+                                      cur, -1))
+        goto restart;
+
     /* this is the only case where we grabbed a detached transaction.
        _stm_detached_inevitable_from_thread is still -1, until
        commit_fetched_detached_transaction() is called. */
diff --git a/c8/stm/fprintcolor.h b/c8/stm/fprintcolor.h
--- a/c8/stm/fprintcolor.h
+++ b/c8/stm/fprintcolor.h
@@ -37,5 +37,6 @@
 /* ------------------------------------------------------------ */
 
 
+__attribute__((unused))
 static void stm_fatalerror(const char *format, ...)
      __attribute__((format (printf, 1, 2), noreturn));
diff --git a/c8/stmgc.h b/c8/stmgc.h
--- a/c8/stmgc.h
+++ b/c8/stmgc.h
@@ -94,7 +94,7 @@
     assert(_stm_detached_inevitable_from_thread == 0);          \
     _stm_detached_inevitable_from_thread = (intptr_t)(tl);      \
 } while (0)
-void _stm_reattach_transaction(intptr_t old, stm_thread_local_t *tl);
+void _stm_reattach_transaction(stm_thread_local_t *tl);
 void _stm_become_inevitable(const char*);
 void _stm_collectable_safe_point(void);
 
@@ -421,24 +421,29 @@
    far more efficient than constantly starting and committing
    transactions.
 */
+#ifdef STM_DEBUGPRINT
 #include <stdio.h>
+#endif
 static inline void stm_enter_transactional_zone(stm_thread_local_t *tl) {
-    intptr_t old;
-    atomic_exchange(&_stm_detached_inevitable_from_thread, old, -1);
-    if (old == (intptr_t)tl) {
+    if (__sync_bool_compare_and_swap(&_stm_detached_inevitable_from_thread,
+                                     (intptr_t)tl, 0)) {
+#ifdef STM_DEBUGPRINT
         fprintf(stderr, "stm_enter_transactional_zone fast path\n");
-        _stm_detached_inevitable_from_thread = 0;
+#endif
     }
     else {
-        _stm_reattach_transaction(old, tl);
+        _stm_reattach_transaction(tl);
         /* _stm_detached_inevitable_from_thread should be 0 here, but
-           it can already have been changed from a parallel thread */
+           it can already have been changed from a parallel thread
+           (assuming we're not inevitable ourselves) */
     }
 }
 static inline void stm_leave_transactional_zone(stm_thread_local_t *tl) {
     assert(STM_SEGMENT->running_thread == tl);
     if (stm_is_inevitable()) {
+#ifdef STM_DEBUGPRINT
         fprintf(stderr, "stm_leave_transactional_zone fast path\n");
+#endif
         _stm_detach_inevitable_transaction(tl);
     }
     else {
@@ -467,9 +472,8 @@
     assert(STM_SEGMENT->running_thread == tl);
     if (!stm_is_inevitable())
         _stm_become_inevitable(msg);
-    /* now, we're running the inevitable transaction, so this var should
-       be 0 (but can occasionally be -1 for a tiny amount of time) */
-    assert(((_stm_detached_inevitable_from_thread + 1) & ~1) == 0);
+    /* now, we're running the inevitable transaction, so this var should be 0 */
+    assert(_stm_detached_inevitable_from_thread == 0);
 }
 
 /* Forces a safe-point if needed.  Normally not needed: this is


More information about the pypy-commit mailing list