(__sso_string_base<>::_M_assign): Likewise. * include/bits/atomic_2.h (_ITp<>::store): Use __sync_swap_full. (_ITp<>::store volatile): Same. (_PTp<>::store): Same. (_PTp<>::store volatile): Same. Index: include/bits/atomic_2.h =================================================================== --- include/bits/atomic_2.h (revision 173831) +++ include/bits/atomic_2.h (working copy) @@ -249,14 +249,12 @@ namespace __atomic2 __glibcxx_assert(__m != memory_order_acq_rel); __glibcxx_assert(__m != memory_order_consume); - if (__m == memory_order_relaxed) - _M_i = __i; + if (__m == memory_order_seq_cst) + (void)__sync_swap_full (&_M_i, __i); else { // write_mem_barrier(); _M_i = __i; - if (__m == memory_order_seq_cst) - __sync_synchronize(); } } @@ -267,14 +265,12 @@ namespace __atomic2 __glibcxx_assert(__m != memory_order_acq_rel); __glibcxx_assert(__m != memory_order_consume); - if (__m == memory_order_relaxed) - _M_i = __i; + if (__m == memory_order_seq_cst) + (void)__sync_swap_full (&_M_i, __i); else { // write_mem_barrier(); _M_i = __i; - if (__m == memory_order_seq_cst) - __sync_synchronize(); } } @@ -540,14 +536,12 @@ namespace __atomic2 __glibcxx_assert(__m != memory_order_acq_rel); __glibcxx_assert(__m != memory_order_consume); - if (__m == memory_order_relaxed) - _M_p = __p; + if (__m = memory_order_seq_cst) + __sync_swap_full (&_M_p, __p); else { // write_mem_barrier(); _M_p = __p; - if (__m == memory_order_seq_cst) - __sync_synchronize(); } } @@ -559,14 +553,12 @@ namespace __atomic2 __glibcxx_assert(__m != memory_order_acq_rel); __glibcxx_assert(__m != memory_order_consume); - if (__m == memory_order_relaxed) - _M_p = __p; + if (__m = memory_order_seq_cst) + __sync_swap_full (&_M_p, __p); else { // write_mem_barrier(); _M_p = __p; - if (__m == memory_order_seq_cst) - __sync_synchronize(); } }