public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* [cxx-mem-model] Rename __sync_mem to __atomic
@ 2011-10-14 18:33 Andrew MacLeod
  2011-10-18 10:03 ` Eric Botcazou
  0 siblings, 1 reply; 3+ messages in thread
From: Andrew MacLeod @ 2011-10-14 18:33 UTC (permalink / raw)
  To: gcc-patches; +Cc: Richard Henderson

[-- Attachment #1: Type: text/plain, Size: 461 bytes --]

In order to align with a standard naming convention, this patch renames 
all the __sync_mem references to __atomic.

The only real addition is in builtins.c::is_builtin_name needed to know 
that __atomic_ is also a builtin prefix.

I didn't include the testsuite changes since they are HUGE, and very 
rote...  they also included renaming all the sync-mem-* test file names 
to atomic-*

bootstrapped on x86_64-unknown-linux-gnu and no new regressions.

Andrew

[-- Attachment #2: rename.diff --]
[-- Type: text/plain, Size: 122909 bytes --]


	libstdc++-v3
	* include/bits/atomic_2.h: Rename __sync_mem to __atomic.

	gcc/c-family
	* c-common.c: Rename __sync_mem to __atomic.

	gcc
	* doc/extend.texi: Rename __sync_mem to __atomic.
	* cppbuiltin.c: Rename __sync_mem to __atomic.
	* optabs.c: Rename __sync_mem to __atomic.
	* optabs.h: Rename __sync_mem to __atomic.
	* genopinit.c: Rename __sync_mem to __atomic.
	* builtins.c: Rename all __sync_mem.
	(is_builtin_name): Add __atomic_ to the builtin prefix list.
	* sync-builtins.def: Rename __sync_mem to __atomic.
	* expr.h: Rename __sync_mem to __atomic.
	* coretypes.h: Rename __sync_mem to __atomic.
	* config/i386/sync.md: Rename __sync_mem to __atomic.

	gcc/testsuite
	* sync-mem*.c: Rename all files to atomic-*, rename __sync_mem.
	* simulate-thread/sync*.c: Rename all files to atomic-*, rename 
	__sync_mem to __atomic..


Index: libstdc++-v3/include/bits/atomic_2.h
===================================================================
*** libstdc++-v3/include/bits/atomic_2.h	(revision 179922)
--- libstdc++-v3/include/bits/atomic_2.h	(working copy)
*************** namespace __atomic2
*** 60,72 ****
      bool
      test_and_set(memory_order __m = memory_order_seq_cst) noexcept
      {
!       return __sync_mem_exchange (&_M_i, 1, __m);
      }
  
      bool
      test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
      {
!       return __sync_mem_exchange (&_M_i, 1, __m);
      }
  
      void
--- 60,72 ----
      bool
      test_and_set(memory_order __m = memory_order_seq_cst) noexcept
      {
!       return __atomic_exchange (&_M_i, 1, __m);
      }
  
      bool
      test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
      {
!       return __atomic_exchange (&_M_i, 1, __m);
      }
  
      void
*************** namespace __atomic2
*** 76,82 ****
        __glibcxx_assert(__m != memory_order_acquire);
        __glibcxx_assert(__m != memory_order_acq_rel);
  
!       __sync_mem_store (&_M_i, 0, __m);
      }
  
      void
--- 76,82 ----
        __glibcxx_assert(__m != memory_order_acquire);
        __glibcxx_assert(__m != memory_order_acq_rel);
  
!       __atomic_store (&_M_i, 0, __m);
      }
  
      void
*************** namespace __atomic2
*** 86,92 ****
        __glibcxx_assert(__m != memory_order_acquire);
        __glibcxx_assert(__m != memory_order_acq_rel);
  
!       __sync_mem_store (&_M_i, 0, __m);
      }
    };
  
--- 86,92 ----
        __glibcxx_assert(__m != memory_order_acquire);
        __glibcxx_assert(__m != memory_order_acq_rel);
  
!       __atomic_store (&_M_i, 0, __m);
      }
    };
  
*************** namespace __atomic2
*** 170,228 ****
  
        __int_type
        operator++() noexcept
!       { return __sync_mem_add_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator++() volatile noexcept
!       { return __sync_mem_add_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator--() noexcept
!       { return __sync_mem_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator--() volatile noexcept
!       { return __sync_mem_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator+=(__int_type __i) noexcept
!       { return __sync_mem_add_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator+=(__int_type __i) volatile noexcept
!       { return __sync_mem_add_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator-=(__int_type __i) noexcept
!       { return __sync_mem_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator-=(__int_type __i) volatile noexcept
!       { return __sync_mem_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator&=(__int_type __i) noexcept
!       { return __sync_mem_and_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator&=(__int_type __i) volatile noexcept
!       { return __sync_mem_and_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator|=(__int_type __i) noexcept
!       { return __sync_mem_or_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator|=(__int_type __i) volatile noexcept
!       { return __sync_mem_or_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator^=(__int_type __i) noexcept
!       { return __sync_mem_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator^=(__int_type __i) volatile noexcept
!       { return __sync_mem_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        bool
        is_lock_free() const noexcept
--- 170,228 ----
  
        __int_type
        operator++() noexcept
!       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator++() volatile noexcept
!       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator--() noexcept
!       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator--() volatile noexcept
!       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
  
        __int_type
        operator+=(__int_type __i) noexcept
!       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator+=(__int_type __i) volatile noexcept
!       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator-=(__int_type __i) noexcept
!       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator-=(__int_type __i) volatile noexcept
!       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator&=(__int_type __i) noexcept
!       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator&=(__int_type __i) volatile noexcept
!       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator|=(__int_type __i) noexcept
!       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator|=(__int_type __i) volatile noexcept
!       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator^=(__int_type __i) noexcept
!       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        __int_type
        operator^=(__int_type __i) volatile noexcept
!       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
  
        bool
        is_lock_free() const noexcept
*************** namespace __atomic2
*** 239,245 ****
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__sync_mem_store (&_M_i, __i, __m);
        }
  
        void
--- 239,245 ----
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__atomic_store (&_M_i, __i, __m);
        }
  
        void
*************** namespace __atomic2
*** 250,256 ****
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__sync_mem_store (&_M_i, __i, __m);
        }
  
        __int_type
--- 250,256 ----
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__atomic_store (&_M_i, __i, __m);
        }
  
        __int_type
*************** namespace __atomic2
*** 259,265 ****
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __sync_mem_load (&_M_i, __m);
        }
  
        __int_type
--- 259,265 ----
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __atomic_load (&_M_i, __m);
        }
  
        __int_type
*************** namespace __atomic2
*** 268,281 ****
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __sync_mem_load (&_M_i, __m);
        }
  
        __int_type
        exchange(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) noexcept
        {
!         return __sync_mem_exchange (&_M_i, __i, __m);
        }
  
  
--- 268,281 ----
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __atomic_load (&_M_i, __m);
        }
  
        __int_type
        exchange(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) noexcept
        {
!         return __atomic_exchange (&_M_i, __i, __m);
        }
  
  
*************** namespace __atomic2
*** 283,289 ****
        exchange(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) volatile noexcept
        {
!         return __sync_mem_exchange (&_M_i, __i, __m);
        }
  
        bool
--- 283,289 ----
        exchange(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) volatile noexcept
        {
!         return __atomic_exchange (&_M_i, __i, __m);
        }
  
        bool
*************** namespace __atomic2
*** 367,418 ****
        __int_type
        fetch_add(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_add(&_M_i, __i, __m); }
  
        __int_type
        fetch_add(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_add(&_M_i, __i, __m); }
  
        __int_type
        fetch_sub(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_sub(&_M_i, __i, __m); }
  
        __int_type
        fetch_sub(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_sub(&_M_i, __i, __m); }
  
        __int_type
        fetch_and(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_and(&_M_i, __i, __m); }
  
        __int_type
        fetch_and(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_and(&_M_i, __i, __m); }
  
        __int_type
        fetch_or(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_or(&_M_i, __i, __m); }
  
        __int_type
        fetch_or(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_or(&_M_i, __i, __m); }
  
        __int_type
        fetch_xor(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_xor(&_M_i, __i, __m); }
  
        __int_type
        fetch_xor(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_xor(&_M_i, __i, __m); }
      };
  
  
--- 367,418 ----
        __int_type
        fetch_add(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_add(&_M_i, __i, __m); }
  
        __int_type
        fetch_add(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_add(&_M_i, __i, __m); }
  
        __int_type
        fetch_sub(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_sub(&_M_i, __i, __m); }
  
        __int_type
        fetch_sub(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_sub(&_M_i, __i, __m); }
  
        __int_type
        fetch_and(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_and(&_M_i, __i, __m); }
  
        __int_type
        fetch_and(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_and(&_M_i, __i, __m); }
  
        __int_type
        fetch_or(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_or(&_M_i, __i, __m); }
  
        __int_type
        fetch_or(__int_type __i,
  	       memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_or(&_M_i, __i, __m); }
  
        __int_type
        fetch_xor(__int_type __i,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_xor(&_M_i, __i, __m); }
  
        __int_type
        fetch_xor(__int_type __i,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_xor(&_M_i, __i, __m); }
      };
  
  
*************** namespace __atomic2
*** 473,507 ****
  
        __pointer_type
        operator++() noexcept
!       { return __sync_mem_add_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator++() volatile noexcept
!       { return __sync_mem_add_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator--() noexcept
!       { return __sync_mem_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator--() volatile noexcept
!       { return __sync_mem_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator+=(ptrdiff_t __d) noexcept
!       { return __sync_mem_add_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        __pointer_type
        operator+=(ptrdiff_t __d) volatile noexcept
!       { return __sync_mem_add_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        __pointer_type
        operator-=(ptrdiff_t __d) noexcept
!       { return __sync_mem_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        __pointer_type
        operator-=(ptrdiff_t __d) volatile noexcept
!       { return __sync_mem_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        bool
        is_lock_free() const noexcept
--- 473,507 ----
  
        __pointer_type
        operator++() noexcept
!       { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator++() volatile noexcept
!       { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator--() noexcept
!       { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator--() volatile noexcept
!       { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
  
        __pointer_type
        operator+=(ptrdiff_t __d) noexcept
!       { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        __pointer_type
        operator+=(ptrdiff_t __d) volatile noexcept
!       { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        __pointer_type
        operator-=(ptrdiff_t __d) noexcept
!       { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        __pointer_type
        operator-=(ptrdiff_t __d) volatile noexcept
!       { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
  
        bool
        is_lock_free() const noexcept
*************** namespace __atomic2
*** 519,525 ****
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__sync_mem_store (&_M_p, __p, __m);
        }
  
        void
--- 519,525 ----
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__atomic_store (&_M_p, __p, __m);
        }
  
        void
*************** namespace __atomic2
*** 530,536 ****
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__sync_mem_store (&_M_p, __p, __m);
        }
  
        __pointer_type
--- 530,536 ----
  	__glibcxx_assert(__m != memory_order_acq_rel);
  	__glibcxx_assert(__m != memory_order_consume);
  
! 	__atomic_store (&_M_p, __p, __m);
        }
  
        __pointer_type
*************** namespace __atomic2
*** 539,545 ****
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __sync_mem_load (&_M_p, __m);
        }
  
        __pointer_type
--- 539,545 ----
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __atomic_load (&_M_p, __m);
        }
  
        __pointer_type
*************** namespace __atomic2
*** 548,561 ****
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __sync_mem_load (&_M_p, __m);
        }
  
        __pointer_type
        exchange(__pointer_type __p,
  	       memory_order __m = memory_order_seq_cst) noexcept
        {
! 	return __sync_mem_exchange (&_M_p, __p, __m);
        }
  
  
--- 548,561 ----
  	__glibcxx_assert(__m != memory_order_release);
  	__glibcxx_assert(__m != memory_order_acq_rel);
  
! 	return __atomic_load (&_M_p, __m);
        }
  
        __pointer_type
        exchange(__pointer_type __p,
  	       memory_order __m = memory_order_seq_cst) noexcept
        {
! 	return __atomic_exchange (&_M_p, __p, __m);
        }
  
  
*************** namespace __atomic2
*** 563,569 ****
        exchange(__pointer_type __p,
  	       memory_order __m = memory_order_seq_cst) volatile noexcept
        {
! 	return __sync_mem_exchange (&_M_p, __p, __m);
        }
  
        bool
--- 563,569 ----
        exchange(__pointer_type __p,
  	       memory_order __m = memory_order_seq_cst) volatile noexcept
        {
! 	return __atomic_exchange (&_M_p, __p, __m);
        }
  
        bool
*************** namespace __atomic2
*** 605,626 ****
        __pointer_type
        fetch_add(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_add(&_M_p, __d, __m); }
  
        __pointer_type
        fetch_add(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_add(&_M_p, __d, __m); }
  
        __pointer_type
        fetch_sub(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __sync_mem_fetch_sub(&_M_p, __d, __m); }
  
        __pointer_type
        fetch_sub(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __sync_mem_fetch_sub(&_M_p, __d, __m); }
      };
  
  } // namespace __atomic2
--- 605,626 ----
        __pointer_type
        fetch_add(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_add(&_M_p, __d, __m); }
  
        __pointer_type
        fetch_add(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_add(&_M_p, __d, __m); }
  
        __pointer_type
        fetch_sub(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) noexcept
!       { return __atomic_fetch_sub(&_M_p, __d, __m); }
  
        __pointer_type
        fetch_sub(ptrdiff_t __d,
  		memory_order __m = memory_order_seq_cst) volatile noexcept
!       { return __atomic_fetch_sub(&_M_p, __d, __m); }
      };
  
  } // namespace __atomic2
Index: gcc/doc/extend.texi
===================================================================
*** gcc/doc/extend.texi	(revision 179946)
--- gcc/doc/extend.texi	(working copy)
*************** are not prevented from being speculated 
*** 6789,6795 ****
  The following builtins approximately match the requirements for
  C++11 memory model. Many are similar to the ``__sync'' prefixed builtins, but
  all also have a memory model parameter.  These are all identified by being
! prefixed with ``__sync_mem'', and most are overloaded such that they work
  with multiple types.
  
  GCC will allow any integral scalar or pointer type that is 1, 2, 4, or 8 bytes
--- 6789,6795 ----
  The following builtins approximately match the requirements for
  C++11 memory model. Many are similar to the ``__sync'' prefixed builtins, but
  all also have a memory model parameter.  These are all identified by being
! prefixed with ``__atomic'', and most are overloaded such that they work
  with multiple types.
  
  GCC will allow any integral scalar or pointer type that is 1, 2, 4, or 8 bytes
*************** code motion as well as synchronization r
*** 6809,6864 ****
  are listed in approximately ascending order of strength.
  
  @table  @code
! @item __SYNC_MEM_RELAXED
  No barriers or synchronization.
! @item __SYNC_MEM_CONSUME
  Data dependency only for both barrier and synchronization with another thread.
! @item __SYNC_MEM_ACQUIRE
  Barrier to hoisting of code and synchronizes with release (or stronger)
  semantic stores from another thread.
! @item __SYNC_MEM_RELEASE
  Barrier to sinking of code and synchronizes with acquire (or stronger) semantic
  loads from another thread.
! @item __SYNC_MEM_ACQ_REL
  Full barrier in both directions and synchronizes with acquire loads and release
  stores in another thread.
! @item __SYNC_MEM_SEQ_CST
  Full barrier in both directions and synchronizes with acquire loads and release
  stores in all threads.
  @end table
  
  When implementing patterns for these builtins, the memory model parameter can
  be ignored as long as the pattern implements the most restrictive 
! __SYNC_MEM_SEQ_CST model.  Any of the other memory models will execute 
  correctly with this memory model but they may not execute as efficiently as 
  they could with a more appropriate implemention of the relaxed requirements.
  
  Note that the C++11 standard allows for the memory model parameter to be
  determined at runtime rather than at compile time.  These builtins will map any 
! runtime value to __SYNC_MEM_SEQ_CST rather than invoke a runtime library 
  call or inline a switch statement.  This is standard compliant, safe, and the 
  simplest approach for now.
  
! @item @var{type} __sync_mem_load (@var{type} *ptr, int memmodel)
! @findex __sync_mem_load
  This builtin implements an atomic load operation.  It returns the contents
  of @code{*@var{ptr}}.
  
  The valid memory model variants are
! __SYNC_MEM_RELAXED, __SYNC_MEM_SEQ_CST, __SYNC_MEM_ACQUIRE, and
! __SYNC_MEM_CONSUME.
  
! @item void __sync_mem_store (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __sync_mem_store
  This builtin implements an atomic store operation.  It writes @code{@var{val}}
  into @code{*@var{ptr}}.  On targets which are limited, 0 may be the only valid
  value. This mimics the behaviour of __sync_lock_release on such hardware.
  
  The valid memory model variants are
! __SYNC_MEM_RELAXED, __SYNC_MEM_SEQ_CST, and __SYNC_MEM_RELEASE.
  
! @item @var{type} __sync_mem_exchange (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __sync_mem_exchange
  This builtin implements an atomic exchange operation.  It writes @var{val}
  into @code{*@var{ptr}}, and returns the previous contents of @code{*@var{ptr}}.
  
--- 6809,6864 ----
  are listed in approximately ascending order of strength.
  
  @table  @code
! @item __ATOMIC_RELAXED
  No barriers or synchronization.
! @item __ATOMIC_CONSUME
  Data dependency only for both barrier and synchronization with another thread.
! @item __ATOMIC_ACQUIRE
  Barrier to hoisting of code and synchronizes with release (or stronger)
  semantic stores from another thread.
! @item __ATOMIC_RELEASE
  Barrier to sinking of code and synchronizes with acquire (or stronger) semantic
  loads from another thread.
! @item __ATOMIC_ACQ_REL
  Full barrier in both directions and synchronizes with acquire loads and release
  stores in another thread.
! @item __ATOMIC_SEQ_CST
  Full barrier in both directions and synchronizes with acquire loads and release
  stores in all threads.
  @end table
  
  When implementing patterns for these builtins, the memory model parameter can
  be ignored as long as the pattern implements the most restrictive 
! __ATOMIC_SEQ_CST model.  Any of the other memory models will execute 
  correctly with this memory model but they may not execute as efficiently as 
  they could with a more appropriate implemention of the relaxed requirements.
  
  Note that the C++11 standard allows for the memory model parameter to be
  determined at runtime rather than at compile time.  These builtins will map any 
! runtime value to __ATOMIC_SEQ_CST rather than invoke a runtime library 
  call or inline a switch statement.  This is standard compliant, safe, and the 
  simplest approach for now.
  
! @item @var{type} __atomic_load (@var{type} *ptr, int memmodel)
! @findex __atomic_load
  This builtin implements an atomic load operation.  It returns the contents
  of @code{*@var{ptr}}.
  
  The valid memory model variants are
! __ATOMIC_RELAXED, __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE, and
! __ATOMIC_CONSUME.
  
! @item void __atomic_store (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __atomic_store
  This builtin implements an atomic store operation.  It writes @code{@var{val}}
  into @code{*@var{ptr}}.  On targets which are limited, 0 may be the only valid
  value. This mimics the behaviour of __sync_lock_release on such hardware.
  
  The valid memory model variants are
! __ATOMIC_RELAXED, __ATOMIC_SEQ_CST, and __ATOMIC_RELEASE.
  
! @item @var{type} __atomic_exchange (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __atomic_exchange
  This builtin implements an atomic exchange operation.  It writes @var{val}
  into @code{*@var{ptr}}, and returns the previous contents of @code{*@var{ptr}}.
  
*************** On targets which are limited, a value of
*** 6866,6876 ****
  This mimics the behaviour of __sync_lock_test_and_set on such hardware.
  
  The valid memory model variants are
! __SYNC_MEM_RELAXED, __SYNC_MEM_SEQ_CST, __SYNC_MEM_ACQUIRE,
! __SYNC_MEM_RELEASE, and __SYNC_MEM_ACQ_REL.
  
! @item bool __sync_mem_compare_exchange (@var{type} *ptr, @var{type} *expected, @var{type} desired, int success_memmodel, int failure_memmodel)
! @findex __sync_mem_compare_exchange
  This builtin implements an atomic compare_exchange operation.  This compares the
  contents of @code{*@var{ptr}} with the contents of @code{*@var{expected}} and if
  equal, writes @var{desired} into @code{*@var{ptr}}.  If they are not equal, the
--- 6866,6876 ----
  This mimics the behaviour of __sync_lock_test_and_set on such hardware.
  
  The valid memory model variants are
! __ATOMIC_RELAXED, __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE,
! __ATOMIC_RELEASE, and __ATOMIC_ACQ_REL.
  
! @item bool __atomic_compare_exchange (@var{type} *ptr, @var{type} *expected, @var{type} desired, int success_memmodel, int failure_memmodel)
! @findex __atomic_compare_exchange
  This builtin implements an atomic compare_exchange operation.  This compares the
  contents of @code{*@var{ptr}} with the contents of @code{*@var{expected}} and if
  equal, writes @var{desired} into @code{*@var{ptr}}.  If they are not equal, the
*************** the execution is considered to conform t
*** 6882,6901 ****
  used here.
  
  False is returned otherwise, and the execution is considered to conform to
! @var{failure_memmodel}. This memory model cannot be __SYNC_MEM_RELEASE nor
! __SYNC_MEM_ACQ_REL.  It also cannot be a stronger model than that specified
  by @var{success_memmodel}.
  
! @item @var{type} __sync_mem_add_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_sub_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_and_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_xor_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_or_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __sync_mem_add_fetch
! @findex __sync_mem_sub_fetch
! @findex __sync_mem_and_fetch
! @findex __sync_mem_xor_fetch
! @findex __sync_mem_or_fetch
  These builtins perform the operation suggested by the name, and return the result 
  of the operation. That is,
  
--- 6882,6901 ----
  used here.
  
  False is returned otherwise, and the execution is considered to conform to
! @var{failure_memmodel}. This memory model cannot be __ATOMIC_RELEASE nor
! __ATOMIC_ACQ_REL.  It also cannot be a stronger model than that specified
  by @var{success_memmodel}.
  
! @item @var{type} __atomic_add_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_sub_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_and_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_xor_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_or_fetch (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __atomic_add_fetch
! @findex __atomic_sub_fetch
! @findex __atomic_and_fetch
! @findex __atomic_xor_fetch
! @findex __atomic_or_fetch
  These builtins perform the operation suggested by the name, and return the result 
  of the operation. That is,
  
*************** of the operation. That is,
*** 6905,6920 ****
  
  All memory models are valid.
  
! @item @var{type} __sync_mem_fetch_add (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_fetch_sub (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_fetch_and (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_fetch_xor (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __sync_mem_fetch_or (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __sync_mem_fetch_add
! @findex __sync_mem_fetch_sub
! @findex __sync_mem_fetch_and
! @findex __sync_mem_fetch_xor
! @findex __sync_mem_fetch_or
  These builtins perform the operation suggested by the name, and return the value
  that had previously been in *ptr .  That is,
  
--- 6905,6920 ----
  
  All memory models are valid.
  
! @item @var{type} __atomic_fetch_add (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_fetch_sub (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_fetch_and (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_fetch_xor (@var{type} *ptr, @var{type} val, int memmodel)
! @itemx @var{type} __atomic_fetch_or (@var{type} *ptr, @var{type} val, int memmodel)
! @findex __atomic_fetch_add
! @findex __atomic_fetch_sub
! @findex __atomic_fetch_and
! @findex __atomic_fetch_xor
! @findex __atomic_fetch_or
  These builtins perform the operation suggested by the name, and return the value
  that had previously been in *ptr .  That is,
  
*************** that had previously been in *ptr .  That
*** 6924,6947 ****
  
  All memory models are valid.
  
! @item void __sync_mem_thread_fence (int memmodel)
! @findex __sync_mem_thread_fence
  
  This builtin acts as a synchronization fence between threads based on the
  specified memory model.
  
  All memory orders are valid.
  
! @item void __sync_mem_signal_fence (int memmodel)
! @findex __sync_mem_signal_fence
  
  This builtin acts as a synchronization fence between a thread and signal
  handlers based in the same thread.
  
  All memory orders are valid.
  
! @item bool __sync_mem_always_lock_free (size_t size)
! @findex __sync_mem_always_lock_free
  
  This builtin returns true if objects of size bytes will always generate lock
  free atomic instructions for the target architecture.  Otherwise false is
--- 6924,6947 ----
  
  All memory models are valid.
  
! @item void __atomic_thread_fence (int memmodel)
! @findex __atomic_thread_fence
  
  This builtin acts as a synchronization fence between threads based on the
  specified memory model.
  
  All memory orders are valid.
  
! @item void __atomic_signal_fence (int memmodel)
! @findex __atomic_signal_fence
  
  This builtin acts as a synchronization fence between a thread and signal
  handlers based in the same thread.
  
  All memory orders are valid.
  
! @item bool __atomic_always_lock_free (size_t size)
! @findex __atomic_always_lock_free
  
  This builtin returns true if objects of size bytes will always generate lock
  free atomic instructions for the target architecture.  Otherwise false is
*************** returned.
*** 6950,6964 ****
  size must resolve to a compile time constant.
  
  @smallexample
! if (_sync_mem_always_lock_free (sizeof (long long)))
  @end smallexample
  
! @item bool __sync_mem_is_lock_free (size_t size)
! @findex __sync_mem_is_lock_free
  
  This builtin returns true if objects of size bytes will always generate lock
  free atomic instructions for the target architecture.  If it is not known to
! be lock free a call is made to a runtime routine named __sync_mem_is_lock_free.
  
  @end table
  
--- 6950,6964 ----
  size must resolve to a compile time constant.
  
  @smallexample
! if (_atomic_always_lock_free (sizeof (long long)))
  @end smallexample
  
! @item bool __atomic_is_lock_free (size_t size)
! @findex __atomic_is_lock_free
  
  This builtin returns true if objects of size bytes will always generate lock
  free atomic instructions for the target architecture.  If it is not known to
! be lock free a call is made to a runtime routine named __atomic_is_lock_free.
  
  @end table
  
Index: gcc/cppbuiltin.c
===================================================================
*** gcc/cppbuiltin.c	(revision 179922)
--- gcc/cppbuiltin.c	(working copy)
*************** define__GNUC__ (cpp_reader *pfile)
*** 66,77 ****
    cpp_define_formatted (pfile, "__GNUC_MINOR__=%d", minor);
    cpp_define_formatted (pfile, "__GNUC_PATCHLEVEL__=%d", patchlevel);
    cpp_define_formatted (pfile, "__VERSION__=\"%s\"", version_string);
!   cpp_define_formatted (pfile, "__SYNC_MEM_RELAXED=%d", MEMMODEL_RELAXED);
!   cpp_define_formatted (pfile, "__SYNC_MEM_SEQ_CST=%d", MEMMODEL_SEQ_CST);
!   cpp_define_formatted (pfile, "__SYNC_MEM_ACQUIRE=%d", MEMMODEL_ACQUIRE);
!   cpp_define_formatted (pfile, "__SYNC_MEM_RELEASE=%d", MEMMODEL_RELEASE);
!   cpp_define_formatted (pfile, "__SYNC_MEM_ACQ_REL=%d", MEMMODEL_ACQ_REL);
!   cpp_define_formatted (pfile, "__SYNC_MEM_CONSUME=%d", MEMMODEL_CONSUME);
  }
  
  
--- 66,77 ----
    cpp_define_formatted (pfile, "__GNUC_MINOR__=%d", minor);
    cpp_define_formatted (pfile, "__GNUC_PATCHLEVEL__=%d", patchlevel);
    cpp_define_formatted (pfile, "__VERSION__=\"%s\"", version_string);
!   cpp_define_formatted (pfile, "__ATOMIC_RELAXED=%d", MEMMODEL_RELAXED);
!   cpp_define_formatted (pfile, "__ATOMIC_SEQ_CST=%d", MEMMODEL_SEQ_CST);
!   cpp_define_formatted (pfile, "__ATOMIC_ACQUIRE=%d", MEMMODEL_ACQUIRE);
!   cpp_define_formatted (pfile, "__ATOMIC_RELEASE=%d", MEMMODEL_RELEASE);
!   cpp_define_formatted (pfile, "__ATOMIC_ACQ_REL=%d", MEMMODEL_ACQ_REL);
!   cpp_define_formatted (pfile, "__ATOMIC_CONSUME=%d", MEMMODEL_CONSUME);
  }
  
  
Index: gcc/c-family/c-common.c
===================================================================
*** gcc/c-family/c-common.c	(revision 179922)
--- gcc/c-family/c-common.c	(working copy)
*************** sync_resolve_params (tree orig_function,
*** 9079,9085 ****
        function_args_iter_next (&iter);
      }
  
!   /* __sync_mem routines are not variadic.  */
    if (!orig_format && VEC_length (tree, params) != parmnum + 1)
      {
        error ("too many arguments to function %qE", orig_function);
--- 9079,9085 ----
        function_args_iter_next (&iter);
      }
  
!   /* __atomic routines are not variadic.  */
    if (!orig_format && VEC_length (tree, params) != parmnum + 1)
      {
        error ("too many arguments to function %qE", orig_function);
*************** resolve_overloaded_builtin (location_t l
*** 9146,9167 ****
    /* Handle BUILT_IN_NORMAL here.  */
    switch (orig_code)
      {
!     case BUILT_IN_SYNC_MEM_EXCHANGE_N:
!     case BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_N:
!     case BUILT_IN_SYNC_MEM_LOAD_N:
!     case BUILT_IN_SYNC_MEM_STORE_N:
!     case BUILT_IN_SYNC_MEM_ADD_FETCH_N:
!     case BUILT_IN_SYNC_MEM_SUB_FETCH_N:
!     case BUILT_IN_SYNC_MEM_AND_FETCH_N:
!     case BUILT_IN_SYNC_MEM_NAND_FETCH_N:
!     case BUILT_IN_SYNC_MEM_XOR_FETCH_N:
!     case BUILT_IN_SYNC_MEM_OR_FETCH_N:
!     case BUILT_IN_SYNC_MEM_FETCH_ADD_N:
!     case BUILT_IN_SYNC_MEM_FETCH_SUB_N:
!     case BUILT_IN_SYNC_MEM_FETCH_AND_N:
!     case BUILT_IN_SYNC_MEM_FETCH_NAND_N:
!     case BUILT_IN_SYNC_MEM_FETCH_XOR_N:
!     case BUILT_IN_SYNC_MEM_FETCH_OR_N:
        {
          orig_format = false;
  	/* Fallthru for parameter processing.  */
--- 9146,9167 ----
    /* Handle BUILT_IN_NORMAL here.  */
    switch (orig_code)
      {
!     case BUILT_IN_ATOMIC_EXCHANGE_N:
!     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
!     case BUILT_IN_ATOMIC_LOAD_N:
!     case BUILT_IN_ATOMIC_STORE_N:
!     case BUILT_IN_ATOMIC_ADD_FETCH_N:
!     case BUILT_IN_ATOMIC_SUB_FETCH_N:
!     case BUILT_IN_ATOMIC_AND_FETCH_N:
!     case BUILT_IN_ATOMIC_NAND_FETCH_N:
!     case BUILT_IN_ATOMIC_XOR_FETCH_N:
!     case BUILT_IN_ATOMIC_OR_FETCH_N:
!     case BUILT_IN_ATOMIC_FETCH_ADD_N:
!     case BUILT_IN_ATOMIC_FETCH_SUB_N:
!     case BUILT_IN_ATOMIC_FETCH_AND_N:
!     case BUILT_IN_ATOMIC_FETCH_NAND_N:
!     case BUILT_IN_ATOMIC_FETCH_XOR_N:
!     case BUILT_IN_ATOMIC_FETCH_OR_N:
        {
          orig_format = false;
  	/* Fallthru for parameter processing.  */
*************** resolve_overloaded_builtin (location_t l
*** 9199,9205 ****
  	result = build_function_call_vec (loc, new_function, params, NULL);
  	if (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
  	    && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N
! 	    && orig_code != BUILT_IN_SYNC_MEM_STORE_N)
  	  result = sync_resolve_return (first_param, result, orig_format);
  
  	return result;
--- 9199,9205 ----
  	result = build_function_call_vec (loc, new_function, params, NULL);
  	if (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
  	    && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N
! 	    && orig_code != BUILT_IN_ATOMIC_STORE_N)
  	  result = sync_resolve_return (first_param, result, orig_format);
  
  	return result;
Index: gcc/optabs.c
===================================================================
*** gcc/optabs.c	(revision 179922)
--- gcc/optabs.c	(working copy)
*************** expand_compare_and_swap_loop (rtx mem, r
*** 6962,6975 ****
     TARGET is an option place to stick the return value.  */
  
  rtx
! expand_sync_mem_exchange (rtx target, rtx mem, rtx val, enum memmodel model)
  {
    enum machine_mode mode = GET_MODE (mem);
    enum insn_code icode;
    rtx last_insn;
  
    /* If the target supports the exchange directly, great.  */
!   icode = direct_optab_handler (sync_mem_exchange_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
        struct expand_operand ops[4];
--- 6962,6975 ----
     TARGET is an option place to stick the return value.  */
  
  rtx
! expand_atomic_exchange (rtx target, rtx mem, rtx val, enum memmodel model)
  {
    enum machine_mode mode = GET_MODE (mem);
    enum insn_code icode;
    rtx last_insn;
  
    /* If the target supports the exchange directly, great.  */
!   icode = direct_optab_handler (atomic_exchange_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
        struct expand_operand ops[4];
*************** expand_sync_mem_exchange (rtx target, rt
*** 7034,7040 ****
     TARGET is an option place to stick the return value.  */
  
  rtx
! expand_sync_mem_compare_exchange (rtx target, rtx mem, rtx expected, 
  				  rtx desired, enum memmodel success, 
  				  enum memmodel failure)
  {
--- 7034,7040 ----
     TARGET is an option place to stick the return value.  */
  
  rtx
! expand_atomic_compare_exchange (rtx target, rtx mem, rtx expected, 
  				  rtx desired, enum memmodel success, 
  				  enum memmodel failure)
  {
*************** expand_sync_mem_compare_exchange (rtx ta
*** 7042,7048 ****
    enum insn_code icode;
  
    /* If the target supports the exchange directly, great.  */
!   icode = direct_optab_handler (sync_mem_compare_exchange_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
        struct expand_operand ops[6];
--- 7042,7048 ----
    enum insn_code icode;
  
    /* If the target supports the exchange directly, great.  */
!   icode = direct_optab_handler (atomic_compare_exchange_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
        struct expand_operand ops[6];
*************** expand_sync_mem_compare_exchange (rtx ta
*** 7100,7112 ****
     TARGET is an option place to stick the return value.  */
  
  rtx
! expand_sync_mem_load (rtx target, rtx mem, enum memmodel model)
  {
    enum machine_mode mode = GET_MODE (mem);
    enum insn_code icode;
  
    /* If the target supports the load directly, great.  */
!   icode = direct_optab_handler (sync_mem_load_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
        struct expand_operand ops[3];
--- 7100,7112 ----
     TARGET is an option place to stick the return value.  */
  
  rtx
! expand_atomic_load (rtx target, rtx mem, enum memmodel model)
  {
    enum machine_mode mode = GET_MODE (mem);
    enum insn_code icode;
  
    /* If the target supports the load directly, great.  */
!   icode = direct_optab_handler (atomic_load_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
        struct expand_operand ops[3];
*************** expand_sync_mem_load (rtx target, rtx me
*** 7152,7165 ****
     function returns const0_rtx if a pattern was emitted.  */
  
  rtx
! expand_sync_mem_store (rtx mem, rtx val, enum memmodel model)
  {
    enum machine_mode mode = GET_MODE (mem);
    enum insn_code icode;
    struct expand_operand ops[3];
  
    /* If the target supports the store directly, great.  */
!   icode = direct_optab_handler (sync_mem_store_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
  
--- 7152,7165 ----
     function returns const0_rtx if a pattern was emitted.  */
  
  rtx
! expand_atomic_store (rtx mem, rtx val, enum memmodel model)
  {
    enum machine_mode mode = GET_MODE (mem);
    enum insn_code icode;
    struct expand_operand ops[3];
  
    /* If the target supports the store directly, great.  */
!   icode = direct_optab_handler (atomic_store_optab, mode);
    if (icode != CODE_FOR_nothing)
      {
  
*************** expand_sync_mem_store (rtx mem, rtx val,
*** 7193,7199 ****
       the result.  If that doesn't work, don't do anything.  */
    if (GET_MODE_PRECISION(mode) > BITS_PER_WORD)
      {
!       rtx target = expand_sync_mem_exchange (NULL_RTX, mem, val, model);
        if (target)
          return const0_rtx;
        else
--- 7193,7199 ----
       the result.  If that doesn't work, don't do anything.  */
    if (GET_MODE_PRECISION(mode) > BITS_PER_WORD)
      {
!       rtx target = expand_atomic_exchange (NULL_RTX, mem, val, model);
        if (target)
          return const0_rtx;
        else
*************** expand_sync_mem_store (rtx mem, rtx val,
*** 7215,7221 ****
  
  
  /* Structure containing the pointers and values required to process the
!    various forms of the sync_mem_fetch_op and sync_mem_op_fetch builtins.  */
  struct op_functions {
    struct direct_optab_d *mem_fetch_before;
    struct direct_optab_d *mem_fetch_after;
--- 7215,7221 ----
  
  
  /* Structure containing the pointers and values required to process the
!    various forms of the atomic_fetch_op and atomic_op_fetch builtins.  */
  struct op_functions {
    struct direct_optab_d *mem_fetch_before;
    struct direct_optab_d *mem_fetch_after;
*************** struct op_functions {
*** 7227,7280 ****
  };
  
  /* Initialize the fields for each supported opcode.  */
! static const struct op_functions add_op = { sync_mem_fetch_add_optab,
! 				      sync_mem_add_fetch_optab,
! 				      sync_mem_add_optab,
  				      sync_old_add_optab,
  				      sync_new_add_optab,
  				      sync_add_optab,
  				      MINUS
  				    };
  
! static const struct op_functions sub_op = { sync_mem_fetch_sub_optab,
! 					    sync_mem_sub_fetch_optab,
! 					    sync_mem_sub_optab,
  					    sync_old_sub_optab,
  					    sync_new_sub_optab,
  					    sync_sub_optab,
  					    PLUS
  					  };
  
! static const struct op_functions xor_op = { sync_mem_fetch_xor_optab,
! 					    sync_mem_xor_fetch_optab,
! 					    sync_mem_xor_optab,
  					    sync_old_xor_optab,
  					    sync_new_xor_optab,
  					    sync_xor_optab,
  					    UNKNOWN
  					  };
  
! static const struct op_functions and_op = { sync_mem_fetch_and_optab,
! 					    sync_mem_and_fetch_optab,
! 					    sync_mem_and_optab,
  					    sync_old_and_optab,
  					    sync_new_and_optab,
  					    sync_and_optab,
  					    UNKNOWN
  					  };
  
! static const struct op_functions nand_op = { sync_mem_fetch_nand_optab,
! 					     sync_mem_nand_fetch_optab,
! 					     sync_mem_nand_optab,
  					     sync_old_nand_optab,
  					     sync_new_nand_optab,
  					     sync_nand_optab,
  					     UNKNOWN
  					   };
  
! static const struct op_functions or_op = { sync_mem_fetch_or_optab,
! 					   sync_mem_or_fetch_optab,
! 					   sync_mem_or_optab,
  					   sync_old_ior_optab,
  					   sync_new_ior_optab,
  					   sync_ior_optab,
--- 7227,7280 ----
  };
  
  /* Initialize the fields for each supported opcode.  */
! static const struct op_functions add_op = { atomic_fetch_add_optab,
! 				      atomic_add_fetch_optab,
! 				      atomic_add_optab,
  				      sync_old_add_optab,
  				      sync_new_add_optab,
  				      sync_add_optab,
  				      MINUS
  				    };
  
! static const struct op_functions sub_op = { atomic_fetch_sub_optab,
! 					    atomic_sub_fetch_optab,
! 					    atomic_sub_optab,
  					    sync_old_sub_optab,
  					    sync_new_sub_optab,
  					    sync_sub_optab,
  					    PLUS
  					  };
  
! static const struct op_functions xor_op = { atomic_fetch_xor_optab,
! 					    atomic_xor_fetch_optab,
! 					    atomic_xor_optab,
  					    sync_old_xor_optab,
  					    sync_new_xor_optab,
  					    sync_xor_optab,
  					    UNKNOWN
  					  };
  
! static const struct op_functions and_op = { atomic_fetch_and_optab,
! 					    atomic_and_fetch_optab,
! 					    atomic_and_optab,
  					    sync_old_and_optab,
  					    sync_new_and_optab,
  					    sync_and_optab,
  					    UNKNOWN
  					  };
  
! static const struct op_functions nand_op = { atomic_fetch_nand_optab,
! 					     atomic_nand_fetch_optab,
! 					     atomic_nand_optab,
  					     sync_old_nand_optab,
  					     sync_new_nand_optab,
  					     sync_nand_optab,
  					     UNKNOWN
  					   };
  
! static const struct op_functions or_op = { atomic_fetch_or_optab,
! 					   atomic_or_fetch_optab,
! 					   atomic_or_optab,
  					   sync_old_ior_optab,
  					   sync_new_ior_optab,
  					   sync_ior_optab,
*************** maybe_emit_op (const struct op_functions
*** 7357,7363 ****
     AFTER is true to return the result of the operation (OP_fetch).
     AFTER is false to return the value before the operation (fetch_OP).  */
  rtx
! expand_sync_mem_fetch_op (rtx target, rtx mem, rtx val, enum rtx_code code,
  			  enum memmodel model, bool after)
  {
    enum machine_mode mode = GET_MODE (mem);
--- 7357,7363 ----
     AFTER is true to return the result of the operation (OP_fetch).
     AFTER is false to return the value before the operation (fetch_OP).  */
  rtx
! expand_atomic_fetch_op (rtx target, rtx mem, rtx val, enum rtx_code code,
  			  enum memmodel model, bool after)
  {
    enum machine_mode mode = GET_MODE (mem);
*************** expand_sync_mem_fetch_op (rtx target, rt
*** 7406,7412 ****
        target = NULL_RTX;
      }
  
!   /* Try the __sync_mem version.  */
    result = maybe_emit_op (optab, target, mem, val, true, model, after);
    if (result)
      return result;
--- 7406,7412 ----
        target = NULL_RTX;
      }
  
!   /* Try the __atomic version.  */
    result = maybe_emit_op (optab, target, mem, val, true, model, after);
    if (result)
      return result;
*************** expand_sync_mem_fetch_op (rtx target, rt
*** 7420,7426 ****
       try that operation.  */
    if (after || optab->reverse_code != UNKNOWN || target == const0_rtx) 
      {
!       /* Try the __sync_mem version, then the older __sync version.  */
        result = maybe_emit_op (optab, target, mem, val, true, model, !after);
        if (!result)
  	result = maybe_emit_op (optab, target, mem, val, false, model, !after);
--- 7420,7426 ----
       try that operation.  */
    if (after || optab->reverse_code != UNKNOWN || target == const0_rtx) 
      {
!       /* Try the __atomic version, then the older __sync version.  */
        result = maybe_emit_op (optab, target, mem, val, true, model, !after);
        if (!result)
  	result = maybe_emit_op (optab, target, mem, val, false, model, !after);
Index: gcc/optabs.h
===================================================================
*** gcc/optabs.h	(revision 179946)
--- gcc/optabs.h	(working copy)
*************** enum direct_optab_index
*** 689,720 ****
    DOI_sync_lock_release,
  
    /* Atomic operations with memory model parameters. */
!   DOI_sync_mem_exchange,
!   DOI_sync_mem_compare_exchange,
!   DOI_sync_mem_load,
!   DOI_sync_mem_store,
!   DOI_sync_mem_add_fetch,
!   DOI_sync_mem_sub_fetch,
!   DOI_sync_mem_and_fetch,
!   DOI_sync_mem_nand_fetch,
!   DOI_sync_mem_xor_fetch,
!   DOI_sync_mem_or_fetch,
!   DOI_sync_mem_fetch_add,
!   DOI_sync_mem_fetch_sub,
!   DOI_sync_mem_fetch_and,
!   DOI_sync_mem_fetch_nand,
!   DOI_sync_mem_fetch_xor,
!   DOI_sync_mem_fetch_or,
!   DOI_sync_mem_add,
!   DOI_sync_mem_sub,
!   DOI_sync_mem_and,
!   DOI_sync_mem_nand,
!   DOI_sync_mem_xor,
!   DOI_sync_mem_or,
!   DOI_sync_mem_always_lock_free,
!   DOI_sync_mem_is_lock_free,
!   DOI_sync_mem_thread_fence,
!   DOI_sync_mem_signal_fence,
  
    DOI_MAX
  };
--- 689,720 ----
    DOI_sync_lock_release,
  
    /* Atomic operations with memory model parameters. */
!   DOI_atomic_exchange,
!   DOI_atomic_compare_exchange,
!   DOI_atomic_load,
!   DOI_atomic_store,
!   DOI_atomic_add_fetch,
!   DOI_atomic_sub_fetch,
!   DOI_atomic_and_fetch,
!   DOI_atomic_nand_fetch,
!   DOI_atomic_xor_fetch,
!   DOI_atomic_or_fetch,
!   DOI_atomic_fetch_add,
!   DOI_atomic_fetch_sub,
!   DOI_atomic_fetch_and,
!   DOI_atomic_fetch_nand,
!   DOI_atomic_fetch_xor,
!   DOI_atomic_fetch_or,
!   DOI_atomic_add,
!   DOI_atomic_sub,
!   DOI_atomic_and,
!   DOI_atomic_nand,
!   DOI_atomic_xor,
!   DOI_atomic_or,
!   DOI_atomic_always_lock_free,
!   DOI_atomic_is_lock_free,
!   DOI_atomic_thread_fence,
!   DOI_atomic_signal_fence,
  
    DOI_MAX
  };
*************** typedef struct direct_optab_d *direct_op
*** 763,820 ****
  #define sync_lock_release_optab \
    (&direct_optab_table[(int) DOI_sync_lock_release])
  
! #define sync_mem_exchange_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_exchange])
! #define sync_mem_compare_exchange_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_compare_exchange])
! #define sync_mem_load_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_load])
! #define sync_mem_store_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_store])
! #define sync_mem_add_fetch_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_add_fetch])
! #define sync_mem_sub_fetch_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_sub_fetch])
! #define sync_mem_and_fetch_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_and_fetch])
! #define sync_mem_nand_fetch_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_nand_fetch])
! #define sync_mem_xor_fetch_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_xor_fetch])
! #define sync_mem_or_fetch_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_or_fetch])
! #define sync_mem_fetch_add_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_fetch_add])
! #define sync_mem_fetch_sub_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_fetch_sub])
! #define sync_mem_fetch_and_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_fetch_and])
! #define sync_mem_fetch_nand_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_fetch_nand])
! #define sync_mem_fetch_xor_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_fetch_xor])
! #define sync_mem_fetch_or_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_fetch_or])
! #define sync_mem_add_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_add])
! #define sync_mem_sub_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_sub])
! #define sync_mem_and_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_and])
! #define sync_mem_nand_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_nand])
! #define sync_mem_xor_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_xor])
! #define sync_mem_or_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_or])
! #define sync_mem_always_lock_free_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_always_lock_free])
! #define sync_mem_is_lock_free_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_is_lock_free])
! #define sync_mem_thread_fence_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_thread_fence])
! #define sync_mem_signal_fence_optab \
!   (&direct_optab_table[(int) DOI_sync_mem_signal_fence])
  \f
  /* Target-dependent globals.  */
  struct target_optabs {
--- 763,820 ----
  #define sync_lock_release_optab \
    (&direct_optab_table[(int) DOI_sync_lock_release])
  
! #define atomic_exchange_optab \
!   (&direct_optab_table[(int) DOI_atomic_exchange])
! #define atomic_compare_exchange_optab \
!   (&direct_optab_table[(int) DOI_atomic_compare_exchange])
! #define atomic_load_optab \
!   (&direct_optab_table[(int) DOI_atomic_load])
! #define atomic_store_optab \
!   (&direct_optab_table[(int) DOI_atomic_store])
! #define atomic_add_fetch_optab \
!   (&direct_optab_table[(int) DOI_atomic_add_fetch])
! #define atomic_sub_fetch_optab \
!   (&direct_optab_table[(int) DOI_atomic_sub_fetch])
! #define atomic_and_fetch_optab \
!   (&direct_optab_table[(int) DOI_atomic_and_fetch])
! #define atomic_nand_fetch_optab \
!   (&direct_optab_table[(int) DOI_atomic_nand_fetch])
! #define atomic_xor_fetch_optab \
!   (&direct_optab_table[(int) DOI_atomic_xor_fetch])
! #define atomic_or_fetch_optab \
!   (&direct_optab_table[(int) DOI_atomic_or_fetch])
! #define atomic_fetch_add_optab \
!   (&direct_optab_table[(int) DOI_atomic_fetch_add])
! #define atomic_fetch_sub_optab \
!   (&direct_optab_table[(int) DOI_atomic_fetch_sub])
! #define atomic_fetch_and_optab \
!   (&direct_optab_table[(int) DOI_atomic_fetch_and])
! #define atomic_fetch_nand_optab \
!   (&direct_optab_table[(int) DOI_atomic_fetch_nand])
! #define atomic_fetch_xor_optab \
!   (&direct_optab_table[(int) DOI_atomic_fetch_xor])
! #define atomic_fetch_or_optab \
!   (&direct_optab_table[(int) DOI_atomic_fetch_or])
! #define atomic_add_optab \
!   (&direct_optab_table[(int) DOI_atomic_add])
! #define atomic_sub_optab \
!   (&direct_optab_table[(int) DOI_atomic_sub])
! #define atomic_and_optab \
!   (&direct_optab_table[(int) DOI_atomic_and])
! #define atomic_nand_optab \
!   (&direct_optab_table[(int) DOI_atomic_nand])
! #define atomic_xor_optab \
!   (&direct_optab_table[(int) DOI_atomic_xor])
! #define atomic_or_optab \
!   (&direct_optab_table[(int) DOI_atomic_or])
! #define atomic_always_lock_free_optab \
!   (&direct_optab_table[(int) DOI_atomic_always_lock_free])
! #define atomic_is_lock_free_optab \
!   (&direct_optab_table[(int) DOI_atomic_is_lock_free])
! #define atomic_thread_fence_optab \
!   (&direct_optab_table[(int) DOI_atomic_thread_fence])
! #define atomic_signal_fence_optab \
!   (&direct_optab_table[(int) DOI_atomic_signal_fence])
  \f
  /* Target-dependent globals.  */
  struct target_optabs {
Index: gcc/genopinit.c
===================================================================
*** gcc/genopinit.c	(revision 179922)
--- gcc/genopinit.c	(working copy)
*************** static const char * const optabs[] =
*** 243,264 ****
    "set_direct_optab_handler (sync_compare_and_swap_optab, $A, CODE_FOR_$(sync_compare_and_swap$I$a$))",
    "set_direct_optab_handler (sync_lock_test_and_set_optab, $A, CODE_FOR_$(sync_lock_test_and_set$I$a$))",
    "set_direct_optab_handler (sync_lock_release_optab, $A, CODE_FOR_$(sync_lock_release$I$a$))",
!   "set_direct_optab_handler (sync_mem_exchange_optab, $A, CODE_FOR_$(sync_mem_exchange$I$a$))",
!   "set_direct_optab_handler (sync_mem_compare_exchange_optab, $A, CODE_FOR_$(sync_mem_compare_exchange$I$a$))",
!   "set_direct_optab_handler (sync_mem_load_optab, $A, CODE_FOR_$(sync_mem_load$I$a$))",
!   "set_direct_optab_handler (sync_mem_store_optab, $A, CODE_FOR_$(sync_mem_store$I$a$))",
!   "set_direct_optab_handler (sync_mem_add_fetch_optab, $A, CODE_FOR_$(sync_mem_add_fetch$I$a$))",
!   "set_direct_optab_handler (sync_mem_sub_fetch_optab, $A, CODE_FOR_$(sync_mem_sub_fetch$I$a$))",
!   "set_direct_optab_handler (sync_mem_and_fetch_optab, $A, CODE_FOR_$(sync_mem_and_fetch$I$a$))",
!   "set_direct_optab_handler (sync_mem_nand_fetch_optab, $A, CODE_FOR_$(sync_mem_nand_fetch$I$a$))",
!   "set_direct_optab_handler (sync_mem_xor_fetch_optab, $A, CODE_FOR_$(sync_mem_xor_fetch$I$a$))",
!   "set_direct_optab_handler (sync_mem_or_fetch_optab, $A, CODE_FOR_$(sync_mem_or_fetch$I$a$))",
!   "set_direct_optab_handler (sync_mem_fetch_add_optab, $A, CODE_FOR_$(sync_mem_fetch_add$I$a$))",
!   "set_direct_optab_handler (sync_mem_fetch_sub_optab, $A, CODE_FOR_$(sync_mem_fetch_sub$I$a$))",
!   "set_direct_optab_handler (sync_mem_fetch_and_optab, $A, CODE_FOR_$(sync_mem_fetch_and$I$a$))",
!   "set_direct_optab_handler (sync_mem_fetch_nand_optab, $A, CODE_FOR_$(sync_mem_fetch_nand$I$a$))",
!   "set_direct_optab_handler (sync_mem_fetch_xor_optab, $A, CODE_FOR_$(sync_mem_fetch_xor$I$a$))",
!   "set_direct_optab_handler (sync_mem_fetch_or_optab, $A, CODE_FOR_$(sync_mem_fetch_or$I$a$))",
    "set_optab_handler (vec_set_optab, $A, CODE_FOR_$(vec_set$a$))",
    "set_optab_handler (vec_extract_optab, $A, CODE_FOR_$(vec_extract$a$))",
    "set_optab_handler (vec_extract_even_optab, $A, CODE_FOR_$(vec_extract_even$a$))",
--- 243,264 ----
    "set_direct_optab_handler (sync_compare_and_swap_optab, $A, CODE_FOR_$(sync_compare_and_swap$I$a$))",
    "set_direct_optab_handler (sync_lock_test_and_set_optab, $A, CODE_FOR_$(sync_lock_test_and_set$I$a$))",
    "set_direct_optab_handler (sync_lock_release_optab, $A, CODE_FOR_$(sync_lock_release$I$a$))",
!   "set_direct_optab_handler (atomic_exchange_optab, $A, CODE_FOR_$(atomic_exchange$I$a$))",
!   "set_direct_optab_handler (atomic_compare_exchange_optab, $A, CODE_FOR_$(atomic_compare_exchange$I$a$))",
!   "set_direct_optab_handler (atomic_load_optab, $A, CODE_FOR_$(atomic_load$I$a$))",
!   "set_direct_optab_handler (atomic_store_optab, $A, CODE_FOR_$(atomic_store$I$a$))",
!   "set_direct_optab_handler (atomic_add_fetch_optab, $A, CODE_FOR_$(atomic_add_fetch$I$a$))",
!   "set_direct_optab_handler (atomic_sub_fetch_optab, $A, CODE_FOR_$(atomic_sub_fetch$I$a$))",
!   "set_direct_optab_handler (atomic_and_fetch_optab, $A, CODE_FOR_$(atomic_and_fetch$I$a$))",
!   "set_direct_optab_handler (atomic_nand_fetch_optab, $A, CODE_FOR_$(atomic_nand_fetch$I$a$))",
!   "set_direct_optab_handler (atomic_xor_fetch_optab, $A, CODE_FOR_$(atomic_xor_fetch$I$a$))",
!   "set_direct_optab_handler (atomic_or_fetch_optab, $A, CODE_FOR_$(atomic_or_fetch$I$a$))",
!   "set_direct_optab_handler (atomic_fetch_add_optab, $A, CODE_FOR_$(atomic_fetch_add$I$a$))",
!   "set_direct_optab_handler (atomic_fetch_sub_optab, $A, CODE_FOR_$(atomic_fetch_sub$I$a$))",
!   "set_direct_optab_handler (atomic_fetch_and_optab, $A, CODE_FOR_$(atomic_fetch_and$I$a$))",
!   "set_direct_optab_handler (atomic_fetch_nand_optab, $A, CODE_FOR_$(atomic_fetch_nand$I$a$))",
!   "set_direct_optab_handler (atomic_fetch_xor_optab, $A, CODE_FOR_$(atomic_fetch_xor$I$a$))",
!   "set_direct_optab_handler (atomic_fetch_or_optab, $A, CODE_FOR_$(atomic_fetch_or$I$a$))",
    "set_optab_handler (vec_set_optab, $A, CODE_FOR_$(vec_set$a$))",
    "set_optab_handler (vec_extract_optab, $A, CODE_FOR_$(vec_extract$a$))",
    "set_optab_handler (vec_extract_even_optab, $A, CODE_FOR_$(vec_extract_even$a$))",
Index: gcc/builtins.c
===================================================================
*** gcc/builtins.c	(revision 179946)
--- gcc/builtins.c	(working copy)
*************** is_builtin_name (const char *name)
*** 234,239 ****
--- 234,241 ----
      return true;
    if (strncmp (name, "__sync_", 7) == 0)
      return true;
+   if (strncmp (name, "__atomic_", 9) == 0)
+     return true;
    return false;
  }
  
*************** get_builtin_sync_mode (int fcode_diff)
*** 5070,5076 ****
     for the builtin_sync operations.  */
  
  static rtx
! get_builtin_sync_mem (tree loc, enum machine_mode mode)
  {
    rtx addr, mem;
  
--- 5072,5078 ----
     for the builtin_sync operations.  */
  
  static rtx
! get_builtin_atomic (tree loc, enum machine_mode mode)
  {
    rtx addr, mem;
  
*************** expand_builtin_sync_operation (enum mach
*** 5171,5180 ****
      }
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_sync_mem_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
  				     after);
  }
  
--- 5173,5182 ----
      }
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
  				     after);
  }
  
*************** expand_builtin_compare_and_swap (enum ma
*** 5190,5196 ****
    rtx old_val, new_val, mem;
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
    old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
    new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
  
--- 5192,5198 ----
    rtx old_val, new_val, mem;
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
    old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
    new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
  
*************** expand_builtin_sync_lock_test_and_set (e
*** 5213,5222 ****
    rtx val, mem;
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_sync_mem_exchange (target, mem, val, MEMMODEL_ACQUIRE);
  }
  
  /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
--- 5215,5224 ----
    rtx val, mem;
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_atomic_exchange (target, mem, val, MEMMODEL_ACQUIRE);
  }
  
  /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
*************** expand_builtin_sync_lock_release (enum m
*** 5227,5235 ****
    rtx mem;
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
  
!   expand_sync_mem_store (mem, const0_rtx, MEMMODEL_RELEASE);
  }
  
  /* Given an integer representing an ``enum memmodel'', verify its
--- 5229,5237 ----
    rtx mem;
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
  
!   expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE);
  }
  
  /* Given an integer representing an ``enum memmodel'', verify its
*************** get_memmodel (tree exp)
*** 5254,5266 ****
    return (enum memmodel) INTVAL (op);
  }
  
! /* Expand the __sync_mem_exchange intrinsic:
!    	TYPE __sync_mem_exchange (TYPE *object, TYPE desired, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_sync_mem_exchange (enum machine_mode mode, tree exp, rtx target)
  {
    rtx val, mem;
    enum memmodel model;
--- 5256,5268 ----
    return (enum memmodel) INTVAL (op);
  }
  
! /* Expand the __atomic_exchange intrinsic:
!    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
  {
    rtx val, mem;
    enum memmodel model;
*************** expand_builtin_sync_mem_exchange (enum m
*** 5268,5293 ****
    model = get_memmodel (CALL_EXPR_ARG (exp, 2));
    if (model == MEMMODEL_CONSUME)
      {
!       error ("invalid memory model for %<__sync_mem_exchange%>");
        return NULL_RTX;
      }
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_sync_mem_exchange (target, mem, val, model);
  }
  
! /* Expand the __sync_mem_compare_exchange intrinsic:
!    	bool __sync_mem_compare_exchange (TYPE *object, TYPE *expect, 
  					  TYPE desired, enum memmodel success,
  					  enum memmodel failure)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_sync_mem_compare_exchange (enum machine_mode mode, tree exp, 
  					  rtx target)
  {
    rtx expect, desired, mem;
--- 5270,5295 ----
    model = get_memmodel (CALL_EXPR_ARG (exp, 2));
    if (model == MEMMODEL_CONSUME)
      {
!       error ("invalid memory model for %<__atomic_exchange%>");
        return NULL_RTX;
      }
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_atomic_exchange (target, mem, val, model);
  }
  
! /* Expand the __atomic_compare_exchange intrinsic:
!    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect, 
  					  TYPE desired, enum memmodel success,
  					  enum memmodel failure)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp, 
  					  rtx target)
  {
    rtx expect, desired, mem;
*************** expand_builtin_sync_mem_compare_exchange
*** 5298,5315 ****
  
    if (failure == MEMMODEL_RELEASE || failure == MEMMODEL_ACQ_REL)
      {
!       error ("invalid failure memory model for %<__sync_mem_compare_exchange%>");
        return NULL_RTX;
      }
  
    if (failure > success)
      {
!       error ("failure memory model cannot be stronger than success memory model for %<__sync_mem_compare_exchange%>");
        return NULL_RTX;
      }
    
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
  
    expect = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, ptr_mode, 
  			EXPAND_NORMAL);
--- 5300,5317 ----
  
    if (failure == MEMMODEL_RELEASE || failure == MEMMODEL_ACQ_REL)
      {
!       error ("invalid failure memory model for %<__atomic_compare_exchange%>");
        return NULL_RTX;
      }
  
    if (failure > success)
      {
!       error ("failure memory model cannot be stronger than success memory model for %<__atomic_compare_exchange%>");
        return NULL_RTX;
      }
    
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
  
    expect = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, ptr_mode, 
  			EXPAND_NORMAL);
*************** expand_builtin_sync_mem_compare_exchange
*** 5317,5333 ****
  
    desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
  
!   return expand_sync_mem_compare_exchange (target, mem, expect, desired, 
  					   success, failure);
  }
  
! /* Expand the __sync_mem_load intrinsic:
!    	TYPE __sync_mem_load (TYPE *object, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_sync_mem_load (enum machine_mode mode, tree exp, rtx target)
  {
    rtx mem;
    enum memmodel model;
--- 5319,5335 ----
  
    desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
  
!   return expand_atomic_compare_exchange (target, mem, expect, desired, 
  					   success, failure);
  }
  
! /* Expand the __atomic_load intrinsic:
!    	TYPE __atomic_load (TYPE *object, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
  {
    rtx mem;
    enum memmodel model;
*************** expand_builtin_sync_mem_load (enum machi
*** 5336,5359 ****
    if (model == MEMMODEL_RELEASE
        || model == MEMMODEL_ACQ_REL)
      {
!       error ("invalid memory model for %<__sync_mem_load%>");
        return NULL_RTX;
      }
  
    /* Expand the operand.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
  
!   return expand_sync_mem_load (target, mem, model);
  }
  
  
! /* Expand the __sync_mem_store intrinsic:
!    	void __sync_mem_store (TYPE *object, TYPE desired, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_sync_mem_store (enum machine_mode mode, tree exp)
  {
    rtx mem, val;
    enum memmodel model;
--- 5338,5361 ----
    if (model == MEMMODEL_RELEASE
        || model == MEMMODEL_ACQ_REL)
      {
!       error ("invalid memory model for %<__atomic_load%>");
        return NULL_RTX;
      }
  
    /* Expand the operand.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
  
!   return expand_atomic_load (target, mem, model);
  }
  
  
! /* Expand the __atomic_store intrinsic:
!    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.  */
  
  static rtx
! expand_builtin_atomic_store (enum machine_mode mode, tree exp)
  {
    rtx mem, val;
    enum memmodel model;
*************** expand_builtin_sync_mem_store (enum mach
*** 5363,5381 ****
        && model != MEMMODEL_SEQ_CST
        && model != MEMMODEL_RELEASE)
      {
!       error ("invalid memory model for %<__sync_mem_store%>");
        return NULL_RTX;
      }
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_sync_mem_store (mem, val, model);
  }
  
! /* Expand the __sync_mem_fetch_XXX intrinsic:
!    	TYPE __sync_mem_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.
     CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
--- 5365,5383 ----
        && model != MEMMODEL_SEQ_CST
        && model != MEMMODEL_RELEASE)
      {
!       error ("invalid memory model for %<__atomic_store%>");
        return NULL_RTX;
      }
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_atomic_store (mem, val, model);
  }
  
! /* Expand the __atomic_fetch_XXX intrinsic:
!    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
     EXP is the CALL_EXPR.
     TARGET is an optional place for us to store the results.
     CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
*************** expand_builtin_sync_mem_store (enum mach
*** 5383,5389 ****
     FETCH_AFTER is false if returning the value before the operation.  */
  
  static rtx
! expand_builtin_sync_mem_fetch_op (enum machine_mode mode, tree exp, rtx target,
  				  enum rtx_code code, bool fetch_after)
  {
    rtx val, mem;
--- 5385,5391 ----
     FETCH_AFTER is false if returning the value before the operation.  */
  
  static rtx
! expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
  				  enum rtx_code code, bool fetch_after)
  {
    rtx val, mem;
*************** expand_builtin_sync_mem_fetch_op (enum m
*** 5392,5406 ****
    model = get_memmodel (CALL_EXPR_ARG (exp, 2));
  
    /* Expand the operands.  */
!   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_sync_mem_fetch_op (target, mem, val, code, model, fetch_after);
  }
  
  /* Return true if size ARG is always lock free on this architecture.  */
  static tree
! fold_builtin_sync_mem_always_lock_free (tree arg)
  {
    int size;
    enum machine_mode mode;
--- 5394,5408 ----
    model = get_memmodel (CALL_EXPR_ARG (exp, 2));
  
    /* Expand the operands.  */
!   mem = get_builtin_atomic (CALL_EXPR_ARG (exp, 0), mode);
    val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
  
!   return expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
  }
  
  /* Return true if size ARG is always lock free on this architecture.  */
  static tree
! fold_builtin_atomic_always_lock_free (tree arg)
  {
    int size;
    enum machine_mode mode;
*************** fold_builtin_sync_mem_always_lock_free (
*** 5426,5443 ****
     object than will always generate lock-free instructions on this target.
     Otherwise return false.  */
  static rtx
! expand_builtin_sync_mem_always_lock_free (tree exp)
  {
    tree size;
    tree arg = CALL_EXPR_ARG (exp, 0);
  
    if (TREE_CODE (arg) != INTEGER_CST)
      {
!       error ("non-constant argument to __sync_mem_always_lock_free");
        return const0_rtx;
      }
  
!   size = fold_builtin_sync_mem_always_lock_free (arg);
    if (size == integer_one_node)
      return const1_rtx;
    return const0_rtx;
--- 5428,5445 ----
     object than will always generate lock-free instructions on this target.
     Otherwise return false.  */
  static rtx
! expand_builtin_atomic_always_lock_free (tree exp)
  {
    tree size;
    tree arg = CALL_EXPR_ARG (exp, 0);
  
    if (TREE_CODE (arg) != INTEGER_CST)
      {
!       error ("non-constant argument to __atomic_always_lock_free");
        return const0_rtx;
      }
  
!   size = fold_builtin_atomic_always_lock_free (arg);
    if (size == integer_one_node)
      return const1_rtx;
    return const0_rtx;
*************** expand_builtin_sync_mem_always_lock_free
*** 5446,5454 ****
  /* Return a one or zero if it can be determined that size ARG is lock free on
     this architecture.  */
  static tree
! fold_builtin_sync_mem_is_lock_free (tree arg)
  {
!   tree always = fold_builtin_sync_mem_always_lock_free (arg);
  
    /* If it isnt always lock free, don't generate a result.  */
    if (always == integer_one_node)
--- 5448,5456 ----
  /* Return a one or zero if it can be determined that size ARG is lock free on
     this architecture.  */
  static tree
! fold_builtin_atomic_is_lock_free (tree arg)
  {
!   tree always = fold_builtin_atomic_always_lock_free (arg);
  
    /* If it isnt always lock free, don't generate a result.  */
    if (always == integer_one_node)
*************** fold_builtin_sync_mem_is_lock_free (tree
*** 5460,5478 ****
  /* Return one or zero if the first argument to call EXP represents a size of
     object than can generate lock-free instructions on this target.  */
  static rtx
! expand_builtin_sync_mem_is_lock_free (tree exp)
  {
    tree size;
    tree arg = CALL_EXPR_ARG (exp, 0);
  
    if (!INTEGRAL_TYPE_P (TREE_TYPE (arg)))
      {
!       error ("non-integer argument to __sync_mem_is_lock_free");
        return NULL_RTX;
      }
  
    /* If the value is known at compile time, return the RTX for it.  */
!   size = fold_builtin_sync_mem_is_lock_free (arg);
    if (size == integer_one_node)
      return const1_rtx;
  
--- 5462,5480 ----
  /* Return one or zero if the first argument to call EXP represents a size of
     object than can generate lock-free instructions on this target.  */
  static rtx
! expand_builtin_atomic_is_lock_free (tree exp)
  {
    tree size;
    tree arg = CALL_EXPR_ARG (exp, 0);
  
    if (!INTEGRAL_TYPE_P (TREE_TYPE (arg)))
      {
!       error ("non-integer argument to __atomic_is_lock_free");
        return NULL_RTX;
      }
  
    /* If the value is known at compile time, return the RTX for it.  */
!   size = fold_builtin_atomic_is_lock_free (arg);
    if (size == integer_one_node)
      return const1_rtx;
  
*************** expand_builtin_mem_thread_fence (enum me
*** 5493,5504 ****
  #endif
  }
  
! /* Expand the __sync_mem_thread_fence intrinsic:
!    	void __sync_mem_thread_fence (enum memmodel)
     EXP is the CALL_EXPR.  */
  
  static void
! expand_builtin_sync_mem_thread_fence (tree exp)
  {
    enum memmodel model;
    
--- 5495,5506 ----
  #endif
  }
  
! /* Expand the __atomic_thread_fence intrinsic:
!    	void __atomic_thread_fence (enum memmodel)
     EXP is the CALL_EXPR.  */
  
  static void
! expand_builtin_atomic_thread_fence (tree exp)
  {
    enum memmodel model;
    
*************** expand_builtin_mem_signal_fence (enum me
*** 5520,5531 ****
  #endif
  }
  
! /* Expand the __sync_mem_signal_fence intrinsic:
!    	void __sync_mem_signal_fence (enum memmodel)
     EXP is the CALL_EXPR.  */
  
  static void
! expand_builtin_sync_mem_signal_fence (tree exp)
  {
    enum memmodel model;
  
--- 5522,5533 ----
  #endif
  }
  
! /* Expand the __atomic_signal_fence intrinsic:
!    	void __atomic_signal_fence (enum memmodel)
     EXP is the CALL_EXPR.  */
  
  static void
! expand_builtin_atomic_signal_fence (tree exp)
  {
    enum memmodel model;
  
*************** expand_builtin (tree exp, rtx target, rt
*** 6344,6545 ****
        expand_builtin_sync_synchronize ();
        return const0_rtx;
  
!     case BUILT_IN_SYNC_MEM_EXCHANGE_1:
!     case BUILT_IN_SYNC_MEM_EXCHANGE_2:
!     case BUILT_IN_SYNC_MEM_EXCHANGE_4:
!     case BUILT_IN_SYNC_MEM_EXCHANGE_8:
!     case BUILT_IN_SYNC_MEM_EXCHANGE_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_EXCHANGE_1);
!       target = expand_builtin_sync_mem_exchange (mode, exp, target);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_1:
!     case BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_2:
!     case BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_4:
!     case BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_8:
!     case BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_16:
        mode = 
! 	  get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_1);
!       target = expand_builtin_sync_mem_compare_exchange (mode, exp, target);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_SYNC_MEM_LOAD_1:
!     case BUILT_IN_SYNC_MEM_LOAD_2:
!     case BUILT_IN_SYNC_MEM_LOAD_4:
!     case BUILT_IN_SYNC_MEM_LOAD_8:
!     case BUILT_IN_SYNC_MEM_LOAD_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_LOAD_1);
!       target = expand_builtin_sync_mem_load (mode, exp, target);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_SYNC_MEM_STORE_1:
!     case BUILT_IN_SYNC_MEM_STORE_2:
!     case BUILT_IN_SYNC_MEM_STORE_4:
!     case BUILT_IN_SYNC_MEM_STORE_8:
!     case BUILT_IN_SYNC_MEM_STORE_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_STORE_1);
!       target = expand_builtin_sync_mem_store (mode, exp);
        if (target)
  	return const0_rtx;
        break;
  
!     case BUILT_IN_SYNC_MEM_ADD_FETCH_1:
!     case BUILT_IN_SYNC_MEM_ADD_FETCH_2:
!     case BUILT_IN_SYNC_MEM_ADD_FETCH_4:
!     case BUILT_IN_SYNC_MEM_ADD_FETCH_8:
!     case BUILT_IN_SYNC_MEM_ADD_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_ADD_FETCH_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, PLUS, true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_SUB_FETCH_1:
!     case BUILT_IN_SYNC_MEM_SUB_FETCH_2:
!     case BUILT_IN_SYNC_MEM_SUB_FETCH_4:
!     case BUILT_IN_SYNC_MEM_SUB_FETCH_8:
!     case BUILT_IN_SYNC_MEM_SUB_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_SUB_FETCH_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, MINUS, 
  						 true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_AND_FETCH_1:
!     case BUILT_IN_SYNC_MEM_AND_FETCH_2:
!     case BUILT_IN_SYNC_MEM_AND_FETCH_4:
!     case BUILT_IN_SYNC_MEM_AND_FETCH_8:
!     case BUILT_IN_SYNC_MEM_AND_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_AND_FETCH_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, AND, true);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_SYNC_MEM_NAND_FETCH_1:
!     case BUILT_IN_SYNC_MEM_NAND_FETCH_2:
!     case BUILT_IN_SYNC_MEM_NAND_FETCH_4:
!     case BUILT_IN_SYNC_MEM_NAND_FETCH_8:
!     case BUILT_IN_SYNC_MEM_NAND_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_NAND_FETCH_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, NOT, true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_XOR_FETCH_1:
!     case BUILT_IN_SYNC_MEM_XOR_FETCH_2:
!     case BUILT_IN_SYNC_MEM_XOR_FETCH_4:
!     case BUILT_IN_SYNC_MEM_XOR_FETCH_8:
!     case BUILT_IN_SYNC_MEM_XOR_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_XOR_FETCH_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, XOR, true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_OR_FETCH_1:
!     case BUILT_IN_SYNC_MEM_OR_FETCH_2:
!     case BUILT_IN_SYNC_MEM_OR_FETCH_4:
!     case BUILT_IN_SYNC_MEM_OR_FETCH_8:
!     case BUILT_IN_SYNC_MEM_OR_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_OR_FETCH_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, IOR, true);
        if (target)
  	return target;
        break;
   
  
!     case BUILT_IN_SYNC_MEM_FETCH_ADD_1:
!     case BUILT_IN_SYNC_MEM_FETCH_ADD_2:
!     case BUILT_IN_SYNC_MEM_FETCH_ADD_4:
!     case BUILT_IN_SYNC_MEM_FETCH_ADD_8:
!     case BUILT_IN_SYNC_MEM_FETCH_ADD_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_FETCH_ADD_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, PLUS,
  						 false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_FETCH_SUB_1:
!     case BUILT_IN_SYNC_MEM_FETCH_SUB_2:
!     case BUILT_IN_SYNC_MEM_FETCH_SUB_4:
!     case BUILT_IN_SYNC_MEM_FETCH_SUB_8:
!     case BUILT_IN_SYNC_MEM_FETCH_SUB_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_FETCH_SUB_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, MINUS,
  						 false);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_SYNC_MEM_FETCH_AND_1:
!     case BUILT_IN_SYNC_MEM_FETCH_AND_2:
!     case BUILT_IN_SYNC_MEM_FETCH_AND_4:
!     case BUILT_IN_SYNC_MEM_FETCH_AND_8:
!     case BUILT_IN_SYNC_MEM_FETCH_AND_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_FETCH_AND_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, AND, false);
        if (target)
  	return target;
        break;
    
!     case BUILT_IN_SYNC_MEM_FETCH_NAND_1:
!     case BUILT_IN_SYNC_MEM_FETCH_NAND_2:
!     case BUILT_IN_SYNC_MEM_FETCH_NAND_4:
!     case BUILT_IN_SYNC_MEM_FETCH_NAND_8:
!     case BUILT_IN_SYNC_MEM_FETCH_NAND_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_FETCH_NAND_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, NOT, false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_FETCH_XOR_1:
!     case BUILT_IN_SYNC_MEM_FETCH_XOR_2:
!     case BUILT_IN_SYNC_MEM_FETCH_XOR_4:
!     case BUILT_IN_SYNC_MEM_FETCH_XOR_8:
!     case BUILT_IN_SYNC_MEM_FETCH_XOR_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_FETCH_XOR_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, XOR, false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_FETCH_OR_1:
!     case BUILT_IN_SYNC_MEM_FETCH_OR_2:
!     case BUILT_IN_SYNC_MEM_FETCH_OR_4:
!     case BUILT_IN_SYNC_MEM_FETCH_OR_8:
!     case BUILT_IN_SYNC_MEM_FETCH_OR_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_MEM_FETCH_OR_1);
!       target = expand_builtin_sync_mem_fetch_op (mode, exp, target, IOR, false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_SYNC_MEM_ALWAYS_LOCK_FREE:
!       return expand_builtin_sync_mem_always_lock_free (exp);
  
!     case BUILT_IN_SYNC_MEM_IS_LOCK_FREE:
!       target = expand_builtin_sync_mem_is_lock_free (exp);
        if (target)
          return target;
        break;
  
!     case BUILT_IN_SYNC_MEM_THREAD_FENCE:
!       expand_builtin_sync_mem_thread_fence (exp);
        return const0_rtx;
  
!     case BUILT_IN_SYNC_MEM_SIGNAL_FENCE:
!       expand_builtin_sync_mem_signal_fence (exp);
        return const0_rtx;
  
      case BUILT_IN_OBJECT_SIZE:
--- 6346,6547 ----
        expand_builtin_sync_synchronize ();
        return const0_rtx;
  
!     case BUILT_IN_ATOMIC_EXCHANGE_1:
!     case BUILT_IN_ATOMIC_EXCHANGE_2:
!     case BUILT_IN_ATOMIC_EXCHANGE_4:
!     case BUILT_IN_ATOMIC_EXCHANGE_8:
!     case BUILT_IN_ATOMIC_EXCHANGE_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
!       target = expand_builtin_atomic_exchange (mode, exp, target);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
!     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
!     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
!     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
!     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
        mode = 
! 	  get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
!       target = expand_builtin_atomic_compare_exchange (mode, exp, target);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_ATOMIC_LOAD_1:
!     case BUILT_IN_ATOMIC_LOAD_2:
!     case BUILT_IN_ATOMIC_LOAD_4:
!     case BUILT_IN_ATOMIC_LOAD_8:
!     case BUILT_IN_ATOMIC_LOAD_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
!       target = expand_builtin_atomic_load (mode, exp, target);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_ATOMIC_STORE_1:
!     case BUILT_IN_ATOMIC_STORE_2:
!     case BUILT_IN_ATOMIC_STORE_4:
!     case BUILT_IN_ATOMIC_STORE_8:
!     case BUILT_IN_ATOMIC_STORE_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
!       target = expand_builtin_atomic_store (mode, exp);
        if (target)
  	return const0_rtx;
        break;
  
!     case BUILT_IN_ATOMIC_ADD_FETCH_1:
!     case BUILT_IN_ATOMIC_ADD_FETCH_2:
!     case BUILT_IN_ATOMIC_ADD_FETCH_4:
!     case BUILT_IN_ATOMIC_ADD_FETCH_8:
!     case BUILT_IN_ATOMIC_ADD_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_SUB_FETCH_1:
!     case BUILT_IN_ATOMIC_SUB_FETCH_2:
!     case BUILT_IN_ATOMIC_SUB_FETCH_4:
!     case BUILT_IN_ATOMIC_SUB_FETCH_8:
!     case BUILT_IN_ATOMIC_SUB_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, 
  						 true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_AND_FETCH_1:
!     case BUILT_IN_ATOMIC_AND_FETCH_2:
!     case BUILT_IN_ATOMIC_AND_FETCH_4:
!     case BUILT_IN_ATOMIC_AND_FETCH_8:
!     case BUILT_IN_ATOMIC_AND_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_ATOMIC_NAND_FETCH_1:
!     case BUILT_IN_ATOMIC_NAND_FETCH_2:
!     case BUILT_IN_ATOMIC_NAND_FETCH_4:
!     case BUILT_IN_ATOMIC_NAND_FETCH_8:
!     case BUILT_IN_ATOMIC_NAND_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_XOR_FETCH_1:
!     case BUILT_IN_ATOMIC_XOR_FETCH_2:
!     case BUILT_IN_ATOMIC_XOR_FETCH_4:
!     case BUILT_IN_ATOMIC_XOR_FETCH_8:
!     case BUILT_IN_ATOMIC_XOR_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_OR_FETCH_1:
!     case BUILT_IN_ATOMIC_OR_FETCH_2:
!     case BUILT_IN_ATOMIC_OR_FETCH_4:
!     case BUILT_IN_ATOMIC_OR_FETCH_8:
!     case BUILT_IN_ATOMIC_OR_FETCH_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true);
        if (target)
  	return target;
        break;
   
  
!     case BUILT_IN_ATOMIC_FETCH_ADD_1:
!     case BUILT_IN_ATOMIC_FETCH_ADD_2:
!     case BUILT_IN_ATOMIC_FETCH_ADD_4:
!     case BUILT_IN_ATOMIC_FETCH_ADD_8:
!     case BUILT_IN_ATOMIC_FETCH_ADD_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS,
  						 false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_FETCH_SUB_1:
!     case BUILT_IN_ATOMIC_FETCH_SUB_2:
!     case BUILT_IN_ATOMIC_FETCH_SUB_4:
!     case BUILT_IN_ATOMIC_FETCH_SUB_8:
!     case BUILT_IN_ATOMIC_FETCH_SUB_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS,
  						 false);
        if (target)
  	return target;
        break;
  
!     case BUILT_IN_ATOMIC_FETCH_AND_1:
!     case BUILT_IN_ATOMIC_FETCH_AND_2:
!     case BUILT_IN_ATOMIC_FETCH_AND_4:
!     case BUILT_IN_ATOMIC_FETCH_AND_8:
!     case BUILT_IN_ATOMIC_FETCH_AND_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false);
        if (target)
  	return target;
        break;
    
!     case BUILT_IN_ATOMIC_FETCH_NAND_1:
!     case BUILT_IN_ATOMIC_FETCH_NAND_2:
!     case BUILT_IN_ATOMIC_FETCH_NAND_4:
!     case BUILT_IN_ATOMIC_FETCH_NAND_8:
!     case BUILT_IN_ATOMIC_FETCH_NAND_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_FETCH_XOR_1:
!     case BUILT_IN_ATOMIC_FETCH_XOR_2:
!     case BUILT_IN_ATOMIC_FETCH_XOR_4:
!     case BUILT_IN_ATOMIC_FETCH_XOR_8:
!     case BUILT_IN_ATOMIC_FETCH_XOR_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_FETCH_OR_1:
!     case BUILT_IN_ATOMIC_FETCH_OR_2:
!     case BUILT_IN_ATOMIC_FETCH_OR_4:
!     case BUILT_IN_ATOMIC_FETCH_OR_8:
!     case BUILT_IN_ATOMIC_FETCH_OR_16:
!       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
!       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false);
        if (target)
  	return target;
        break;
   
!     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
!       return expand_builtin_atomic_always_lock_free (exp);
  
!     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
!       target = expand_builtin_atomic_is_lock_free (exp);
        if (target)
          return target;
        break;
  
!     case BUILT_IN_ATOMIC_THREAD_FENCE:
!       expand_builtin_atomic_thread_fence (exp);
        return const0_rtx;
  
!     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
!       expand_builtin_atomic_signal_fence (exp);
        return const0_rtx;
  
      case BUILT_IN_OBJECT_SIZE:
*************** fold_builtin_1 (location_t loc, tree fnd
*** 10378,10388 ****
  	return build_empty_stmt (loc);
        break;
  
!     case BUILT_IN_SYNC_MEM_ALWAYS_LOCK_FREE:
!       return fold_builtin_sync_mem_always_lock_free (arg0);
  
!     case BUILT_IN_SYNC_MEM_IS_LOCK_FREE:
!       return fold_builtin_sync_mem_is_lock_free (arg0);
  
      default:
        break;
--- 10380,10390 ----
  	return build_empty_stmt (loc);
        break;
  
!     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
!       return fold_builtin_atomic_always_lock_free (arg0);
  
!     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
!       return fold_builtin_atomic_is_lock_free (arg0);
  
      default:
        break;
Index: gcc/sync-builtins.def
===================================================================
*** gcc/sync-builtins.def	(revision 179946)
--- gcc/sync-builtins.def	(working copy)
*************** DEF_SYNC_BUILTIN (BUILT_IN_SYNC_SYNCHRON
*** 259,583 ****
  
  /* __sync* builtins for the C++ memory model.  */
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_EXCHANGE_N,
! 		  "__sync_mem_exchange",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_EXCHANGE_1,
! 		  "__sync_mem_exchange_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_EXCHANGE_2,
! 		  "__sync_mem_exchange_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_EXCHANGE_4,
! 		  "__sync_mem_exchange_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_EXCHANGE_8,
! 		  "__sync_mem_exchange_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_EXCHANGE_16,
! 		  "__sync_mem_exchange_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_LOAD_N,
! 		  "__sync_mem_load",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_LOAD_1,
! 		  "__sync_mem_load_1",
  		  BT_FN_I1_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_LOAD_2,
! 		  "__sync_mem_load_2",
  		  BT_FN_I2_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_LOAD_4,
! 		  "__sync_mem_load_4",
  		  BT_FN_I4_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_LOAD_8,
! 		  "__sync_mem_load_8",
  		  BT_FN_I8_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_LOAD_16,
! 		  "__sync_mem_load_16",
  		  BT_FN_I16_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_N,
! 		  "__sync_mem_compare_exchange",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_1,
! 		  "__sync_mem_compare_exchange_1",
  		  BT_FN_BOOL_VPTR_PTR_I1_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_2,
! 		  "__sync_mem_compare_exchange_2",
  		  BT_FN_BOOL_VPTR_PTR_I2_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_4,
! 		  "__sync_mem_compare_exchange_4",
  		  BT_FN_BOOL_VPTR_PTR_I4_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_8,
! 		  "__sync_mem_compare_exchange_8",
  		  BT_FN_BOOL_VPTR_PTR_I8_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_COMPARE_EXCHANGE_16,
! 		  "__sync_mem_compare_exchange_16",
  		  BT_FN_BOOL_VPTR_PTR_I16_INT_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_STORE_N,
! 		  "__sync_mem_store",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_STORE_1,
! 		  "__sync_mem_store_1",
  		  BT_FN_VOID_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_STORE_2,
! 		  "__sync_mem_store_2",
  		  BT_FN_VOID_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_STORE_4,
! 		  "__sync_mem_store_4",
  		  BT_FN_VOID_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_STORE_8,
! 		  "__sync_mem_store_8",
  		  BT_FN_VOID_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_STORE_16,
! 		  "__sync_mem_store_16",
  		  BT_FN_VOID_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ADD_FETCH_N,
! 		  "__sync_mem_add_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ADD_FETCH_1,
! 		  "__sync_mem_add_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ADD_FETCH_2,
! 		  "__sync_mem_add_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ADD_FETCH_4,
! 		  "__sync_mem_add_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ADD_FETCH_8,
! 		  "__sync_mem_add_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ADD_FETCH_16,
! 		  "__sync_mem_add_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SUB_FETCH_N,
! 		  "__sync_mem_sub_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SUB_FETCH_1,
! 		  "__sync_mem_sub_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SUB_FETCH_2,
! 		  "__sync_mem_sub_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SUB_FETCH_4,
! 		  "__sync_mem_sub_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SUB_FETCH_8,
! 		  "__sync_mem_sub_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SUB_FETCH_16,
! 		  "__sync_mem_sub_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_AND_FETCH_N,
! 		  "__sync_mem_and_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_AND_FETCH_1,
! 		  "__sync_mem_and_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_AND_FETCH_2,
! 		  "__sync_mem_and_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_AND_FETCH_4,
! 		  "__sync_mem_and_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_AND_FETCH_8,
! 		  "__sync_mem_and_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_AND_FETCH_16,
! 		  "__sync_mem_and_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_NAND_FETCH_N,
! 		  "__sync_mem_nand_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_NAND_FETCH_1,
! 		  "__sync_mem_nand_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_NAND_FETCH_2,
! 		  "__sync_mem_nand_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_NAND_FETCH_4,
! 		  "__sync_mem_nand_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_NAND_FETCH_8,
! 		  "__sync_mem_nand_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_NAND_FETCH_16,
! 		  "__sync_mem_nand_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_XOR_FETCH_N,
! 		  "__sync_mem_xor_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_XOR_FETCH_1,
! 		  "__sync_mem_xor_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_XOR_FETCH_2,
! 		  "__sync_mem_xor_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_XOR_FETCH_4,
! 		  "__sync_mem_xor_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_XOR_FETCH_8,
! 		  "__sync_mem_xor_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_XOR_FETCH_16,
! 		  "__sync_mem_xor_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_OR_FETCH_N,
! 		  "__sync_mem_or_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_OR_FETCH_1,
! 		  "__sync_mem_or_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_OR_FETCH_2,
! 		  "__sync_mem_or_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_OR_FETCH_4,
! 		  "__sync_mem_or_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_OR_FETCH_8,
! 		  "__sync_mem_or_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_OR_FETCH_16,
! 		  "__sync_mem_or_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_ADD_N,
! 		  "__sync_mem_fetch_add",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_ADD_1,
! 		  "__sync_mem_fetch_add_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_ADD_2,
! 		  "__sync_mem_fetch_add_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_ADD_4,
! 		  "__sync_mem_fetch_add_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_ADD_8,
! 		  "__sync_mem_fetch_add_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_ADD_16,
! 		  "__sync_mem_fetch_add_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_SUB_N,
! 		  "__sync_mem_fetch_sub",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_SUB_1,
! 		  "__sync_mem_fetch_sub_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_SUB_2,
! 		  "__sync_mem_fetch_sub_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_SUB_4,
! 		  "__sync_mem_fetch_sub_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_SUB_8,
! 		  "__sync_mem_fetch_sub_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_SUB_16,
! 		  "__sync_mem_fetch_sub_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_AND_N,
! 		  "__sync_mem_fetch_and",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_AND_1,
! 		  "__sync_mem_fetch_and_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_AND_2,
! 		  "__sync_mem_fetch_and_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_AND_4,
! 		  "__sync_mem_fetch_and_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_AND_8,
! 		  "__sync_mem_fetch_and_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_AND_16,
! 		  "__sync_mem_fetch_and_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_NAND_N,
! 		  "__sync_mem_fetch_nand",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_NAND_1,
! 		  "__sync_mem_fetch_nand_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_NAND_2,
! 		  "__sync_mem_fetch_nand_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_NAND_4,
! 		  "__sync_mem_fetch_nand_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_NAND_8,
! 		  "__sync_mem_fetch_nand_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_NAND_16,
! 		  "__sync_mem_fetch_nand_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_XOR_N,
! 		  "__sync_mem_fetch_xor",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_XOR_1,
! 		  "__sync_mem_fetch_xor_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_XOR_2,
! 		  "__sync_mem_fetch_xor_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_XOR_4,
! 		  "__sync_mem_fetch_xor_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_XOR_8,
! 		  "__sync_mem_fetch_xor_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_XOR_16,
! 		  "__sync_mem_fetch_xor_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_OR_N,
! 		  "__sync_mem_fetch_or",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_OR_1,
! 		  "__sync_mem_fetch_or_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_OR_2,
! 		  "__sync_mem_fetch_or_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_OR_4,
! 		  "__sync_mem_fetch_or_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_OR_8,
! 		  "__sync_mem_fetch_or_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_FETCH_OR_16,
! 		  "__sync_mem_fetch_or_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_ALWAYS_LOCK_FREE,
! 		  "__sync_mem_always_lock_free",
  		  BT_FN_BOOL_SIZE, ATTR_CONST_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_IS_LOCK_FREE,
! 		  "__sync_mem_is_lock_free",
  		  BT_FN_BOOL_SIZE, ATTR_CONST_NOTHROW_LEAF_LIST)
  
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_THREAD_FENCE,
! 		  "__sync_mem_thread_fence",
  		  BT_FN_VOID_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_SYNC_MEM_SIGNAL_FENCE,
! 		  "__sync_mem_signal_fence",
  		  BT_FN_VOID_INT, ATTR_NOTHROW_LEAF_LIST)
  
--- 259,583 ----
  
  /* __sync* builtins for the C++ memory model.  */
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_EXCHANGE_N,
! 		  "__atomic_exchange",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_EXCHANGE_1,
! 		  "__atomic_exchange_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_EXCHANGE_2,
! 		  "__atomic_exchange_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_EXCHANGE_4,
! 		  "__atomic_exchange_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_EXCHANGE_8,
! 		  "__atomic_exchange_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_EXCHANGE_16,
! 		  "__atomic_exchange_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_LOAD_N,
! 		  "__atomic_load",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_LOAD_1,
! 		  "__atomic_load_1",
  		  BT_FN_I1_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_LOAD_2,
! 		  "__atomic_load_2",
  		  BT_FN_I2_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_LOAD_4,
! 		  "__atomic_load_4",
  		  BT_FN_I4_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_LOAD_8,
! 		  "__atomic_load_8",
  		  BT_FN_I8_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_LOAD_16,
! 		  "__atomic_load_16",
  		  BT_FN_I16_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N,
! 		  "__atomic_compare_exchange",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1,
! 		  "__atomic_compare_exchange_1",
  		  BT_FN_BOOL_VPTR_PTR_I1_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2,
! 		  "__atomic_compare_exchange_2",
  		  BT_FN_BOOL_VPTR_PTR_I2_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4,
! 		  "__atomic_compare_exchange_4",
  		  BT_FN_BOOL_VPTR_PTR_I4_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8,
! 		  "__atomic_compare_exchange_8",
  		  BT_FN_BOOL_VPTR_PTR_I8_INT_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16,
! 		  "__atomic_compare_exchange_16",
  		  BT_FN_BOOL_VPTR_PTR_I16_INT_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_STORE_N,
! 		  "__atomic_store",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_STORE_1,
! 		  "__atomic_store_1",
  		  BT_FN_VOID_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_STORE_2,
! 		  "__atomic_store_2",
  		  BT_FN_VOID_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_STORE_4,
! 		  "__atomic_store_4",
  		  BT_FN_VOID_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_STORE_8,
! 		  "__atomic_store_8",
  		  BT_FN_VOID_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_STORE_16,
! 		  "__atomic_store_16",
  		  BT_FN_VOID_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ADD_FETCH_N,
! 		  "__atomic_add_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ADD_FETCH_1,
! 		  "__atomic_add_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ADD_FETCH_2,
! 		  "__atomic_add_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ADD_FETCH_4,
! 		  "__atomic_add_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ADD_FETCH_8,
! 		  "__atomic_add_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ADD_FETCH_16,
! 		  "__atomic_add_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SUB_FETCH_N,
! 		  "__atomic_sub_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SUB_FETCH_1,
! 		  "__atomic_sub_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SUB_FETCH_2,
! 		  "__atomic_sub_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SUB_FETCH_4,
! 		  "__atomic_sub_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SUB_FETCH_8,
! 		  "__atomic_sub_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SUB_FETCH_16,
! 		  "__atomic_sub_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_AND_FETCH_N,
! 		  "__atomic_and_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_AND_FETCH_1,
! 		  "__atomic_and_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_AND_FETCH_2,
! 		  "__atomic_and_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_AND_FETCH_4,
! 		  "__atomic_and_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_AND_FETCH_8,
! 		  "__atomic_and_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_AND_FETCH_16,
! 		  "__atomic_and_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_NAND_FETCH_N,
! 		  "__atomic_nand_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_NAND_FETCH_1,
! 		  "__atomic_nand_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_NAND_FETCH_2,
! 		  "__atomic_nand_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_NAND_FETCH_4,
! 		  "__atomic_nand_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_NAND_FETCH_8,
! 		  "__atomic_nand_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_NAND_FETCH_16,
! 		  "__atomic_nand_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_XOR_FETCH_N,
! 		  "__atomic_xor_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_XOR_FETCH_1,
! 		  "__atomic_xor_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_XOR_FETCH_2,
! 		  "__atomic_xor_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_XOR_FETCH_4,
! 		  "__atomic_xor_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_XOR_FETCH_8,
! 		  "__atomic_xor_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_XOR_FETCH_16,
! 		  "__atomic_xor_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_OR_FETCH_N,
! 		  "__atomic_or_fetch",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_OR_FETCH_1,
! 		  "__atomic_or_fetch_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_OR_FETCH_2,
! 		  "__atomic_or_fetch_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_OR_FETCH_4,
! 		  "__atomic_or_fetch_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_OR_FETCH_8,
! 		  "__atomic_or_fetch_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_OR_FETCH_16,
! 		  "__atomic_or_fetch_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_ADD_N,
! 		  "__atomic_fetch_add",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_ADD_1,
! 		  "__atomic_fetch_add_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_ADD_2,
! 		  "__atomic_fetch_add_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_ADD_4,
! 		  "__atomic_fetch_add_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_ADD_8,
! 		  "__atomic_fetch_add_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_ADD_16,
! 		  "__atomic_fetch_add_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_SUB_N,
! 		  "__atomic_fetch_sub",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_SUB_1,
! 		  "__atomic_fetch_sub_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_SUB_2,
! 		  "__atomic_fetch_sub_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_SUB_4,
! 		  "__atomic_fetch_sub_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_SUB_8,
! 		  "__atomic_fetch_sub_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_SUB_16,
! 		  "__atomic_fetch_sub_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_AND_N,
! 		  "__atomic_fetch_and",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_AND_1,
! 		  "__atomic_fetch_and_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_AND_2,
! 		  "__atomic_fetch_and_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_AND_4,
! 		  "__atomic_fetch_and_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_AND_8,
! 		  "__atomic_fetch_and_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_AND_16,
! 		  "__atomic_fetch_and_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_NAND_N,
! 		  "__atomic_fetch_nand",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_NAND_1,
! 		  "__atomic_fetch_nand_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_NAND_2,
! 		  "__atomic_fetch_nand_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_NAND_4,
! 		  "__atomic_fetch_nand_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_NAND_8,
! 		  "__atomic_fetch_nand_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_NAND_16,
! 		  "__atomic_fetch_nand_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_XOR_N,
! 		  "__atomic_fetch_xor",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_XOR_1,
! 		  "__atomic_fetch_xor_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_XOR_2,
! 		  "__atomic_fetch_xor_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_XOR_4,
! 		  "__atomic_fetch_xor_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_XOR_8,
! 		  "__atomic_fetch_xor_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_XOR_16,
! 		  "__atomic_fetch_xor_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_OR_N,
! 		  "__atomic_fetch_or",
  		  BT_FN_VOID_VAR, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_OR_1,
! 		  "__atomic_fetch_or_1",
  		  BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_OR_2,
! 		  "__atomic_fetch_or_2",
  		  BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_OR_4,
! 		  "__atomic_fetch_or_4",
  		  BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_OR_8,
! 		  "__atomic_fetch_or_8",
  		  BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_FETCH_OR_16,
! 		  "__atomic_fetch_or_16",
  		  BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE,
! 		  "__atomic_always_lock_free",
  		  BT_FN_BOOL_SIZE, ATTR_CONST_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_IS_LOCK_FREE,
! 		  "__atomic_is_lock_free",
  		  BT_FN_BOOL_SIZE, ATTR_CONST_NOTHROW_LEAF_LIST)
  
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_THREAD_FENCE,
! 		  "__atomic_thread_fence",
  		  BT_FN_VOID_INT, ATTR_NOTHROW_LEAF_LIST)
  
! DEF_SYNC_BUILTIN (BUILT_IN_ATOMIC_SIGNAL_FENCE,
! 		  "__atomic_signal_fence",
  		  BT_FN_VOID_INT, ATTR_NOTHROW_LEAF_LIST)
  
Index: gcc/expr.h
===================================================================
*** gcc/expr.h	(revision 179922)
--- gcc/expr.h	(working copy)
*************** rtx expand_bool_compare_and_swap (rtx, r
*** 217,231 ****
  rtx expand_sync_operation (rtx, rtx, enum rtx_code);
  rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx);
  
! rtx expand_sync_mem_exchange (rtx, rtx, rtx, enum memmodel);
! rtx expand_sync_mem_compare_exchange (rtx, rtx, rtx, rtx, enum memmodel, 
  				      enum memmodel);
! rtx expand_sync_mem_load (rtx, rtx, enum memmodel);
! rtx expand_sync_mem_store (rtx, rtx, enum memmodel);
! rtx expand_sync_mem_fetch_op (rtx, rtx, rtx, enum rtx_code, enum memmodel, 
  			      bool);
! void expand_sync_mem_thread_fence (enum memmodel);
! void expand_sync_mem_signal_fence (enum memmodel);
  
  \f
  /* Functions from expmed.c:  */
--- 217,231 ----
  rtx expand_sync_operation (rtx, rtx, enum rtx_code);
  rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx);
  
! rtx expand_atomic_exchange (rtx, rtx, rtx, enum memmodel);
! rtx expand_atomic_compare_exchange (rtx, rtx, rtx, rtx, enum memmodel, 
  				      enum memmodel);
! rtx expand_atomic_load (rtx, rtx, enum memmodel);
! rtx expand_atomic_store (rtx, rtx, enum memmodel);
! rtx expand_atomic_fetch_op (rtx, rtx, rtx, enum rtx_code, enum memmodel, 
  			      bool);
! void expand_atomic_thread_fence (enum memmodel);
! void expand_atomic_signal_fence (enum memmodel);
  
  \f
  /* Functions from expmed.c:  */
Index: gcc/coretypes.h
===================================================================
*** gcc/coretypes.h	(revision 179922)
--- gcc/coretypes.h	(working copy)
*************** union _dont_use_tree_here_;
*** 181,187 ****
  
  #endif
  
! /* Memory model types for the __sync_mem* builtins. 
     This must match the order in libstdc++-v3/include/bits/atomic_base.h.  */
  enum memmodel
  {
--- 181,187 ----
  
  #endif
  
! /* Memory model types for the __atomic* builtins. 
     This must match the order in libstdc++-v3/include/bits/atomic_base.h.  */
  enum memmodel
  {
Index: gcc/config/i386/sync.md
===================================================================
*** gcc/config/i386/sync.md	(revision 179922)
--- gcc/config/i386/sync.md	(working copy)
***************
*** 232,238 ****
    return "lock{%;} add{<imodesuffix>}\t{%1, %0|%0, %1}";
  })
  
! (define_expand "sync_mem_exchange<mode>"
    [(match_operand:SWI 0 "register_operand" "")		;; output
     (match_operand:SWI 1 "memory_operand" "")		;; memory
     (match_operand:SWI 2 "register_operand" "")		;; input
--- 232,238 ----
    return "lock{%;} add{<imodesuffix>}\t{%1, %0|%0, %1}";
  })
  
! (define_expand "atomic_exchange<mode>"
    [(match_operand:SWI 0 "register_operand" "")		;; output
     (match_operand:SWI 1 "memory_operand" "")		;; memory
     (match_operand:SWI 2 "register_operand" "")		;; input

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [cxx-mem-model] Rename __sync_mem to __atomic
  2011-10-14 18:33 [cxx-mem-model] Rename __sync_mem to __atomic Andrew MacLeod
@ 2011-10-18 10:03 ` Eric Botcazou
  2011-10-18 12:24   ` Andrew MacLeod
  0 siblings, 1 reply; 3+ messages in thread
From: Eric Botcazou @ 2011-10-18 10:03 UTC (permalink / raw)
  To: Andrew MacLeod; +Cc: gcc-patches, Richard Henderson

> In order to align with a standard naming convention, this patch renames
> all the __sync_mem references to __atomic.
>
> The only real addition is in builtins.c::is_builtin_name needed to know
> that __atomic_ is also a builtin prefix.

Are you sure about the renaming get_builtin_sync_mem -> get_builtin_atomic?

-- 
Eric Botcazou

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [cxx-mem-model] Rename __sync_mem to __atomic
  2011-10-18 10:03 ` Eric Botcazou
@ 2011-10-18 12:24   ` Andrew MacLeod
  0 siblings, 0 replies; 3+ messages in thread
From: Andrew MacLeod @ 2011-10-18 12:24 UTC (permalink / raw)
  To: Eric Botcazou; +Cc: gcc-patches, Richard Henderson

>> In order to align with a standard naming convention, this patch renames
>> all the __sync_mem references to __atomic.
>>
>> The only real addition is in builtins.c::is_builtin_name needed to know
>> that __atomic_ is also a builtin prefix.
> Are you sure about the renaming get_builtin_sync_mem ->  get_builtin_atomic?
>
Heh, yeah you are right. It should retain the old name.  I'll fix that 
along with some minor formatting issues I've come across.

Andrew

^ permalink raw reply	[flat|nested] 3+ messages in thread

end of thread, other threads:[~2011-10-18 12:14 UTC | newest]

Thread overview: 3+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2011-10-14 18:33 [cxx-mem-model] Rename __sync_mem to __atomic Andrew MacLeod
2011-10-18 10:03 ` Eric Botcazou
2011-10-18 12:24   ` Andrew MacLeod

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).