From patchwork Wed May 25 23:04:12 2016 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Torvald Riegel X-Patchwork-Id: 12522 X-Patchwork-Delegate: fweimer@redhat.com Received: (qmail 9199 invoked by alias); 25 May 2016 23:04:22 -0000 Mailing-List: contact libc-alpha-help@sourceware.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Subscribe: List-Archive: List-Post: List-Help: , Sender: libc-alpha-owner@sourceware.org Delivered-To: mailing list libc-alpha@sourceware.org Received: (qmail 9122 invoked by uid 89); 25 May 2016 23:04:21 -0000 Authentication-Results: sourceware.org; auth=none X-Virus-Found: No X-Spam-SWARE-Status: No, score=-3.3 required=5.0 tests=BAYES_00, RP_MATCHES_RCVD, SPF_HELO_PASS autolearn=ham version=3.3.2 spammy=2016-05-26, 20160526, Contains, 7516 X-HELO: mx1.redhat.com Message-ID: <1464217452.1779.67.camel@localhost.localdomain> Subject: [PATCH] Add atomic operations required by the new condition variable. From: Torvald Riegel To: GLIBC Devel Date: Thu, 26 May 2016 01:04:12 +0200 Mime-Version: 1.0 This adds just a few atomic operations required by the new condvar. Our policy is to add atomic operations as needed, and this patch does just that. Tested on x86-linux and x86_64-linux using the new condvar. 2016-05-26 Torvald Riegel * include/atomic.h (atomic_exchange_relaxed, atomic_fetch_and_relaxed, atomic_fetch_and_release, atomic_fetch_or_release, atomic_fetch_xor_release): New. commit ce627255c26efddbcc18f090dd4154a6740441f5 Author: Torvald Riegel Date: Thu May 26 00:57:27 2016 +0200 Add atomic operations required by the new condition variable. * include/atomic.h (atomic_exchange_relaxed, atomic_fetch_and_relaxed, atomic_fetch_and_release, atomic_fetch_or_release, atomic_fetch_xor_release): New. diff --git a/include/atomic.h b/include/atomic.h index 5e8bfff..4e18ed0 100644 --- a/include/atomic.h +++ b/include/atomic.h @@ -605,6 +605,9 @@ void __atomic_link_error (void); __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \ __ATOMIC_RELEASE, __ATOMIC_RELAXED); }) +# define atomic_exchange_relaxed(mem, desired) \ + ({ __atomic_check_size((mem)); \ + __atomic_exchange_n ((mem), (desired), __ATOMIC_RELAXED); }) # define atomic_exchange_acquire(mem, desired) \ ({ __atomic_check_size((mem)); \ __atomic_exchange_n ((mem), (desired), __ATOMIC_ACQUIRE); }) @@ -625,9 +628,15 @@ void __atomic_link_error (void); ({ __atomic_check_size((mem)); \ __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQ_REL); }) +# define atomic_fetch_and_relaxed(mem, operand) \ + ({ __atomic_check_size((mem)); \ + __atomic_fetch_and ((mem), (operand), __ATOMIC_RELAXED); }) # define atomic_fetch_and_acquire(mem, operand) \ ({ __atomic_check_size((mem)); \ __atomic_fetch_and ((mem), (operand), __ATOMIC_ACQUIRE); }) +# define atomic_fetch_and_release(mem, operand) \ + ({ __atomic_check_size((mem)); \ + __atomic_fetch_and ((mem), (operand), __ATOMIC_RELEASE); }) # define atomic_fetch_or_relaxed(mem, operand) \ ({ __atomic_check_size((mem)); \ @@ -635,6 +644,13 @@ void __atomic_link_error (void); # define atomic_fetch_or_acquire(mem, operand) \ ({ __atomic_check_size((mem)); \ __atomic_fetch_or ((mem), (operand), __ATOMIC_ACQUIRE); }) +# define atomic_fetch_or_release(mem, operand) \ + ({ __atomic_check_size((mem)); \ + __atomic_fetch_or ((mem), (operand), __ATOMIC_RELEASE); }) + +# define atomic_fetch_xor_release(mem, operand) \ + ({ __atomic_check_size((mem)); \ + __atomic_fetch_xor ((mem), (operand), __ATOMIC_RELEASE); }) #else /* !USE_ATOMIC_COMPILER_BUILTINS */ @@ -701,6 +717,11 @@ void __atomic_link_error (void); *(expected) == __atg103_expected; }) # endif +# ifndef atomic_exchange_relaxed +/* XXX This unnecessarily has acquire MO. */ +# define atomic_exchange_relaxed(mem, val) \ + atomic_exchange_acq ((mem), (val)) +# endif # ifndef atomic_exchange_acquire # define atomic_exchange_acquire(mem, val) \ atomic_exchange_acq ((mem), (val)) @@ -732,12 +753,24 @@ void __atomic_link_error (void); atomic_exchange_and_add_acq ((mem), (operand)); }) # endif +/* XXX Fall back to acquire MO because archs do not define a weaker + atomic_and_val. */ +# ifndef atomic_fetch_and_relaxed +# define atomic_fetch_and_relaxed(mem, operand) \ + atomic_fetch_and_acquire ((mem), (operand)) +# endif /* XXX The default for atomic_and_val has acquire semantics, but this is not documented. */ # ifndef atomic_fetch_and_acquire # define atomic_fetch_and_acquire(mem, operand) \ atomic_and_val ((mem), (operand)) # endif +# ifndef atomic_fetch_and_release +/* XXX This unnecessarily has acquire MO. */ +# define atomic_fetch_and_release(mem, operand) \ + ({ atomic_thread_fence_release (); \ + atomic_and_val ((mem), (operand)); }) +# endif /* XXX The default for atomic_or_val has acquire semantics, but this is not documented. */ @@ -751,6 +784,28 @@ void __atomic_link_error (void); # define atomic_fetch_or_relaxed(mem, operand) \ atomic_fetch_or_acquire ((mem), (operand)) # endif +/* XXX Contains an unnecessary acquire MO because archs do not define a weaker + atomic_or_val. */ +# ifndef atomic_fetch_or_release +# define atomic_fetch_or_release(mem, operand) \ + ({ atomic_thread_fence_release (); \ + atomic_fetch_or_acquire ((mem), (operand)); }) +# endif + +# ifndef atomic_fetch_xor_release +# define atomic_fetch_xor_release(mem, operand) \ + ({ __typeof (*(mem)) __atg104_old; \ + __typeof (mem) __atg104_memp = (mem); \ + __typeof (*(mem)) __atg104_op = (operand); \ + \ + do \ + __atg104_old = (*__atg104_memp); \ + while (__builtin_expect \ + (atomic_compare_and_exchange_bool_rel ( \ + __atg104_memp, __atg104_old ^ __atg104_op, __atg104_old), 0));\ + \ + __atg104_old; }) +#endif #endif /* !USE_ATOMIC_COMPILER_BUILTINS */