145
|
1 /* Copyright (C) 2012-2020 Free Software Foundation, Inc.
|
111
|
2 Contributed by Richard Henderson <rth@redhat.com>.
|
|
3
|
|
4 This file is part of the GNU Atomic Library (libatomic).
|
|
5
|
|
6 Libatomic is free software; you can redistribute it and/or modify it
|
|
7 under the terms of the GNU General Public License as published by
|
|
8 the Free Software Foundation; either version 3 of the License, or
|
|
9 (at your option) any later version.
|
|
10
|
|
11 Libatomic is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
13 FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
14 more details.
|
|
15
|
|
16 Under Section 7 of GPL version 3, you are granted additional
|
|
17 permissions described in the GCC Runtime Library Exception, version
|
|
18 3.1, as published by the Free Software Foundation.
|
|
19
|
|
20 You should have received a copy of the GNU General Public License and
|
|
21 a copy of the GCC Runtime Library Exception along with this program;
|
|
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
|
|
23 <http://www.gnu.org/licenses/>. */
|
|
24
|
|
25 /* Included after all more target-specific host-config.h. */
|
|
26
|
|
27
|
|
28 /* The target may have some OS specific way to implement compare-and-swap. */
|
|
29 #if !defined(atomic_compare_exchange_n) && SIZE(HAVE_ATOMIC_CAS)
|
|
30 # define atomic_compare_exchange_n __atomic_compare_exchange_n
|
|
31 #endif
|
|
32 #if !defined(atomic_compare_exchange_w) && WSIZE(HAVE_ATOMIC_CAS)
|
|
33 # define atomic_compare_exchange_w __atomic_compare_exchange_n
|
|
34 #endif
|
|
35
|
|
36 /* For some targets, it may be significantly faster to avoid all barriers
|
|
37 if the user only wants relaxed memory order. Sometimes we don't want
|
|
38 the extra code bloat. In all cases, use the input to avoid warnings. */
|
|
39 #if defined(WANT_SPECIALCASE_RELAXED) && !defined(__OPTIMIZE_SIZE__)
|
|
40 # define maybe_specialcase_relaxed(x) ((x) == __ATOMIC_RELAXED)
|
|
41 #else
|
|
42 # define maybe_specialcase_relaxed(x) ((x) & 0)
|
|
43 #endif
|
|
44
|
|
45 /* Similar, but for targets for which the seq_cst model is sufficiently
|
|
46 more expensive than the acq_rel model. */
|
|
47 #if defined(WANT_SPECIALCASE_ACQREL) && !defined(__OPTIMIZE_SIZE__)
|
|
48 # define maybe_specialcase_acqrel(x) ((x) != __ATOMIC_SEQ_CST)
|
|
49 #else
|
|
50 # define maybe_specialcase_acqrel(x) ((x) & 0)
|
|
51 #endif
|
|
52
|
|
53
|
|
54 /* The target may have some OS specific way to emit barriers. */
|
|
55 #ifndef pre_post_barrier
|
|
56 static inline void __attribute__((always_inline, artificial))
|
|
57 pre_barrier(int model)
|
|
58 {
|
|
59 if (!maybe_specialcase_relaxed(model))
|
|
60 {
|
|
61 if (maybe_specialcase_acqrel(model))
|
|
62 __atomic_thread_fence (__ATOMIC_ACQ_REL);
|
|
63 else
|
|
64 __atomic_thread_fence (__ATOMIC_SEQ_CST);
|
|
65 }
|
|
66 }
|
|
67 static inline void __attribute__((always_inline, artificial))
|
|
68 post_barrier(int model)
|
|
69 {
|
|
70 pre_barrier(model);
|
|
71 }
|
|
72 #define pre_post_barrier 1
|
|
73 #endif /* pre_post_barrier */
|
|
74
|
|
75 /* Similar, but assume that acq_rel is already handled via locks. */
|
|
76 #ifndef pre_post_seq_barrier
|
|
77 static inline void __attribute__((always_inline, artificial))
|
|
78 pre_seq_barrier(int model)
|
|
79 {
|
|
80 }
|
|
81 static inline void __attribute__((always_inline, artificial))
|
|
82 post_seq_barrier(int model)
|
|
83 {
|
|
84 }
|
|
85 #define pre_post_seq_barrier 1
|
|
86 #endif
|