Searched refs:memory_order_relaxed (Results 1 - 25 of 114) sorted by relevance

12345

/freebsd-current/contrib/llvm-project/compiler-rt/lib/sanitizer_common/
H A Dsanitizer_atomic.h21 memory_order_relaxed = 1 << 0, enumerator in enum:__sanitizer::memory_order
76 return atomic_load(a, memory_order_relaxed);
81 atomic_store(a, v, memory_order_relaxed);
H A Dsanitizer_atomic_clang_other.h27 DCHECK(mo & (memory_order_relaxed | memory_order_consume
34 if (mo == memory_order_relaxed) {
61 DCHECK(mo & (memory_order_relaxed | memory_order_release
67 if (mo == memory_order_relaxed) {
H A Dsanitizer_atomic_clang_x86.h29 DCHECK(mo & (memory_order_relaxed | memory_order_consume
36 if (mo == memory_order_relaxed) {
74 DCHECK(mo & (memory_order_relaxed | memory_order_release
80 if (mo == memory_order_relaxed) {
H A Dsanitizer_lfstack.h28 atomic_store(&head_, 0, memory_order_relaxed);
32 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0;
36 u64 cmp = atomic_load(&head_, memory_order_relaxed);
H A Dsanitizer_atomic_clang_mips.h44 (memory_order_relaxed | memory_order_release | memory_order_seq_cst));
70 (memory_order_relaxed | memory_order_release | memory_order_seq_cst));
93 (memory_order_relaxed | memory_order_release | memory_order_seq_cst));
106 (memory_order_relaxed | memory_order_release | memory_order_seq_cst));
H A Dsanitizer_allocator_stats.h30 atomic_fetch_add(&stats_[i], v, memory_order_relaxed);
34 atomic_fetch_sub(&stats_[i], v, memory_order_relaxed);
38 atomic_store(&stats_[i], v, memory_order_relaxed);
42 return atomic_load(&stats_[i], memory_order_relaxed);
H A Dsanitizer_addrhashmap.h234 if (atomic_load(&b->add, memory_order_relaxed)) {
236 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
239 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
255 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
267 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
271 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
295 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
310 atomic_store(&b->add, (uptr)add, memory_order_relaxed);
322 atomic_store(&b->add, (uptr)add1, memory_order_relaxed);
328 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed),
[all...]
H A Dsanitizer_allocator.cpp40 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
190 return atomic_load(&allocator_may_return_null, memory_order_relaxed);
195 memory_order_relaxed);
206 return atomic_load(&rss_limit_exceeded, memory_order_relaxed);
210 atomic_store(&rss_limit_exceeded, limit_exceeded, memory_order_relaxed);
H A Dsanitizer_tls_get_addr.cpp43 atomic_fetch_sub(&number_of_live_dtls, 1, memory_order_relaxed);
62 atomic_fetch_add(&number_of_live_dtls, 1, memory_order_relaxed);
127 atomic_load(&number_of_live_dtls, memory_order_relaxed));
163 return atomic_load(&dtls->dtv_block, memory_order_relaxed) ==
H A Dsanitizer_mutex.h26 atomic_store(&state_, 0, memory_order_relaxed);
44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1);
186 state = atomic_load(&state_, memory_order_relaxed);
206 state = atomic_load(&state_, memory_order_relaxed);
270 state = atomic_load(&state_, memory_order_relaxed);
286 state = atomic_load(&state_, memory_order_relaxed);
317 CHECK(atomic_load(&state_, memory_order_relaxed) & kWriterLock);
323 CHECK(atomic_load(&state_, memory_order_relaxed) & kReaderLockMask);
/freebsd-current/contrib/llvm-project/llvm/include/llvm/ADT/
H A DStatistic.h68 uint64_t getValue() const { return Value.load(std::memory_order_relaxed); }
74 Value.store(Val, std::memory_order_relaxed);
79 Value.fetch_add(1, std::memory_order_relaxed);
85 return Value.fetch_add(1, std::memory_order_relaxed);
89 Value.fetch_sub(1, std::memory_order_relaxed);
95 return Value.fetch_sub(1, std::memory_order_relaxed);
101 Value.fetch_add(V, std::memory_order_relaxed);
108 Value.fetch_sub(V, std::memory_order_relaxed);
113 uint64_t PrevMax = Value.load(std::memory_order_relaxed);
117 PrevMax, V, std::memory_order_relaxed)) {
[all...]
/freebsd-current/contrib/llvm-project/compiler-rt/lib/orc/
H A Ddebug.cpp37 DebugTypes.store(&DebugTypesAll, std::memory_order_relaxed);
44 DebugTypes.store(DT, std::memory_order_relaxed);
50 DebugTypes.store(&DebugTypesNone, std::memory_order_relaxed);
H A Ddebug.h35 ::__orc_rt::DebugTypes.load(std::memory_order_relaxed); \
/freebsd-current/contrib/llvm-project/compiler-rt/lib/scudo/standalone/
H A Doptions.h53 memory_order_relaxed);
57 atomic_fetch_or(&Val, 1U << static_cast<u32>(Opt), memory_order_relaxed);
68 memory_order_relaxed));
H A Datomic_helpers.h17 memory_order_relaxed = 0, enumerator in enum:scudo::memory_order
24 static_assert(memory_order_relaxed == __ATOMIC_RELAXED, "");
127 return atomic_load(A, memory_order_relaxed);
132 atomic_store(A, V, memory_order_relaxed);
/freebsd-current/tools/test/stress2/misc/
H A Dvm_reserv_populate.sh97 memory_order_relaxed = 1 << 0,
115 assert(mo & (memory_order_relaxed | memory_order_consume
122 if (mo == memory_order_relaxed) {
161 assert(mo & (memory_order_relaxed | memory_order_release
167 if (mo == memory_order_relaxed) {
230 uptr cmp = atomic_load(p, memory_order_relaxed);
245 uptr s = atomic_load(p, memory_order_relaxed);
/freebsd-current/crypto/openssl/include/internal/
H A Dtsan_assist.h57 # define tsan_load(ptr) atomic_load_explicit((ptr), memory_order_relaxed)
58 # define tsan_store(ptr, val) atomic_store_explicit((ptr), (val), memory_order_relaxed)
59 # define tsan_counter(ptr) atomic_fetch_add_explicit((ptr), 1, memory_order_relaxed)
60 # define tsan_decr(ptr) atomic_fetch_add_explicit((ptr), -1, memory_order_relaxed)
84 * memory_order_release for stores, while "iso" - memory_order_relaxed for
/freebsd-current/contrib/llvm-project/libcxx/include/__stop_token/
H A Datomic_unique_lock.h97 // Therefore `memory_order_relaxed` is enough.
98 _State __current_state = __state_.load(std::memory_order_relaxed);
106 __state_.wait(__current_state, std::memory_order_relaxed);
110 __current_state = __state_.load(std::memory_order_relaxed);
127 std::memory_order_relaxed // fail to exchange order. We don't need any ordering as we are going back to the
H A Dstop_state.h73 __state_.load(std::memory_order_relaxed) <= static_cast<__state_t>(~(1 << __stop_source_counter_shift)),
75 __state_.fetch_add(1 << __stop_source_counter_shift, std::memory_order_relaxed);
82 __state_.load(std::memory_order_relaxed) >= static_cast<__state_t>(1 << __stop_source_counter_shift),
84 __state_.fetch_sub(1 << __stop_source_counter_shift, std::memory_order_relaxed);
97 // Todo: Can this be std::memory_order_relaxed as the standard does not say anything except not to introduce data
/freebsd-current/contrib/llvm-project/libcxx/include/__atomic/
H A Dmemory_order.h43 inline constexpr auto memory_order_relaxed = memory_order::relaxed; member in class:memory_order
53 memory_order_relaxed = __mo_relaxed,
/freebsd-current/contrib/llvm-project/compiler-rt/lib/tsan/rtl/
H A Dtsan_fd.cpp59 atomic_store(&s->rc, 1, memory_order_relaxed);
64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1)
65 atomic_fetch_add(&s->rc, 1, memory_order_relaxed);
70 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) {
112 atomic_load(&d->aux_sync, memory_order_relaxed)));
113 atomic_store(&d->aux_sync, 0, memory_order_relaxed);
140 atomic_store(&fdctx.globsync.rc, (u64)-1, memory_order_relaxed);
141 atomic_store(&fdctx.filesync.rc, (u64)-1, memory_order_relaxed);
142 atomic_store(&fdctx.socksync.rc, (u64)-1, memory_order_relaxed);
150 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
[all...]
H A Dtsan_external.cpp35 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr;
50 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed);
69 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed));
85 uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed);
108 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed));
/freebsd-current/contrib/llvm-project/llvm/lib/Support/
H A DManagedStatic.cpp33 if (!Ptr.load(std::memory_order_relaxed)) {
H A DPrettyStackTrace.cpp196 GlobalSigInfoGenerationCounter.load(std::memory_order_relaxed);
297 GlobalSigInfoGenerationCounter.fetch_add(1, std::memory_order_relaxed);
305 GlobalSigInfoGenerationCounter.load(std::memory_order_relaxed);
/freebsd-current/contrib/llvm-project/llvm/include/llvm/Support/
H A DManagedStatic.h91 return *static_cast<C *>(Ptr.load(std::memory_order_relaxed));
101 return *static_cast<C *>(Ptr.load(std::memory_order_relaxed));

Completed in 196 milliseconds

12345