ETISS 0.8.0
Extendable Translating Instruction Set Simulator (version 0.8.0)
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
tsan_interface_atomic.h
Go to the documentation of this file.
1//===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file is a part of ThreadSanitizer (TSan), a race detector.
10//
11// Public interface header for TSan atomics.
12//===----------------------------------------------------------------------===//
13#ifndef TSAN_INTERFACE_ATOMIC_H
14#define TSAN_INTERFACE_ATOMIC_H
15
16#ifdef __cplusplus
17extern "C" {
18#endif
19
20typedef char __tsan_atomic8;
21typedef short __tsan_atomic16;
22typedef int __tsan_atomic32;
23typedef long __tsan_atomic64;
24#if defined(__SIZEOF_INT128__) \
25 || (__clang_major__ * 100 + __clang_minor__ >= 302)
26__extension__ typedef __int128 __tsan_atomic128;
27# define __TSAN_HAS_INT128 1
28#else
29# define __TSAN_HAS_INT128 0
30#endif
31
32// Part of ABI, do not change.
33// https://github.com/llvm/llvm-project/blob/master/libcxx/include/atomic
42
51#if __TSAN_HAS_INT128
52__tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a,
54#endif
55
64#if __TSAN_HAS_INT128
65void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v,
67#endif
68
77#if __TSAN_HAS_INT128
78__tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a,
79 __tsan_atomic128 v, __tsan_memory_order mo);
80#endif
81
90#if __TSAN_HAS_INT128
91__tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a,
92 __tsan_atomic128 v, __tsan_memory_order mo);
93#endif
94
103#if __TSAN_HAS_INT128
104__tsan_atomic128 __tsan_atomic128_fetch_sub(volatile __tsan_atomic128 *a,
105 __tsan_atomic128 v, __tsan_memory_order mo);
106#endif
107
116#if __TSAN_HAS_INT128
117__tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a,
118 __tsan_atomic128 v, __tsan_memory_order mo);
119#endif
120
129#if __TSAN_HAS_INT128
130__tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a,
131 __tsan_atomic128 v, __tsan_memory_order mo);
132#endif
133
142#if __TSAN_HAS_INT128
143__tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a,
144 __tsan_atomic128 v, __tsan_memory_order mo);
145#endif
146
155#if __TSAN_HAS_INT128
156__tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a,
157 __tsan_atomic128 v, __tsan_memory_order mo);
158#endif
159
162 __tsan_memory_order fail_mo);
165 __tsan_memory_order fail_mo);
168 __tsan_memory_order fail_mo);
171 __tsan_memory_order fail_mo);
172#if __TSAN_HAS_INT128
173int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a,
174 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
175 __tsan_memory_order fail_mo);
176#endif
177
180 __tsan_memory_order fail_mo);
183 __tsan_memory_order fail_mo);
186 __tsan_memory_order fail_mo);
189 __tsan_memory_order fail_mo);
190#if __TSAN_HAS_INT128
191int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a,
192 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
193 __tsan_memory_order fail_mo);
194#endif
195
208#if __TSAN_HAS_INT128
209__tsan_atomic128 __tsan_atomic128_compare_exchange_val(
210 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
212#endif
213
216
217#ifdef __cplusplus
218} // extern "C"
219#endif
220
221#endif // TSAN_INTERFACE_ATOMIC_H
__device__ __2f16 float c
do v
Definition arm_acle.h:76
__tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
__tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
__tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_compare_exchange_val(volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
char __tsan_atomic8
__tsan_atomic8 __tsan_atomic8_compare_exchange_val(volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_fetch_sub(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
long __tsan_atomic64
int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
__tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, __tsan_memory_order mo)
__tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
void __tsan_atomic_thread_fence(__tsan_memory_order mo)
__tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, __tsan_memory_order mo)
__tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_fetch_sub(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
__tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, __tsan_memory_order mo)
__tsan_atomic8 __tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_compare_exchange_val(volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic64 __tsan_atomic64_fetch_sub(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, __tsan_memory_order mo)
void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
__tsan_memory_order
@ __tsan_memory_order_seq_cst
@ __tsan_memory_order_relaxed
@ __tsan_memory_order_acq_rel
@ __tsan_memory_order_consume
@ __tsan_memory_order_release
@ __tsan_memory_order_acquire
__tsan_atomic64 __tsan_atomic64_compare_exchange_val(volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
short __tsan_atomic16
__tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)
void __tsan_atomic_signal_fence(__tsan_memory_order mo)
__tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
__tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo)
int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, __tsan_memory_order fail_mo)
__tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo)
int __tsan_atomic32
void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo)