forked from intel/llvm
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathatomic.hpp
93 lines (72 loc) · 2.72 KB
/
atomic.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
//==-------------- atomic.hpp - support of atomic operations ---------------==//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#pragma once
#include <cstdint>
#include "device.h"
#ifdef __SPIR__
#define SPIR_GLOBAL __attribute__((opencl_global))
namespace __spv {
struct Scope {
enum Flag : uint32_t {
CrossDevice = 0,
Device = 1,
Workgroup = 2,
Subgroup = 3,
Invocation = 4,
};
constexpr Scope(Flag flag) : flag_value(flag) {}
constexpr operator uint32_t() const { return flag_value; }
Flag flag_value;
};
struct MemorySemanticsMask {
enum Flag : uint32_t {
None = 0x0,
Acquire = 0x2,
Release = 0x4,
AcquireRelease = 0x8,
SequentiallyConsistent = 0x10,
UniformMemory = 0x40,
SubgroupMemory = 0x80,
WorkgroupMemory = 0x100,
CrossWorkgroupMemory = 0x200,
AtomicCounterMemory = 0x400,
ImageMemory = 0x800,
};
constexpr MemorySemanticsMask(Flag flag) : flag_value(flag) {}
constexpr operator uint32_t() const { return flag_value; }
Flag flag_value;
};
} // namespace __spv
extern DEVICE_EXTERNAL int
__spirv_AtomicCompareExchange(int SPIR_GLOBAL *, __spv::Scope::Flag,
__spv::MemorySemanticsMask::Flag,
__spv::MemorySemanticsMask::Flag, int, int);
extern DEVICE_EXTERNAL int __spirv_AtomicLoad(const int SPIR_GLOBAL *,
__spv::Scope::Flag,
__spv::MemorySemanticsMask::Flag);
extern DEVICE_EXTERNAL void
__spirv_AtomicStore(int SPIR_GLOBAL *, __spv::Scope::Flag,
__spv::MemorySemanticsMask::Flag, int);
/// Atomically set the value in *Ptr with Desired if and only if it is Expected
/// Return the value which already was in *Ptr
static inline int atomicCompareAndSet(SPIR_GLOBAL int *Ptr, int Desired,
int Expected) {
return __spirv_AtomicCompareExchange(
Ptr, __spv::Scope::Device,
__spv::MemorySemanticsMask::SequentiallyConsistent,
__spv::MemorySemanticsMask::SequentiallyConsistent, Desired, Expected);
}
static inline int atomicLoad(SPIR_GLOBAL int *Ptr) {
return __spirv_AtomicLoad(Ptr, __spv::Scope::Device,
__spv::MemorySemanticsMask::SequentiallyConsistent);
}
static inline void atomicStore(SPIR_GLOBAL int *Ptr, int V) {
__spirv_AtomicStore(Ptr, __spv::Scope::Device,
__spv::MemorySemanticsMask::SequentiallyConsistent, V);
}
#endif // __SPIR__