@@ -28,7 +28,8 @@ bool skip_all_tests = true;
28
28
29
29
#if defined(ENABLE_ATOMICS_TESTS ) && \
30
30
defined(__BPF_FEATURE_ADDR_SPACE_CAST ) && \
31
- (defined(__TARGET_ARCH_arm64 ) || defined(__TARGET_ARCH_x86 ))
31
+ (defined(__TARGET_ARCH_arm64 ) || defined(__TARGET_ARCH_x86 ) || \
32
+ (defined(__TARGET_ARCH_riscv ) && __riscv_xlen == 64 ))
32
33
bool skip_lacq_srel_tests __attribute((__section__ (".data" ))) = false;
33
34
#else
34
35
bool skip_lacq_srel_tests = true;
@@ -314,7 +315,8 @@ int load_acquire(const void *ctx)
314
315
{
315
316
#if defined(ENABLE_ATOMICS_TESTS ) && \
316
317
defined(__BPF_FEATURE_ADDR_SPACE_CAST ) && \
317
- (defined(__TARGET_ARCH_arm64 ) || defined(__TARGET_ARCH_x86 ))
318
+ (defined(__TARGET_ARCH_arm64 ) || defined(__TARGET_ARCH_x86 ) || \
319
+ (defined(__TARGET_ARCH_riscv ) && __riscv_xlen == 64 ))
318
320
319
321
#define LOAD_ACQUIRE_ARENA (SIZEOP , SIZE , SRC , DST ) \
320
322
{ asm volatile ( \
@@ -365,7 +367,8 @@ int store_release(const void *ctx)
365
367
{
366
368
#if defined(ENABLE_ATOMICS_TESTS ) && \
367
369
defined(__BPF_FEATURE_ADDR_SPACE_CAST ) && \
368
- (defined(__TARGET_ARCH_arm64 ) || defined(__TARGET_ARCH_x86 ))
370
+ (defined(__TARGET_ARCH_arm64 ) || defined(__TARGET_ARCH_x86 ) || \
371
+ (defined(__TARGET_ARCH_riscv ) && __riscv_xlen == 64 ))
369
372
370
373
#define STORE_RELEASE_ARENA (SIZEOP , DST , VAL ) \
371
374
{ asm volatile ( \
0 commit comments