From 105d70f7b4ce7706652853ece2e35ea8c774005f Mon Sep 17 00:00:00 2001 From: kbkpbot Date: Mon, 5 Feb 2024 08:37:41 +0800 Subject: [PATCH] atomic_load/atomic_store : to ensure return the latest value, should we add a memory barrier here? If have no these memory barriers, sometime it will cause bug in multiple threads program. --- lib/stdatomic.c | 102 ++++++++++++++++++++++++------------------------ 1 file changed, 52 insertions(+), 50 deletions(-) diff --git a/lib/stdatomic.c b/lib/stdatomic.c index bd8c0ce7..db3c907f 100644 --- a/lib/stdatomic.c +++ b/lib/stdatomic.c @@ -15,6 +15,56 @@ #define __ATOMIC_SEQ_CST 5 typedef __SIZE_TYPE__ size_t; +/* uses alias to allow building with gcc/clang */ +#ifdef __TINYC__ +#define ATOMIC(x) __atomic_##x +#else +#define ATOMIC(x) __tcc_atomic_##x +#endif + +void ATOMIC(signal_fence) (int memorder) +{ +} + +void ATOMIC(thread_fence) (int memorder) +{ +#if defined __i386__ + __asm__ volatile("lock orl $0, (%esp)"); +#elif defined __x86_64__ + __asm__ volatile("lock orq $0, (%rsp)"); +#elif defined __arm__ + __asm__ volatile(".int 0xee070fba"); // mcr p15, 0, r0, c7, c10, 5 +#elif defined __aarch64__ + __asm__ volatile(".int 0xd5033bbf"); // dmb ish +#elif defined __riscv + __asm__ volatile(".int 0x0ff0000f"); // fence iorw,iorw +#endif +} + +bool ATOMIC(is_lock_free) (unsigned long size, const volatile void *ptr) +{ + bool ret; + + switch (size) { + case 1: ret = true; break; + case 2: ret = true; break; + case 4: ret = true; break; +#if defined __x86_64__ || defined __aarch64__ || defined __riscv + case 8: ret = true; break; +#else + case 8: ret = false; break; +#endif + default: ret = false; break; + } + return ret; +} + +#ifndef __TINYC__ +void __atomic_signal_fence(int memorder) __attribute__((alias("__tcc_atomic_signal_fence"))); +void __atomic_thread_fence(int memorder) __attribute__((alias("__tcc_atomic_thread_fence"))); +bool __atomic_is_lock_free(unsigned long size, const volatile void *ptr) __attribute__((alias("__tcc_atomic_is_lock_free"))); +#endif + #if defined __i386__ || defined __x86_64__ #define ATOMIC_COMPARE_EXCHANGE(TYPE, MODE, SUFFIX) \ bool __atomic_compare_exchange_##MODE \ @@ -42,6 +92,7 @@ typedef __SIZE_TYPE__ size_t; #define ATOMIC_LOAD(TYPE, MODE) \ TYPE __atomic_load_##MODE(const volatile void *atom, int memorder) \ { \ + __atomic_thread_fence(__ATOMIC_ACQUIRE); \ return *(volatile TYPE *)atom; \ } @@ -49,6 +100,7 @@ typedef __SIZE_TYPE__ size_t; void __atomic_store_##MODE(volatile void *atom, TYPE value, int memorder) \ { \ *(volatile TYPE *)atom = value; \ + __atomic_thread_fence(__ATOMIC_RELEASE); \ } #define ATOMIC_GEN_OP(TYPE, MODE, NAME, OP, RET) \ @@ -114,53 +166,3 @@ ATOMIC_GEN(uint32_t, 4, "l") #if defined __x86_64__ || defined __aarch64__ || defined __riscv ATOMIC_GEN(uint64_t, 8, "q") #endif - -/* uses alias to allow building with gcc/clang */ -#ifdef __TINYC__ -#define ATOMIC(x) __atomic_##x -#else -#define ATOMIC(x) __tcc_atomic_##x -#endif - -void ATOMIC(signal_fence) (int memorder) -{ -} - -void ATOMIC(thread_fence) (int memorder) -{ -#if defined __i386__ - __asm__ volatile("lock orl $0, (%esp)"); -#elif defined __x86_64__ - __asm__ volatile("lock orq $0, (%rsp)"); -#elif defined __arm__ - __asm__ volatile(".int 0xee070fba"); // mcr p15, 0, r0, c7, c10, 5 -#elif defined __aarch64__ - __asm__ volatile(".int 0xd5033bbf"); // dmb ish -#elif defined __riscv - __asm__ volatile(".int 0x0ff0000f"); // fence iorw,iorw -#endif -} - -bool ATOMIC(is_lock_free) (unsigned long size, const volatile void *ptr) -{ - bool ret; - - switch (size) { - case 1: ret = true; break; - case 2: ret = true; break; - case 4: ret = true; break; -#if defined __x86_64__ || defined __aarch64__ || defined __riscv - case 8: ret = true; break; -#else - case 8: ret = false; break; -#endif - default: ret = false; break; - } - return ret; -} - -#ifndef __TINYC__ -void __atomic_signal_fence(int memorder) __attribute__((alias("__tcc_atomic_signal_fence"))); -void __atomic_thread_fence(int memorder) __attribute__((alias("__tcc_atomic_thread_fence"))); -bool __atomic_is_lock_free(unsigned long size, const volatile void *ptr) __attribute__((alias("__tcc_atomic_is_lock_free"))); -#endif