mirror of
https://github.com/yuzu-mirror/yuzu
synced 2024-11-27 06:20:17 +00:00
atomic_ops: Remove volatile qualifier
This commit is contained in:
parent
6527c0d2fc
commit
9f91d310c6
2 changed files with 21 additions and 25 deletions
|
@ -16,32 +16,31 @@ namespace Common {
|
|||
#if _MSC_VER
|
||||
|
||||
template <typename T>
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(volatile T* pointer, T value, T expected);
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(T* pointer, T value, T expected);
|
||||
template <typename T>
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(volatile T* pointer, T value, T expected, T& actual);
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(T* pointer, T value, T expected, T& actual);
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u8>(volatile u8* pointer, u8 value,
|
||||
u8 expected) {
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u8>(u8* pointer, u8 value, u8 expected) {
|
||||
const u8 result =
|
||||
_InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected);
|
||||
return result == expected;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u16>(volatile u16* pointer, u16 value,
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u16>(u16* pointer, u16 value,
|
||||
u16 expected) {
|
||||
const u16 result =
|
||||
_InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected);
|
||||
return result == expected;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u32>(volatile u32* pointer, u32 value,
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u32>(u32* pointer, u32 value,
|
||||
u32 expected) {
|
||||
const u32 result =
|
||||
_InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected);
|
||||
return result == expected;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u64>(volatile u64* pointer, u64 value,
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u64>(u64* pointer, u64 value,
|
||||
u64 expected) {
|
||||
const u64 result = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer),
|
||||
value, expected);
|
||||
|
@ -54,29 +53,29 @@ template [[nodiscard]] inline bool AtomicCompareAndSwap<u64>(volatile u64* point
|
|||
reinterpret_cast<__int64*>(expected.data())) != 0;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u8>(volatile u8* pointer, u8 value,
|
||||
u8 expected, u8& actual) {
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u8>(u8* pointer, u8 value, u8 expected,
|
||||
u8& actual) {
|
||||
actual =
|
||||
_InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected);
|
||||
return actual == expected;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u16>(volatile u16* pointer, u16 value,
|
||||
u16 expected, u16& actual) {
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u16>(u16* pointer, u16 value, u16 expected,
|
||||
u16& actual) {
|
||||
actual =
|
||||
_InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected);
|
||||
return actual == expected;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u32>(volatile u32* pointer, u32 value,
|
||||
u32 expected, u32& actual) {
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u32>(u32* pointer, u32 value, u32 expected,
|
||||
u32& actual) {
|
||||
actual =
|
||||
_InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected);
|
||||
return actual == expected;
|
||||
}
|
||||
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u64>(volatile u64* pointer, u64 value,
|
||||
u64 expected, u64& actual) {
|
||||
template [[nodiscard]] inline bool AtomicCompareAndSwap<u64>(u64* pointer, u64 value, u64 expected,
|
||||
u64& actual) {
|
||||
actual = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer), value,
|
||||
expected);
|
||||
return actual == expected;
|
||||
|
@ -101,11 +100,11 @@ template [[nodiscard]] inline bool AtomicCompareAndSwap<u64>(volatile u64* point
|
|||
#else
|
||||
|
||||
template <typename T>
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(volatile T* pointer, T value, T expected) {
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(T* pointer, T value, T expected) {
|
||||
return __sync_bool_compare_and_swap(pointer, expected, value);
|
||||
}
|
||||
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) {
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(u64* pointer, u128 value, u128 expected) {
|
||||
unsigned __int128 value_a;
|
||||
unsigned __int128 expected_a;
|
||||
std::memcpy(&value_a, value.data(), sizeof(u128));
|
||||
|
@ -114,13 +113,12 @@ template <typename T>
|
|||
}
|
||||
|
||||
template <typename T>
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(volatile T* pointer, T value, T expected,
|
||||
T& actual) {
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(T* pointer, T value, T expected, T& actual) {
|
||||
actual = __sync_val_compare_and_swap(pointer, expected, value);
|
||||
return actual == expected;
|
||||
}
|
||||
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected,
|
||||
[[nodiscard]] inline bool AtomicCompareAndSwap(u64* pointer, u128 value, u128 expected,
|
||||
u128& actual) {
|
||||
unsigned __int128 value_a;
|
||||
unsigned __int128 expected_a;
|
||||
|
@ -132,7 +130,7 @@ template <typename T>
|
|||
return actual_a == expected_a;
|
||||
}
|
||||
|
||||
[[nodiscard]] inline u128 AtomicLoad128(volatile u64* pointer) {
|
||||
[[nodiscard]] inline u128 AtomicLoad128(u64* pointer) {
|
||||
unsigned __int128 zeros_a = 0;
|
||||
unsigned __int128 result_a =
|
||||
__sync_val_compare_and_swap((unsigned __int128*)pointer, zeros_a, zeros_a);
|
||||
|
|
|
@ -781,8 +781,7 @@ struct Memory::Impl {
|
|||
},
|
||||
[&]() { HandleRasterizerWrite(GetInteger(vaddr), sizeof(T)); });
|
||||
if (ptr) {
|
||||
const auto volatile_pointer = reinterpret_cast<volatile T*>(ptr);
|
||||
return Common::AtomicCompareAndSwap(volatile_pointer, data, expected);
|
||||
return Common::AtomicCompareAndSwap(reinterpret_cast<T*>(ptr), data, expected);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -796,8 +795,7 @@ struct Memory::Impl {
|
|||
},
|
||||
[&]() { HandleRasterizerWrite(GetInteger(vaddr), sizeof(u128)); });
|
||||
if (ptr) {
|
||||
const auto volatile_pointer = reinterpret_cast<volatile u64*>(ptr);
|
||||
return Common::AtomicCompareAndSwap(volatile_pointer, data, expected);
|
||||
return Common::AtomicCompareAndSwap(reinterpret_cast<u64*>(ptr), data, expected);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue