mirror of
https://github.com/RPCSX/rpcsx.git
synced 2026-04-04 14:08:37 +00:00
vm::atomic update (or, and, xor)
CellSyncMutex, CellSyncBarrier, CellSyncRwm refactoring
This commit is contained in:
parent
169c8c47c0
commit
3ab08e0d7a
5 changed files with 200 additions and 200 deletions
|
|
@ -46,18 +46,21 @@ void strcpy_trunc(char(&dst)[size], const char(&src)[rsize])
|
|||
#define _CRT_ALIGN(x) __attribute__((aligned(x)))
|
||||
#define InterlockedCompareExchange(ptr,new_val,old_val) __sync_val_compare_and_swap(ptr,old_val,new_val)
|
||||
#define InterlockedExchange(ptr, value) __sync_lock_test_and_set(ptr, value)
|
||||
#define InterlockedOr(ptr, value) __sync_fetch_and_or(ptr, value)
|
||||
#define InterlockedAnd(ptr, value) __sync_fetch_and_and(ptr, value)
|
||||
#define InterlockedXor(ptr, value) __sync_fetch_and_xor(ptr, value)
|
||||
|
||||
inline int64_t InterlockedOr64(volatile int64_t *dest, int64_t val)
|
||||
{
|
||||
int64_t olderval;
|
||||
int64_t oldval = *dest;
|
||||
do
|
||||
{
|
||||
olderval = oldval;
|
||||
oldval = __sync_val_compare_and_swap(dest, olderval | val, olderval);
|
||||
} while (olderval != oldval);
|
||||
return oldval;
|
||||
}
|
||||
//inline int64_t InterlockedOr64(volatile int64_t *dest, int64_t val)
|
||||
//{
|
||||
// int64_t olderval;
|
||||
// int64_t oldval = *dest;
|
||||
// do
|
||||
// {
|
||||
// olderval = oldval;
|
||||
// oldval = __sync_val_compare_and_swap(dest, olderval | val, olderval);
|
||||
// } while (olderval != oldval);
|
||||
// return oldval;
|
||||
//}
|
||||
|
||||
inline uint64_t __umulh(uint64_t a, uint64_t b)
|
||||
{
|
||||
|
|
@ -112,3 +115,36 @@ static __forceinline uint64_t InterlockedExchange(volatile uint64_t* dest, uint6
|
|||
return _InterlockedExchange64((volatile long long*)dest, value);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef InterlockedOr
|
||||
static __forceinline uint32_t InterlockedOr(volatile uint32_t* dest, uint32_t value)
|
||||
{
|
||||
return _InterlockedOr((volatile long*)dest, value);
|
||||
}
|
||||
static __forceinline uint64_t InterlockedOr(volatile uint64_t* dest, uint64_t value)
|
||||
{
|
||||
return _InterlockedOr64((volatile long long*)dest, value);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef InterlockedAnd
|
||||
static __forceinline uint32_t InterlockedAnd(volatile uint32_t* dest, uint32_t value)
|
||||
{
|
||||
return _InterlockedAnd((volatile long*)dest, value);
|
||||
}
|
||||
static __forceinline uint64_t InterlockedAnd(volatile uint64_t* dest, uint64_t value)
|
||||
{
|
||||
return _InterlockedAnd64((volatile long long*)dest, value);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef InterlockedXor
|
||||
static __forceinline uint32_t InterlockedXor(volatile uint32_t* dest, uint32_t value)
|
||||
{
|
||||
return _InterlockedXor((volatile long*)dest, value);
|
||||
}
|
||||
static __forceinline uint64_t InterlockedXor(volatile uint64_t* dest, uint64_t value)
|
||||
{
|
||||
return _InterlockedXor64((volatile long long*)dest, value);
|
||||
}
|
||||
#endif
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue