Compilation fix

This commit is contained in:
Nekotekina 2015-05-27 12:51:25 +03:00
parent 22b78fec71
commit 2823953489
5 changed files with 90 additions and 81 deletions

View file

@ -388,7 +388,7 @@ union _CRT_ALIGN(16) u128
}
};
static __forceinline u128 __sync_val_compare_and_swap(volatile u128* dest, u128 comp, u128 exch)
static __forceinline u128 sync_val_compare_and_swap(volatile u128* dest, u128 comp, u128 exch)
{
#if !defined(_MSC_VER)
auto res = __sync_val_compare_and_swap((volatile __int128_t*)dest, (__int128_t&)comp, (__int128_t&)exch);
@ -399,7 +399,7 @@ static __forceinline u128 __sync_val_compare_and_swap(volatile u128* dest, u128
#endif
}
static __forceinline bool __sync_bool_compare_and_swap(volatile u128* dest, u128 comp, u128 exch)
static __forceinline bool sync_bool_compare_and_swap(volatile u128* dest, u128 comp, u128 exch)
{
#if !defined(_MSC_VER)
return __sync_bool_compare_and_swap((volatile __int128_t*)dest, (__int128_t&)comp, (__int128_t&)exch);
@ -408,39 +408,39 @@ static __forceinline bool __sync_bool_compare_and_swap(volatile u128* dest, u128
#endif
}
static __forceinline u128 __sync_lock_test_and_set(volatile u128* dest, u128 value)
static __forceinline u128 sync_lock_test_and_set(volatile u128* dest, u128 value)
{
while (true)
{
const u128 old = *(u128*)dest;
if (__sync_bool_compare_and_swap(dest, old, value)) return old;
if (sync_bool_compare_and_swap(dest, old, value)) return old;
}
}
static __forceinline u128 __sync_fetch_and_or(volatile u128* dest, u128 value)
static __forceinline u128 sync_fetch_and_or(volatile u128* dest, u128 value)
{
while (true)
{
const u128 old = *(u128*)dest;
if (__sync_bool_compare_and_swap(dest, old, value | old)) return old;
if (sync_bool_compare_and_swap(dest, old, value | old)) return old;
}
}
static __forceinline u128 __sync_fetch_and_and(volatile u128* dest, u128 value)
static __forceinline u128 sync_fetch_and_and(volatile u128* dest, u128 value)
{
while (true)
{
const u128 old = *(u128*)dest;
if (__sync_bool_compare_and_swap(dest, old, value & old)) return old;
if (sync_bool_compare_and_swap(dest, old, value & old)) return old;
}
}
static __forceinline u128 __sync_fetch_and_xor(volatile u128* dest, u128 value)
static __forceinline u128 sync_fetch_and_xor(volatile u128* dest, u128 value)
{
while (true)
{
const u128 old = *(u128*)dest;
if (__sync_bool_compare_and_swap(dest, old, value ^ old)) return old;
if (sync_bool_compare_and_swap(dest, old, value ^ old)) return old;
}
}

View file

@ -136,8 +136,8 @@ bool fs::stat(const std::string& path, stat_t& info)
info.mtime = to_time_t(attrs.ftLastWriteTime);
info.ctime = to_time_t(attrs.ftCreationTime);
#else
struct stat64 file_info;
if (stat64(path.c_str(), &file_info) < 0)
struct stat file_info;
if (stat(path.c_str(), &file_info) < 0)
{
return false;
}
@ -174,8 +174,8 @@ bool fs::is_file(const std::string& file)
return (attrs & FILE_ATTRIBUTE_DIRECTORY) == 0;
#else
struct stat64 file_info;
if (stat64(file.c_str(), &file_info) < 0)
struct stat file_info;
if (stat(file.c_str(), &file_info) < 0)
{
return false;
}
@ -195,8 +195,8 @@ bool fs::is_dir(const std::string& dir)
return (attrs & FILE_ATTRIBUTE_DIRECTORY) != 0;
#else
struct stat64 file_info;
if (stat64(dir.c_str(), &file_info) < 0)
struct stat file_info;
if (stat(dir.c_str(), &file_info) < 0)
{
return false;
}
@ -364,7 +364,7 @@ bool fs::truncate_file(const std::string& file, u64 length)
#ifdef _WIN32
if (!::truncate_file(file, length))
#else
if (truncate64(file.c_str(), length))
if (::truncate(file.c_str(), length))
#endif
{
LOG_WARNING(GENERAL, "Error resizing file '%s' to 0x%llx: 0x%llx", file, length, GET_API_ERROR);
@ -480,7 +480,7 @@ bool fs::file::trunc(u64 size) const
return true; // TODO
#else
return !ftruncate64(m_fd, size);
return !::ftruncate(m_fd, size);
#endif
}
@ -501,8 +501,8 @@ bool fs::file::stat(stat_t& info) const
info.mtime = to_time_t(basic_info.ChangeTime);
info.ctime = to_time_t(basic_info.CreationTime);
#else
struct stat64 file_info;
if (fstat64(m_fd, &file_info) < 0)
struct stat file_info;
if (fstat(m_fd, &file_info) < 0)
{
return false;
}
@ -580,7 +580,7 @@ u64 fs::file::seek(u64 offset, u32 mode) const
return pos.QuadPart;
#else
return lseek64(m_fd, offset, mode);
return ::lseek(m_fd, offset, mode);
#endif
}
@ -595,8 +595,8 @@ u64 fs::file::size() const
return size.QuadPart;
#else
struct stat64 file_info;
if (fstat64(m_fd, &file_info) < 0)
struct stat file_info;
if (::fstat(m_fd, &file_info) < 0)
{
return -1;
}
@ -766,8 +766,8 @@ bool fs::dir::get_next(std::string& name, stat_t& info)
#else
const auto found = ::readdir((DIR*)m_dd);
struct stat64 file_info;
if (!found || fstatat64(::dirfd((DIR*)m_dd), found->d_name, &file_info, 0) < 0)
struct stat file_info;
if (!found || ::fstatat(::dirfd((DIR*)m_dd), found->d_name, &file_info, 0) < 0)
{
return false;
}

View file

@ -78,180 +78,189 @@ int clock_gettime(int foo, struct timespec *ts);
#endif /* __APPLE__ */
#define sync_val_compare_and_swap __sync_val_compare_and_swap
#define sync_bool_compare_and_swap __sync_bool_compare_and_swap
#define sync_lock_test_and_set __sync_lock_test_and_set
#define sync_fetch_and_add __sync_fetch_and_add
#define sync_fetch_and_sub __sync_fetch_and_sub
#define sync_fetch_and_or __sync_fetch_and_or
#define sync_fetch_and_and __sync_fetch_and_and
#define sync_fetch_and_xor __sync_fetch_and_xor
#endif /* __GNUG__ */
#if defined(_MSC_VER)
// atomic compare and swap functions
static __forceinline uint8_t __sync_val_compare_and_swap(volatile uint8_t* dest, uint8_t comp, uint8_t exch)
static __forceinline uint8_t sync_val_compare_and_swap(volatile uint8_t* dest, uint8_t comp, uint8_t exch)
{
return _InterlockedCompareExchange8((volatile char*)dest, exch, comp);
}
static __forceinline uint16_t __sync_val_compare_and_swap(volatile uint16_t* dest, uint16_t comp, uint16_t exch)
static __forceinline uint16_t sync_val_compare_and_swap(volatile uint16_t* dest, uint16_t comp, uint16_t exch)
{
return _InterlockedCompareExchange16((volatile short*)dest, exch, comp);
}
static __forceinline uint32_t __sync_val_compare_and_swap(volatile uint32_t* dest, uint32_t comp, uint32_t exch)
static __forceinline uint32_t sync_val_compare_and_swap(volatile uint32_t* dest, uint32_t comp, uint32_t exch)
{
return _InterlockedCompareExchange((volatile long*)dest, exch, comp);
}
static __forceinline uint64_t __sync_val_compare_and_swap(volatile uint64_t* dest, uint64_t comp, uint64_t exch)
static __forceinline uint64_t sync_val_compare_and_swap(volatile uint64_t* dest, uint64_t comp, uint64_t exch)
{
return _InterlockedCompareExchange64((volatile long long*)dest, exch, comp);
}
static __forceinline bool __sync_bool_compare_and_swap(volatile uint8_t* dest, uint8_t comp, uint8_t exch)
static __forceinline bool sync_bool_compare_and_swap(volatile uint8_t* dest, uint8_t comp, uint8_t exch)
{
return (uint8_t)_InterlockedCompareExchange8((volatile char*)dest, exch, comp) == comp;
}
static __forceinline bool __sync_bool_compare_and_swap(volatile uint16_t* dest, uint16_t comp, uint16_t exch)
static __forceinline bool sync_bool_compare_and_swap(volatile uint16_t* dest, uint16_t comp, uint16_t exch)
{
return (uint16_t)_InterlockedCompareExchange16((volatile short*)dest, exch, comp) == comp;
}
static __forceinline bool __sync_bool_compare_and_swap(volatile uint32_t* dest, uint32_t comp, uint32_t exch)
static __forceinline bool sync_bool_compare_and_swap(volatile uint32_t* dest, uint32_t comp, uint32_t exch)
{
return (uint32_t)_InterlockedCompareExchange((volatile long*)dest, exch, comp) == comp;
}
static __forceinline bool __sync_bool_compare_and_swap(volatile uint64_t* dest, uint64_t comp, uint64_t exch)
static __forceinline bool sync_bool_compare_and_swap(volatile uint64_t* dest, uint64_t comp, uint64_t exch)
{
return (uint64_t)_InterlockedCompareExchange64((volatile long long*)dest, exch, comp) == comp;
}
// atomic exchange functions
static __forceinline uint8_t __sync_lock_test_and_set(volatile uint8_t* dest, uint8_t value)
static __forceinline uint8_t sync_lock_test_and_set(volatile uint8_t* dest, uint8_t value)
{
return _InterlockedExchange8((volatile char*)dest, value);
}
static __forceinline uint16_t __sync_lock_test_and_set(volatile uint16_t* dest, uint16_t value)
static __forceinline uint16_t sync_lock_test_and_set(volatile uint16_t* dest, uint16_t value)
{
return _InterlockedExchange16((volatile short*)dest, value);
}
static __forceinline uint32_t __sync_lock_test_and_set(volatile uint32_t* dest, uint32_t value)
static __forceinline uint32_t sync_lock_test_and_set(volatile uint32_t* dest, uint32_t value)
{
return _InterlockedExchange((volatile long*)dest, value);
}
static __forceinline uint64_t __sync_lock_test_and_set(volatile uint64_t* dest, uint64_t value)
static __forceinline uint64_t sync_lock_test_and_set(volatile uint64_t* dest, uint64_t value)
{
return _InterlockedExchange64((volatile long long*)dest, value);
}
// atomic add functions
static __forceinline uint8_t __sync_fetch_and_add(volatile uint8_t* dest, uint8_t value)
static __forceinline uint8_t sync_fetch_and_add(volatile uint8_t* dest, uint8_t value)
{
return _InterlockedExchangeAdd8((volatile char*)dest, value);
}
static __forceinline uint16_t __sync_fetch_and_add(volatile uint16_t* dest, uint16_t value)
static __forceinline uint16_t sync_fetch_and_add(volatile uint16_t* dest, uint16_t value)
{
return _InterlockedExchangeAdd16((volatile short*)dest, value);
}
static __forceinline uint32_t __sync_fetch_and_add(volatile uint32_t* dest, uint32_t value)
static __forceinline uint32_t sync_fetch_and_add(volatile uint32_t* dest, uint32_t value)
{
return _InterlockedExchangeAdd((volatile long*)dest, value);
}
static __forceinline uint64_t __sync_fetch_and_add(volatile uint64_t* dest, uint64_t value)
static __forceinline uint64_t sync_fetch_and_add(volatile uint64_t* dest, uint64_t value)
{
return _InterlockedExchangeAdd64((volatile long long*)dest, value);
}
// atomic sub functions
static __forceinline uint8_t __sync_fetch_and_sub(volatile uint8_t* dest, uint8_t value)
static __forceinline uint8_t sync_fetch_and_sub(volatile uint8_t* dest, uint8_t value)
{
return _InterlockedExchangeAdd8((volatile char*)dest, -(char)value);
}
static __forceinline uint16_t __sync_fetch_and_sub(volatile uint16_t* dest, uint16_t value)
static __forceinline uint16_t sync_fetch_and_sub(volatile uint16_t* dest, uint16_t value)
{
return _InterlockedExchangeAdd16((volatile short*)dest, -(short)value);
}
static __forceinline uint32_t __sync_fetch_and_sub(volatile uint32_t* dest, uint32_t value)
static __forceinline uint32_t sync_fetch_and_sub(volatile uint32_t* dest, uint32_t value)
{
return _InterlockedExchangeAdd((volatile long*)dest, -(long)value);
}
static __forceinline uint64_t __sync_fetch_and_sub(volatile uint64_t* dest, uint64_t value)
static __forceinline uint64_t sync_fetch_and_sub(volatile uint64_t* dest, uint64_t value)
{
return _InterlockedExchangeAdd64((volatile long long*)dest, -(long long)value);
}
// atomic bitwise or functions
static __forceinline uint8_t __sync_fetch_and_or(volatile uint8_t* dest, uint8_t value)
static __forceinline uint8_t sync_fetch_and_or(volatile uint8_t* dest, uint8_t value)
{
return _InterlockedOr8((volatile char*)dest, value);
}
static __forceinline uint16_t __sync_fetch_and_or(volatile uint16_t* dest, uint16_t value)
static __forceinline uint16_t sync_fetch_and_or(volatile uint16_t* dest, uint16_t value)
{
return _InterlockedOr16((volatile short*)dest, value);
}
static __forceinline uint32_t __sync_fetch_and_or(volatile uint32_t* dest, uint32_t value)
static __forceinline uint32_t sync_fetch_and_or(volatile uint32_t* dest, uint32_t value)
{
return _InterlockedOr((volatile long*)dest, value);
}
static __forceinline uint64_t __sync_fetch_and_or(volatile uint64_t* dest, uint64_t value)
static __forceinline uint64_t sync_fetch_and_or(volatile uint64_t* dest, uint64_t value)
{
return _InterlockedOr64((volatile long long*)dest, value);
}
// atomic bitwise and functions
static __forceinline uint8_t __sync_fetch_and_and(volatile uint8_t* dest, uint8_t value)
static __forceinline uint8_t sync_fetch_and_and(volatile uint8_t* dest, uint8_t value)
{
return _InterlockedAnd8((volatile char*)dest, value);
}
static __forceinline uint16_t __sync_fetch_and_and(volatile uint16_t* dest, uint16_t value)
static __forceinline uint16_t sync_fetch_and_and(volatile uint16_t* dest, uint16_t value)
{
return _InterlockedAnd16((volatile short*)dest, value);
}
static __forceinline uint32_t __sync_fetch_and_and(volatile uint32_t* dest, uint32_t value)
static __forceinline uint32_t sync_fetch_and_and(volatile uint32_t* dest, uint32_t value)
{
return _InterlockedAnd((volatile long*)dest, value);
}
static __forceinline uint64_t __sync_fetch_and_and(volatile uint64_t* dest, uint64_t value)
static __forceinline uint64_t sync_fetch_and_and(volatile uint64_t* dest, uint64_t value)
{
return _InterlockedAnd64((volatile long long*)dest, value);
}
// atomic bitwise xor functions
static __forceinline uint8_t __sync_fetch_and_xor(volatile uint8_t* dest, uint8_t value)
static __forceinline uint8_t sync_fetch_and_xor(volatile uint8_t* dest, uint8_t value)
{
return _InterlockedXor8((volatile char*)dest, value);
}
static __forceinline uint16_t __sync_fetch_and_xor(volatile uint16_t* dest, uint16_t value)
static __forceinline uint16_t sync_fetch_and_xor(volatile uint16_t* dest, uint16_t value)
{
return _InterlockedXor16((volatile short*)dest, value);
}
static __forceinline uint32_t __sync_fetch_and_xor(volatile uint32_t* dest, uint32_t value)
static __forceinline uint32_t sync_fetch_and_xor(volatile uint32_t* dest, uint32_t value)
{
return _InterlockedXor((volatile long*)dest, value);
}
static __forceinline uint64_t __sync_fetch_and_xor(volatile uint64_t* dest, uint64_t value)
static __forceinline uint64_t sync_fetch_and_xor(volatile uint64_t* dest, uint64_t value)
{
return _InterlockedXor64((volatile long long*)dest, value);
}

View file

@ -64,25 +64,25 @@ public:
// atomically compare data with cmp, replace with exch if equal, return previous data value anyway
__forceinline const type compare_and_swap(const type& cmp, const type& exch) volatile
{
return from_subtype(__sync_val_compare_and_swap(&sub_data, to_subtype(cmp), to_subtype(exch)));
return from_subtype(sync_val_compare_and_swap(&sub_data, to_subtype(cmp), to_subtype(exch)));
}
// atomically compare data with cmp, replace with exch if equal, return true if data was replaced
__forceinline bool compare_and_swap_test(const type& cmp, const type& exch) volatile
{
return __sync_bool_compare_and_swap(&sub_data, to_subtype(cmp), to_subtype(exch));
return sync_bool_compare_and_swap(&sub_data, to_subtype(cmp), to_subtype(exch));
}
// read data with memory barrier
__forceinline const type read_sync() const volatile
{
return from_subtype(__sync_val_compare_and_swap(const_cast<subtype*>(&sub_data), 0, 0));
return from_subtype(sync_val_compare_and_swap(const_cast<subtype*>(&sub_data), 0, 0));
}
// atomically replace data with exch, return previous data value
__forceinline const type exchange(const type& exch) volatile
{
return from_subtype(__sync_lock_test_and_set(&sub_data, to_subtype(exch)));
return from_subtype(sync_lock_test_and_set(&sub_data, to_subtype(exch)));
}
// read data without memory barrier
@ -106,7 +106,7 @@ public:
const subtype old = const_cast<const subtype&>(sub_data);
subtype _new = old;
atomic_proc(to_type(_new)); // function should accept reference to T type
if (__sync_bool_compare_and_swap(&sub_data, old, _new)) return;
if (sync_bool_compare_and_swap(&sub_data, old, _new)) return;
}
}
@ -119,19 +119,19 @@ public:
subtype _new = old;
auto res = static_cast<RT>(atomic_proc(to_type(_new))); // function should accept reference to T type and return some value
if (res != proceed_value) return res;
if (__sync_bool_compare_and_swap(&sub_data, old, _new)) return proceed_value;
if (sync_bool_compare_and_swap(&sub_data, old, _new)) return proceed_value;
}
}
// perform atomic operation on data with additional memory barrier
template<typename FT> __forceinline void atomic_op_sync(const FT atomic_proc) volatile
{
subtype old = __sync_val_compare_and_swap(&sub_data, 0, 0);
subtype old = sync_val_compare_and_swap(&sub_data, 0, 0);
while (true)
{
subtype _new = old;
atomic_proc(to_type(_new)); // function should accept reference to T type
const subtype val = __sync_val_compare_and_swap(&sub_data, old, _new);
const subtype val = sync_val_compare_and_swap(&sub_data, old, _new);
if (val == old) return;
old = val;
}
@ -140,13 +140,13 @@ public:
// perform atomic operation on data with additional memory barrier and special exit condition (if intermediate result != proceed_value)
template<typename RT, typename FT> __forceinline RT atomic_op_sync(const RT proceed_value, const FT atomic_proc) volatile
{
subtype old = __sync_val_compare_and_swap(&sub_data, 0, 0);
subtype old = sync_val_compare_and_swap(&sub_data, 0, 0);
while (true)
{
subtype _new = old;
auto res = static_cast<RT>(atomic_proc(to_type(_new))); // function should accept reference to T type and return some value
if (res != proceed_value) return res;
const subtype val = __sync_val_compare_and_swap(&sub_data, old, _new);
const subtype val = sync_val_compare_and_swap(&sub_data, old, _new);
if (val == old) return proceed_value;
old = val;
}
@ -155,40 +155,40 @@ public:
// atomic bitwise OR, returns previous data
__forceinline const type _or(const type& right) volatile
{
return from_subtype(__sync_fetch_and_or(&sub_data, to_subtype(right)));
return from_subtype(sync_fetch_and_or(&sub_data, to_subtype(right)));
}
// atomic bitwise AND, returns previous data
__forceinline const type _and(const type& right) volatile
{
return from_subtype(__sync_fetch_and_and(&sub_data, to_subtype(right)));
return from_subtype(sync_fetch_and_and(&sub_data, to_subtype(right)));
}
// atomic bitwise AND NOT (inverts right argument), returns previous data
__forceinline const type _and_not(const type& right) volatile
{
return from_subtype(__sync_fetch_and_and(&sub_data, ~to_subtype(right)));
return from_subtype(sync_fetch_and_and(&sub_data, ~to_subtype(right)));
}
// atomic bitwise XOR, returns previous data
__forceinline const type _xor(const type& right) volatile
{
return from_subtype(__sync_fetch_and_xor(&sub_data, to_subtype(right)));
return from_subtype(sync_fetch_and_xor(&sub_data, to_subtype(right)));
}
__forceinline const type operator |= (const type& right) volatile
{
return from_subtype(__sync_fetch_and_or(&sub_data, to_subtype(right)) | to_subtype(right));
return from_subtype(sync_fetch_and_or(&sub_data, to_subtype(right)) | to_subtype(right));
}
__forceinline const type operator &= (const type& right) volatile
{
return from_subtype(__sync_fetch_and_and(&sub_data, to_subtype(right)) & to_subtype(right));
return from_subtype(sync_fetch_and_and(&sub_data, to_subtype(right)) & to_subtype(right));
}
__forceinline const type operator ^= (const type& right) volatile
{
return from_subtype(__sync_fetch_and_xor(&sub_data, to_subtype(right)) ^ to_subtype(right));
return from_subtype(sync_fetch_and_xor(&sub_data, to_subtype(right)) ^ to_subtype(right));
}
};
@ -199,32 +199,32 @@ template<typename T, typename T2 = T> using if_arithmetic_be_t = const typename
template<typename T> inline static if_arithmetic_le_t<T> operator ++(_atomic_base<le_t<T>>& left)
{
return left.from_subtype(__sync_fetch_and_add(&left.sub_data, 1) + 1);
return left.from_subtype(sync_fetch_and_add(&left.sub_data, 1) + 1);
}
template<typename T> inline static if_arithmetic_le_t<T> operator --(_atomic_base<le_t<T>>& left)
{
return left.from_subtype(__sync_fetch_and_sub(&left.sub_data, 1) - 1);
return left.from_subtype(sync_fetch_and_sub(&left.sub_data, 1) - 1);
}
template<typename T> inline static if_arithmetic_le_t<T> operator ++(_atomic_base<le_t<T>>& left, int)
{
return left.from_subtype(__sync_fetch_and_add(&left.sub_data, 1));
return left.from_subtype(sync_fetch_and_add(&left.sub_data, 1));
}
template<typename T> inline static if_arithmetic_le_t<T> operator --(_atomic_base<le_t<T>>& left, int)
{
return left.from_subtype(__sync_fetch_and_sub(&left.sub_data, 1));
return left.from_subtype(sync_fetch_and_sub(&left.sub_data, 1));
}
template<typename T, typename T2> inline static if_arithmetic_le_t<T, T2> operator +=(_atomic_base<le_t<T>>& left, T2 right)
{
return left.from_subtype(__sync_fetch_and_add(&left.sub_data, right) + right);
return left.from_subtype(sync_fetch_and_add(&left.sub_data, right) + right);
}
template<typename T, typename T2> inline static if_arithmetic_le_t<T, T2> operator -=(_atomic_base<le_t<T>>& left, T2 right)
{
return left.from_subtype(__sync_fetch_and_sub(&left.sub_data, right) - right);
return left.from_subtype(sync_fetch_and_sub(&left.sub_data, right) - right);
}
template<typename T> inline static if_arithmetic_be_t<T> operator ++(_atomic_base<be_t<T>>& left)

View file

@ -326,7 +326,7 @@ void Emulator::Pause()
if (!IsRunning()) return;
SendDbgCommand(DID_PAUSE_EMU);
if (__sync_bool_compare_and_swap((volatile u32*)&m_status, Running, Paused))
if (sync_bool_compare_and_swap((volatile u32*)&m_status, Running, Paused))
{
SendDbgCommand(DID_PAUSED_EMU);