MDEV-27667 Fix MDEV-26720 on 64-bit Microsoft Windows

The correct macro to detect the AMD64 ISA is _M_X64, not M_IX64.

This is the 10.6 version of
commit fb8fea3490 (10.5).
This commit is contained in:
Marko Mäkelä 2022-01-28 16:42:37 +02:00
parent f0f5ce58c7
commit c478a5533e
5 changed files with 9 additions and 9 deletions

View file

@ -572,7 +572,7 @@ public:
#if defined __GNUC__ && (defined __i386__ || defined __x86_64__)
static_assert(NEEDS_FSYNC == 1U << 29, "compatibility");
__asm__ __volatile__("lock btrl $29, %0" : "+m" (n_pending));
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
static_assert(NEEDS_FSYNC == 1U << 29, "compatibility");
_interlockedbittestandreset(reinterpret_cast<volatile long*>
(&n_pending), 29);
@ -588,7 +588,7 @@ private:
#if defined __GNUC__ && (defined __i386__ || defined __x86_64__)
static_assert(CLOSING == 1U << 30, "compatibility");
__asm__ __volatile__("lock btrl $30, %0" : "+m" (n_pending));
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
static_assert(CLOSING == 1U << 30, "compatibility");
_interlockedbittestandreset(reinterpret_cast<volatile long*>
(&n_pending), 30);
@ -1555,7 +1555,7 @@ inline bool fil_space_t::set_stopping_check()
return true;
not_stopped:
return false;
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
static_assert(STOPPING == 1U << 31, "compatibility");
return _interlockedbittestandset(reinterpret_cast<volatile long*>
(&n_pending), 31);
@ -1572,7 +1572,7 @@ inline void fil_space_t::set_stopping()
#if defined __GNUC__ && (defined __i386__ || defined __x86_64__)
static_assert(STOPPING == 1U << 31, "compatibility");
__asm__ __volatile__("lock btsl $31, %0" : "+m" (n_pending));
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
static_assert(STOPPING == 1U << 31, "compatibility");
_interlockedbittestandset(reinterpret_cast<volatile long*>(&n_pending), 31);
#else

View file

@ -56,7 +56,7 @@ protected:
#if defined __GNUC__ && (defined __i386__ || defined __x86_64__)
static_assert(WRITER_WAITING == 1U << 30, "compatibility");
__asm__ __volatile__("lock btsl $30, %0" : "+m" (lock));
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
static_assert(WRITER_WAITING == 1U << 30, "compatibility");
_interlockedbittestandset(reinterpret_cast<volatile long*>(&lock), 30);
#else

View file

@ -224,7 +224,7 @@ public:
void wr_lock()
{
writer.wr_lock();
#if defined __i386__||defined __x86_64__||defined _M_IX86||defined _M_IX64
#if defined __i386__||defined __x86_64__||defined _M_IX86||defined _M_X64
/* On IA-32 and AMD64, this type of fetch_or() can only be implemented
as a loop around LOCK CMPXCHG. In this particular case, setting the
most significant bit using fetch_add() is equivalent, and is

View file

@ -116,7 +116,7 @@ private:
__asm__ __volatile__("lock btsl $1, %0" : "+m" (ref));
else
__asm__ __volatile__("lock btsl $0, %0" : "+m" (ref));
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
_interlockedbittestandset(reinterpret_cast<volatile long*>(&ref),
needs_purge);
#else
@ -133,7 +133,7 @@ private:
__asm__ __volatile__("lock btrl $1, %0" : "+m" (ref));
else
__asm__ __volatile__("lock btrl $0, %0" : "+m" (ref));
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
_interlockedbittestandreset(reinterpret_cast<volatile long*>(&ref),
needs_purge);
#else

View file

@ -385,7 +385,7 @@ assembler code or a Microsoft intrinsic function.
# define IF_NOT_FETCH_OR_GOTO(mem, bit, label) \
__asm__ goto("lock btsl $" #bit ", %0\n\t" \
"jnc %l1" : : "m" (mem) : "cc", "memory" : label);
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_IX64)
#elif defined _MSC_VER && (defined _M_IX86 || defined _M_X64)
# define IF_FETCH_OR_GOTO(mem, bit, label) \
if (_interlockedbittestandset(reinterpret_cast<volatile long*>(&mem), bit)) \
goto label;