summaryrefslogtreecommitdiffstats
path: root/arch/x86
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@linux.intel.com>2012-11-28 11:50:25 -0800
committerH. Peter Anvin <hpa@linux.intel.com>2012-11-29 13:23:02 -0800
commit7ac468b1300f35143a9b5b100e3970ca7ae1d9b8 (patch)
tree1c928b80c897acc8bf7f47de8ef3793c5ba30c2c /arch/x86
parentd55c5a93db2d5fa95f233ab153f594365d95b777 (diff)
x86, 386 removal: Remove CONFIG_XADD
All 486+ CPUs support XADD, so remove the fallback 386 support code. Signed-off-by: H. Peter Anvin <hpa@linux.intel.com> Link: http://lkml.kernel.org/r/1354132230-21854-4-git-send-email-hpa@linux.intel.com
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/Kconfig5
-rw-r--r--arch/x86/Kconfig.cpu3
-rw-r--r--arch/x86/include/asm/atomic.h16
-rw-r--r--arch/x86/include/asm/local.h18
-rw-r--r--arch/x86/um/Kconfig2
5 files changed, 2 insertions, 42 deletions
diff --git a/arch/x86/Kconfig b/arch/x86/Kconfig
index a1a662721f8..631b298ac5b 100644
--- a/arch/x86/Kconfig
+++ b/arch/x86/Kconfig
@@ -171,13 +171,8 @@ config ARCH_MAY_HAVE_PC_FDC
def_bool y
depends on ISA_DMA_API
-config RWSEM_GENERIC_SPINLOCK
- def_bool y
- depends on !X86_XADD
-
config RWSEM_XCHGADD_ALGORITHM
def_bool y
- depends on X86_XADD
config GENERIC_CALIBRATE_DELAY
def_bool y
diff --git a/arch/x86/Kconfig.cpu b/arch/x86/Kconfig.cpu
index 1290a696349..52955eeeb1e 100644
--- a/arch/x86/Kconfig.cpu
+++ b/arch/x86/Kconfig.cpu
@@ -304,9 +304,6 @@ config X86_L1_CACHE_SHIFT
default "4" if MELAN || M486 || MGEODEGX1
default "5" if MWINCHIP3D || MWINCHIPC6 || MCRUSOE || MEFFICEON || MCYRIXIII || MK6 || MPENTIUMIII || MPENTIUMII || M686 || M586MMX || M586TSC || M586 || MVIAC3_2 || MGEODE_LX
-config X86_XADD
- def_bool y
-
config X86_PPRO_FENCE
bool "PentiumPro memory ordering errata workaround"
depends on M686 || M586MMX || M586TSC || M586 || M486 || MGEODEGX1
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index b6c3b821acf..722aa3b0462 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -172,23 +172,7 @@ static inline int atomic_add_negative(int i, atomic_t *v)
*/
static inline int atomic_add_return(int i, atomic_t *v)
{
-#ifdef CONFIG_M386
- int __i;
- unsigned long flags;
- if (unlikely(boot_cpu_data.x86 <= 3))
- goto no_xadd;
-#endif
- /* Modern 486+ processor */
return i + xadd(&v->counter, i);
-
-#ifdef CONFIG_M386
-no_xadd: /* Legacy 386 processor */
- raw_local_irq_save(flags);
- __i = atomic_read(v);
- atomic_set(v, i + __i);
- raw_local_irq_restore(flags);
- return i + __i;
-#endif
}
/**
diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h
index c8bed0da434..2d89e3980cb 100644
--- a/arch/x86/include/asm/local.h
+++ b/arch/x86/include/asm/local.h
@@ -124,27 +124,11 @@ static inline int local_add_negative(long i, local_t *l)
*/
static inline long local_add_return(long i, local_t *l)
{
- long __i;
-#ifdef CONFIG_M386
- unsigned long flags;
- if (unlikely(boot_cpu_data.x86 <= 3))
- goto no_xadd;
-#endif
- /* Modern 486+ processor */
- __i = i;
+ long __i = i;
asm volatile(_ASM_XADD "%0, %1;"
: "+r" (i), "+m" (l->a.counter)
: : "memory");
return i + __i;
-
-#ifdef CONFIG_M386
-no_xadd: /* Legacy 386 processor */
- local_irq_save(flags);
- __i = local_read(l);
- local_set(l, i + __i);
- local_irq_restore(flags);
- return i + __i;
-#endif
}
static inline long local_sub_return(long i, local_t *l)
diff --git a/arch/x86/um/Kconfig b/arch/x86/um/Kconfig
index 07611759ce3..b0c30dae9f5 100644
--- a/arch/x86/um/Kconfig
+++ b/arch/x86/um/Kconfig
@@ -31,7 +31,7 @@ config X86_64
select MODULES_USE_ELF_RELA
config RWSEM_XCHGADD_ALGORITHM
- def_bool X86_XADD && 64BIT
+ def_bool 64BIT
config RWSEM_GENERIC_SPINLOCK
def_bool !RWSEM_XCHGADD_ALGORITHM