summaryrefslogtreecommitdiffstats
path: root/arch/arc
diff options
context:
space:
mode:
authorNoam Camus <noamc@ezchip.com>2012-09-10 15:13:19 +0300
committerVineet Gupta <vgupta@synopsys.com>2013-09-05 10:31:11 +0530
commit7d669a193bc0f44c20054687a3bf9ff82ad001a7 (patch)
tree889e700ab5e07aa52f4a439f31a707bb28c74232 /arch/arc
parent947bf103fcd2defa3bc4b7ebc6b05d0427bcde2d (diff)
ARC: Handle un-aligned user space access in BE.
Adding endian awarness to un-aligned access exception handling. Signed-off-by: Noam Camus <noamc@ezchip.com> Signed-off-by: Vineet Gupta <vgupta@synopsys.com>
Diffstat (limited to 'arch/arc')
-rw-r--r--arch/arc/kernel/unaligned.c26
1 files changed, 19 insertions, 7 deletions
diff --git a/arch/arc/kernel/unaligned.c b/arch/arc/kernel/unaligned.c
index c0f832f595d..28d17006074 100644
--- a/arch/arc/kernel/unaligned.c
+++ b/arch/arc/kernel/unaligned.c
@@ -16,6 +16,16 @@
#include <linux/uaccess.h>
#include <asm/disasm.h>
+#ifdef CONFIG_CPU_BIG_ENDIAN
+#define BE 1
+#define FIRST_BYTE_16 "swap %1, %1\n swape %1, %1\n"
+#define FIRST_BYTE_32 "swape %1, %1\n"
+#else
+#define BE 0
+#define FIRST_BYTE_16
+#define FIRST_BYTE_32
+#endif
+
#define __get8_unaligned_check(val, addr, err) \
__asm__( \
"1: ldb.ab %1, [%2, 1]\n" \
@@ -36,9 +46,9 @@
do { \
unsigned int err = 0, v, a = addr; \
__get8_unaligned_check(v, a, err); \
- val = v ; \
+ val = v << ((BE) ? 8 : 0); \
__get8_unaligned_check(v, a, err); \
- val |= v << 8; \
+ val |= v << ((BE) ? 0 : 8); \
if (err) \
goto fault; \
} while (0)
@@ -47,13 +57,13 @@
do { \
unsigned int err = 0, v, a = addr; \
__get8_unaligned_check(v, a, err); \
- val = v << 0; \
+ val = v << ((BE) ? 24 : 0); \
__get8_unaligned_check(v, a, err); \
- val |= v << 8; \
+ val |= v << ((BE) ? 16 : 8); \
__get8_unaligned_check(v, a, err); \
- val |= v << 16; \
+ val |= v << ((BE) ? 8 : 16); \
__get8_unaligned_check(v, a, err); \
- val |= v << 24; \
+ val |= v << ((BE) ? 0 : 24); \
if (err) \
goto fault; \
} while (0)
@@ -63,6 +73,7 @@
unsigned int err = 0, v = val, a = addr;\
\
__asm__( \
+ FIRST_BYTE_16 \
"1: stb.ab %1, [%2, 1]\n" \
" lsr %1, %1, 8\n" \
"2: stb %1, [%2]\n" \
@@ -87,8 +98,9 @@
#define put32_unaligned_check(val, addr) \
do { \
unsigned int err = 0, v = val, a = addr;\
- __asm__( \
\
+ __asm__( \
+ FIRST_BYTE_32 \
"1: stb.ab %1, [%2, 1]\n" \
" lsr %1, %1, 8\n" \
"2: stb.ab %1, [%2, 1]\n" \