summaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/align.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/kernel/align.c')
-rw-r--r--arch/powerpc/kernel/align.c75
1 files changed, 52 insertions, 23 deletions
diff --git a/arch/powerpc/kernel/align.c b/arch/powerpc/kernel/align.c
index a5b632e52fa..b876e989220 100644
--- a/arch/powerpc/kernel/align.c
+++ b/arch/powerpc/kernel/align.c
@@ -642,10 +642,14 @@ static int emulate_spe(struct pt_regs *regs, unsigned int reg,
*/
static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
unsigned int areg, struct pt_regs *regs,
- unsigned int flags, unsigned int length)
+ unsigned int flags, unsigned int length,
+ unsigned int elsize)
{
char *ptr;
+ unsigned long *lptr;
int ret = 0;
+ int sw = 0;
+ int i, j;
flush_vsx_to_thread(current);
@@ -654,19 +658,35 @@ static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
else
ptr = (char *) &current->thread.vr[reg - 32];
- if (flags & ST)
- ret = __copy_to_user(addr, ptr, length);
- else {
- if (flags & SPLT){
- ret = __copy_from_user(ptr, addr, length);
- ptr += length;
+ lptr = (unsigned long *) ptr;
+
+ if (flags & SW)
+ sw = elsize-1;
+
+ for (j = 0; j < length; j += elsize) {
+ for (i = 0; i < elsize; ++i) {
+ if (flags & ST)
+ ret |= __put_user(ptr[i^sw], addr + i);
+ else
+ ret |= __get_user(ptr[i^sw], addr + i);
}
- ret |= __copy_from_user(ptr, addr, length);
+ ptr += elsize;
+ addr += elsize;
}
- if (flags & U)
- regs->gpr[areg] = regs->dar;
- if (ret)
+
+ if (!ret) {
+ if (flags & U)
+ regs->gpr[areg] = regs->dar;
+
+ /* Splat load copies the same data to top and bottom 8 bytes */
+ if (flags & SPLT)
+ lptr[1] = lptr[0];
+ /* For 8 byte loads, zero the top 8 bytes */
+ else if (!(flags & ST) && (8 == length))
+ lptr[1] = 0;
+ } else
return -EFAULT;
+
return 1;
}
#endif
@@ -732,7 +752,7 @@ int fix_alignment(struct pt_regs *regs)
#ifdef CONFIG_SPE
if ((instr >> 26) == 0x4) {
- PPC_WARN_EMULATED(spe);
+ PPC_WARN_ALIGNMENT(spe, regs);
return emulate_spe(regs, reg, instr);
}
#endif
@@ -767,16 +787,25 @@ int fix_alignment(struct pt_regs *regs)
#ifdef CONFIG_VSX
if ((instruction & 0xfc00003e) == 0x7c000018) {
- /* Additional register addressing bit (64 VSX vs 32 FPR/GPR */
+ unsigned int elsize;
+
+ /* Additional register addressing bit (64 VSX vs 32 FPR/GPR) */
reg |= (instruction & 0x1) << 5;
/* Simple inline decoder instead of a table */
+ /* VSX has only 8 and 16 byte memory accesses */
+ nb = 8;
if (instruction & 0x200)
nb = 16;
- else if (instruction & 0x080)
- nb = 8;
- else
- nb = 4;
+
+ /* Vector stores in little-endian mode swap individual
+ elements, so process them separately */
+ elsize = 4;
+ if (instruction & 0x80)
+ elsize = 8;
+
flags = 0;
+ if (regs->msr & MSR_LE)
+ flags |= SW;
if (instruction & 0x100)
flags |= ST;
if (instruction & 0x040)
@@ -786,15 +815,15 @@ int fix_alignment(struct pt_regs *regs)
flags |= SPLT;
nb = 8;
}
- PPC_WARN_EMULATED(vsx);
- return emulate_vsx(addr, reg, areg, regs, flags, nb);
+ PPC_WARN_ALIGNMENT(vsx, regs);
+ return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
}
#endif
/* A size of 0 indicates an instruction we don't support, with
* the exception of DCBZ which is handled as a special case here
*/
if (instr == DCBZ) {
- PPC_WARN_EMULATED(dcbz);
+ PPC_WARN_ALIGNMENT(dcbz, regs);
return emulate_dcbz(regs, addr);
}
if (unlikely(nb == 0))
@@ -804,7 +833,7 @@ int fix_alignment(struct pt_regs *regs)
* function
*/
if (flags & M) {
- PPC_WARN_EMULATED(multiple);
+ PPC_WARN_ALIGNMENT(multiple, regs);
return emulate_multiple(regs, addr, reg, nb,
flags, instr, swiz);
}
@@ -825,11 +854,11 @@ int fix_alignment(struct pt_regs *regs)
/* Special case for 16-byte FP loads and stores */
if (nb == 16) {
- PPC_WARN_EMULATED(fp_pair);
+ PPC_WARN_ALIGNMENT(fp_pair, regs);
return emulate_fp_pair(addr, reg, flags);
}
- PPC_WARN_EMULATED(unaligned);
+ PPC_WARN_ALIGNMENT(unaligned, regs);
/* If we are loading, get the data from user space, else
* get it from register values