aboutsummaryrefslogtreecommitdiff
path: root/arch/powerpc/lib/ldstfp.S
diff options
context:
space:
mode:
authorGravatar Anton Blanchard <anton@samba.org> 2015-02-10 09:51:22 +1100
committerGravatar Michael Ellerman <mpe@ellerman.id.au> 2015-03-16 18:32:11 +1100
commitc2ce6f9f3dc00daca5714ef070a9a2d4e78eb336 (patch)
treec008f72ced83ffd950f0920566c378b4809780cf /arch/powerpc/lib/ldstfp.S
parentLinux 4.0-rc4 (diff)
downloadlinux-c2ce6f9f3dc00daca5714ef070a9a2d4e78eb336.tar.gz
linux-c2ce6f9f3dc00daca5714ef070a9a2d4e78eb336.tar.bz2
linux-c2ce6f9f3dc00daca5714ef070a9a2d4e78eb336.zip
powerpc: Change vrX register defines to vX to match gcc and glibc
As our various loops (copy, string, crypto etc) get more complicated, we want to share implementations between userspace (eg glibc) and the kernel. We also want to write userspace test harnesses to put in tools/testing/selftest. One gratuitous difference between userspace and the kernel is the VMX register definitions - the kernel uses vrX whereas both gcc and glibc use vX. Change the kernel to match userspace. Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/lib/ldstfp.S')
-rw-r--r--arch/powerpc/lib/ldstfp.S26
1 files changed, 13 insertions, 13 deletions
diff --git a/arch/powerpc/lib/ldstfp.S b/arch/powerpc/lib/ldstfp.S
index 85aec08ab234..659c7ca1f4f2 100644
--- a/arch/powerpc/lib/ldstfp.S
+++ b/arch/powerpc/lib/ldstfp.S
@@ -184,16 +184,16 @@ _GLOBAL(do_stfd)
extab 2b,3b
#ifdef CONFIG_ALTIVEC
-/* Get the contents of vrN into vr0; N is in r3. */
+/* Get the contents of vrN into v0; N is in r3. */
_GLOBAL(get_vr)
mflr r0
rlwinm r3,r3,3,0xf8
bcl 20,31,1f
- blr /* vr0 is already in vr0 */
+ blr /* v0 is already in v0 */
nop
reg = 1
.rept 31
- vor vr0,reg,reg /* assembler doesn't know vmr? */
+ vor v0,reg,reg /* assembler doesn't know vmr? */
blr
reg = reg + 1
.endr
@@ -203,16 +203,16 @@ reg = reg + 1
mtlr r0
bctr
-/* Put the contents of vr0 into vrN; N is in r3. */
+/* Put the contents of v0 into vrN; N is in r3. */
_GLOBAL(put_vr)
mflr r0
rlwinm r3,r3,3,0xf8
bcl 20,31,1f
- blr /* vr0 is already in vr0 */
+ blr /* v0 is already in v0 */
nop
reg = 1
.rept 31
- vor reg,vr0,vr0
+ vor reg,v0,v0
blr
reg = reg + 1
.endr
@@ -234,13 +234,13 @@ _GLOBAL(do_lvx)
MTMSRD(r7)
isync
beq cr7,1f
- stvx vr0,r1,r8
+ stvx v0,r1,r8
1: li r9,-EFAULT
-2: lvx vr0,0,r4
+2: lvx v0,0,r4
li r9,0
3: beq cr7,4f
bl put_vr
- lvx vr0,r1,r8
+ lvx v0,r1,r8
4: PPC_LL r0,STKFRM+PPC_LR_STKOFF(r1)
mtlr r0
MTMSRD(r6)
@@ -262,13 +262,13 @@ _GLOBAL(do_stvx)
MTMSRD(r7)
isync
beq cr7,1f
- stvx vr0,r1,r8
+ stvx v0,r1,r8
bl get_vr
1: li r9,-EFAULT
-2: stvx vr0,0,r4
+2: stvx v0,0,r4
li r9,0
3: beq cr7,4f
- lvx vr0,r1,r8
+ lvx v0,r1,r8
4: PPC_LL r0,STKFRM+PPC_LR_STKOFF(r1)
mtlr r0
MTMSRD(r6)
@@ -304,7 +304,7 @@ _GLOBAL(put_vsr)
mflr r0
rlwinm r3,r3,3,0x1f8
bcl 20,31,1f
- blr /* vr0 is already in vr0 */
+ blr /* v0 is already in v0 */
nop
reg = 1
.rept 63