diff options
author | Anton Blanchard <anton@samba.org> | 2012-04-18 02:21:52 +0000 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2012-04-30 15:37:26 +1000 |
commit | 694caf0255dcab506d1e174c96a65ab65d96e108 (patch) | |
tree | b5bb6facfc517062c319c742b54c4fceffa56c9b /arch/powerpc/lib | |
parent | 6cd3209967469f6e89d329deda6bb0b4700e7b62 (diff) | |
download | op-kernel-dev-694caf0255dcab506d1e174c96a65ab65d96e108.zip op-kernel-dev-694caf0255dcab506d1e174c96a65ab65d96e108.tar.gz |
powerpc: Remove CONFIG_POWER4_ONLY
Remove CONFIG_POWER4_ONLY, the option is badly named and only does two
things:
- It wraps the MMU segment table code. With feature fixups there is
little downside to compiling this in.
- It uses the newer mtocrf instruction in various assembly functions.
Instead of making this a compile option just do it at runtime via
a feature fixup.
Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/lib')
-rw-r--r-- | arch/powerpc/lib/copyuser_64.S | 6 | ||||
-rw-r--r-- | arch/powerpc/lib/mem_64.S | 6 | ||||
-rw-r--r-- | arch/powerpc/lib/memcpy_64.S | 6 |
3 files changed, 9 insertions, 9 deletions
diff --git a/arch/powerpc/lib/copyuser_64.S b/arch/powerpc/lib/copyuser_64.S index 773d38f..d73a590 100644 --- a/arch/powerpc/lib/copyuser_64.S +++ b/arch/powerpc/lib/copyuser_64.S @@ -30,7 +30,7 @@ _GLOBAL(__copy_tofrom_user_base) dcbt 0,r4 beq .Lcopy_page_4K andi. r6,r6,7 - PPC_MTOCRF 0x01,r5 + PPC_MTOCRF(0x01,r5) blt cr1,.Lshort_copy /* Below we want to nop out the bne if we're on a CPU that has the * CPU_FTR_UNALIGNED_LD_STD bit set and the CPU_FTR_CP_USE_DCBTZ bit @@ -186,7 +186,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) blr .Ldst_unaligned: - PPC_MTOCRF 0x01,r6 /* put #bytes to 8B bdry into cr7 */ + PPC_MTOCRF(0x01,r6) /* put #bytes to 8B bdry into cr7 */ subf r5,r6,r5 li r7,0 cmpldi cr1,r5,16 @@ -201,7 +201,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) 2: bf cr7*4+1,3f 37: lwzx r0,r7,r4 83: stwx r0,r7,r3 -3: PPC_MTOCRF 0x01,r5 +3: PPC_MTOCRF(0x01,r5) add r4,r6,r4 add r3,r6,r3 b .Ldst_aligned diff --git a/arch/powerpc/lib/mem_64.S b/arch/powerpc/lib/mem_64.S index 11ce045..f4fcb0b 100644 --- a/arch/powerpc/lib/mem_64.S +++ b/arch/powerpc/lib/mem_64.S @@ -19,7 +19,7 @@ _GLOBAL(memset) rlwimi r4,r4,16,0,15 cmplw cr1,r5,r0 /* do we get that far? */ rldimi r4,r4,32,0 - PPC_MTOCRF 1,r0 + PPC_MTOCRF(1,r0) mr r6,r3 blt cr1,8f beq+ 3f /* if already 8-byte aligned */ @@ -49,7 +49,7 @@ _GLOBAL(memset) bdnz 4b 5: srwi. r0,r5,3 clrlwi r5,r5,29 - PPC_MTOCRF 1,r0 + PPC_MTOCRF(1,r0) beq 8f bf 29,6f std r4,0(r6) @@ -65,7 +65,7 @@ _GLOBAL(memset) std r4,0(r6) addi r6,r6,8 8: cmpwi r5,0 - PPC_MTOCRF 1,r5 + PPC_MTOCRF(1,r5) beqlr+ bf 29,9f stw r4,0(r6) diff --git a/arch/powerpc/lib/memcpy_64.S b/arch/powerpc/lib/memcpy_64.S index e178922..82fea39 100644 --- a/arch/powerpc/lib/memcpy_64.S +++ b/arch/powerpc/lib/memcpy_64.S @@ -12,7 +12,7 @@ .align 7 _GLOBAL(memcpy) std r3,48(r1) /* save destination pointer for return value */ - PPC_MTOCRF 0x01,r5 + PPC_MTOCRF(0x01,r5) cmpldi cr1,r5,16 neg r6,r3 # LS 3 bits = # bytes to 8-byte dest bdry andi. r6,r6,7 @@ -154,7 +154,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) blr .Ldst_unaligned: - PPC_MTOCRF 0x01,r6 # put #bytes to 8B bdry into cr7 + PPC_MTOCRF(0x01,r6) # put #bytes to 8B bdry into cr7 subf r5,r6,r5 li r7,0 cmpldi cr1,r5,16 @@ -169,7 +169,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) 2: bf cr7*4+1,3f lwzx r0,r7,r4 stwx r0,r7,r3 -3: PPC_MTOCRF 0x01,r5 +3: PPC_MTOCRF(0x01,r5) add r4,r6,r4 add r3,r6,r3 b .Ldst_aligned |