summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/cputable.c11
-rw-r--r--arch/powerpc/kernel/setup_32.c2
-rw-r--r--arch/powerpc/kernel/setup_64.c4
-rw-r--r--arch/powerpc/kernel/vdso.c43
-rw-r--r--arch/powerpc/kernel/vdso32/vdso32.lds.S12
-rw-r--r--arch/powerpc/kernel/vdso64/vdso64.lds.S10
-rw-r--r--arch/ppc/kernel/setup.c2
-rw-r--r--include/asm-powerpc/asm-compat.h52
-rw-r--r--include/asm-powerpc/cputable.h31
-rw-r--r--include/asm-powerpc/firmware.h15
-rw-r--r--include/asm-powerpc/timex.h8
11 files changed, 141 insertions, 49 deletions
diff --git a/arch/powerpc/kernel/cputable.c b/arch/powerpc/kernel/cputable.c
index 6fdfaa4..bfd499e 100644
--- a/arch/powerpc/kernel/cputable.c
+++ b/arch/powerpc/kernel/cputable.c
@@ -1202,14 +1202,13 @@ struct cpu_spec *identify_cpu(unsigned long offset)
return NULL;
}
-void do_feature_fixups(unsigned long offset, unsigned long value,
- void *fixup_start, void *fixup_end)
+void do_feature_fixups(unsigned long value, void *fixup_start, void *fixup_end)
{
struct fixup_entry {
unsigned long mask;
unsigned long value;
- unsigned int *start;
- unsigned int *end;
+ long start_off;
+ long end_off;
} *fcur, *fend;
fcur = fixup_start;
@@ -1224,8 +1223,8 @@ void do_feature_fixups(unsigned long offset, unsigned long value,
/* These PTRRELOCs will disappear once the new scheme for
* modules and vdso is implemented
*/
- pstart = PTRRELOC(fcur->start);
- pend = PTRRELOC(fcur->end);
+ pstart = ((unsigned int *)fcur) + (fcur->start_off / 4);
+ pend = ((unsigned int *)fcur) + (fcur->end_off / 4);
for (p = pstart; p < pend; p++) {
*p = 0x60000000u;
diff --git a/arch/powerpc/kernel/setup_32.c b/arch/powerpc/kernel/setup_32.c
index 769e511..a4c2964 100644
--- a/arch/powerpc/kernel/setup_32.c
+++ b/arch/powerpc/kernel/setup_32.c
@@ -103,7 +103,7 @@ unsigned long __init early_init(unsigned long dt_ptr)
*/
spec = identify_cpu(offset);
- do_feature_fixups(offset, spec->cpu_features,
+ do_feature_fixups(spec->cpu_features,
PTRRELOC(&__start___ftr_fixup),
PTRRELOC(&__stop___ftr_fixup));
diff --git a/arch/powerpc/kernel/setup_64.c b/arch/powerpc/kernel/setup_64.c
index 1969b56..1627896 100644
--- a/arch/powerpc/kernel/setup_64.c
+++ b/arch/powerpc/kernel/setup_64.c
@@ -354,9 +354,9 @@ void __init setup_system(void)
/* Apply the CPUs-specific and firmware specific fixups to kernel
* text (nop out sections not relevant to this CPU or this firmware)
*/
- do_feature_fixups(0, cur_cpu_spec->cpu_features,
+ do_feature_fixups(cur_cpu_spec->cpu_features,
&__start___ftr_fixup, &__stop___ftr_fixup);
- do_feature_fixups(0, powerpc_firmware_features,
+ do_feature_fixups(powerpc_firmware_features,
&__start___fw_ftr_fixup, &__stop___fw_ftr_fixup);
/*
diff --git a/arch/powerpc/kernel/vdso.c b/arch/powerpc/kernel/vdso.c
index 1a7e19c..c913ad5 100644
--- a/arch/powerpc/kernel/vdso.c
+++ b/arch/powerpc/kernel/vdso.c
@@ -36,6 +36,8 @@
#include <asm/vdso.h>
#include <asm/vdso_datapage.h>
+#include "setup.h"
+
#undef DEBUG
#ifdef DEBUG
@@ -586,6 +588,43 @@ static __init int vdso_fixup_datapage(struct lib32_elfinfo *v32,
return 0;
}
+
+static __init int vdso_fixup_features(struct lib32_elfinfo *v32,
+ struct lib64_elfinfo *v64)
+{
+ void *start32;
+ unsigned long size32;
+
+#ifdef CONFIG_PPC64
+ void *start64;
+ unsigned long size64;
+
+ start64 = find_section64(v64->hdr, "__ftr_fixup", &size64);
+ if (start64)
+ do_feature_fixups(cur_cpu_spec->cpu_features,
+ start64, start64 + size64);
+
+ start64 = find_section64(v64->hdr, "__fw_ftr_fixup", &size64);
+ if (start64)
+ do_feature_fixups(powerpc_firmware_features,
+ start64, start64 + size64);
+#endif /* CONFIG_PPC64 */
+
+ start32 = find_section32(v32->hdr, "__ftr_fixup", &size32);
+ if (start32)
+ do_feature_fixups(cur_cpu_spec->cpu_features,
+ start32, start32 + size32);
+
+#ifdef CONFIG_PPC64
+ start32 = find_section32(v32->hdr, "__fw_ftr_fixup", &size32);
+ if (start32)
+ do_feature_fixups(powerpc_firmware_features,
+ start32, start32 + size32);
+#endif /* CONFIG_PPC64 */
+
+ return 0;
+}
+
static __init int vdso_fixup_alt_funcs(struct lib32_elfinfo *v32,
struct lib64_elfinfo *v64)
{
@@ -634,6 +673,9 @@ static __init int vdso_setup(void)
if (vdso_fixup_datapage(&v32, &v64))
return -1;
+ if (vdso_fixup_features(&v32, &v64))
+ return -1;
+
if (vdso_fixup_alt_funcs(&v32, &v64))
return -1;
@@ -714,6 +756,7 @@ void __init vdso_init(void)
* Setup the syscall map in the vDOS
*/
vdso_setup_syscall_map();
+
/*
* Initialize the vDSO images in memory, that is do necessary
* fixups of vDSO symbols, locate trampolines, etc...
diff --git a/arch/powerpc/kernel/vdso32/vdso32.lds.S b/arch/powerpc/kernel/vdso32/vdso32.lds.S
index 6187af2..26e138c 100644
--- a/arch/powerpc/kernel/vdso32/vdso32.lds.S
+++ b/arch/powerpc/kernel/vdso32/vdso32.lds.S
@@ -32,6 +32,18 @@ SECTIONS
PROVIDE (_etext = .);
PROVIDE (etext = .);
+ . = ALIGN(8);
+ __ftr_fixup : {
+ *(__ftr_fixup)
+ }
+
+#ifdef CONFIG_PPC64
+ . = ALIGN(8);
+ __fw_ftr_fixup : {
+ *(__fw_ftr_fixup)
+ }
+#endif
+
/* Other stuff is appended to the text segment: */
.rodata : { *(.rodata .rodata.* .gnu.linkonce.r.*) }
.rodata1 : { *(.rodata1) }
diff --git a/arch/powerpc/kernel/vdso64/vdso64.lds.S b/arch/powerpc/kernel/vdso64/vdso64.lds.S
index 4a2b6dc..2d70f35 100644
--- a/arch/powerpc/kernel/vdso64/vdso64.lds.S
+++ b/arch/powerpc/kernel/vdso64/vdso64.lds.S
@@ -31,6 +31,16 @@ SECTIONS
PROVIDE (_etext = .);
PROVIDE (etext = .);
+ . = ALIGN(8);
+ __ftr_fixup : {
+ *(__ftr_fixup)
+ }
+
+ . = ALIGN(8);
+ __fw_ftr_fixup : {
+ *(__fw_ftr_fixup)
+ }
+
/* Other stuff is appended to the text segment: */
.rodata : { *(.rodata .rodata.* .gnu.linkonce.r.*) }
.rodata1 : { *(.rodata1) }
diff --git a/arch/ppc/kernel/setup.c b/arch/ppc/kernel/setup.c
index 41a640f..27faeca 100644
--- a/arch/ppc/kernel/setup.c
+++ b/arch/ppc/kernel/setup.c
@@ -314,7 +314,7 @@ early_init(int r3, int r4, int r5)
* that depend on which cpu we have.
*/
spec = identify_cpu(offset);
- do_feature_fixups(offset, spec->cpu_features,
+ do_feature_fixups(spec->cpu_features,
PTRRELOC(&__start___ftr_fixup),
PTRRELOC(&__stop___ftr_fixup));
diff --git a/include/asm-powerpc/asm-compat.h b/include/asm-powerpc/asm-compat.h
index 8e64be0..c89bd58 100644
--- a/include/asm-powerpc/asm-compat.h
+++ b/include/asm-powerpc/asm-compat.h
@@ -14,6 +14,58 @@
# define ASM_CONST(x) __ASM_CONST(x)
#endif
+
+/*
+ * Feature section common macros
+ *
+ * Note that the entries now contain offsets between the table entry
+ * and the code rather than absolute code pointers in order to be
+ * useable with the vdso shared library. There is also an assumption
+ * that values will be negative, that is, the fixup table has to be
+ * located after the code it fixes up.
+ */
+#ifdef CONFIG_PPC64
+#ifdef __powerpc64__
+/* 64 bits kernel, 64 bits code */
+#define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
+99: \
+ .section sect,"a"; \
+ .align 3; \
+98: \
+ .llong msk; \
+ .llong val; \
+ .llong label##b-98b; \
+ .llong 99b-98b; \
+ .previous
+#else /* __powerpc64__ */
+/* 64 bits kernel, 32 bits code (ie. vdso32) */
+#define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
+99: \
+ .section sect,"a"; \
+ .align 3; \
+98: \
+ .llong msk; \
+ .llong val; \
+ .long 0xffffffff; \
+ .long label##b-98b; \
+ .long 0xffffffff; \
+ .long 99b-98b; \
+ .previous
+#endif /* !__powerpc64__ */
+#else /* CONFIG_PPC64 */
+/* 32 bits kernel, 32 bits code */
+#define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
+99: \
+ .section sect,"a"; \
+ .align 2; \
+98: \
+ .long msk; \
+ .long val; \
+ .long label##b-98b; \
+ .long 99b-98b; \
+ .previous
+#endif /* !CONFIG_PPC64 */
+
#ifdef __powerpc64__
/* operations for longs and pointers */
diff --git a/include/asm-powerpc/cputable.h b/include/asm-powerpc/cputable.h
index 65faf32..02e52d6 100644
--- a/include/asm-powerpc/cputable.h
+++ b/include/asm-powerpc/cputable.h
@@ -92,8 +92,8 @@ extern struct cpu_spec *cur_cpu_spec;
extern unsigned int __start___ftr_fixup, __stop___ftr_fixup;
extern struct cpu_spec *identify_cpu(unsigned long offset);
-extern void do_feature_fixups(unsigned long offset, unsigned long value,
- void *fixup_start, void *fixup_end);
+extern void do_feature_fixups(unsigned long value, void *fixup_start,
+ void *fixup_end);
#endif /* __ASSEMBLY__ */
@@ -435,32 +435,11 @@ static inline int cpu_has_feature(unsigned long feature)
#ifdef __ASSEMBLY__
#define BEGIN_FTR_SECTION_NESTED(label) label:
-#define BEGIN_FTR_SECTION BEGIN_FTR_SECTION_NESTED(98)
-
-#ifndef __powerpc64__
-#define END_FTR_SECTION_NESTED(msk, val, label) \
-99: \
- .section __ftr_fixup,"a"; \
- .align 2; \
- .long msk; \
- .long val; \
- .long label##b; \
- .long 99b; \
- .previous
-#else /* __powerpc64__ */
+#define BEGIN_FTR_SECTION BEGIN_FTR_SECTION_NESTED(97)
#define END_FTR_SECTION_NESTED(msk, val, label) \
-99: \
- .section __ftr_fixup,"a"; \
- .align 3; \
- .llong msk; \
- .llong val; \
- .llong label##b; \
- .llong 99b; \
- .previous
-#endif /* __powerpc64__ */
-
+ MAKE_FTR_SECTION_ENTRY(msk, val, label, __ftr_fixup)
#define END_FTR_SECTION(msk, val) \
- END_FTR_SECTION_NESTED(msk, val, 98)
+ END_FTR_SECTION_NESTED(msk, val, 97)
#define END_FTR_SECTION_IFSET(msk) END_FTR_SECTION((msk), (msk))
#define END_FTR_SECTION_IFCLR(msk) END_FTR_SECTION((msk), 0)
diff --git a/include/asm-powerpc/firmware.h b/include/asm-powerpc/firmware.h
index c16e0a6..fdf9aff 100644
--- a/include/asm-powerpc/firmware.h
+++ b/include/asm-powerpc/firmware.h
@@ -100,17 +100,12 @@ extern unsigned int __start___fw_ftr_fixup, __stop___fw_ftr_fixup;
#else /* __ASSEMBLY__ */
-#define BEGIN_FW_FTR_SECTION 96:
-
+#define BEGIN_FW_FTR_SECTION_NESTED(label) label:
+#define BEGIN_FW_FTR_SECTION BEGIN_FW_FTR_SECTION_NESTED(97)
+#define END_FW_FTR_SECTION_NESTED(msk, val, label) \
+ MAKE_FTR_SECTION_ENTRY(msk, val, label, __fw_ftr_fixup)
#define END_FW_FTR_SECTION(msk, val) \
-97: \
- .section __fw_ftr_fixup,"a"; \
- .align 3; \
- .llong msk; \
- .llong val; \
- .llong 96b; \
- .llong 97b; \
- .previous
+ END_FW_FTR_SECTION_NESTED(msk, val, 97)
#define END_FW_FTR_SECTION_IFSET(msk) END_FW_FTR_SECTION((msk), (msk))
#define END_FW_FTR_SECTION_IFCLR(msk) END_FW_FTR_SECTION((msk), 0)
diff --git a/include/asm-powerpc/timex.h b/include/asm-powerpc/timex.h
index 3b9a8e7..e3f08cf 100644
--- a/include/asm-powerpc/timex.h
+++ b/include/asm-powerpc/timex.h
@@ -30,13 +30,15 @@ static inline cycles_t get_cycles(void)
ret = 0;
__asm__ __volatile__(
- "98: mftb %0\n"
+ "97: mftb %0\n"
"99:\n"
".section __ftr_fixup,\"a\"\n"
+ ".align 2\n"
+ "98:\n"
" .long %1\n"
" .long 0\n"
- " .long 98b\n"
- " .long 99b\n"
+ " .long 97b-98b\n"
+ " .long 99b-98b\n"
".previous"
: "=r" (ret) : "i" (CPU_FTR_601));
#endif
OpenPOWER on IntegriCloud