diff options
-rw-r--r-- | arch/arm/kernel/setup.c | 3 | ||||
-rw-r--r-- | arch/arm/mm/proc-arm926.S | 2 | ||||
-rw-r--r-- | arch/arm/mm/proc-v6.S | 7 | ||||
-rw-r--r-- | arch/arm/vfp/vfpmodule.c | 26 | ||||
-rw-r--r-- | include/asm-arm/system.h | 43 |
5 files changed, 57 insertions, 24 deletions
diff --git a/arch/arm/kernel/setup.c b/arch/arm/kernel/setup.c index 238dd9b..cf2bd42 100644 --- a/arch/arm/kernel/setup.c +++ b/arch/arm/kernel/setup.c @@ -354,9 +354,6 @@ static void __init setup_processor(void) #ifndef CONFIG_ARM_THUMB elf_hwcap &= ~HWCAP_THUMB; #endif -#ifndef CONFIG_VFP - elf_hwcap &= ~HWCAP_VFP; -#endif cpu_proc_init(); } diff --git a/arch/arm/mm/proc-arm926.S b/arch/arm/mm/proc-arm926.S index 8628ed2..080efac 100644 --- a/arch/arm/mm/proc-arm926.S +++ b/arch/arm/mm/proc-arm926.S @@ -480,7 +480,7 @@ __arm926_proc_info: b __arm926_setup .long cpu_arch_name .long cpu_elf_name - .long HWCAP_SWP|HWCAP_HALF|HWCAP_THUMB|HWCAP_FAST_MULT|HWCAP_VFP|HWCAP_EDSP|HWCAP_JAVA + .long HWCAP_SWP|HWCAP_HALF|HWCAP_THUMB|HWCAP_FAST_MULT|HWCAP_EDSP|HWCAP_JAVA .long cpu_arm926_name .long arm926_processor_functions .long v4wbi_tlb_fns diff --git a/arch/arm/mm/proc-v6.S b/arch/arm/mm/proc-v6.S index b440c8a..c40baf8 100644 --- a/arch/arm/mm/proc-v6.S +++ b/arch/arm/mm/proc-v6.S @@ -207,11 +207,6 @@ __v6_setup: #endif mcr p15, 0, r4, c2, c0, 1 @ load TTB1 #endif /* CONFIG_MMU */ -#ifdef CONFIG_VFP - mrc p15, 0, r0, c1, c0, 2 - orr r0, r0, #(0xf << 20) - mcr p15, 0, r0, c1, c0, 2 @ Enable full access to VFP -#endif adr r5, v6_crval ldmia r5, {r5, r6} mrc p15, 0, r0, c1, c0, 0 @ read control register @@ -273,7 +268,7 @@ __v6_proc_info: b __v6_setup .long cpu_arch_name .long cpu_elf_name - .long HWCAP_SWP|HWCAP_HALF|HWCAP_THUMB|HWCAP_FAST_MULT|HWCAP_VFP|HWCAP_EDSP|HWCAP_JAVA + .long HWCAP_SWP|HWCAP_HALF|HWCAP_THUMB|HWCAP_FAST_MULT|HWCAP_EDSP|HWCAP_JAVA .long cpu_v6_name .long v6_processor_functions .long v6wbi_tlb_fns diff --git a/arch/arm/vfp/vfpmodule.c b/arch/arm/vfp/vfpmodule.c index f08eafb..e26cc1f 100644 --- a/arch/arm/vfp/vfpmodule.c +++ b/arch/arm/vfp/vfpmodule.c @@ -263,13 +263,24 @@ void VFP9_bounce(u32 trigger, u32 fpexc, struct pt_regs *regs) if (exceptions) vfp_raise_exceptions(exceptions, trigger, orig_fpscr, regs); } - + /* * VFP support code initialisation. */ static int __init vfp_init(void) { unsigned int vfpsid; + unsigned int cpu_arch = cpu_architecture(); + u32 access = 0; + + if (cpu_arch >= CPU_ARCH_ARMv6) { + access = get_copro_access(); + + /* + * Enable full access to VFP (cp10 and cp11) + */ + set_copro_access(access | CPACC_FULL(10) | CPACC_FULL(11)); + } /* * First check that there is a VFP that we can use. @@ -281,6 +292,12 @@ static int __init vfp_init(void) printk(KERN_INFO "VFP support v0.3: "); if (VFP_arch) { printk("not present\n"); + + /* + * Restore the copro access register. + */ + if (cpu_arch >= CPU_ARCH_ARMv6) + set_copro_access(access); } else if (vfpsid & FPSID_NODOUBLE) { printk("no double precision support\n"); } else { @@ -291,9 +308,16 @@ static int __init vfp_init(void) (vfpsid & FPSID_PART_MASK) >> FPSID_PART_BIT, (vfpsid & FPSID_VARIANT_MASK) >> FPSID_VARIANT_BIT, (vfpsid & FPSID_REV_MASK) >> FPSID_REV_BIT); + vfp_vector = vfp_support_entry; thread_register_notifier(&vfp_notifier_block); + + /* + * We detected VFP, and the support code is + * in place; report VFP support to userspace. + */ + elf_hwcap |= HWCAP_VFP; } return 0; } diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h index f05fbe3..f60facc 100644 --- a/include/asm-arm/system.h +++ b/include/asm-arm/system.h @@ -139,19 +139,36 @@ static inline int cpu_is_xsc3(void) #define cpu_is_xscale() 1 #endif -#define set_cr(x) \ - __asm__ __volatile__( \ - "mcr p15, 0, %0, c1, c0, 0 @ set CR" \ - : : "r" (x) : "cc") - -#define get_cr() \ - ({ \ - unsigned int __val; \ - __asm__ __volatile__( \ - "mrc p15, 0, %0, c1, c0, 0 @ get CR" \ - : "=r" (__val) : : "cc"); \ - __val; \ - }) +static inline unsigned int get_cr(void) +{ + unsigned int val; + asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc"); + return val; +} + +static inline void set_cr(unsigned int val) +{ + asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" + : : "r" (val) : "cc"); +} + +#define CPACC_FULL(n) (3 << (n * 2)) +#define CPACC_SVC(n) (1 << (n * 2)) +#define CPACC_DISABLE(n) (0 << (n * 2)) + +static inline unsigned int get_copro_access(void) +{ + unsigned int val; + asm("mrc p15, 0, %0, c1, c0, 2 @ get copro access" + : "=r" (val) : : "cc"); + return val; +} + +static inline void set_copro_access(unsigned int val) +{ + asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access" + : : "r" (val) : "cc"); +} extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ extern unsigned long cr_alignment; /* defined in entry-armv.S */ |