summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--lib/libc/powerpc64/sys/cerror.S2
-rw-r--r--sys/powerpc/aim/locore64.S4
-rw-r--r--sys/powerpc/aim/swtch64.S9
-rw-r--r--sys/powerpc/aim/trap_subr64.S2
-rw-r--r--sys/powerpc/include/asm.h24
-rw-r--r--sys/powerpc/include/profile.h1
-rw-r--r--sys/powerpc/ofw/ofwcall64.S4
-rw-r--r--sys/powerpc/powerpc/atomic.S16
-rw-r--r--sys/powerpc/powerpc/setjmp.S4
9 files changed, 44 insertions, 22 deletions
diff --git a/lib/libc/powerpc64/sys/cerror.S b/lib/libc/powerpc64/sys/cerror.S
index 9bf33bf..515d7e5 100644
--- a/lib/libc/powerpc64/sys/cerror.S
+++ b/lib/libc/powerpc64/sys/cerror.S
@@ -38,7 +38,7 @@ __FBSDID("$FreeBSD$");
* programs and the initial threaded in threaded programs,
* it returns a pointer to the global errno variable.
*/
-ENTRY(HIDENAME(cerror))
+ENTRY_NOPROF(HIDENAME(cerror))
mflr %r0
std %r0,16(%r1) /* save lr */
stdu %r1,-64(%r1) /* allocate new stack frame */
diff --git a/sys/powerpc/aim/locore64.S b/sys/powerpc/aim/locore64.S
index 64e4e62..83d9048 100644
--- a/sys/powerpc/aim/locore64.S
+++ b/sys/powerpc/aim/locore64.S
@@ -115,7 +115,7 @@ kernel_text:
* segment!
*/
.text
-ASENTRY(__start)
+ASENTRY_NOPROF(__start)
li 8,0
li 9,0x100
mtctr 9
@@ -202,7 +202,7 @@ tocbase:
* or the (currently used) C code optimized, so it doesn't use any non-volatile
* registers.
*/
-ASENTRY(setfault)
+ASENTRY_NOPROF(setfault)
mflr 0
mfcr 12
mfsprg 4,0
diff --git a/sys/powerpc/aim/swtch64.S b/sys/powerpc/aim/swtch64.S
index c5cdcbc..489ec40 100644
--- a/sys/powerpc/aim/swtch64.S
+++ b/sys/powerpc/aim/swtch64.S
@@ -68,7 +68,7 @@
/*
* void cpu_throw(struct thread *old, struct thread *new)
*/
-ENTRY(cpu_throw)
+ENTRY_NOPROF(cpu_throw)
mr %r13, %r4
b cpu_switchin
@@ -79,7 +79,7 @@ ENTRY(cpu_throw)
*
* Switch to a new thread saving the current state in the old thread.
*/
-ENTRY(cpu_switch)
+ENTRY_NOPROF(cpu_switch)
ld %r6,TD_PCB(%r3) /* Get the old thread's PCB ptr */
std %r12,PCB_CONTEXT(%r6) /* Save the non-volatile GP regs.
These can now be used for scratch */
@@ -237,7 +237,7 @@ blocked_loop:
* savectx(pcb)
* Update pcb, saving current processor state
*/
-ENTRY(savectx)
+ENTRY_NOPROF(savectx)
std %r12,PCB_CONTEXT(%r3) /* Save the non-volatile GP regs. */
std %r13,PCB_CONTEXT+1*8(%r3)
std %r14,PCB_CONTEXT+2*8(%r3)
@@ -268,7 +268,8 @@ ENTRY(savectx)
* fork_trampoline()
* Set up the return from cpu_fork()
*/
-ENTRY(fork_trampoline)
+
+ENTRY_NOPROF(fork_trampoline)
ld %r3,CF_FUNC(%r1)
ld %r4,CF_ARG0(%r1)
ld %r5,CF_ARG1(%r1)
diff --git a/sys/powerpc/aim/trap_subr64.S b/sys/powerpc/aim/trap_subr64.S
index 82935e7..8243dc7 100644
--- a/sys/powerpc/aim/trap_subr64.S
+++ b/sys/powerpc/aim/trap_subr64.S
@@ -703,7 +703,7 @@ CNAME(asttrapexit):
/*
* Deliberate entry to dbtrap
*/
-ASENTRY(breakpoint)
+ASENTRY_NOPROF(breakpoint)
mtsprg1 %r1
mfmsr %r3
mtsrr1 %r3
diff --git a/sys/powerpc/include/asm.h b/sys/powerpc/include/asm.h
index 0c6b6a5..bfc939b 100644
--- a/sys/powerpc/include/asm.h
+++ b/sys/powerpc/include/asm.h
@@ -76,15 +76,35 @@
#endif
#if defined(PROF) || (defined(_KERNEL) && defined(GPROF))
-# define _PROF_PROLOGUE mflr 0; stw 0,4(1); bl _mcount
+# ifdef __powerpc64__
+# define _PROF_PROLOGUE mflr 0; \
+ std 3,48(1); \
+ std 4,56(1); \
+ std 5,64(1); \
+ std 0,16(1); \
+ stdu 1,-112(1); \
+ bl _mcount; \
+ nop; \
+ ld 0,112+16(1); \
+ ld 3,112+48(1); \
+ ld 4,112+56(1); \
+ ld 5,112+64(1); \
+ mtlr 0; \
+ addi 1,1,112
+# else
+# define _PROF_PROLOGUE mflr 0; stw 0,4(1); bl _mcount
+# endif
#else
# define _PROF_PROLOGUE
#endif
-#define ENTRY(y) _ENTRY(CNAME(y)); _PROF_PROLOGUE
#define ASENTRY(y) _ENTRY(ASMNAME(y)); _PROF_PROLOGUE
+#define ENTRY(y) _ENTRY(CNAME(y)); _PROF_PROLOGUE
#define GLOBAL(y) _GLOBAL(CNAME(y))
+#define ASENTRY_NOPROF(y) _ENTRY(ASMNAME(y))
+#define ENTRY_NOPROF(y) _ENTRY(CNAME(y))
+
#define ASMSTR .asciz
#define RCSID(x) .text; .asciz x
diff --git a/sys/powerpc/include/profile.h b/sys/powerpc/include/profile.h
index a354e38..9192cee 100644
--- a/sys/powerpc/include/profile.h
+++ b/sys/powerpc/include/profile.h
@@ -85,6 +85,7 @@ __asm( " .text \n" \
"_mcount: \n" \
" .quad .L._mcount,.TOC.@tocbase,0\n" \
" .previous \n" \
+ " .size main,24 \n" \
" .type _mcount,@function \n" \
" .align 4 \n" \
".L._mcount: \n" \
diff --git a/sys/powerpc/ofw/ofwcall64.S b/sys/powerpc/ofw/ofwcall64.S
index 1fb78e8..beb6bdc 100644
--- a/sys/powerpc/ofw/ofwcall64.S
+++ b/sys/powerpc/ofw/ofwcall64.S
@@ -56,7 +56,7 @@ GLOBAL(rtas_entry)
* Open Firmware Real-mode Entry Point. This is a huge pain.
*/
-ASENTRY(ofwcall)
+ASENTRY_NOPROF(ofwcall)
mflr %r0
std %r0,16(%r1)
stdu %r1,-208(%r1)
@@ -175,7 +175,7 @@ ASENTRY(ofwcall)
* C prototype: int rtascall(void *callbuffer, void *rtas_privdat);
*/
-ASENTRY(rtascall)
+ASENTRY_NOPROF(rtascall)
mflr %r0
std %r0,16(%r1)
stdu %r1,-208(%r1)
diff --git a/sys/powerpc/powerpc/atomic.S b/sys/powerpc/powerpc/atomic.S
index c6f669a..4d0d1c6 100644
--- a/sys/powerpc/powerpc/atomic.S
+++ b/sys/powerpc/powerpc/atomic.S
@@ -30,7 +30,7 @@
.text
-ASENTRY(atomic_set_8)
+ASENTRY_NOPROF(atomic_set_8)
0: lwarx 0, 0, 3 /* load old value */
slwi 4, 4, 24 /* shift the byte so it's in the right place */
or 0, 0, 4 /* generate new value */
@@ -40,7 +40,7 @@ ASENTRY(atomic_set_8)
sync
blr /* return */
-ASENTRY(atomic_clear_8)
+ASENTRY_NOPROF(atomic_clear_8)
0: lwarx 0, 0, 3 /* load old value */
slwi 4, 4, 24 /* shift the byte so it's in the right place */
andc 0, 0, 4 /* generate new value */
@@ -50,7 +50,7 @@ ASENTRY(atomic_clear_8)
sync
blr /* return */
-ASENTRY(atomic_add_8)
+ASENTRY_NOPROF(atomic_add_8)
0: lwarx 9, 0, 3 /* load old value */
srwi 0, 9, 24 /* byte alignment */
add 0, 4, 0 /* calculate new value */
@@ -63,7 +63,7 @@ ASENTRY(atomic_add_8)
sync
blr /* return */
-ASENTRY(atomic_subtract_8)
+ASENTRY_NOPROF(atomic_subtract_8)
0: lwarx 9, 0, 3 /* load old value */
srwi 0, 9, 24 /* byte alignment */
subf 0, 4, 0 /* calculate new value */
@@ -76,7 +76,7 @@ ASENTRY(atomic_subtract_8)
sync
blr /* return */
-ASENTRY(atomic_set_16)
+ASENTRY_NOPROF(atomic_set_16)
li 11, 3 /* mask to test for alignment */
andc. 11, 3, 11 /* force address to be word-aligned */
0: lwarx 12, 0, 11 /* load old value */
@@ -89,7 +89,7 @@ ASENTRY(atomic_set_16)
sync
blr /* return */
-ASENTRY(atomic_clear_16)
+ASENTRY_NOPROF(atomic_clear_16)
li 11, 3 /* mask to test for alignment */
andc. 11, 3, 11 /* force address to be word-aligned */
0: lwarx 12, 0, 11 /* load old value */
@@ -102,7 +102,7 @@ ASENTRY(atomic_clear_16)
sync
blr /* return */
-ASENTRY(atomic_add_16)
+ASENTRY_NOPROF(atomic_add_16)
li 11, 3 /* mask to test for alignment */
andc. 11, 3, 11 /* force address to be word-aligned */
0: lwarx 12, 0, 11 /* load old value */
@@ -119,7 +119,7 @@ ASENTRY(atomic_add_16)
sync
blr /* return */
-ASENTRY(atomic_subtract_16)
+ASENTRY_NOPROF(atomic_subtract_16)
li 11, 3 /* mask to test for alignment */
andc. 11, 3, 11 /* force address to be word-aligned */
0: lwarx 12, 0, 11 /* load old value */
diff --git a/sys/powerpc/powerpc/setjmp.S b/sys/powerpc/powerpc/setjmp.S
index 3f6d259..3884b11 100644
--- a/sys/powerpc/powerpc/setjmp.S
+++ b/sys/powerpc/powerpc/setjmp.S
@@ -42,7 +42,7 @@
#define JMP_xer 24*REGWIDTH
#define JMP_sig 25*REGWIDTH
-ASENTRY(setjmp)
+ASENTRY_NOPROF(setjmp)
ST_REG 31, JMP_r31(3)
/* r1, r2, r14-r30 */
ST_REG 1, JMP_r1 (3)
@@ -79,7 +79,7 @@ ASENTRY(setjmp)
.extern sigsetmask
-ASENTRY(longjmp)
+ASENTRY_NOPROF(longjmp)
LD_REG 31, JMP_r31(3)
/* r1, r2, r14-r30 */
LD_REG 1, JMP_r1 (3)
OpenPOWER on IntegriCloud