summaryrefslogtreecommitdiffstats
path: root/sys/sparc64
diff options
context:
space:
mode:
authormarius <marius@FreeBSD.org>2011-06-07 17:33:39 +0000
committermarius <marius@FreeBSD.org>2011-06-07 17:33:39 +0000
commite727405d750da7fbe6efcf79acc6c3515fe7aeb2 (patch)
treed4ebb74d716f12567312611b854cd5ff14437ec7 /sys/sparc64
parent3a370ea3a9d5051e1f35c95d2db6af27c37fde70 (diff)
downloadFreeBSD-src-e727405d750da7fbe6efcf79acc6c3515fe7aeb2.zip
FreeBSD-src-e727405d750da7fbe6efcf79acc6c3515fe7aeb2.tar.gz
Adapt CATR() to r222813. This is somewhat tricky as we can't afford using
more than three temporary register in several places CATR() is used so this code trades instructions in for registers. Actually, this still isn't sufficient and CATR() has the side-effect of clobbering %y. Luckily, with the current uses of CATR() this either doesn't matter or we are able to (save and) restore it. Now that there's only one use of AND() and TEST() left inline these.
Diffstat (limited to 'sys/sparc64')
-rw-r--r--sys/sparc64/include/ktr.h44
-rw-r--r--sys/sparc64/sparc64/exception.S94
-rw-r--r--sys/sparc64/sparc64/mp_locore.S8
-rw-r--r--sys/sparc64/sparc64/mp_machdep.c3
4 files changed, 88 insertions, 61 deletions
diff --git a/sys/sparc64/include/ktr.h b/sys/sparc64/include/ktr.h
index 2a9966b..f13865f 100644
--- a/sys/sparc64/include/ktr.h
+++ b/sys/sparc64/include/ktr.h
@@ -40,16 +40,6 @@
#else
-#define AND(var, mask, r1, r2) \
- SET(var, r2, r1) ; \
- lduw [r1], r2 ; \
- and r2, mask, r1
-
-#define TEST(var, mask, r1, r2, l1) \
- AND(var, mask, r1, r2) ; \
- brz r1, l1 ## f ; \
- nop
-
/*
* XXX could really use another register...
*/
@@ -79,15 +69,37 @@ l2: add r2, 1, r3 ; \
SET(l1 ## b, r3, r2) ; \
stx r2, [r1 + KTR_DESC]
+/*
+ * NB: this clobbers %y.
+ */
#define CATR(mask, desc, r1, r2, r3, l1, l2, l3) \
set mask, r1 ; \
- TEST(ktr_mask, r1, r2, r2, l3) ; \
- lduw [PCPU(MID)], r1 ; \
+ SET(ktr_mask, r3, r2) ; \
+ lduw [r2], r2 ; \
+ and r2, r1, r1 ; \
+ brz r1, l3 ## f ; \
+ nop ; \
+ lduw [PCPU(CPUID)], r2 ; \
+ mov _NCPUBITS, r3 ; \
+ mov %g0, %y ; \
+ udiv r2, r3, r2 ; \
+ srl r2, 0, r2 ; \
+ sllx r2, PTR_SHIFT, r2 ; \
+ SET(ktr_cpumask, r3, r1) ; \
+ ldx [r1 + r2], r1 ; \
+ lduw [PCPU(CPUID)], r2 ; \
+ mov _NCPUBITS, r3 ; \
+ mov %g0, %y ; \
+ udiv r2, r3, r2 ; \
+ srl r2, 0, r2 ; \
+ smul r2, r3, r3 ; \
+ lduw [PCPU(CPUID)], r2 ; \
+ sub r2, r3, r3 ; \
mov 1, r2 ; \
- sllx r2, r1, r1 ; \
-#ifdef notyet \
- TEST(ktr_cpumask, r1, r2, r3, l3) ; \
-#endif \
+ sllx r2, r3, r2 ; \
+ andn r1, r2, r1 ; \
+ brz r1, l3 ## f ; \
+ nop ; \
ATR(desc, r1, r2, r3, l1, l2)
#endif /* LOCORE */
diff --git a/sys/sparc64/sparc64/exception.S b/sys/sparc64/sparc64/exception.S
index ed0e381..246f2c2 100644
--- a/sys/sparc64/sparc64/exception.S
+++ b/sys/sparc64/sparc64/exception.S
@@ -2615,9 +2615,9 @@ ENTRY(tl0_ret)
andn %l4, TSTATE_CWP_MASK, %g2
/*
- * Restore %y. Could also be below if we had more alternate globals.
+ * Save %y in an alternate global.
*/
- wr %l5, 0, %y
+ mov %l5, %g4
/*
* Setup %wstate for return. We need to restore the user window state
@@ -2662,8 +2662,8 @@ tl0_ret_fill:
* Fixup %tstate so the saved %cwp points to the current window and
* restore it.
*/
- rdpr %cwp, %g4
- wrpr %g2, %g4, %tstate
+ rdpr %cwp, %g1
+ wrpr %g2, %g1, %tstate
/*
* Restore the user window state. The transition bit was set above
@@ -2673,20 +2673,25 @@ tl0_ret_fill:
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: td=%#lx pil=%#lx pc=%#lx npc=%#lx sp=%#lx"
- , %g2, %g3, %g4, 7, 8, 9)
- ldx [PCPU(CURTHREAD)], %g3
- stx %g3, [%g2 + KTR_PARM1]
- rdpr %pil, %g3
- stx %g3, [%g2 + KTR_PARM2]
- rdpr %tpc, %g3
- stx %g3, [%g2 + KTR_PARM3]
- rdpr %tnpc, %g3
- stx %g3, [%g2 + KTR_PARM4]
- stx %sp, [%g2 + KTR_PARM5]
+ , %g1, %g2, %g3, 7, 8, 9)
+ ldx [PCPU(CURTHREAD)], %g2
+ stx %g2, [%g1 + KTR_PARM1]
+ rdpr %pil, %g2
+ stx %g2, [%g1 + KTR_PARM2]
+ rdpr %tpc, %g2
+ stx %g2, [%g1 + KTR_PARM3]
+ rdpr %tnpc, %g2
+ stx %g2, [%g1 + KTR_PARM4]
+ stx %sp, [%g1 + KTR_PARM5]
9:
#endif
/*
+ * Restore %y. Note that the CATR above clobbered it.
+ */
+ wr %g4, 0, %y
+
+ /*
* Return to usermode.
*/
retry
@@ -2700,6 +2705,11 @@ tl0_ret_fill_end:
stx %l5, [%l0 + KTR_PARM2]
stx %sp, [%l0 + KTR_PARM3]
9:
+
+ /*
+ * Restore %y clobbered by the CATR. This was saved in %l5 above.
+ */
+ wr %l5, 0, %y
#endif
/*
@@ -2867,34 +2877,36 @@ ENTRY(tl1_ret)
andn %l0, TSTATE_CWP_MASK, %g1
mov %l1, %g2
mov %l2, %g3
+ mov %l4, %g4
wrpr %l3, 0, %pil
- wr %l4, 0, %y
restore
wrpr %g0, 2, %tl
- rdpr %cwp, %g4
- wrpr %g1, %g4, %tstate
wrpr %g2, 0, %tpc
wrpr %g3, 0, %tnpc
+ rdpr %cwp, %g2
+ wrpr %g1, %g2, %tstate
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl1_ret: td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
- , %g2, %g3, %g4, 7, 8, 9)
- ldx [PCPU(CURTHREAD)], %g3
- stx %g3, [%g2 + KTR_PARM1]
- rdpr %pil, %g3
- stx %g3, [%g2 + KTR_PARM2]
- rdpr %tstate, %g3
- stx %g3, [%g2 + KTR_PARM3]
- rdpr %tpc, %g3
- stx %g3, [%g2 + KTR_PARM4]
- stx %sp, [%g2 + KTR_PARM5]
+ , %g1, %g2, %g3, 7, 8, 9)
+ ldx [PCPU(CURTHREAD)], %g2
+ stx %g2, [%g1 + KTR_PARM1]
+ rdpr %pil, %g2
+ stx %g2, [%g1 + KTR_PARM2]
+ rdpr %tstate, %g2
+ stx %g2, [%g1 + KTR_PARM3]
+ rdpr %tpc, %g2
+ stx %g2, [%g1 + KTR_PARM4]
+ stx %sp, [%g1 + KTR_PARM5]
9:
#endif
+ wr %g4, 0, %y
+
retry
END(tl1_ret)
@@ -2995,33 +3007,35 @@ ENTRY(tl1_intr)
andn %l0, TSTATE_CWP_MASK, %g1
mov %l1, %g2
mov %l2, %g3
+ mov %l4, %g4
wrpr %l3, 0, %pil
- wr %l4, 0, %y
restore
wrpr %g0, 2, %tl
- rdpr %cwp, %g4
- wrpr %g1, %g4, %tstate
wrpr %g2, 0, %tpc
wrpr %g3, 0, %tnpc
+ rdpr %cwp, %g2
+ wrpr %g1, %g2, %tstate
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl1_intr: td=%#x pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
- , %g2, %g3, %g4, 7, 8, 9)
- ldx [PCPU(CURTHREAD)], %g3
- stx %g3, [%g2 + KTR_PARM1]
- rdpr %pil, %g3
- stx %g3, [%g2 + KTR_PARM2]
- rdpr %tstate, %g3
- stx %g3, [%g2 + KTR_PARM3]
- rdpr %tpc, %g3
- stx %g3, [%g2 + KTR_PARM4]
- stx %sp, [%g2 + KTR_PARM5]
+ , %g1, %g2, %g3, 7, 8, 9)
+ ldx [PCPU(CURTHREAD)], %g2
+ stx %g2, [%g1 + KTR_PARM1]
+ rdpr %pil, %g2
+ stx %g2, [%g1 + KTR_PARM2]
+ rdpr %tstate, %g2
+ stx %g2, [%g1 + KTR_PARM3]
+ rdpr %tpc, %g2
+ stx %g2, [%g1 + KTR_PARM4]
+ stx %sp, [%g1 + KTR_PARM5]
9:
#endif
+ wr %g4, 0, %y
+
retry
END(tl1_intr)
diff --git a/sys/sparc64/sparc64/mp_locore.S b/sys/sparc64/sparc64/mp_locore.S
index fbcb767..fd4357e 100644
--- a/sys/sparc64/sparc64/mp_locore.S
+++ b/sys/sparc64/sparc64/mp_locore.S
@@ -269,13 +269,17 @@ ENTRY(mp_startup)
add %l1, %l2, %l1
sub %l1, SPOFF + CCFSZ, %sp
+ /* Initialize global registers. */
+ call cpu_setregs
+ mov %l1, %o0
+
#if KTR_COMPILE & KTR_SMP
CATR(KTR_SMP,
"mp_startup: bootstrap cpuid=%d mid=%d pcpu=%#lx data=%#lx sp=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
- lduw [%l1 + PC_CPUID], %g2
+ lduw [PCPU(CPUID)], %g2
stx %g2, [%g1 + KTR_PARM1]
- lduw [%l1 + PC_MID], %g2
+ lduw [PCPU(MID)], %g2
stx %g2, [%g1 + KTR_PARM2]
stx %l1, [%g1 + KTR_PARM3]
stx %sp, [%g1 + KTR_PARM5]
diff --git a/sys/sparc64/sparc64/mp_machdep.c b/sys/sparc64/sparc64/mp_machdep.c
index 304a0f3..f2e76df 100644
--- a/sys/sparc64/sparc64/mp_machdep.c
+++ b/sys/sparc64/sparc64/mp_machdep.c
@@ -457,9 +457,6 @@ cpu_mp_bootstrap(struct pcpu *pc)
*/
tlb_flush_nonlocked();
- /* Initialize global registers. */
- cpu_setregs(pc);
-
/*
* Enable interrupts.
* Note that the PIL we be lowered indirectly via sched_throw(NULL)
OpenPOWER on IntegriCloud