diff options
author | jake <jake@FreeBSD.org> | 2002-12-28 23:57:52 +0000 |
---|---|---|
committer | jake <jake@FreeBSD.org> | 2002-12-28 23:57:52 +0000 |
commit | eb4b24c167c626d88cf10c249d193fc47fb259c6 (patch) | |
tree | 6ac6a341beb763461196d3c67a07db9e647f22f9 /sys | |
parent | 7892c5c36d63e6246a294b9f845357fdf66270d8 (diff) | |
download | FreeBSD-src-eb4b24c167c626d88cf10c249d193fc47fb259c6.zip FreeBSD-src-eb4b24c167c626d88cf10c249d193fc47fb259c6.tar.gz |
- Moved storing %g1-%g5 in the trapframe until after interrupts are enabled.
- Restore %g6 and %g7 for kernel traps if we are returning to prom code.
This allows complex traps (ones that call into C code) to be handled from
the prom.
Diffstat (limited to 'sys')
-rw-r--r-- | sys/sparc64/sparc64/exception.S | 68 |
1 files changed, 41 insertions, 27 deletions
diff --git a/sys/sparc64/sparc64/exception.S b/sys/sparc64/sparc64/exception.S index 5b01fba..1f255f2 100644 --- a/sys/sparc64/sparc64/exception.S +++ b/sys/sparc64/sparc64/exception.S @@ -2186,11 +2186,6 @@ ENTRY(tl0_trap) mov PCPU_REG, %l1 wrpr %g0, PSTATE_NORMAL, %pstate - stx %g1, [%sp + SPOFF + CCFSZ + TF_G1] - stx %g2, [%sp + SPOFF + CCFSZ + TF_G2] - stx %g3, [%sp + SPOFF + CCFSZ + TF_G3] - stx %g4, [%sp + SPOFF + CCFSZ + TF_G4] - stx %g5, [%sp + SPOFF + CCFSZ + TF_G5] stx %g6, [%sp + SPOFF + CCFSZ + TF_G6] stx %g7, [%sp + SPOFF + CCFSZ + TF_G7] @@ -2207,6 +2202,12 @@ ENTRY(tl0_trap) stx %i6, [%sp + SPOFF + CCFSZ + TF_O6] stx %i7, [%sp + SPOFF + CCFSZ + TF_O7] + stx %g1, [%sp + SPOFF + CCFSZ + TF_G1] + stx %g2, [%sp + SPOFF + CCFSZ + TF_G2] + stx %g3, [%sp + SPOFF + CCFSZ + TF_G3] + stx %g4, [%sp + SPOFF + CCFSZ + TF_G4] + stx %g5, [%sp + SPOFF + CCFSZ + TF_G5] + set tl0_ret - 8, %o7 jmpl %o2, %g0 add %sp, CCFSZ + SPOFF, %o0 @@ -2375,8 +2376,8 @@ ENTRY(tl0_ret) nop /* - * Restore the out registers from the trapframe. These are ins - * now, they will become the outs when we restore below. + * Restore the out and most global registers from the trapframe. + * The ins will become the outs when we restore below. */ 2: ldx [%sp + SPOFF + CCFSZ + TF_O0], %i0 ldx [%sp + SPOFF + CCFSZ + TF_O1], %i1 @@ -2387,6 +2388,12 @@ ENTRY(tl0_ret) ldx [%sp + SPOFF + CCFSZ + TF_O6], %i6 ldx [%sp + SPOFF + CCFSZ + TF_O7], %i7 + ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1 + ldx [%sp + SPOFF + CCFSZ + TF_G2], %g2 + ldx [%sp + SPOFF + CCFSZ + TF_G3], %g3 + ldx [%sp + SPOFF + CCFSZ + TF_G4], %g4 + ldx [%sp + SPOFF + CCFSZ + TF_G5], %g5 + /* * Load everything we need to restore below before disabling * interrupts. @@ -2400,18 +2407,12 @@ ENTRY(tl0_ret) ldx [%sp + SPOFF + CCFSZ + TF_WSTATE], %l6 /* - * Disable interrupts to restore the globals. We need to restore - * %g6 and %g7 which are used as global variables in the kernel. - * They are not saved and restored for kernel traps, so an interrupt - * at the wrong time would clobber them. + * Disable interrupts to restore the special globals. They are not + * saved and restored for all kernel traps, so an interrupt at the + * wrong time would clobber them. */ wrpr %g0, PSTATE_NORMAL, %pstate - ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1 - ldx [%sp + SPOFF + CCFSZ + TF_G2], %g2 - ldx [%sp + SPOFF + CCFSZ + TF_G3], %g3 - ldx [%sp + SPOFF + CCFSZ + TF_G4], %g4 - ldx [%sp + SPOFF + CCFSZ + TF_G5], %g5 ldx [%sp + SPOFF + CCFSZ + TF_G6], %g6 ldx [%sp + SPOFF + CCFSZ + TF_G7], %g7 @@ -2648,11 +2649,8 @@ ENTRY(tl1_trap) mov PCPU_REG, %l1 wrpr %g0, PSTATE_NORMAL, %pstate - stx %g1, [%sp + SPOFF + CCFSZ + TF_G1] - stx %g2, [%sp + SPOFF + CCFSZ + TF_G2] - stx %g3, [%sp + SPOFF + CCFSZ + TF_G3] - stx %g4, [%sp + SPOFF + CCFSZ + TF_G4] - stx %g5, [%sp + SPOFF + CCFSZ + TF_G5] + stx %g6, [%sp + SPOFF + CCFSZ + TF_G6] + stx %g7, [%sp + SPOFF + CCFSZ + TF_G7] mov %l0, PCB_REG mov %l1, PCPU_REG @@ -2667,6 +2665,12 @@ ENTRY(tl1_trap) stx %i6, [%sp + SPOFF + CCFSZ + TF_O6] stx %i7, [%sp + SPOFF + CCFSZ + TF_O7] + stx %g1, [%sp + SPOFF + CCFSZ + TF_G1] + stx %g2, [%sp + SPOFF + CCFSZ + TF_G2] + stx %g3, [%sp + SPOFF + CCFSZ + TF_G3] + stx %g4, [%sp + SPOFF + CCFSZ + TF_G4] + stx %g5, [%sp + SPOFF + CCFSZ + TF_G5] + set tl1_ret - 8, %o7 jmpl %o2, %g0 add %sp, CCFSZ + SPOFF, %o0 @@ -2682,19 +2686,29 @@ ENTRY(tl1_ret) ldx [%sp + SPOFF + CCFSZ + TF_O6], %i6 ldx [%sp + SPOFF + CCFSZ + TF_O7], %i7 + ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1 + ldx [%sp + SPOFF + CCFSZ + TF_G2], %g2 + ldx [%sp + SPOFF + CCFSZ + TF_G3], %g3 + ldx [%sp + SPOFF + CCFSZ + TF_G4], %g4 + ldx [%sp + SPOFF + CCFSZ + TF_G5], %g5 + ldx [%sp + SPOFF + CCFSZ + TF_TSTATE], %l0 ldx [%sp + SPOFF + CCFSZ + TF_TPC], %l1 ldx [%sp + SPOFF + CCFSZ + TF_TNPC], %l2 ldx [%sp + SPOFF + CCFSZ + TF_PIL], %l3 ldx [%sp + SPOFF + CCFSZ + TF_Y], %l4 - ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1 - ldx [%sp + SPOFF + CCFSZ + TF_G2], %g2 - ldx [%sp + SPOFF + CCFSZ + TF_G3], %g3 - ldx [%sp + SPOFF + CCFSZ + TF_G4], %g4 - ldx [%sp + SPOFF + CCFSZ + TF_G5], %g5 + set VM_MIN_PROM_ADDRESS, %l5 + cmp %l1, %l5 + bl,a,pt %xcc, 1f + nop - wrpr %g0, PSTATE_ALT, %pstate + wrpr %g0, PSTATE_NORMAL, %pstate + + ldx [%sp + SPOFF + CCFSZ + TF_G6], %g6 + ldx [%sp + SPOFF + CCFSZ + TF_G7], %g7 + +1: wrpr %g0, PSTATE_ALT, %pstate andn %l0, TSTATE_CWP_MASK, %g1 mov %l1, %g2 |