diff options
author | Till Straumann <strauman@slac.stanford.edu> | 2008-07-10 21:29:27 +0000 |
---|---|---|
committer | Till Straumann <strauman@slac.stanford.edu> | 2008-07-10 21:29:27 +0000 |
commit | 38f5e616fcf26380e758452d3a0f72a65247cc14 (patch) | |
tree | 6ca4fe58737deaf24e4dd01812540299315cedd4 /c/src/lib/libcpu | |
parent | 2008-07-10 Till Straumann <strauman@slac.stanford.edu> (diff) | |
download | rtems-38f5e616fcf26380e758452d3a0f72a65247cc14.tar.bz2 |
2008-07-10 Till Straumann <strauman@slac.stanford.edu>
* new-exceptions/bspsupport/ppc_exc.S: must disable
interrupts prior to restoring SRRs (thanks to Sebastian Huber)
Diffstat (limited to 'c/src/lib/libcpu')
-rw-r--r-- | c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc.S | 20 |
1 files changed, 20 insertions, 0 deletions
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc.S b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc.S index fbf6432b1c..c3cede95da 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc.S +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc.S @@ -271,6 +271,10 @@ skip_save_nonvolatile_regs: /* decrement ISR nest level; * disable all interrupts. + * (Disabling IRQs here is not necessary if we + * use the stack-switching strategy which tests + * if we are alreay on the ISR-stack as opposed + * to test the nesting level; see ppc_exc_asm_macros.h) */ lwz r4, ppc_exc_msr_irq_mask@sdarel(r13) mfmsr r5 @@ -365,6 +369,22 @@ skip_restore_nonvolatile_regs: lwz r4, EXC_CR_OFFSET(r1) mtcr r4 + /* Must disable interrupts prior to restoring SSRs. + * Here's a scenario discovered by Sebastian Huber: + * 1) CE happens between writing to SRR and RFI + * 2) CE handler does something which requires a task switch + * 3) CE wrapper returns and determines that task switch + * is OK since EE lock is not held, dispatch-disable level + * is zero etc. + * 4) switch to other task enables EE + * 5) eventually, switch back to task interrupted by 1) + * 6) RFI happens but SRR contents have been clobbered. + */ + lwz r4, ppc_exc_msr_irq_mask@sdarel(r13) + mfmsr r5 + andc r4, r5, r4 + mtmsr r4 + /* restore SRR and stack */ lwz r4, SRR0_FRAME_OFFSET(r1) lwz r5, SRR1_FRAME_OFFSET(r1) |