diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S index 23016f6aa645..9b42b88bfba0 100644 --- a/arch/arm/boot/compressed/head.S +++ b/arch/arm/boot/compressed/head.S @@ -448,8 +448,11 @@ __common_mmu_cache_on: mov r1, #-1 mcr p15, 0, r3, c2, c0, 0 @ load page table pointer mcr p15, 0, r1, c3, c0, 0 @ load domain access control - mcr p15, 0, r0, c1, c0, 0 @ load control register - mov pc, lr + b 1f + .align 5 @ cache line aligned +1: mcr p15, 0, r0, c1, c0, 0 @ load control register + mrc p15, 0, r0, c1, c0, 0 @ and read it back to + sub pc, lr, r0, lsr #32 @ properly flush pipeline /* * All code following this line is relocatable. It is relocated by diff --git a/arch/arm/mm/proc-xscale.S b/arch/arm/mm/proc-xscale.S index 535395e25a8a..521538671f4c 100644 --- a/arch/arm/mm/proc-xscale.S +++ b/arch/arm/mm/proc-xscale.S @@ -138,17 +138,23 @@ ENTRY(cpu_xscale_proc_fin) * to what would be the reset vector. * * loc: location to jump to for soft reset + * + * Beware PXA270 erratum E7. */ .align 5 ENTRY(cpu_xscale_reset) mov r1, #PSR_F_BIT|PSR_I_BIT|SVC_MODE msr cpsr_c, r1 @ reset CPSR + mcr p15, 0, r1, c10, c4, 1 @ unlock I-TLB + mcr p15, 0, r1, c8, c5, 0 @ invalidate I-TLB mrc p15, 0, r1, c1, c0, 0 @ ctrl register bic r1, r1, #0x0086 @ ........B....CA. bic r1, r1, #0x3900 @ ..VIZ..S........ + sub pc, pc, #4 @ flush pipeline + @ *** cache line aligned *** mcr p15, 0, r1, c1, c0, 0 @ ctrl register - mcr p15, 0, ip, c7, c7, 0 @ invalidate I,D caches & BTB bic r1, r1, #0x0001 @ ...............M + mcr p15, 0, ip, c7, c7, 0 @ invalidate I,D caches & BTB mcr p15, 0, r1, c1, c0, 0 @ ctrl register @ CAUTION: MMU turned off from this point. We count on the pipeline @ already containing those two last instructions to survive.