diff options
author | Russell King <rmk+kernel@arm.linux.org.uk> | 2010-09-04 10:47:48 +0100 |
---|---|---|
committer | Russell King <rmk+kernel@arm.linux.org.uk> | 2010-10-04 20:23:36 +0100 |
commit | f00ec48fadf5e37e7889f14cff900aa70d18b644 (patch) | |
tree | 421cbce97167a78532aa825624f380caade3c0d2 /arch/arm/mm/tlb-v7.S | |
parent | 067173526c3bbc2eaeefcf6b7b2a9d998b9e8042 (diff) |
ARM: Allow SMP kernels to boot on UP systems
UP systems do not implement all the instructions that SMP systems have,
so in order to boot a SMP kernel on a UP system, we need to rewrite
parts of the kernel.
Do this using an 'alternatives' scheme, where the kernel code and data
is modified prior to initialization to replace the SMP instructions,
thereby rendering the problematical code ineffectual. We use the linker
to generate a list of 32-bit word locations and their replacement values,
and run through these replacements when we detect a UP system.
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/mm/tlb-v7.S')
-rw-r--r-- | arch/arm/mm/tlb-v7.S | 33 |
1 files changed, 12 insertions, 21 deletions
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S index f3f288a9546..53cd5b45467 100644 --- a/arch/arm/mm/tlb-v7.S +++ b/arch/arm/mm/tlb-v7.S @@ -13,6 +13,7 @@ */ #include <linux/init.h> #include <linux/linkage.h> +#include <asm/assembler.h> #include <asm/asm-offsets.h> #include <asm/page.h> #include <asm/tlbflush.h> @@ -41,20 +42,15 @@ ENTRY(v7wbi_flush_user_tlb_range) orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA mov r1, r1, lsl #PAGE_SHIFT 1: -#ifdef CONFIG_SMP - mcr p15, 0, r0, c8, c3, 1 @ TLB invalidate U MVA (shareable) -#else - mcr p15, 0, r0, c8, c7, 1 @ TLB invalidate U MVA -#endif + ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable) + ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA + add r0, r0, #PAGE_SZ cmp r0, r1 blo 1b mov ip, #0 -#ifdef CONFIG_SMP - mcr p15, 0, ip, c7, c1, 6 @ flush BTAC/BTB Inner Shareable -#else - mcr p15, 0, ip, c7, c5, 6 @ flush BTAC/BTB -#endif + ALT_SMP(mcr p15, 0, ip, c7, c1, 6) @ flush BTAC/BTB Inner Shareable + ALT_UP(mcr p15, 0, ip, c7, c5, 6) @ flush BTAC/BTB dsb mov pc, lr ENDPROC(v7wbi_flush_user_tlb_range) @@ -74,20 +70,14 @@ ENTRY(v7wbi_flush_kern_tlb_range) mov r0, r0, lsl #PAGE_SHIFT mov r1, r1, lsl #PAGE_SHIFT 1: -#ifdef CONFIG_SMP - mcr p15, 0, r0, c8, c3, 1 @ TLB invalidate U MVA (shareable) -#else - mcr p15, 0, r0, c8, c7, 1 @ TLB invalidate U MVA -#endif + ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable) + ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA add r0, r0, #PAGE_SZ cmp r0, r1 blo 1b mov r2, #0 -#ifdef CONFIG_SMP - mcr p15, 0, r2, c7, c1, 6 @ flush BTAC/BTB Inner Shareable -#else - mcr p15, 0, r2, c7, c5, 6 @ flush BTAC/BTB -#endif + ALT_SMP(mcr p15, 0, r2, c7, c1, 6) @ flush BTAC/BTB Inner Shareable + ALT_UP(mcr p15, 0, r2, c7, c5, 6) @ flush BTAC/BTB dsb isb mov pc, lr @@ -99,5 +89,6 @@ ENDPROC(v7wbi_flush_kern_tlb_range) ENTRY(v7wbi_tlb_fns) .long v7wbi_flush_user_tlb_range .long v7wbi_flush_kern_tlb_range - .long v7wbi_tlb_flags + ALT_SMP(.long v7wbi_tlb_flags_smp) + ALT_UP(.long v7wbi_tlb_flags_up) .size v7wbi_tlb_fns, . - v7wbi_tlb_fns |