Skip to content

Commit d9e1831

Browse files
author
Scott Wood
committed
powerpc/85xx: Load all early TLB entries at once
Use an AS=1 trampoline TLB entry to allow all normal TLB1 entries to be loaded at once. This avoids the need to keep the translation that code is executing from in the same TLB entry in the final TLB configuration as during early boot, which in turn is helpful for relocatable kernels (e.g. kdump) where the kernel is not running from what would be the first TLB entry. On e6500, we limit map_mem_in_cams() to the primary hwthread of a core (the boot cpu is always considered primary, as a kdump kernel can be entered on any cpu). Each TLB only needs to be set up once, and when we do, we don't want another thread to be running when we create a temporary trampoline TLB1 entry. Signed-off-by: Scott Wood <[email protected]>
1 parent 1930bb5 commit d9e1831

File tree

5 files changed

+92
-3
lines changed

5 files changed

+92
-3
lines changed

arch/powerpc/kernel/setup_64.c

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,14 @@ static void setup_tlb_core_data(void)
108108
for_each_possible_cpu(cpu) {
109109
int first = cpu_first_thread_sibling(cpu);
110110

111+
/*
112+
* If we boot via kdump on a non-primary thread,
113+
* make sure we point at the thread that actually
114+
* set up this TLB.
115+
*/
116+
if (cpu_first_thread_sibling(boot_cpuid) == first)
117+
first = boot_cpuid;
118+
111119
paca[cpu].tcd_ptr = &paca[first].tcd;
112120

113121
/*

arch/powerpc/mm/fsl_booke_mmu.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,8 +141,6 @@ static void settlbcam(int index, unsigned long virt, phys_addr_t phys,
141141
tlbcam_addrs[index].start = virt;
142142
tlbcam_addrs[index].limit = virt + size - 1;
143143
tlbcam_addrs[index].phys = phys;
144-
145-
loadcam_entry(index);
146144
}
147145

148146
unsigned long calc_cam_sz(unsigned long ram, unsigned long virt,
@@ -188,6 +186,8 @@ static unsigned long map_mem_in_cams_addr(phys_addr_t phys, unsigned long virt,
188186
virt += cam_sz;
189187
phys += cam_sz;
190188
}
189+
190+
loadcam_multi(0, i, max_cam_idx);
191191
tlbcam_index = i;
192192

193193
#ifdef CONFIG_PPC64

arch/powerpc/mm/mmu_decl.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,7 @@ extern int switch_to_as1(void);
152152
extern void restore_to_as0(int esel, int offset, void *dt_ptr, int bootcpu);
153153
#endif
154154
extern void loadcam_entry(unsigned int index);
155+
extern void loadcam_multi(int first_idx, int num, int tmp_idx);
155156

156157
struct tlbcam {
157158
u32 MAS0;

arch/powerpc/mm/tlb_nohash.c

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
#include <asm/tlbflush.h>
4343
#include <asm/tlb.h>
4444
#include <asm/code-patching.h>
45+
#include <asm/cputhreads.h>
4546
#include <asm/hugetlb.h>
4647
#include <asm/paca.h>
4748

@@ -628,10 +629,26 @@ static void early_init_this_mmu(void)
628629
#ifdef CONFIG_PPC_FSL_BOOK3E
629630
if (mmu_has_feature(MMU_FTR_TYPE_FSL_E)) {
630631
unsigned int num_cams;
632+
int __maybe_unused cpu = smp_processor_id();
633+
bool map = true;
631634

632635
/* use a quarter of the TLBCAM for bolted linear map */
633636
num_cams = (mfspr(SPRN_TLB1CFG) & TLBnCFG_N_ENTRY) / 4;
634-
linear_map_top = map_mem_in_cams(linear_map_top, num_cams);
637+
638+
/*
639+
* Only do the mapping once per core, or else the
640+
* transient mapping would cause problems.
641+
*/
642+
#ifdef CONFIG_SMP
643+
if (cpu != boot_cpuid &&
644+
(cpu != cpu_first_thread_sibling(cpu) ||
645+
cpu == cpu_first_thread_sibling(boot_cpuid)))
646+
map = false;
647+
#endif
648+
649+
if (map)
650+
linear_map_top = map_mem_in_cams(linear_map_top,
651+
num_cams);
635652
}
636653
#endif
637654

arch/powerpc/mm/tlb_nohash_low.S

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -400,6 +400,7 @@ _GLOBAL(set_context)
400400
* extern void loadcam_entry(unsigned int index)
401401
*
402402
* Load TLBCAM[index] entry in to the L2 CAM MMU
403+
* Must preserve r7, r8, r9, and r10
403404
*/
404405
_GLOBAL(loadcam_entry)
405406
mflr r5
@@ -423,4 +424,66 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
423424
tlbwe
424425
isync
425426
blr
427+
428+
/*
429+
* Load multiple TLB entries at once, using an alternate-space
430+
* trampoline so that we don't have to care about whether the same
431+
* TLB entry maps us before and after.
432+
*
433+
* r3 = first entry to write
434+
* r4 = number of entries to write
435+
* r5 = temporary tlb entry
436+
*/
437+
_GLOBAL(loadcam_multi)
438+
mflr r8
439+
440+
/*
441+
* Set up temporary TLB entry that is the same as what we're
442+
* running from, but in AS=1.
443+
*/
444+
bl 1f
445+
1: mflr r6
446+
tlbsx 0,r8
447+
mfspr r6,SPRN_MAS1
448+
ori r6,r6,MAS1_TS
449+
mtspr SPRN_MAS1,r6
450+
mfspr r6,SPRN_MAS0
451+
rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
452+
mr r7,r5
453+
mtspr SPRN_MAS0,r6
454+
isync
455+
tlbwe
456+
isync
457+
458+
/* Switch to AS=1 */
459+
mfmsr r6
460+
ori r6,r6,MSR_IS|MSR_DS
461+
mtmsr r6
462+
isync
463+
464+
mr r9,r3
465+
add r10,r3,r4
466+
2: bl loadcam_entry
467+
addi r9,r9,1
468+
cmpw r9,r10
469+
mr r3,r9
470+
blt 2b
471+
472+
/* Return to AS=0 and clear the temporary entry */
473+
mfmsr r6
474+
rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
475+
mtmsr r6
476+
isync
477+
478+
li r6,0
479+
mtspr SPRN_MAS1,r6
480+
rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
481+
oris r6,r6,MAS0_TLBSEL(1)@h
482+
mtspr SPRN_MAS0,r6
483+
isync
484+
tlbwe
485+
isync
486+
487+
mtlr r8
488+
blr
426489
#endif

0 commit comments

Comments
 (0)