Skip to content
Snippets Groups Projects
Commit b2eec281 authored by Kumar Gala's avatar Kumar Gala
Browse files

ppc/85xx: Move code around to prep for NAND_SPL


If we move some of the functions in tlb.c around we need less
ifdefs.  The first stage loader just needs invalidate_tlb and
init_tlbs.

Signed-off-by: default avatarKumar Gala <galak@kernel.crashing.org>
parent 206af352
No related branches found
No related tags found
No related merge requests found
......@@ -32,6 +32,29 @@
DECLARE_GLOBAL_DATA_PTR;
void invalidate_tlb(u8 tlb)
{
if (tlb == 0)
mtspr(MMUCSR0, 0x4);
if (tlb == 1)
mtspr(MMUCSR0, 0x2);
}
void init_tlbs(void)
{
int i;
for (i = 0; i < num_tlb_entries; i++) {
write_tlb(tlb_table[i].mas0,
tlb_table[i].mas1,
tlb_table[i].mas2,
tlb_table[i].mas3,
tlb_table[i].mas7);
}
return ;
}
void set_tlb(u8 tlb, u32 epn, u64 rpn,
u8 perms, u8 wimge,
u8 ts, u8 esel, u8 tsize, u8 iprot)
......@@ -77,29 +100,6 @@ void disable_tlb(u8 esel)
#endif
}
void invalidate_tlb(u8 tlb)
{
if (tlb == 0)
mtspr(MMUCSR0, 0x4);
if (tlb == 1)
mtspr(MMUCSR0, 0x2);
}
void init_tlbs(void)
{
int i;
for (i = 0; i < num_tlb_entries; i++) {
write_tlb(tlb_table[i].mas0,
tlb_table[i].mas1,
tlb_table[i].mas2,
tlb_table[i].mas3,
tlb_table[i].mas7);
}
return ;
}
static void tlbsx (const volatile unsigned *addr)
{
__asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment