/*
- * Copyright 2008 Freescale Semiconductor, Inc.
+ * Copyright 2008-2009 Freescale Semiconductor, Inc.
*
* (C) Copyright 2000
DECLARE_GLOBAL_DATA_PTR;
+void invalidate_tlb(u8 tlb)
+{
+ if (tlb == 0)
+ mtspr(MMUCSR0, 0x4);
+ if (tlb == 1)
+ mtspr(MMUCSR0, 0x2);
+}
+
+void init_tlbs(void)
+{
+ int i;
+
+ for (i = 0; i < num_tlb_entries; i++) {
+ write_tlb(tlb_table[i].mas0,
+ tlb_table[i].mas1,
+ tlb_table[i].mas2,
+ tlb_table[i].mas3,
+ tlb_table[i].mas7);
+ }
+
+ return ;
+}
+
void set_tlb(u8 tlb, u32 epn, u64 rpn,
u8 perms, u8 wimge,
u8 ts, u8 esel, u8 tsize, u8 iprot)
_mas1 = FSL_BOOKE_MAS1(1, iprot, 0, ts, tsize);
_mas2 = FSL_BOOKE_MAS2(epn, wimge);
_mas3 = FSL_BOOKE_MAS3(rpn, 0, perms);
- _mas7 = rpn >> 32;
+ _mas7 = FSL_BOOKE_MAS7(rpn);
- mtspr(MAS0, _mas0);
- mtspr(MAS1, _mas1);
- mtspr(MAS2, _mas2);
- mtspr(MAS3, _mas3);
-#ifdef CONFIG_ENABLE_36BIT_PHYS
- mtspr(MAS7, _mas7);
-#endif
- asm volatile("isync;msync;tlbwe;isync");
+ write_tlb(_mas0, _mas1, _mas2, _mas3, _mas7);
#ifdef CONFIG_ADDR_MAP
if ((tlb == 1) && (gd->flags & GD_FLG_RELOC))
#endif
}
-void invalidate_tlb(u8 tlb)
+static void tlbsx (const volatile unsigned *addr)
{
- if (tlb == 0)
- mtspr(MMUCSR0, 0x4);
- if (tlb == 1)
- mtspr(MMUCSR0, 0x2);
+ __asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));
}
-void init_tlbs(void)
+/* return -1 if we didn't find anything */
+int find_tlb_idx(void *addr, u8 tlbsel)
{
- int i;
+ u32 _mas0, _mas1;
- for (i = 0; i < num_tlb_entries; i++) {
- set_tlb(tlb_table[i].tlb, tlb_table[i].epn, tlb_table[i].rpn,
- tlb_table[i].perms, tlb_table[i].wimge,
- tlb_table[i].ts, tlb_table[i].esel, tlb_table[i].tsize,
- tlb_table[i].iprot);
+ /* zero out Search PID, AS */
+ mtspr(MAS6, 0);
+
+ tlbsx(addr);
+
+ _mas0 = mfspr(MAS0);
+ _mas1 = mfspr(MAS1);
+
+ /* we found something, and its in the TLB we expect */
+ if ((MAS1_VALID & _mas1) &&
+ (MAS0_TLBSEL(tlbsel) == (_mas0 & MAS0_TLBSEL_MSK))) {
+ return ((_mas0 & MAS0_ESEL_MSK) >> 16);
}
- return ;
+ return -1;
}
#ifdef CONFIG_ADDR_MAP
void init_addr_map(void)
{
int i;
+ unsigned int max_cam = (mfspr(SPRN_TLB1CFG) >> 16) & 0xff;
- for (i = 0; i < num_tlb_entries; i++) {
- if (tlb_table[i].tlb == 0)
+ /* walk all the entries */
+ for (i = 0; i < max_cam; i++) {
+ unsigned long epn;
+ u32 tsize, _mas1;
+ phys_addr_t rpn;
+
+ mtspr(MAS0, FSL_BOOKE_MAS0(1, i, 0));
+
+ asm volatile("tlbre;isync");
+ _mas1 = mfspr(MAS1);
+
+ /* if the entry isn't valid skip it */
+ if (!(_mas1 & MAS1_VALID))
continue;
- addrmap_set_entry(tlb_table[i].epn,
- tlb_table[i].rpn,
- (1UL << ((tlb_table[i].tsize * 2) + 10)),
- tlb_table[i].esel);
+ tsize = (_mas1 >> 8) & 0xf;
+ epn = mfspr(MAS2) & MAS2_EPN;
+ rpn = mfspr(MAS3) & MAS3_RPN;
+#ifdef CONFIG_ENABLE_36BIT_PHYS
+ rpn |= ((phys_addr_t)mfspr(MAS7)) << 32;
+#endif
+
+ addrmap_set_entry(epn, rpn, (1UL << ((tsize * 2) + 10)), i);
}
return ;
unsigned int setup_ddr_tlbs(unsigned int memsize_in_meg)
{
unsigned int tlb_size;
- unsigned int ram_tlb_index;
- unsigned int ram_tlb_address;
+ unsigned int ram_tlb_index = CONFIG_SYS_DDR_TLB_START;
+ unsigned int ram_tlb_address = (unsigned int)CONFIG_SYS_DDR_SDRAM_BASE;
+ unsigned int max_cam = (mfspr(SPRN_TLB1CFG) >> 16) & 0xf;
+ u64 size, memsize = (u64)memsize_in_meg << 20;
- /*
- * Determine size of each TLB1 entry.
- */
- switch (memsize_in_meg) {
- case 16:
- case 32:
- tlb_size = BOOKE_PAGESZ_16M;
- break;
- case 64:
- case 128:
- tlb_size = BOOKE_PAGESZ_64M;
- break;
- case 256:
- case 512:
- tlb_size = BOOKE_PAGESZ_256M;
- break;
- case 1024:
- case 2048:
- if (PVR_VER(get_pvr()) > PVR_VER(PVR_85xx))
- tlb_size = BOOKE_PAGESZ_1G;
- else
- tlb_size = BOOKE_PAGESZ_256M;
- break;
- default:
- puts("DDR: only 16M, 32M, 64M, 128M, 256M, 512M, 1G"
- " and 2G are supported.\n");
-
- /*
- * The memory was not able to be mapped.
- * Default to a small size.
- */
- tlb_size = BOOKE_PAGESZ_64M;
- memsize_in_meg = 64;
- break;
- }
+ size = min(memsize, CONFIG_MAX_MEM_MAPPED);
+
+ /* Convert (4^max) kB to (2^max) bytes */
+ max_cam = max_cam * 2 + 10;
+
+ for (; size && ram_tlb_index < 16; ram_tlb_index++) {
+ u32 camsize = __ilog2_u64(size) & ~1U;
+ u32 align = __ilog2(ram_tlb_address) & ~1U;
+
+ if (align == -2) align = max_cam;
+ if (camsize > align)
+ camsize = align;
+
+ if (camsize > max_cam)
+ camsize = max_cam;
+
+ tlb_size = (camsize - 10) / 2;
- /*
- * Configure DDR TLB1 entries.
- * Starting at TLB1 8, use no more than 8 TLB1 entries.
- */
- ram_tlb_index = CONFIG_SYS_DDR_TLB_START;
- ram_tlb_address = (unsigned int)CONFIG_SYS_DDR_SDRAM_BASE;
- while (ram_tlb_address < (memsize_in_meg * 1024 * 1024)
- && ram_tlb_index < 16) {
set_tlb(1, ram_tlb_address, ram_tlb_address,
MAS3_SX|MAS3_SW|MAS3_SR, 0,
0, ram_tlb_index, tlb_size, 1);
- ram_tlb_address += (0x1000 << ((tlb_size - 1) * 2));
- ram_tlb_index++;
+ size -= 1ULL << camsize;
+ memsize -= 1ULL << camsize;
+ ram_tlb_address += 1UL << camsize;
}
+ if (memsize)
+ print_size(memsize, " left unmapped\n");
+
/*
* Confirm that the requested amount of memory was mapped.
*/