lblast.h 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. /*
  2. * on return from this function we will be running in virtual mode.
  3. * We set up the Block Address Translation (BAT) registers thus:
  4. * 1) first 3 BATs are 256M blocks, starting from KZERO->0
  5. * 2) remaining BAT maps last 256M directly
  6. */
  7. TEXT mmuinit0(SB), $0
  8. /* reset all the tlbs */
  9. MOVW $64, R3
  10. MOVW R3, CTR
  11. MOVW $0, R4
  12. tlbloop:
  13. TLBIE R4
  14. SYNC
  15. ADD $BIT(19), R4
  16. BDNZ tlbloop
  17. TLBSYNC
  18. /* BATs 0 and 1 cover memory from 0x00000000 to 0x20000000 */
  19. /* KZERO -> 0, IBAT and DBAT, 256 MB */
  20. MOVW $(KZERO|(0x7ff<<2)|2), R3
  21. MOVW $(PTEVALID|PTEWRITE), R4 /* PTEVALID => Cache coherency on */
  22. MOVW R3, SPR(IBATU(0))
  23. MOVW R4, SPR(IBATL(0))
  24. MOVW R3, SPR(DBATU(0))
  25. MOVW R4, SPR(DBATL(0))
  26. /* KZERO+256M -> 256M, IBAT and DBAT, 256 MB */
  27. ADD $(1<<28), R3
  28. ADD $(1<<28), R4
  29. MOVW R3, SPR(IBATU(1))
  30. MOVW R4, SPR(IBATL(1))
  31. MOVW R3, SPR(DBATU(1))
  32. MOVW R4, SPR(DBATL(1))
  33. /* FPGABASE -> FPGABASE, DBAT, 16 MB */
  34. MOVW $(FPGABASE|(0x7f<<2)|2), R3
  35. MOVW $(FPGABASE|PTEWRITE|PTEUNCACHED), R4 /* FPGA memory, don't cache */
  36. MOVW R3, SPR(DBATU(2))
  37. MOVW R4, SPR(DBATL(2))
  38. /* IBAT 2 unused */
  39. MOVW R0, SPR(IBATU(2))
  40. MOVW R0, SPR(IBATL(2))
  41. /* direct map last block, uncached, (not guarded, doesn't work for BAT), DBAT only */
  42. MOVW $(INTMEM|(0x7ff<<2)|2), R3
  43. MOVW $(INTMEM|PTEWRITE|PTEUNCACHED), R4 /* Don't set PTEVALID here */
  44. MOVW R3, SPR(DBATU(3))
  45. MOVW R4, SPR(DBATL(3))
  46. /* IBAT 3 unused */
  47. MOVW R0, SPR(IBATU(3))
  48. MOVW R0, SPR(IBATL(3))
  49. /* enable MMU */
  50. MOVW LR, R3
  51. OR $KZERO, R3
  52. MOVW R3, SPR(SRR0) /* Stored PC for RFI instruction */
  53. MOVW MSR, R4
  54. OR $(MSR_IR|MSR_DR|MSR_RI|MSR_FP), R4
  55. MOVW R4, SPR(SRR1)
  56. RFI /* resume in kernel mode in caller */
  57. RETURN