123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774 |
- /*
- * sheevaplug machine assist
- * arm926ej-s processor at 1.2GHz
- *
- * loader uses R11 as scratch.
- * R9 and R10 are used for `extern register' variables.
- *
- * ARM v7 arch. ref. man. (I know, this is v5) §B1.3.3 that
- * we don't need barriers around moves to CPSR. The ARM v6 manual
- * seems to be silent on the subject.
- */
- #include "arm.s"
- /*
- * MCR and MRC are counter-intuitively named.
- * MCR coproc, opcode1, Rd, CRn, CRm[, opcode2] # arm -> coproc
- * MRC coproc, opcode1, Rd, CRn, CRm[, opcode2] # coproc -> arm
- */
- /*
- * Entered here from Das U-Boot with MMU disabled.
- * Until the MMU is enabled it is OK to call functions provided
- * they are within ±32MiB relative and do not require any
- * local variables or more than one argument (i.e. there is
- * no stack).
- */
- TEXT _start(SB), 1, $-4
- MOVW $setR12(SB), R12 /* load the SB */
- _main:
- /* SVC mode, interrupts disabled */
- MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
- MOVW R1, CPSR
- BARRIERS
- /*
- * disable the MMU & caches,
- * switch to system permission & 32-bit addresses.
- */
- MOVW $(CpCsystem|CpCd32|CpCi32), R1
- MCR CpSC, 0, R1, C(CpCONTROL), C(0)
- ISB
- /*
- * disable the Sheevaplug's L2 cache, invalidate all caches
- */
- /* flush caches. 926ejs manual says we have to do it iteratively. */
- _dwbinv0:
- MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
- BNE _dwbinv0
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- /* make the l2 cache pay attention */
- MOVW $(PHYSIO+0x20100), R1 /* CPUCSREG */
- MOVW (4*10)(R1), R2
- ORR $(1<<3), R2 /* cpu->l2cfg |= L2exists */
- MOVW R2, (4*10)(R1)
- ISB
- /* invalidate l2 cache */
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
- ISB
- /* disable l2 cache. do this while l1 caches are off */
- MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
- /* disabling write allocation is probably for cortex-a8 errata 460075 */
- /* l2 off, no wr alloc, no streaming */
- BIC $(CpTCl2ena | CpTCl2wralloc | CpTCldcstream), R1
- MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
- BARRIERS
- /* flush caches. 926ejs manual says we have to do it iteratively. */
- _dwbinv1:
- MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
- BNE _dwbinv1
- BARRIERS
- PUTC('\r')
- /* clear Mach */
- MOVW $PADDR(MACHADDR), R4 /* address of Mach */
- _machZ:
- MOVW R0, (R4)
- ADD $4, R4 /* bump PTE address */
- CMP.S $PADDR(L1+L1X(0)), R4
- BNE _machZ
- /*
- * set up the MMU page table
- */
- /* clear all PTEs first, to provide a default */
- PUTC('\n')
- MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
- _ptenv0:
- ZEROPTE()
- CMP.S $PADDR(L1+16*KiB), R4
- BNE _ptenv0
- /* double map of PHYSDRAM, KZERO to PHYSDRAM for first few MBs */
- MOVW $PTEDRAM, R2 /* PTE bits */
- MOVW $PHYSDRAM, R3 /* pa */
- MOVW $PADDR(L1+L1X(PHYSDRAM)), R4 /* address of PTE for PHYSDRAM */
- MOVW $16, R5
- _ptdbl:
- FILLPTE()
- SUB.S $1, R5
- BNE _ptdbl
- /*
- * back up and fill in PTEs for memory at KZERO
- * there is 1 bank of 512MB of SDRAM at PHYSDRAM
- */
- MOVW $PTEDRAM, R2 /* PTE bits */
- MOVW $PHYSDRAM, R3
- MOVW $PADDR(L1+L1X(KZERO)), R4 /* start with PTE for KZERO */
- MOVW $512, R5 /* inner loop count */
- _ptekrw: /* set PTEs for 512MiB */
- FILLPTE()
- SUB.S $1, R5
- BNE _ptekrw
- /*
- * back up and fill in PTE for MMIO
- */
- MOVW $PTEIO, R2 /* PTE bits */
- MOVW $PHYSIO, R3
- MOVW $PADDR(L1+L1X(VIRTIO)), R4 /* start with PTE for VIRTIO */
- FILLPTE()
- /* mmu.c sets up the vectors later */
- /*
- * set up a temporary stack; avoid data & bss segments
- */
- MOVW $(PHYSDRAM | (128*1024*1024)), R13
- PUTC('P')
- /* set the domain access control */
- MOVW $Client, R0
- BL dacput(SB)
- /* set the translation table base */
- MOVW $PADDR(L1), R0
- BL ttbput(SB)
- MOVW $0, R0
- BL pidput(SB) /* paranoia */
- /* the little dance to turn the MMU & caches on */
- PUTC('l')
- BL cacheuwbinv(SB)
- BL mmuinvalidate(SB)
- BL mmuenable(SB)
- PUTC('a')
- /* warp the PC into the virtual map */
- MOVW $KZERO, R0
- BL _r15warp(SB)
- /*
- * now running at KZERO+something!
- */
- MOVW $setR12(SB), R12 /* reload the SB */
- /*
- * set up temporary stack again, in case we've just switched
- * to a new register set.
- */
- MOVW $(KZERO|(128*1024*1024)), R13
- /* can now execute arbitrary C code */
- BL cacheuwbinv(SB)
- PUTC('n')
- /* undo double map of 0, KZERO */
- MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
- MOVW $0, R0
- MOVW $16, R5
- _ptudbl:
- MOVW R0, (R4)
- ADD $4, R4 /* bump PTE address */
- ADD $MiB, R0 /* bump pa */
- SUB.S $1, R5
- BNE _ptudbl
- BARRIERS
- MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinvse
- MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
- BARRIERS
- PUTC(' ')
- /* pass Mach to main and set up the stack */
- MOVW $(MACHADDR), R0 /* Mach */
- MOVW R0, R13
- ADD $(MACHSIZE), R13 /* stack pointer */
- SUB $4, R13 /* space for link register */
- BL main(SB) /* void main(Mach*) */
- /* fall through */
- /* not used */
- TEXT _reset(SB), 1, $-4
- /* turn the caches off */
- MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R0
- MOVW R0, CPSR
- BARRIERS
- BL cacheuwbinv(SB)
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- BIC $(CpCwb|CpCicache|CpCdcache|CpCalign), R0
- MCR CpSC, 0, R0, C(CpCONTROL), C(0)
- BARRIERS
- PUTC('R')
- /* redo double map of 0, KZERO */
- MOVW $(L1+L1X(0)), R4 /* address of PTE for 0 */
- MOVW $PTEDRAM, R2 /* PTE bits */
- MOVW $0, R3
- MOVW $16, R5
- _ptrdbl:
- ORR R3, R2, R1 /* first identity-map 0 to 0, etc. */
- MOVW R1, (R4)
- ADD $4, R4 /* bump PTE address */
- ADD $MiB, R3 /* bump pa */
- SUB.S $1, R5
- BNE _ptrdbl
- BARRIERS
- PUTC('e')
- MOVW $0, R0
- MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinv
- MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
- BARRIERS
- /* back to 29- or 26-bit addressing, mainly for SB */
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- BIC $(CpCd32|CpCi32), R0
- MCR CpSC, 0, R0, C(CpCONTROL), C(0)
- BARRIERS
- /* turn the MMU off */
- MOVW $PHYSDRAM, R0
- BL _r15warp(SB)
- BL mmuinvalidate(SB)
- BL mmudisable(SB)
- PUTC('s')
- /* set new reset vector */
- MOVW $0, R2
- MOVW $0xe59ff018, R3 /* MOVW 0x18(R15), R15 */
- MOVW R3, (R2)
- PUTC('e')
- MOVW $PHYSBOOTROM, R3
- MOVW R3, 0x20(R2) /* where $0xe59ff018 jumps to */
- BARRIERS
- PUTC('t')
- PUTC('\r')
- PUTC('\n')
- /* ...and jump to it */
- MOVW R2, R15 /* software reboot */
- _limbo: /* should not get here... */
- B _limbo /* ... and can't get out */
- BL _div(SB) /* hack to load _div, etc. */
- TEXT _r15warp(SB), 1, $-4
- BIC $KSEGM, R14
- ORR R0, R14
- BIC $KSEGM, R13
- ORR R0, R13
- RET
- /* clobbers R1, R6 */
- TEXT myputc(SB), 1, $-4
- MOVW $PHYSCONS, R6
- _busy:
- MOVW 20(R6), R1
- BIC.S $~(1<<5), R1 /* (x->lsr & LSRthre) == 0? */
- BEQ _busy
- MOVW R3, (R6) /* print */
- ISB
- RET
- /*
- * l1 caches
- */
- TEXT l1cacheson(SB), 1, $-4
- MOVW CPSR, R5
- ORR $(PsrDirq|PsrDfiq), R5, R4
- MOVW R4, CPSR /* splhi */
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- ORR $(CpCdcache|CpCicache|CpCwb), R0
- MCR CpSC, 0, R0, C(CpCONTROL), C(0)
- BARRIERS
- MOVW R5, CPSR /* splx */
- RET
- TEXT l1cachesoff(SB), 1, $-4
- MOVM.DB.W [R14], (SP) /* save lr on stack */
- MOVW CPSR, R5
- ORR $(PsrDirq|PsrDfiq), R5, R4
- MOVW R4, CPSR /* splhi */
- BL cacheuwbinv(SB)
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- BIC $(CpCdcache|CpCicache|CpCwb), R0
- MCR CpSC, 0, R0, C(CpCONTROL), C(0)
- BARRIERS
- MOVW R5, CPSR /* splx */
- MOVM.IA.W (SP), [R14] /* restore lr */
- RET
- /*
- * cache* functions affect only the L1 caches, which are VIVT.
- */
- TEXT cachedwb(SB), 1, $-4 /* D writeback */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- BARRIERS /* force outstanding stores to cache */
- /* keep writing back dirty cache lines until no more exist */
- _dwb:
- MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwb), CpCACHEtest
- BNE _dwb
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW R3, CPSR /* splx */
- RET
- TEXT cachedwbse(SB), 1, $-4 /* D writeback SE */
- MOVW R0, R2 /* first arg: address */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- BARRIERS /* force outstanding stores to cache */
- MOVW 4(FP), R1 /* second arg: size */
- // CMP.S $(4*1024), R1
- // BGT _dwb
- ADD R2, R1
- BIC $(CACHELINESZ-1), R2
- _dwbse:
- MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwb), CpCACHEse
- ADD $CACHELINESZ, R2
- CMP.S R2, R1
- BGT _dwbse
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW R3, CPSR /* splx */
- RET
- TEXT cachedwbinv(SB), 1, $-4 /* D writeback+invalidate */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- BARRIERS /* force outstanding stores to cache */
- /* keep writing back dirty cache lines until no more exist */
- _dwbinv:
- MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
- BNE _dwbinv
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW R3, CPSR /* splx */
- RET
- TEXT cachedwbinvse(SB), 1, $-4 /* D writeback+invalidate SE */
- MOVW R0, R2 /* first arg: address */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- BARRIERS /* force outstanding stores to cache */
- MOVW 4(FP), R1 /* second arg: size */
- DSB
- // CMP.S $(4*1024), R1
- // BGT _dwbinv
- ADD R2, R1
- BIC $(CACHELINESZ-1), R2
- _dwbinvse:
- MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwbi), CpCACHEse
- ADD $CACHELINESZ, R2
- CMP.S R2, R1
- BGT _dwbinvse
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW R3, CPSR /* splx */
- RET
- TEXT cachedinvse(SB), 1, $-4 /* D invalidate SE */
- MOVW R0, R2 /* first arg: address */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- MOVW 4(FP), R1 /* second arg: size */
- DSB
- // CMP.S $(4*1024), R1
- // BGT _dinv
- ADD R2, R1
- BIC $(CACHELINESZ-1), R2
- _dinvse:
- MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEinvd), CpCACHEse
- ADD $CACHELINESZ, R2
- CMP.S R2, R1
- BGT _dinvse
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW R3, CPSR /* splx */
- RET
- TEXT cacheuwbinv(SB), 1, $-4 /* D+I writeback+invalidate */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- BARRIERS /* force outstanding stores to cache */
- /* keep writing back dirty cache lines until no more exist */
- _uwbinv: /* D writeback+invalidate */
- MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
- BNE _uwbinv
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW $0, R0 /* I invalidate */
- MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- MOVW R3, CPSR /* splx */
- RET
- TEXT cacheiinv(SB), 1, $-4 /* I invalidate */
- BARRIERS
- MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- RET
- TEXT cachedinv(SB), 1, $-4 /* D invalidate */
- _dinv:
- BARRIERS
- MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEall
- /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
- BARRIERS
- RET
- /*
- * l2 cache
- *
- * these functions assume that the necessary l1 cache operations have been
- * or will be done explicitly by the caller.
- */
- /* enable l2 cache in config coproc. reg. do this while l1 caches are off. */
- TEXT l2cachecfgon(SB), 1, $-4
- BARRIERS
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
- BARRIERS
- MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
- ORR $(CpTCl2ena | CpTCl2prefdis), R1 /* l2 on, prefetch off */
- MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
- BARRIERS
- RET
- /* disable l2 cache in config coproc. reg. do this while l1 caches are off. */
- TEXT l2cachecfgoff(SB), 1, $-4
- BARRIERS
- MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
- BIC $CpTCl2ena, R1
- MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
- BARRIERS
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
- BARRIERS
- RET
- TEXT l2cacheuwb(SB), 1, $-4 /* L2 unified writeback */
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
- ISB
- RET
- TEXT l2cacheuwbse(SB), 1, $-4 /* L2 unified writeback SE */
- MOVW R0, R2 /* first arg: address */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- MOVW 4(FP), R1 /* second arg: size */
- ADD R2, R1
- BIC $(CACHELINESZ-1), R2
- _l2wbse:
- MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
- ADD $CACHELINESZ, R2
- CMP.S R2, R1
- BGT _l2wbse
- ISB
- MOVW R3, CPSR /* splx */
- RET
- TEXT l2cacheuwbinv(SB), 1, $-4 /* L2 unified writeback+invalidate */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
- ISB
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
- ISB
- MOVW R3, CPSR /* splx */
- RET
- TEXT l2cacheuwbinvse(SB), 1, $-4 /* L2 unified writeback+invalidate SE */
- MOVW R0, R2 /* first arg: address */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- MOVW 4(FP), R1 /* second arg: size */
- ADD R2, R1
- BIC $(CACHELINESZ-1), R2
- _l2wbinvse:
- MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
- ISB
- MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
- ADD $CACHELINESZ, R2
- CMP.S R2, R1
- BGT _l2wbinvse
- ISB
- MOVW R3, CPSR /* splx */
- RET
- TEXT l2cacheuinv(SB), 1, $-4 /* L2 unified invalidate */
- MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
- ISB
- RET
- TEXT l2cacheuinvse(SB), 1, $-4 /* L2 unified invalidate SE */
- MOVW R0, R2 /* first arg: address */
- MOVW CPSR, R3 /* splhi */
- ORR $(PsrDirq), R3, R1
- MOVW R1, CPSR
- MOVW 4(FP), R1 /* second arg: size */
- ADD R2, R1
- BIC $(CACHELINESZ-1), R2
- _l2invse:
- MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
- ADD $CACHELINESZ, R2
- CMP.S R2, R1
- BGT _l2invse
- ISB
- MOVW R3, CPSR /* splx */
- RET
- /*
- * enable mmu, i and d caches, and high vector
- */
- TEXT mmuenable(SB), 1, $-4
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- ORR $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
- BIC $(CpCrom), R0
- MCR CpSC, 0, R0, C(CpCONTROL), C(0)
- BARRIERS
- RET
- TEXT mmudisable(SB), 1, $-4
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- BIC $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb), R0
- MCR CpSC, 0, R0, C(CpCONTROL), C(0)
- BARRIERS
- RET
- TEXT mmuinvalidate(SB), 1, $-4 /* invalidate all */
- MOVW $0, R0
- MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
- BARRIERS
- RET
- TEXT mmuinvalidateaddr(SB), 1, $-4 /* invalidate single entry */
- MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
- BARRIERS
- RET
- TEXT cpidget(SB), 1, $-4 /* main ID */
- MRC CpSC, 0, R0, C(CpID), C(0), CpIDid
- RET
- TEXT cpctget(SB), 1, $-4 /* cache type */
- MRC CpSC, 0, R0, C(CpID), C(0), CpIDct
- RET
- TEXT controlget(SB), 1, $-4 /* control */
- MRC CpSC, 0, R0, C(CpCONTROL), C(0)
- RET
- TEXT ttbget(SB), 1, $-4 /* translation table base */
- MRC CpSC, 0, R0, C(CpTTB), C(0)
- RET
- TEXT ttbput(SB), 1, $-4 /* translation table base */
- MCR CpSC, 0, R0, C(CpTTB), C(0)
- ISB
- RET
- TEXT dacget(SB), 1, $-4 /* domain access control */
- MRC CpSC, 0, R0, C(CpDAC), C(0)
- RET
- TEXT dacput(SB), 1, $-4 /* domain access control */
- MCR CpSC, 0, R0, C(CpDAC), C(0)
- ISB
- RET
- TEXT fsrget(SB), 1, $-4 /* fault status */
- MRC CpSC, 0, R0, C(CpFSR), C(0)
- RET
- TEXT farget(SB), 1, $-4 /* fault address */
- MRC CpSC, 0, R0, C(CpFAR), C(0x0)
- RET
- TEXT pidget(SB), 1, $-4 /* address translation pid */
- MRC CpSC, 0, R0, C(CpPID), C(0x0)
- RET
- TEXT pidput(SB), 1, $-4 /* address translation pid */
- MCR CpSC, 0, R0, C(CpPID), C(0x0)
- ISB
- RET
- TEXT splhi(SB), 1, $-4
- MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
- MOVW R14, 0(R2)
- MOVW CPSR, R0 /* turn off interrupts */
- ORR $(PsrDirq), R0, R1
- MOVW R1, CPSR
- RET
- TEXT spllo(SB), 1, $-4
- MOVW CPSR, R0
- BIC $(PsrDirq), R0, R1
- MOVW R1, CPSR
- RET
- TEXT splx(SB), 1, $-4
- MOVW $(MACHADDR+0x04), R2 /* save caller pc in Mach */
- MOVW R14, 0(R2)
- MOVW R0, R1 /* reset interrupt level */
- MOVW CPSR, R0
- MOVW R1, CPSR
- RET
- TEXT splxpc(SB), 1, $-4 /* for iunlock */
- MOVW R0, R1
- MOVW CPSR, R0
- MOVW R1, CPSR
- RET
- TEXT spldone(SB), 1, $0
- RET
- TEXT islo(SB), 1, $-4
- MOVW CPSR, R0
- AND $(PsrDirq), R0
- EOR $(PsrDirq), R0
- RET
- TEXT splfhi(SB), $-4
- MOVW CPSR, R0
- ORR $(PsrDfiq|PsrDirq), R0, R1
- MOVW R1, CPSR
- RET
- //TEXT splflo(SB), $-4
- // MOVW CPSR, R0
- // BIC $(PsrDfiq), R0, R1
- // MOVW R1, CPSR
- // RET
- TEXT tas(SB), $-4
- TEXT _tas(SB), $-4
- MOVW R0,R1
- MOVW $1,R0
- SWPW R0,(R1) /* fix: deprecated in armv7 */
- RET
- //TEXT tas32(SB), 1, $-4
- // MOVW R0, R1
- // MOVW $0xDEADDEAD, R0
- // MOVW R0, R3
- // SWPW R0, (R1)
- // CMP.S R0, R3
- // BEQ _tasout
- // EOR R3, R3 /* R3 = 0 */
- // CMP.S R0, R3
- // BEQ _tasout
- // MOVW $1, R15 /* abort: lock != 0 && lock != $0xDEADDEAD */
- //_tasout:
- // RET
- TEXT clz(SB), 1, $-4
- CLZ(0, 0) /* 0 is R0 */
- RET
- TEXT setlabel(SB), 1, $-4
- MOVW R13, 0(R0) /* sp */
- MOVW R14, 4(R0) /* pc */
- BARRIERS
- MOVW $0, R0
- RET
- TEXT gotolabel(SB), 1, $-4
- MOVW 0(R0), R13 /* sp */
- MOVW 4(R0), R14 /* pc */
- BARRIERS
- MOVW $1, R0
- RET
- TEXT getcallerpc(SB), 1, $-4
- MOVW 0(R13), R0
- RET
- TEXT _idlehands(SB), 1, $-4
- MOVW CPSR, R3
- // ORR $PsrDirq, R3, R1 /* splhi */
- BIC $PsrDirq, R3, R1 /* spllo */
- MOVW R1, CPSR
- MOVW $0, R0 /* wait for interrupt */
- MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
- ISB
- MOVW R3, CPSR /* splx */
- RET
- TEXT barriers(SB), 1, $-4
- BARRIERS
- RET
|