cache_helpers.S 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231
  1. /*
  2. * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions are met:
  6. *
  7. * Redistributions of source code must retain the above copyright notice, this
  8. * list of conditions and the following disclaimer.
  9. *
  10. * Redistributions in binary form must reproduce the above copyright notice,
  11. * this list of conditions and the following disclaimer in the documentation
  12. * and/or other materials provided with the distribution.
  13. *
  14. * Neither the name of ARM nor the names of its contributors may be used
  15. * to endorse or promote products derived from this software without specific
  16. * prior written permission.
  17. *
  18. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  19. * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  20. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  21. * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
  22. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  23. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  24. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  25. * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  26. * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  27. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  28. * POSSIBILITY OF SUCH DAMAGE.
  29. */
  30. #include <arch_helpers.h>
  31. #include <asm_macros.S>
  32. .globl dcisw
  33. .globl dccisw
  34. .globl dccsw
  35. .globl dccvac
  36. .globl dcivac
  37. .globl dccivac
  38. .globl dccvau
  39. .globl dczva
  40. .globl flush_dcache_range
  41. .globl inv_dcache_range
  42. .globl dcsw_op_louis
  43. .globl dcsw_op_all
  44. func dcisw
  45. dc isw, x0
  46. dsb sy
  47. isb
  48. ret
  49. func dccisw
  50. dc cisw, x0
  51. dsb sy
  52. isb
  53. ret
  54. func dccsw
  55. dc csw, x0
  56. dsb sy
  57. isb
  58. ret
  59. func dccvac
  60. dc cvac, x0
  61. dsb sy
  62. isb
  63. ret
  64. func dcivac
  65. dc ivac, x0
  66. dsb sy
  67. isb
  68. ret
  69. func dccivac
  70. dc civac, x0
  71. dsb sy
  72. isb
  73. ret
  74. func dccvau
  75. dc cvau, x0
  76. dsb sy
  77. isb
  78. ret
  79. func dczva
  80. dc zva, x0
  81. dsb sy
  82. isb
  83. ret
  84. /* ------------------------------------------
  85. * Clean+Invalidate from base address till
  86. * size. 'x0' = addr, 'x1' = size
  87. * ------------------------------------------
  88. */
  89. func flush_dcache_range
  90. dcache_line_size x2, x3
  91. add x1, x0, x1
  92. sub x3, x2, #1
  93. bic x0, x0, x3
  94. flush_loop:
  95. dc civac, x0
  96. add x0, x0, x2
  97. cmp x0, x1
  98. b.lo flush_loop
  99. dsb sy
  100. ret
  101. /* ------------------------------------------
  102. * Invalidate from base address till
  103. * size. 'x0' = addr, 'x1' = size
  104. * ------------------------------------------
  105. */
  106. func inv_dcache_range
  107. dcache_line_size x2, x3
  108. add x1, x0, x1
  109. sub x3, x2, #1
  110. bic x0, x0, x3
  111. inv_loop:
  112. dc ivac, x0
  113. add x0, x0, x2
  114. cmp x0, x1
  115. b.lo inv_loop
  116. dsb sy
  117. ret
  118. /* ------------------------------------------
  119. * Data cache operations by set/way to the
  120. * level specified
  121. * ------------------------------------------
  122. * ----------------------------------
  123. * Call this func with the clidr in
  124. * x0, starting cache level in x10,
  125. * last cache level in x3 & cm op in
  126. * x14
  127. * ----------------------------------
  128. */
  129. func dcsw_op
  130. all_start_at_level:
  131. add x2, x10, x10, lsr #1 // work out 3x current cache level
  132. lsr x1, x0, x2 // extract cache type bits from clidr
  133. and x1, x1, #7 // mask of the bits for current cache only
  134. cmp x1, #2 // see what cache we have at this level
  135. b.lt skip // skip if no cache, or just i-cache
  136. msr csselr_el1, x10 // select current cache level in csselr
  137. isb // isb to sych the new cssr&csidr
  138. mrs x1, ccsidr_el1 // read the new ccsidr
  139. and x2, x1, #7 // extract the length of the cache lines
  140. add x2, x2, #4 // add 4 (line length offset)
  141. mov x4, #0x3ff
  142. and x4, x4, x1, lsr #3 // find maximum number on the way size
  143. clz w5, w4 // find bit position of way size increment
  144. mov x7, #0x7fff
  145. and x7, x7, x1, lsr #13 // extract max number of the index size
  146. loop2:
  147. mov x9, x4 // create working copy of max way size
  148. loop3:
  149. lsl x6, x9, x5
  150. orr x11, x10, x6 // factor way and cache number into x11
  151. lsl x6, x7, x2
  152. orr x11, x11, x6 // factor index number into x11
  153. mov x12, x0
  154. mov x13, x30 // lr
  155. mov x0, x11
  156. blr x14
  157. mov x0, x12
  158. mov x30, x13 // lr
  159. subs x9, x9, #1 // decrement the way
  160. b.ge loop3
  161. subs x7, x7, #1 // decrement the index
  162. b.ge loop2
  163. skip:
  164. add x10, x10, #2 // increment cache number
  165. cmp x3, x10
  166. b.gt all_start_at_level
  167. finished:
  168. mov x10, #0 // swith back to cache level 0
  169. msr csselr_el1, x10 // select current cache level in csselr
  170. dsb sy
  171. isb
  172. ret
  173. func do_dcsw_op
  174. cbz x3, exit
  175. cmp x0, #DCISW
  176. b.eq dc_isw
  177. cmp x0, #DCCISW
  178. b.eq dc_cisw
  179. cmp x0, #DCCSW
  180. b.eq dc_csw
  181. dc_isw:
  182. mov x0, x9
  183. adr x14, dcisw
  184. b dcsw_op
  185. dc_cisw:
  186. mov x0, x9
  187. adr x14, dccisw
  188. b dcsw_op
  189. dc_csw:
  190. mov x0, x9
  191. adr x14, dccsw
  192. b dcsw_op
  193. exit:
  194. ret
  195. func dcsw_op_louis
  196. dsb sy
  197. setup_dcsw_op_args x10, x3, x9, #LOUIS_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
  198. b do_dcsw_op
  199. func dcsw_op_all
  200. dsb sy
  201. setup_dcsw_op_args x10, x3, x9, #LOC_SHIFT, #CLIDR_FIELD_WIDTH, #LEVEL_SHIFT
  202. b do_dcsw_op