aem_generic.S 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. /*
  2. * Copyright (c) 2014-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <aem_generic.h>
  7. #include <arch.h>
  8. #include <asm_macros.S>
  9. #include <cpu_macros.S>
  10. func aem_generic_core_pwr_dwn
  11. /* ---------------------------------------------
  12. * Disable the Data Cache.
  13. * ---------------------------------------------
  14. */
  15. mrs x1, sctlr_el3
  16. bic x1, x1, #SCTLR_C_BIT
  17. msr sctlr_el3, x1
  18. isb
  19. /* ---------------------------------------------
  20. * AEM model supports L3 caches in which case L2
  21. * will be private per core caches and flush
  22. * from L1 to L2 is not sufficient.
  23. * ---------------------------------------------
  24. */
  25. mrs x1, clidr_el1
  26. /* ---------------------------------------------
  27. * Check if L3 cache is implemented.
  28. * ---------------------------------------------
  29. */
  30. tst x1, ((1 << CLIDR_FIELD_WIDTH) - 1) << CTYPE_SHIFT(3)
  31. /* ---------------------------------------------
  32. * There is no L3 cache, flush L1 to L2 only.
  33. * ---------------------------------------------
  34. */
  35. mov x0, #DCCISW
  36. b.eq dcsw_op_level1
  37. mov x18, x30
  38. /* ---------------------------------------------
  39. * Flush L1 cache to L2.
  40. * ---------------------------------------------
  41. */
  42. bl dcsw_op_level1
  43. mov x30, x18
  44. /* ---------------------------------------------
  45. * Flush L2 cache to L3.
  46. * ---------------------------------------------
  47. */
  48. mov x0, #DCCISW
  49. b dcsw_op_level2
  50. endfunc aem_generic_core_pwr_dwn
  51. func aem_generic_cluster_pwr_dwn
  52. /* ---------------------------------------------
  53. * Disable the Data Cache.
  54. * ---------------------------------------------
  55. */
  56. mrs x1, sctlr_el3
  57. bic x1, x1, #SCTLR_C_BIT
  58. msr sctlr_el3, x1
  59. isb
  60. /* ---------------------------------------------
  61. * Flush all caches to PoC.
  62. * ---------------------------------------------
  63. */
  64. mov x0, #DCCISW
  65. b dcsw_op_all
  66. endfunc aem_generic_cluster_pwr_dwn
  67. /* ---------------------------------------------
  68. * This function provides cpu specific
  69. * register information for crash reporting.
  70. * It needs to return with x6 pointing to
  71. * a list of register names in ascii and
  72. * x8 - x15 having values of registers to be
  73. * reported.
  74. * ---------------------------------------------
  75. */
  76. .section .rodata.aem_generic_regs, "aS"
  77. aem_generic_regs: /* The ascii list of register names to be reported */
  78. .asciz "" /* no registers to report */
  79. func aem_generic_cpu_reg_dump
  80. adr x6, aem_generic_regs
  81. ret
  82. endfunc aem_generic_cpu_reg_dump
  83. /* cpu_ops for Base AEM FVP */
  84. declare_cpu_ops aem_generic, BASE_AEM_MIDR, CPU_NO_RESET_FUNC, \
  85. aem_generic_core_pwr_dwn, \
  86. aem_generic_cluster_pwr_dwn
  87. /* cpu_ops for Foundation FVP */
  88. declare_cpu_ops aem_generic, FOUNDATION_AEM_MIDR, CPU_NO_RESET_FUNC, \
  89. aem_generic_core_pwr_dwn, \
  90. aem_generic_cluster_pwr_dwn