arm_arch_svc_setup.c 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179
  1. /*
  2. * Copyright (c) 2018-2024, Arm Limited and Contributors. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. */
  6. #include <common/debug.h>
  7. #include <common/runtime_svc.h>
  8. #include <lib/cpus/errata.h>
  9. #include <lib/cpus/wa_cve_2017_5715.h>
  10. #include <lib/cpus/wa_cve_2018_3639.h>
  11. #include <lib/cpus/wa_cve_2022_23960.h>
  12. #include <lib/smccc.h>
  13. #include <services/arm_arch_svc.h>
  14. #include <smccc_helpers.h>
  15. #include <plat/common/platform.h>
  16. static int32_t smccc_version(void)
  17. {
  18. return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
  19. }
  20. static int32_t smccc_arch_features(u_register_t arg1)
  21. {
  22. switch (arg1) {
  23. case SMCCC_VERSION:
  24. case SMCCC_ARCH_FEATURES:
  25. return SMC_ARCH_CALL_SUCCESS;
  26. case SMCCC_ARCH_SOC_ID:
  27. return plat_is_smccc_feature_available(arg1);
  28. #ifdef __aarch64__
  29. /* Workaround checks are currently only implemented for aarch64 */
  30. #if WORKAROUND_CVE_2017_5715
  31. case SMCCC_ARCH_WORKAROUND_1:
  32. if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
  33. return 1;
  34. return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
  35. #endif
  36. #if WORKAROUND_CVE_2018_3639
  37. case SMCCC_ARCH_WORKAROUND_2: {
  38. #if DYNAMIC_WORKAROUND_CVE_2018_3639
  39. unsigned long long ssbs;
  40. /*
  41. * Firmware doesn't have to carry out dynamic workaround if the
  42. * PE implements architectural Speculation Store Bypass Safe
  43. * (SSBS) feature.
  44. */
  45. ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
  46. ID_AA64PFR1_EL1_SSBS_MASK;
  47. /*
  48. * If architectural SSBS is available on this PE, no firmware
  49. * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
  50. */
  51. if (ssbs != SSBS_NOT_IMPLEMENTED)
  52. return 1;
  53. /*
  54. * On a platform where at least one CPU requires
  55. * dynamic mitigation but others are either unaffected
  56. * or permanently mitigated, report the latter as not
  57. * needing dynamic mitigation.
  58. */
  59. if (wa_cve_2018_3639_get_disable_ptr() == NULL)
  60. return 1;
  61. /*
  62. * If we get here, this CPU requires dynamic mitigation
  63. * so report it as such.
  64. */
  65. return 0;
  66. #else
  67. /* Either the CPUs are unaffected or permanently mitigated */
  68. return SMC_ARCH_CALL_NOT_REQUIRED;
  69. #endif
  70. }
  71. #endif
  72. #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
  73. case SMCCC_ARCH_WORKAROUND_3:
  74. /*
  75. * SMCCC_ARCH_WORKAROUND_3 should also take into account
  76. * CVE-2017-5715 since this SMC can be used instead of
  77. * SMCCC_ARCH_WORKAROUND_1.
  78. */
  79. if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) &&
  80. (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) {
  81. return 1;
  82. }
  83. return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
  84. #endif
  85. #endif /* __aarch64__ */
  86. /* Fallthrough */
  87. default:
  88. return SMC_UNK;
  89. }
  90. }
  91. /* return soc revision or soc version on success otherwise
  92. * return invalid parameter */
  93. static int32_t smccc_arch_id(u_register_t arg1)
  94. {
  95. if (arg1 == SMCCC_GET_SOC_REVISION) {
  96. return plat_get_soc_revision();
  97. }
  98. if (arg1 == SMCCC_GET_SOC_VERSION) {
  99. return plat_get_soc_version();
  100. }
  101. return SMC_ARCH_CALL_INVAL_PARAM;
  102. }
  103. /*
  104. * Top-level Arm Architectural Service SMC handler.
  105. */
  106. static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
  107. u_register_t x1,
  108. u_register_t x2,
  109. u_register_t x3,
  110. u_register_t x4,
  111. void *cookie,
  112. void *handle,
  113. u_register_t flags)
  114. {
  115. switch (smc_fid) {
  116. case SMCCC_VERSION:
  117. SMC_RET1(handle, smccc_version());
  118. case SMCCC_ARCH_FEATURES:
  119. SMC_RET1(handle, smccc_arch_features(x1));
  120. case SMCCC_ARCH_SOC_ID:
  121. SMC_RET1(handle, smccc_arch_id(x1));
  122. #ifdef __aarch64__
  123. #if WORKAROUND_CVE_2017_5715
  124. case SMCCC_ARCH_WORKAROUND_1:
  125. /*
  126. * The workaround has already been applied on affected PEs
  127. * during entry to EL3. On unaffected PEs, this function
  128. * has no effect.
  129. */
  130. SMC_RET0(handle);
  131. #endif
  132. #if WORKAROUND_CVE_2018_3639
  133. case SMCCC_ARCH_WORKAROUND_2:
  134. /*
  135. * The workaround has already been applied on affected PEs
  136. * requiring dynamic mitigation during entry to EL3.
  137. * On unaffected or statically mitigated PEs, this function
  138. * has no effect.
  139. */
  140. SMC_RET0(handle);
  141. #endif
  142. #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
  143. case SMCCC_ARCH_WORKAROUND_3:
  144. /*
  145. * The workaround has already been applied on affected PEs
  146. * during entry to EL3. On unaffected PEs, this function
  147. * has no effect.
  148. */
  149. SMC_RET0(handle);
  150. #endif
  151. #endif /* __aarch64__ */
  152. default:
  153. WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
  154. smc_fid);
  155. SMC_RET1(handle, SMC_UNK);
  156. }
  157. }
  158. /* Register Standard Service Calls as runtime service */
  159. DECLARE_RT_SVC(
  160. arm_arch_svc,
  161. OEN_ARM_START,
  162. OEN_ARM_END,
  163. SMC_TYPE_FAST,
  164. NULL,
  165. arm_arch_svc_smc_handler
  166. );