123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262 |
- /*
- * Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
- *
- * SPDX-License-Identifier: BSD-3-Clause
- */
- #include <arch.h>
- #include <asm_macros.S>
- #include <assert_macros.S>
- #include <common/debug.h>
- #include <cortex_a72.h>
- #include <cpu_macros.S>
- /* ---------------------------------------------
- * Disable all types of L2 prefetches.
- * ---------------------------------------------
- */
- func cortex_a72_disable_l2_prefetch
- ldcopr16 r0, r1, CORTEX_A72_ECTLR
- orr64_imm r0, r1, CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT
- bic64_imm r0, r1, (CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK | \
- CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK)
- stcopr16 r0, r1, CORTEX_A72_ECTLR
- isb
- bx lr
- endfunc cortex_a72_disable_l2_prefetch
- /* ---------------------------------------------
- * Disable the load-store hardware prefetcher.
- * ---------------------------------------------
- */
- func cortex_a72_disable_hw_prefetcher
- ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
- orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH
- stcopr16 r0, r1, CORTEX_A72_CPUACTLR
- isb
- dsb ish
- bx lr
- endfunc cortex_a72_disable_hw_prefetcher
- /* ---------------------------------------------
- * Disable intra-cluster coherency
- * Clobbers: r0-r1
- * ---------------------------------------------
- */
- func cortex_a72_disable_smp
- ldcopr16 r0, r1, CORTEX_A72_ECTLR
- bic64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT
- stcopr16 r0, r1, CORTEX_A72_ECTLR
- bx lr
- endfunc cortex_a72_disable_smp
- /* ---------------------------------------------
- * Disable debug interfaces
- * ---------------------------------------------
- */
- func cortex_a72_disable_ext_debug
- mov r0, #1
- stcopr r0, DBGOSDLR
- isb
- dsb sy
- bx lr
- endfunc cortex_a72_disable_ext_debug
- /* ---------------------------------------------------
- * Errata Workaround for Cortex A72 Errata #859971.
- * This applies only to revision <= r0p3 of Cortex A72.
- * Inputs:
- * r0: variant[4:7] and revision[0:3] of current cpu.
- * Shall clobber: r0-r3
- * ---------------------------------------------------
- */
- func errata_a72_859971_wa
- mov r2,lr
- bl check_errata_859971
- mov lr, r2
- cmp r0, #ERRATA_NOT_APPLIES
- beq 1f
- ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
- orr64_imm r1, r1, CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH
- stcopr16 r0, r1, CORTEX_A72_CPUACTLR
- 1:
- bx lr
- endfunc errata_a72_859971_wa
- func check_errata_859971
- mov r1, #0x03
- b cpu_rev_var_ls
- endfunc check_errata_859971
- add_erratum_entry cortex_a72, ERRATUM(859971), ERRATA_A72_859971
- func check_errata_cve_2017_5715
- mov r0, #ERRATA_MISSING
- bx lr
- endfunc check_errata_cve_2017_5715
- add_erratum_entry cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
- func check_errata_cve_2018_3639
- #if WORKAROUND_CVE_2018_3639
- mov r0, #ERRATA_APPLIES
- #else
- mov r0, #ERRATA_MISSING
- #endif
- bx lr
- endfunc check_errata_cve_2018_3639
- add_erratum_entry cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
- func check_errata_cve_2022_23960
- mov r0, #ERRATA_MISSING
- bx lr
- endfunc check_errata_cve_2022_23960
- add_erratum_entry cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
- /* -------------------------------------------------
- * The CPU Ops reset function for Cortex-A72.
- * -------------------------------------------------
- */
- func cortex_a72_reset_func
- mov r5, lr
- bl cpu_get_rev_var
- mov r4, r0
- #if ERRATA_A72_859971
- mov r0, r4
- bl errata_a72_859971_wa
- #endif
- #if WORKAROUND_CVE_2018_3639
- ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
- orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE
- stcopr16 r0, r1, CORTEX_A72_CPUACTLR
- isb
- dsb sy
- #endif
- /* ---------------------------------------------
- * Enable the SMP bit.
- * ---------------------------------------------
- */
- ldcopr16 r0, r1, CORTEX_A72_ECTLR
- orr64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT
- stcopr16 r0, r1, CORTEX_A72_ECTLR
- isb
- bx r5
- endfunc cortex_a72_reset_func
- /* ----------------------------------------------------
- * The CPU Ops core power down function for Cortex-A72.
- * ----------------------------------------------------
- */
- func cortex_a72_core_pwr_dwn
- push {r12, lr}
- /* Assert if cache is enabled */
- #if ENABLE_ASSERTIONS
- ldcopr r0, SCTLR
- tst r0, #SCTLR_C_BIT
- ASM_ASSERT(eq)
- #endif
- /* ---------------------------------------------
- * Disable the L2 prefetches.
- * ---------------------------------------------
- */
- bl cortex_a72_disable_l2_prefetch
- /* ---------------------------------------------
- * Disable the load-store hardware prefetcher.
- * ---------------------------------------------
- */
- bl cortex_a72_disable_hw_prefetcher
- /* ---------------------------------------------
- * Flush L1 caches.
- * ---------------------------------------------
- */
- mov r0, #DC_OP_CISW
- bl dcsw_op_level1
- /* ---------------------------------------------
- * Come out of intra cluster coherency
- * ---------------------------------------------
- */
- bl cortex_a72_disable_smp
- /* ---------------------------------------------
- * Force the debug interfaces to be quiescent
- * ---------------------------------------------
- */
- pop {r12, lr}
- b cortex_a72_disable_ext_debug
- endfunc cortex_a72_core_pwr_dwn
- /* -------------------------------------------------------
- * The CPU Ops cluster power down function for Cortex-A72.
- * -------------------------------------------------------
- */
- func cortex_a72_cluster_pwr_dwn
- push {r12, lr}
- /* Assert if cache is enabled */
- #if ENABLE_ASSERTIONS
- ldcopr r0, SCTLR
- tst r0, #SCTLR_C_BIT
- ASM_ASSERT(eq)
- #endif
- /* ---------------------------------------------
- * Disable the L2 prefetches.
- * ---------------------------------------------
- */
- bl cortex_a72_disable_l2_prefetch
- /* ---------------------------------------------
- * Disable the load-store hardware prefetcher.
- * ---------------------------------------------
- */
- bl cortex_a72_disable_hw_prefetcher
- #if !SKIP_A72_L1_FLUSH_PWR_DWN
- /* ---------------------------------------------
- * Flush L1 caches.
- * ---------------------------------------------
- */
- mov r0, #DC_OP_CISW
- bl dcsw_op_level1
- #endif
- /* ---------------------------------------------
- * Disable the optional ACP.
- * ---------------------------------------------
- */
- bl plat_disable_acp
- /* -------------------------------------------------
- * Flush the L2 caches.
- * -------------------------------------------------
- */
- mov r0, #DC_OP_CISW
- bl dcsw_op_level2
- /* ---------------------------------------------
- * Come out of intra cluster coherency
- * ---------------------------------------------
- */
- bl cortex_a72_disable_smp
- /* ---------------------------------------------
- * Force the debug interfaces to be quiescent
- * ---------------------------------------------
- */
- pop {r12, lr}
- b cortex_a72_disable_ext_debug
- endfunc cortex_a72_cluster_pwr_dwn
- declare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \
- cortex_a72_reset_func, \
- cortex_a72_core_pwr_dwn, \
- cortex_a72_cluster_pwr_dwn
|