LCOV - code coverage report
Current view: top level - zephyr/arch/arm64 - lib_helpers.h Coverage Total Hit
Test: new.info Lines: 0.0 % 55 0
Test Date: 2025-09-05 20:47:19

            Line data    Source code
       1            0 : /*
       2              :  * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
       3              :  *
       4              :  * SPDX-License-Identifier: Apache-2.0
       5              :  */
       6              : 
       7              : #ifndef ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
       8              : #define ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
       9              : 
      10              : #ifndef _ASMLANGUAGE
      11              : 
      12              : #include <zephyr/arch/arm64/cpu.h>
      13              : #include <stdint.h>
      14              : 
      15              : /* All the macros need a memory clobber */
      16              : 
      17            0 : #define read_sysreg(reg)                                                \
      18              : ({                                                                      \
      19              :         uint64_t reg_val;                                               \
      20              :         __asm__ volatile ("mrs %0, " STRINGIFY(reg)                   \
      21              :                           : "=r" (reg_val) :: "memory");            \
      22              :         reg_val;                                                        \
      23              : })
      24              : 
      25            0 : #define write_sysreg(val, reg)                                          \
      26              : ({                                                                      \
      27              :         uint64_t reg_val = val;                                         \
      28              :         __asm__ volatile ("msr " STRINGIFY(reg) ", %0"                      \
      29              :                           :: "r" (reg_val) : "memory");                     \
      30              : })
      31              : 
      32            0 : #define zero_sysreg(reg)                                                \
      33              : ({                                                                      \
      34              :         __asm__ volatile ("msr " STRINGIFY(reg) ", xzr"                     \
      35              :                           ::: "memory");                              \
      36              : })
      37              : 
      38            0 : #define MAKE_REG_HELPER(reg)                                            \
      39              :         static ALWAYS_INLINE uint64_t read_##reg(void)                  \
      40              :         {                                                               \
      41              :                 return read_sysreg(reg);                                \
      42              :         }                                                               \
      43              :         static ALWAYS_INLINE void write_##reg(uint64_t val)             \
      44              :         {                                                               \
      45              :                 write_sysreg(val, reg);                                 \
      46              :         }                                                               \
      47              :         static ALWAYS_INLINE void zero_##reg(void)                      \
      48              :         {                                                               \
      49              :                 zero_sysreg(reg);                                       \
      50              :         }
      51              : 
      52            0 : #define MAKE_REG_HELPER_EL123(reg) \
      53              :         MAKE_REG_HELPER(reg##_el1) \
      54              :         MAKE_REG_HELPER(reg##_el2) \
      55              :         MAKE_REG_HELPER(reg##_el3)
      56              : 
      57            0 : MAKE_REG_HELPER(ccsidr_el1);
      58            0 : MAKE_REG_HELPER(clidr_el1);
      59            0 : MAKE_REG_HELPER(cntfrq_el0);
      60            0 : MAKE_REG_HELPER(cnthctl_el2);
      61            0 : MAKE_REG_HELPER(cnthp_ctl_el2);
      62            0 : MAKE_REG_HELPER(cnthps_ctl_el2);
      63            0 : MAKE_REG_HELPER(cntv_ctl_el0)
      64            0 : MAKE_REG_HELPER(cntv_cval_el0)
      65            0 : MAKE_REG_HELPER(cntvct_el0);
      66            0 : MAKE_REG_HELPER(cntvoff_el2);
      67            0 : MAKE_REG_HELPER(currentel);
      68            0 : MAKE_REG_HELPER(csselr_el1);
      69            0 : MAKE_REG_HELPER(daif)
      70            0 : MAKE_REG_HELPER(hcr_el2);
      71            0 : MAKE_REG_HELPER(id_aa64pfr0_el1);
      72            0 : MAKE_REG_HELPER(id_aa64mmfr0_el1);
      73            0 : MAKE_REG_HELPER(mpidr_el1);
      74            0 : MAKE_REG_HELPER(par_el1);
      75              : #if !defined(CONFIG_ARMV8_R)
      76            0 : MAKE_REG_HELPER(scr_el3);
      77              : #endif /* CONFIG_ARMV8_R */
      78            0 : MAKE_REG_HELPER(tpidrro_el0);
      79            0 : MAKE_REG_HELPER(vmpidr_el2);
      80            0 : MAKE_REG_HELPER(sp_el0);
      81              : 
      82            0 : MAKE_REG_HELPER_EL123(actlr)
      83            0 : MAKE_REG_HELPER_EL123(cpacr)
      84            0 : MAKE_REG_HELPER_EL123(cptr)
      85            0 : MAKE_REG_HELPER_EL123(elr)
      86            0 : MAKE_REG_HELPER_EL123(esr)
      87            0 : MAKE_REG_HELPER_EL123(far)
      88            0 : MAKE_REG_HELPER_EL123(mair)
      89            0 : MAKE_REG_HELPER_EL123(sctlr)
      90            0 : MAKE_REG_HELPER_EL123(spsr)
      91            0 : MAKE_REG_HELPER_EL123(tcr)
      92            0 : MAKE_REG_HELPER_EL123(ttbr0)
      93            0 : MAKE_REG_HELPER_EL123(vbar)
      94              : 
      95              : #if defined(CONFIG_ARM_MPU)
      96              : /* Armv8-R aarch64 mpu registers */
      97              : #define mpuir_el1       S3_0_c0_c0_4
      98              : #define prselr_el1      S3_0_c6_c2_1
      99              : #define prbar_el1       S3_0_c6_c8_0
     100              : #define prlar_el1       S3_0_c6_c8_1
     101              : 
     102              : MAKE_REG_HELPER(mpuir_el1);
     103              : MAKE_REG_HELPER(prselr_el1);
     104              : MAKE_REG_HELPER(prbar_el1);
     105              : MAKE_REG_HELPER(prlar_el1);
     106              : #endif
     107              : 
     108            0 : static ALWAYS_INLINE void enable_debug_exceptions(void)
     109              : {
     110              :         __asm__ volatile ("msr DAIFClr, %0"
     111              :                           :: "i" (DAIFCLR_DBG_BIT) : "memory");
     112              : }
     113              : 
     114            0 : static ALWAYS_INLINE void disable_debug_exceptions(void)
     115              : {
     116              :         __asm__ volatile ("msr DAIFSet, %0"
     117              :                           :: "i" (DAIFSET_DBG_BIT) : "memory");
     118              : }
     119              : 
     120            0 : static ALWAYS_INLINE void enable_serror_exceptions(void)
     121              : {
     122              :         __asm__ volatile ("msr DAIFClr, %0"
     123              :                           :: "i" (DAIFCLR_ABT_BIT) : "memory");
     124              : }
     125              : 
     126            0 : static ALWAYS_INLINE void disable_serror_exceptions(void)
     127              : {
     128              :         __asm__ volatile ("msr DAIFSet, %0"
     129              :                           :: "i" (DAIFSET_ABT_BIT) : "memory");
     130              : }
     131              : 
     132            0 : static ALWAYS_INLINE void enable_irq(void)
     133              : {
     134              :         __asm__ volatile ("msr DAIFClr, %0"
     135              :                           :: "i" (DAIFCLR_IRQ_BIT) : "memory");
     136              : }
     137              : 
     138            0 : static ALWAYS_INLINE void disable_irq(void)
     139              : {
     140              :         __asm__ volatile ("msr DAIFSet, %0"
     141              :                           :: "i" (DAIFSET_IRQ_BIT) : "memory");
     142              : }
     143              : 
     144            0 : static ALWAYS_INLINE void enable_fiq(void)
     145              : {
     146              :         __asm__ volatile ("msr DAIFClr, %0"
     147              :                           :: "i" (DAIFCLR_FIQ_BIT) : "memory");
     148              : }
     149              : 
     150            0 : static ALWAYS_INLINE void disable_fiq(void)
     151              : {
     152              :         __asm__ volatile ("msr DAIFSet, %0"
     153              :                           :: "i" (DAIFSET_FIQ_BIT) : "memory");
     154              : }
     155              : 
     156            0 : #define sev()   __asm__ volatile("sev" : : : "memory")
     157            0 : #define wfe()   __asm__ volatile("wfe" : : : "memory")
     158            0 : #define wfi()   __asm__ volatile("wfi" : : : "memory")
     159              : 
     160            0 : static inline bool is_el_implemented(unsigned int el)
     161              : {
     162              :         unsigned int shift;
     163              : 
     164              :         if (el > 3) {
     165              :                 return false;
     166              :         }
     167              : 
     168              :         shift = ID_AA64PFR0_EL1_SHIFT * el;
     169              : 
     170              :         return (((read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK) != 0U);
     171              : }
     172              : 
     173            0 : static inline bool is_el_highest_implemented(void)
     174              : {
     175              :         uint32_t el_highest;
     176              :         uint32_t curr_el;
     177              : 
     178              :         el_highest = read_id_aa64pfr0_el1() & 0xFFFF;
     179              :         el_highest = (31U - __builtin_clz(el_highest)) / 4;
     180              : 
     181              :         curr_el = GET_EL(read_currentel());
     182              : 
     183              :         if (curr_el < el_highest) {
     184              :                 return false;
     185              :         }
     186              : 
     187              :         return true;
     188              : }
     189              : 
     190            0 : static inline bool is_el2_sec_supported(void)
     191              : {
     192              :         return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
     193              :                 ID_AA64PFR0_SEL2_MASK) != 0U);
     194              : }
     195              : 
     196            0 : static inline bool is_in_secure_state(void)
     197              : {
     198              :         /* We cannot read SCR_EL3 from EL2 or EL1 */
     199              :         return !IS_ENABLED(CONFIG_ARMV8_A_NS);
     200              : }
     201              : 
     202              : #endif /* !_ASMLANGUAGE */
     203              : 
     204              : #endif /* ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ */
        

Generated by: LCOV version 2.0-1