17#ifndef _ZEPHYR_ARCH_ARM_M_SWITCH_H
18#define _ZEPHYR_ARCH_ARM_M_SWITCH_H
35#ifdef CONFIG_ARM_GCC_FP_WORKAROUND
36#define _R7_CLOBBER_OPT(expr) expr
38#define _R7_CLOBBER_OPT(expr)
45#if defined(CONFIG_CPU_CORTEX_M4) || defined(CONFIG_CPU_CORTEX_M7) || defined(CONFIG_ARMV8_M_DSP)
46#define _ARM_M_SWITCH_HAVE_DSP
125void z_arm_configure_dynamic_mpu_regions(
struct k_thread *thread);
140 void *out, *in, *lr_save, *lr_fixup;
158#ifdef CONFIG_MULTITHREADING
184 void z_check_stack_sentinel(
void);
188 z_check_stack_sentinel();
200 if ((((
uint32_t)isr_lr & 0xFFFFFF00U) == 0xFFFFFF00U)
201 && (((
uint32_t)isr_lr & 0xC) == 0xC)) {
222#if defined(CONFIG_USERSPACE) || defined(CONFIG_MPU_STACK_GUARD)
223 z_arm_configure_dynamic_mpu_regions(_current);
226#ifdef CONFIG_THREAD_LOCAL_STORAGE
227 z_arm_tls_ptr = _current->tls;
230#if defined(CONFIG_USERSPACE) && defined(CONFIG_USE_SWITCH)
235 extern uint32_t arm_m_switch_control;
238 __asm__
volatile(
"mrs %0, control" :
"=r"(control));
239 arm_m_switch_control = (control & ~1) | (_current->arch.mode & 1);
252 __asm__
volatile(_R7_CLOBBER_OPT(
"push {r7};")
264 "mov r2, #0x01000000;"
266#ifdef CONFIG_BUILTIN_STACK_GUARD
289 " vldm r4!, {s0-s31};"
295#if defined(CONFIG_USERSPACE) && defined(CONFIG_USE_SWITCH)
296 " ldr r8, =arm_m_switch_control;"
309#if defined(CONFIG_USERSPACE) && defined(CONFIG_USE_SWITCH)
318#ifdef CONFIG_BUILTIN_STACK_GUARD
324#ifdef _ARM_M_SWITCH_HAVE_DSP
325 "msr apsr_nzcvqg, r2;"
327 "msr apsr_nzcvq, r2;"
333 _R7_CLOBBER_OPT(
"pop {r7};")::
"r"(r4),
335 :
"r6",
"r8",
"r9",
"r10",
336#ifndef CONFIG_ARM_GCC_FP_WORKAROUND
342#ifdef CONFIG_USE_SWITCH
bool arm_m_iciit_check(uint32_t msp, uint32_t psp, uint32_t lr)
Recover an interrupted IT/ICI instruction after a context switch.
static void arm_m_exc_tail(void)
ISR-tail helper that patches the stacked LR for deferred switch fixup.
Definition arm-m-switch.h:156
void * arm_m_new_stack(char *base, uint32_t sz, void *entry, void *arg0, void *arg1, void *arg2, void *arg3)
Create an initial switch frame on a new thread's stack.
static ALWAYS_INLINE void arm_m_switch(void *switch_to, void **switched_from)
Core Cortex-M context switch routine.
Definition arm-m-switch.h:220
static ALWAYS_INLINE void arch_switch(void *switch_to, void **switched_from)
Public arch-level wrapper for the Cortex-M switch routine.
Definition arm-m-switch.h:352
void arm_m_iciit_stub(void)
Undefined-instruction stub used to force IT/ICI recovery.
void arm_m_exc_exit(void)
Assembly stub that completes the Cortex-M context restore.
uint32_t * arm_m_exc_lr_ptr
Pointer to the stacked LR word used by the ISR tail fixup path.
struct arm_m_cs_ptrs arm_m_cs_ptrs
Global instance with current callee-saved frame pointers.
bool arm_m_must_switch(void)
Evaluate whether an interrupt should trigger a context switch.
uint32_t arm_m_switch_stack_buffer
Backing storage used when relocating stacks during switch operations.
#define IS_ENABLED(config_macro)
Check for macro definition in compiler-visible expressions.
Definition util_macro.h:148
__UINT32_TYPE__ uint32_t
Definition stdint.h:90
__UINTPTR_TYPE__ uintptr_t
Definition stdint.h:105
Thread Structure.
Definition thread.h:259