Zephyr API Documentation 4.2.99
A Scalable Open Source RTOS
Loading...
Searching...
No Matches
thread_stack.h
Go to the documentation of this file.
1/*
2 * Copyright (c) 2020 Intel Corporation
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
12
20
21#ifndef ZEPHYR_INCLUDE_KERNEL_THREAD_STACK_H
22#define ZEPHYR_INCLUDE_KERNEL_THREAD_STACK_H
23
24#if !defined(_ASMLANGUAGE)
25#include <zephyr/arch/cpu.h>
26#include <zephyr/sys/util.h>
27
28#ifdef __cplusplus
29extern "C" {
30#endif
31
32/* Using typedef deliberately here, this is quite intended to be an opaque
33 * type.
34 *
35 * The purpose of this data type is to clearly distinguish between the
36 * declared symbol for a stack (of type k_thread_stack_t) and the underlying
37 * buffer which composes the stack data actually used by the underlying
38 * thread; they cannot be used interchangeably as some arches precede the
39 * stack buffer region with guard areas that trigger a MPU or MMU fault
40 * if written to.
41 *
42 * APIs that want to work with the buffer inside should continue to use
43 * char *.
44 *
45 * Stacks should always be created with K_THREAD_STACK_DEFINE().
46 */
47struct __packed z_thread_stack_element {
48 char data;
49};
50
57
58
69static inline char *z_stack_ptr_align(char *ptr)
70{
71 return (char *)ROUND_DOWN(ptr, ARCH_STACK_PTR_ALIGN);
72}
73#define Z_STACK_PTR_ALIGN(ptr) ((uintptr_t)z_stack_ptr_align((char *)(ptr)))
74
88#define Z_STACK_PTR_TO_FRAME(type, ptr) \
89 (type *)((ptr) - sizeof(type))
90
91#ifdef ARCH_KERNEL_STACK_RESERVED
92#define K_KERNEL_STACK_RESERVED ((size_t)ARCH_KERNEL_STACK_RESERVED)
93#else
94#define K_KERNEL_STACK_RESERVED ((size_t)0)
95#endif /* ARCH_KERNEL_STACK_RESERVED */
96
97#define Z_KERNEL_STACK_SIZE_ADJUST(size) (ROUND_UP(size, \
98 ARCH_STACK_PTR_ALIGN) + \
99 K_KERNEL_STACK_RESERVED)
100
101#ifdef ARCH_KERNEL_STACK_OBJ_ALIGN
102#define Z_KERNEL_STACK_OBJ_ALIGN ARCH_KERNEL_STACK_OBJ_ALIGN
103#else
104#define Z_KERNEL_STACK_OBJ_ALIGN ARCH_STACK_PTR_ALIGN
105#endif /* ARCH_KERNEL_STACK_OBJ_ALIGN */
106
107#define K_KERNEL_STACK_LEN(size) \
108 ROUND_UP(Z_KERNEL_STACK_SIZE_ADJUST(size), Z_KERNEL_STACK_OBJ_ALIGN)
109
114
115#ifdef CONFIG_HW_SHADOW_STACK
123#define k_thread_hw_shadow_stack_t arch_thread_hw_shadow_stack_t
124
134#define K_THREAD_HW_SHADOW_STACK_SIZE(size_) \
135 ARCH_THREAD_HW_SHADOW_STACK_SIZE(size_)
136
146#define K_KERNEL_HW_SHADOW_STACK_DECLARE(sym, size) \
147 ARCH_THREAD_HW_SHADOW_STACK_DECLARE(__ ## sym ## _shstk, size)
148
159#define K_KERNEL_HW_SHADOW_STACK_ARRAY_DECLARE(sym, nmemb, size) \
160 ARCH_THREAD_HW_SHADOW_STACK_ARRAY_DECLARE(__ ## sym ## _shstk_arr, \
161 nmemb, size)
162
163struct _stack_to_hw_shadow_stack {
164 k_thread_stack_t *stack;
165 k_thread_hw_shadow_stack_t *shstk_addr;
166 size_t size;
167};
168
169
181#define K_THREAD_HW_SHADOW_STACK_DEFINE(sym, size_) \
182 ARCH_THREAD_HW_SHADOW_STACK_DEFINE(__ ## sym ## _shstk, size_); \
183 static const STRUCT_SECTION_ITERABLE(_stack_to_hw_shadow_stack, \
184 sym ## _stack_to_shstk_attach) = { \
185 .stack = sym, \
186 .shstk_addr = __ ## sym ## _shstk, \
187 .size = size_, \
188 }
189
190struct _stack_to_hw_shadow_stack_arr {
191 uintptr_t stack_addr;
192 uintptr_t shstk_addr;
193 size_t stack_size;
194 size_t shstk_size;
195 size_t nmemb;
196};
197
210#define K_THREAD_HW_SHADOW_STACK_ARRAY_DEFINE(sym, nmemb_, size_) \
211 ARCH_THREAD_HW_SHADOW_STACK_ARRAY_DEFINE(__ ## sym ## _shstk_arr, nmemb_, \
212 K_THREAD_HW_SHADOW_STACK_SIZE(size_)); \
213 static const STRUCT_SECTION_ITERABLE(_stack_to_hw_shadow_stack_arr, \
214 sym ## _stack_to_shstk_attach) = { \
215 .stack_addr = (uintptr_t)sym, \
216 .stack_size = K_KERNEL_STACK_LEN(size_), \
217 .nmemb = nmemb_, \
218 .shstk_addr = (uintptr_t)__ ## sym ## _shstk_arr, \
219 .shstk_size = K_THREAD_HW_SHADOW_STACK_SIZE(size_), \
220 }
221
230#define k_thread_hw_shadow_stack_attach arch_thread_hw_shadow_stack_attach
231
232struct _thread_hw_shadow_stack_static {
233 struct k_thread *thread;
234 k_thread_hw_shadow_stack_t *shstk_addr;
235 size_t size;
236};
237
250#define K_THREAD_HW_SHADOW_STACK_ATTACH(thread_, shstk_addr_, size_) \
251 static const STRUCT_SECTION_ITERABLE(_thread_hw_shadow_stack_static, \
252 thread ## _shstk_attach_static) = { \
253 .thread = thread_, \
254 .shstk_addr = shstk_addr_, \
255 .size = size_, \
256 }
257
258#else
259#define K_KERNEL_HW_SHADOW_STACK_DECLARE(sym, size)
260#define K_KERNEL_HW_SHADOW_STACK_ARRAY_DECLARE(sym, nmemb, size)
261#define K_THREAD_HW_SHADOW_STACK_DEFINE(sym, size)
262#define K_THREAD_HW_SHADOW_STACK_ARRAY_DEFINE(sym, nmemb, size_)
263#endif
264
274#define K_KERNEL_STACK_DECLARE(sym, size) \
275 K_KERNEL_HW_SHADOW_STACK_DECLARE(sym, K_THREAD_HW_SHADOW_STACK_SIZE(size)); \
276 extern struct z_thread_stack_element \
277 sym[K_KERNEL_STACK_LEN(size)]
278
289#define K_KERNEL_STACK_ARRAY_DECLARE(sym, nmemb, size) \
290 K_KERNEL_HW_SHADOW_STACK_ARRAY_DECLARE(sym, nmemb, K_THREAD_HW_SHADOW_STACK_SIZE(size)); \
291 extern struct z_thread_stack_element \
292 sym[nmemb][K_KERNEL_STACK_LEN(size)]
293
304#define K_KERNEL_PINNED_STACK_ARRAY_DECLARE(sym, nmemb, size) \
305 K_KERNEL_HW_SHADOW_STACK_ARRAY_DECLARE(sym, nmemb, K_THREAD_HW_SHADOW_STACK_SIZE(size)); \
306 extern struct z_thread_stack_element \
307 sym[nmemb][K_KERNEL_STACK_LEN(size)]
308
328#define Z_KERNEL_STACK_DEFINE_IN(sym, size, lsect) \
329 struct z_thread_stack_element lsect \
330 __aligned(Z_KERNEL_STACK_OBJ_ALIGN) \
331 sym[K_KERNEL_STACK_LEN(size)]
332
341#define Z_KERNEL_STACK_ARRAY_DEFINE_IN(sym, nmemb, size, lsect) \
342 struct z_thread_stack_element lsect \
343 __aligned(Z_KERNEL_STACK_OBJ_ALIGN) \
344 sym[nmemb][K_KERNEL_STACK_LEN(size)]
345
367#define K_KERNEL_STACK_DEFINE(sym, size) \
368 Z_KERNEL_STACK_DEFINE_IN(sym, size, __kstackmem); \
369 K_THREAD_HW_SHADOW_STACK_DEFINE(sym, \
370 K_THREAD_HW_SHADOW_STACK_SIZE(size))
371
384#if defined(CONFIG_LINKER_USE_PINNED_SECTION)
385#define K_KERNEL_PINNED_STACK_DEFINE(sym, size) \
386 Z_KERNEL_STACK_DEFINE_IN(sym, size, __pinned_noinit); \
387 K_THREAD_HW_SHADOW_STACK_DEFINE(sym, \
388 K_THREAD_HW_SHADOW_STACK_SIZE(size))
389#else
390#define K_KERNEL_PINNED_STACK_DEFINE(sym, size) \
391 Z_KERNEL_STACK_DEFINE_IN(sym, size, __kstackmem); \
392 K_THREAD_HW_SHADOW_STACK_DEFINE(sym, \
393 K_THREAD_HW_SHADOW_STACK_SIZE(size))
394#endif /* CONFIG_LINKER_USE_PINNED_SECTION */
395
405#define K_KERNEL_STACK_ARRAY_DEFINE(sym, nmemb, size) \
406 Z_KERNEL_STACK_ARRAY_DEFINE_IN(sym, nmemb, size, __kstackmem); \
407 K_THREAD_HW_SHADOW_STACK_ARRAY_DEFINE(sym, nmemb, size)
408
409
423#if defined(CONFIG_LINKER_USE_PINNED_SECTION)
424#define K_KERNEL_PINNED_STACK_ARRAY_DEFINE(sym, nmemb, size) \
425 Z_KERNEL_STACK_ARRAY_DEFINE_IN(sym, nmemb, size, __pinned_noinit); \
426 K_THREAD_HW_SHADOW_STACK_ARRAY_DEFINE(sym, nmemb, size)
427#else
428#define K_KERNEL_PINNED_STACK_ARRAY_DEFINE(sym, nmemb, size) \
429 Z_KERNEL_STACK_ARRAY_DEFINE_IN(sym, nmemb, size, __kstackmem); \
430 K_THREAD_HW_SHADOW_STACK_ARRAY_DEFINE(sym, nmemb, size)
431#endif /* CONFIG_LINKER_USE_PINNED_SECTION */
432
442#define K_KERNEL_STACK_MEMBER(sym, size) \
443 Z_KERNEL_STACK_DEFINE_IN(sym, size,)
444
445#define K_KERNEL_STACK_SIZEOF(sym) (sizeof(sym) - K_KERNEL_STACK_RESERVED)
446
448
450{
451 return (char *)sym + K_KERNEL_STACK_RESERVED;
452}
453#ifndef CONFIG_USERSPACE
454#define K_THREAD_STACK_RESERVED K_KERNEL_STACK_RESERVED
455#define K_THREAD_STACK_SIZEOF K_KERNEL_STACK_SIZEOF
456#define K_THREAD_STACK_LEN K_KERNEL_STACK_LEN
457#define K_THREAD_STACK_DEFINE K_KERNEL_STACK_DEFINE
458#define K_THREAD_STACK_ARRAY_DEFINE K_KERNEL_STACK_ARRAY_DEFINE
459#define K_THREAD_STACK_BUFFER K_KERNEL_STACK_BUFFER
460#define K_THREAD_STACK_DECLARE K_KERNEL_STACK_DECLARE
461#define K_THREAD_STACK_ARRAY_DECLARE K_KERNEL_STACK_ARRAY_DECLARE
462#define K_THREAD_PINNED_STACK_DEFINE K_KERNEL_PINNED_STACK_DEFINE
463#define K_THREAD_PINNED_STACK_ARRAY_DEFINE \
464 K_KERNEL_PINNED_STACK_ARRAY_DEFINE
465#else
481#ifdef ARCH_THREAD_STACK_RESERVED
482#define K_THREAD_STACK_RESERVED ((size_t)(ARCH_THREAD_STACK_RESERVED))
483#else
484#define K_THREAD_STACK_RESERVED ((size_t)0U)
485#endif /* ARCH_THREAD_STACK_RESERVED */
486
512#if defined(ARCH_THREAD_STACK_OBJ_ALIGN)
513#define Z_THREAD_STACK_OBJ_ALIGN(size) \
514 ARCH_THREAD_STACK_OBJ_ALIGN(Z_THREAD_STACK_SIZE_ADJUST(size))
515#else
516#define Z_THREAD_STACK_OBJ_ALIGN(size) ARCH_STACK_PTR_ALIGN
517#endif /* ARCH_THREAD_STACK_OBJ_ALIGN */
518
545#if defined(ARCH_THREAD_STACK_SIZE_ADJUST)
546#define Z_THREAD_STACK_SIZE_ADJUST(size) \
547 ARCH_THREAD_STACK_SIZE_ADJUST((size) + K_THREAD_STACK_RESERVED)
548#else
549#define Z_THREAD_STACK_SIZE_ADJUST(size) \
550 (ROUND_UP((size), ARCH_STACK_PTR_ALIGN) + K_THREAD_STACK_RESERVED)
551#endif /* ARCH_THREAD_STACK_SIZE_ADJUST */
552
557
567#define K_THREAD_STACK_DECLARE(sym, size) \
568 extern struct z_thread_stack_element \
569 sym[K_THREAD_STACK_LEN(size)]
570
581#define K_THREAD_STACK_ARRAY_DECLARE(sym, nmemb, size) \
582 extern struct z_thread_stack_element \
583 sym[nmemb][K_THREAD_STACK_LEN(size)]
584
599#define K_THREAD_STACK_SIZEOF(sym) (sizeof(sym) - K_THREAD_STACK_RESERVED)
600
629#define Z_THREAD_STACK_DEFINE_IN(sym, size, lsect) \
630 struct z_thread_stack_element lsect \
631 __aligned(Z_THREAD_STACK_OBJ_ALIGN(size)) \
632 sym[K_THREAD_STACK_LEN(size)]
633
648#define Z_THREAD_STACK_ARRAY_DEFINE_IN(sym, nmemb, size, lsect) \
649 struct z_thread_stack_element lsect \
650 __aligned(Z_THREAD_STACK_OBJ_ALIGN(size)) \
651 sym[nmemb][K_THREAD_STACK_LEN(size)]
652
679#define K_THREAD_STACK_DEFINE(sym, size) \
680 Z_THREAD_STACK_DEFINE_IN(sym, size, __stackmem)
681
712#if defined(CONFIG_LINKER_USE_PINNED_SECTION)
713#define K_THREAD_PINNED_STACK_DEFINE(sym, size) \
714 Z_THREAD_STACK_DEFINE_IN(sym, size, __pinned_noinit)
715#else
716#define K_THREAD_PINNED_STACK_DEFINE(sym, size) \
717 K_THREAD_STACK_DEFINE(sym, size)
718#endif /* CONFIG_LINKER_USE_PINNED_SECTION */
719
733#define K_THREAD_STACK_LEN(size) \
734 ROUND_UP(Z_THREAD_STACK_SIZE_ADJUST(size), \
735 Z_THREAD_STACK_OBJ_ALIGN(size))
736
750#define K_THREAD_STACK_ARRAY_DEFINE(sym, nmemb, size) \
751 Z_THREAD_STACK_ARRAY_DEFINE_IN(sym, nmemb, size, __stackmem)
752
770#if defined(CONFIG_LINKER_USE_PINNED_SECTION)
771#define K_THREAD_PINNED_STACK_ARRAY_DEFINE(sym, nmemb, size) \
772 Z_THREAD_PINNED_STACK_DEFINE_IN(sym, nmemb, size, __pinned_noinit)
773#else
774#define K_THREAD_PINNED_STACK_ARRAY_DEFINE(sym, nmemb, size) \
775 K_THREAD_STACK_ARRAY_DEFINE(sym, nmemb, size)
776#endif /* CONFIG_LINKER_USE_PINNED_SECTION */
777
779
795{
796 return (char *)sym + K_THREAD_STACK_RESERVED;
797}
798
799#endif /* CONFIG_USERSPACE */
800
801#ifdef __cplusplus
802}
803#endif
804
805#endif /* _ASMLANGUAGE */
806#endif /* ZEPHYR_INCLUDE_KERNEL_THREAD_STACK_H */
#define ARCH_STACK_PTR_ALIGN
Definition arch.h:98
struct z_thread_stack_element k_thread_stack_t
Typedef of struct z_thread_stack_element.
Definition arch_interface.h:46
#define ROUND_DOWN(x, align)
Value of x rounded down to the previous multiple of align.
Definition util.h:330
#define K_THREAD_STACK_RESERVED
Indicate how much additional memory is reserved for stack objects.
Definition thread_stack.h:484
static char * K_THREAD_STACK_BUFFER(k_thread_stack_t *sym)
Get a pointer to the physical stack buffer.
Definition thread_stack.h:794
static char * K_KERNEL_STACK_BUFFER(k_thread_stack_t *sym)
Definition thread_stack.h:449
#define K_KERNEL_STACK_RESERVED
Definition thread_stack.h:94
__UINTPTR_TYPE__ uintptr_t
Definition stdint.h:105
Misc utilities.