esp_hal/system/
multi_core.rs1use core::{
4 marker::PhantomData,
5 mem::{ManuallyDrop, MaybeUninit},
6 sync::atomic::{AtomicPtr, Ordering},
7};
8
9#[instability::unstable]
10pub use crate::soc::cpu_control::is_running;
11use crate::{
12 peripherals::CPU_CTRL,
13 soc::cpu_control::{internal_park_core, start_core1_init},
14 system::Cpu,
15};
16
17#[repr(C, align(16))]
31#[instability::unstable]
32pub struct Stack<const SIZE: usize> {
33 pub mem: MaybeUninit<[u8; SIZE]>,
35}
36
37impl<const SIZE: usize> Default for Stack<SIZE> {
38 fn default() -> Self {
39 Self::new()
40 }
41}
42
43#[allow(clippy::len_without_is_empty)]
44impl<const SIZE: usize> Stack<SIZE> {
45 #[instability::unstable]
47 pub const fn new() -> Stack<SIZE> {
48 const {
49 ::core::assert!(SIZE.is_multiple_of(16));
51 }
52
53 Stack {
54 mem: MaybeUninit::uninit(),
55 }
56 }
57
58 #[instability::unstable]
60 pub const fn len(&self) -> usize {
61 SIZE
62 }
63
64 #[instability::unstable]
66 pub fn bottom(&mut self) -> *mut u32 {
67 self.mem.as_mut_ptr() as *mut u32
68 }
69
70 #[instability::unstable]
72 pub fn top(&mut self) -> *mut u32 {
73 unsafe { self.bottom().add(SIZE / 4) }
74 }
75}
76
77pub(crate) static START_CORE1_FUNCTION: AtomicPtr<()> = AtomicPtr::new(core::ptr::null_mut());
80pub(crate) static APP_CORE_STACK_TOP: AtomicPtr<u32> = AtomicPtr::new(core::ptr::null_mut());
81pub(crate) static APP_CORE_STACK_GUARD: AtomicPtr<u32> = AtomicPtr::new(core::ptr::null_mut());
82
83#[must_use = "Dropping this guard will park the APP core"]
85#[instability::unstable]
86pub struct AppCoreGuard<'a> {
87 phantom: PhantomData<&'a ()>,
88}
89
90impl Drop for AppCoreGuard<'_> {
91 fn drop(&mut self) {
92 unsafe { internal_park_core(Cpu::AppCpu, true) };
93 }
94}
95
96#[derive(Debug, Clone, Copy, PartialEq)]
98#[cfg_attr(feature = "defmt", derive(defmt::Format))]
99#[instability::unstable]
100pub enum Error {
101 CoreAlreadyRunning,
103}
104
105#[procmacros::doc_replace]
106#[instability::unstable]
149pub struct CpuControl<'d> {
150 _cpu_control: CPU_CTRL<'d>,
151}
152
153impl<'d> CpuControl<'d> {
154 #[instability::unstable]
156 pub fn new(cpu_control: CPU_CTRL<'d>) -> CpuControl<'d> {
157 CpuControl {
158 _cpu_control: cpu_control,
159 }
160 }
161
162 #[instability::unstable]
169 pub unsafe fn park_core(&mut self, core: Cpu) {
170 unsafe { internal_park_core(core, true) };
171 }
172
173 #[instability::unstable]
175 pub fn unpark_core(&mut self, core: Cpu) {
176 unsafe { internal_park_core(core, false) };
177 }
178
179 #[inline(never)]
181 pub(crate) unsafe fn start_core1_run<F>() -> !
182 where
183 F: FnOnce(),
184 {
185 let entry = START_CORE1_FUNCTION.load(Ordering::Acquire);
186 debug_assert!(!entry.is_null());
187
188 unsafe {
189 let entry = ManuallyDrop::take(&mut *entry.cast::<ManuallyDrop<F>>());
190 entry();
191 loop {
192 internal_park_core(Cpu::current(), true);
193 }
194 }
195 }
196
197 #[instability::unstable]
204 pub fn start_app_core<'a, const SIZE: usize, F>(
205 &mut self,
206 stack: &'static mut Stack<SIZE>,
207 entry: F,
208 ) -> Result<AppCoreGuard<'a>, Error>
209 where
210 F: FnOnce(),
211 F: Send + 'a,
212 {
213 cfg_if::cfg_if! {
214 if #[cfg(all(stack_guard_monitoring))] {
215 let stack_guard_offset = Some(esp_config::esp_config_int!(
216 usize,
217 "ESP_HAL_CONFIG_STACK_GUARD_OFFSET"
218 ));
219 } else {
220 let stack_guard_offset = None;
221 }
222 };
223
224 self.start_app_core_with_stack_guard_offset(stack, stack_guard_offset, entry)
225 }
226
227 #[instability::unstable]
234 pub fn start_app_core_with_stack_guard_offset<'a, const SIZE: usize, F>(
235 &mut self,
236 stack: &'static mut Stack<SIZE>,
237 stack_guard_offset: Option<usize>,
238 entry: F,
239 ) -> Result<AppCoreGuard<'a>, Error>
240 where
241 F: FnOnce(),
242 F: Send + 'a,
243 {
244 if !crate::debugger::debugger_connected() && is_running(Cpu::AppCpu) {
245 return Err(Error::CoreAlreadyRunning);
246 }
247
248 setup_second_core_stack(stack, stack_guard_offset, entry);
249
250 crate::soc::cpu_control::start_core1(start_core1_init::<F> as *const u32);
251
252 self.unpark_core(Cpu::AppCpu);
253
254 Ok(AppCoreGuard {
255 phantom: PhantomData,
256 })
257 }
258}
259
260fn setup_second_core_stack<'a, F, const SIZE: usize>(
261 stack: &'static mut Stack<SIZE>,
262 stack_guard_offset: Option<usize>,
263 entry: F,
264) where
265 F: FnOnce(),
266 F: Send + 'a,
267{
268 let entry = ManuallyDrop::new(entry);
270
271 unsafe {
272 let stack_bottom = stack.bottom().cast::<u8>();
273 let (stack_guard, stack_bottom_above_guard) =
274 if let Some(stack_guard_offset) = stack_guard_offset {
275 assert!(stack_guard_offset.is_multiple_of(4));
276 assert!(stack_guard_offset <= stack.len() - 4);
277 (
278 stack_bottom.byte_add(stack_guard_offset),
279 stack_bottom.byte_add(stack_guard_offset).byte_add(4),
280 )
281 } else {
282 (core::ptr::null_mut(), stack_bottom)
283 };
284
285 let align_offset = stack_bottom_above_guard.align_offset(core::mem::align_of::<F>());
289 let entry_dst = stack_bottom_above_guard
290 .add(align_offset)
291 .cast::<ManuallyDrop<F>>();
292
293 entry_dst.write(entry);
294
295 let entry_fn = entry_dst.cast::<()>();
296 START_CORE1_FUNCTION.store(entry_fn, Ordering::Release);
297 APP_CORE_STACK_TOP.store(stack.top(), Ordering::Release);
298 APP_CORE_STACK_GUARD.store(stack_guard.cast(), Ordering::Release);
299 }
300}