1use enumset::EnumSet;
2use portable_atomic::{AtomicBool, Ordering};
3
4use crate::{
5 asynch::AtomicWaker,
6 dma::{
7 BurstConfig,
8 DmaChannel,
9 DmaPeripheral,
10 DmaRxChannel,
11 DmaRxInterrupt,
12 DmaTxChannel,
13 DmaTxInterrupt,
14 InterruptAccess,
15 PdmaChannel,
16 RegisterAccess,
17 RxRegisterAccess,
18 TxRegisterAccess,
19 },
20 interrupt::InterruptHandler,
21 peripherals::Interrupt,
22};
23
24pub(super) type SpiRegisterBlock = crate::pac::spi2::RegisterBlock;
25
26#[derive(Debug)]
28#[cfg_attr(feature = "defmt", derive(defmt::Format))]
29pub struct AnySpiDmaRxChannel<'d>(pub(crate) AnySpiDmaChannel<'d>);
30
31impl AnySpiDmaRxChannel<'_> {
32 fn regs(&self) -> &SpiRegisterBlock {
33 self.0.register_block()
34 }
35}
36
37impl crate::private::Sealed for AnySpiDmaRxChannel<'_> {}
38impl DmaRxChannel for AnySpiDmaRxChannel<'_> {}
39
40#[derive(Debug)]
42#[cfg_attr(feature = "defmt", derive(defmt::Format))]
43pub struct AnySpiDmaTxChannel<'d>(pub(crate) AnySpiDmaChannel<'d>);
44
45impl AnySpiDmaTxChannel<'_> {
46 fn regs(&self) -> &SpiRegisterBlock {
47 self.0.register_block()
48 }
49}
50
51impl crate::private::Sealed for AnySpiDmaTxChannel<'_> {}
52impl DmaTxChannel for AnySpiDmaTxChannel<'_> {}
53
54impl RegisterAccess for AnySpiDmaTxChannel<'_> {
55 fn reset(&self) {
56 self.regs().dma_conf().modify(|_, w| w.out_rst().set_bit());
57 self.regs()
58 .dma_conf()
59 .modify(|_, w| w.out_rst().clear_bit());
60 }
61
62 fn set_burst_mode(&self, burst_mode: BurstConfig) {
63 self.regs()
64 .dma_conf()
65 .modify(|_, w| w.out_data_burst_en().bit(burst_mode.is_burst_enabled()));
66 }
67
68 fn set_descr_burst_mode(&self, burst_mode: bool) {
69 self.regs()
70 .dma_conf()
71 .modify(|_, w| w.outdscr_burst_en().bit(burst_mode));
72 }
73
74 fn set_peripheral(&self, _peripheral: u8) {
75 }
77
78 fn set_link_addr(&self, address: u32) {
79 self.regs()
80 .dma_out_link()
81 .modify(|_, w| unsafe { w.outlink_addr().bits(address) });
82 }
83
84 fn start(&self) {
85 self.regs()
86 .dma_out_link()
87 .modify(|_, w| w.outlink_start().set_bit());
88 }
89
90 fn stop(&self) {
91 self.regs()
92 .dma_out_link()
93 .modify(|_, w| w.outlink_stop().set_bit());
94 }
95
96 fn restart(&self) {
97 self.regs()
98 .dma_out_link()
99 .modify(|_, w| w.outlink_restart().set_bit());
100 }
101
102 fn set_check_owner(&self, check_owner: Option<bool>) {
103 if check_owner == Some(true) {
104 panic!("SPI DMA does not support checking descriptor ownership");
105 }
106 }
107
108 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool {
109 self.0.is_compatible_with(peripheral)
110 }
111
112 #[cfg(psram_dma)]
113 fn set_ext_mem_block_size(&self, size: crate::dma::DmaExtMemBKSize) {
114 self.regs()
115 .dma_conf()
116 .modify(|_, w| unsafe { w.ext_mem_bk_size().bits(size as u8) });
117 }
118
119 #[cfg(psram_dma)]
120 fn can_access_psram(&self) -> bool {
121 matches!(self.0, AnySpiDmaChannel(any::Inner::Spi2(_)))
122 }
123}
124
125impl TxRegisterAccess for AnySpiDmaTxChannel<'_> {
126 fn is_fifo_empty(&self) -> bool {
127 cfg_if::cfg_if! {
128 if #[cfg(esp32)] {
129 self.regs().dma_rstatus().read().dma_out_status().bits() & 0x80000000 != 0
130 } else {
131 self.regs().dma_outstatus().read().dma_outfifo_empty().bit_is_set()
132 }
133 }
134 }
135
136 fn set_auto_write_back(&self, enable: bool) {
137 assert!(!enable);
139 }
140
141 fn last_dscr_address(&self) -> usize {
142 self.regs()
143 .out_eof_des_addr()
144 .read()
145 .dma_out_eof_des_addr()
146 .bits() as usize
147 }
148
149 fn peripheral_interrupt(&self) -> Option<Interrupt> {
150 None
151 }
152
153 fn async_handler(&self) -> Option<InterruptHandler> {
154 None
155 }
156}
157
158impl InterruptAccess<DmaTxInterrupt> for AnySpiDmaTxChannel<'_> {
159 fn enable_listen(&self, interrupts: EnumSet<DmaTxInterrupt>, enable: bool) {
160 self.regs().dma_int_ena().modify(|_, w| {
161 for interrupt in interrupts {
162 match interrupt {
163 DmaTxInterrupt::TotalEof => w.out_total_eof().bit(enable),
164 DmaTxInterrupt::DescriptorError => w.outlink_dscr_error().bit(enable),
165 DmaTxInterrupt::Eof => w.out_eof().bit(enable),
166 DmaTxInterrupt::Done => w.out_done().bit(enable),
167 };
168 }
169 w
170 });
171 }
172
173 fn is_listening(&self) -> EnumSet<DmaTxInterrupt> {
174 let mut result = EnumSet::new();
175
176 let int_ena = self.regs().dma_int_ena().read();
177 if int_ena.out_total_eof().bit_is_set() {
178 result |= DmaTxInterrupt::TotalEof;
179 }
180 if int_ena.outlink_dscr_error().bit_is_set() {
181 result |= DmaTxInterrupt::DescriptorError;
182 }
183 if int_ena.out_eof().bit_is_set() {
184 result |= DmaTxInterrupt::Eof;
185 }
186 if int_ena.out_done().bit_is_set() {
187 result |= DmaTxInterrupt::Done;
188 }
189
190 result
191 }
192
193 fn clear(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
194 self.regs().dma_int_clr().write(|w| {
195 for interrupt in interrupts.into() {
196 match interrupt {
197 DmaTxInterrupt::TotalEof => w.out_total_eof().clear_bit_by_one(),
198 DmaTxInterrupt::DescriptorError => w.outlink_dscr_error().clear_bit_by_one(),
199 DmaTxInterrupt::Eof => w.out_eof().clear_bit_by_one(),
200 DmaTxInterrupt::Done => w.out_done().clear_bit_by_one(),
201 };
202 }
203 w
204 });
205 }
206
207 fn pending_interrupts(&self) -> EnumSet<DmaTxInterrupt> {
208 let mut result = EnumSet::new();
209
210 let int_raw = self.regs().dma_int_raw().read();
211 if int_raw.out_total_eof().bit_is_set() {
212 result |= DmaTxInterrupt::TotalEof;
213 }
214 if int_raw.outlink_dscr_error().bit_is_set() {
215 result |= DmaTxInterrupt::DescriptorError;
216 }
217 if int_raw.out_eof().bit_is_set() {
218 result |= DmaTxInterrupt::Eof;
219 }
220 if int_raw.out_done().bit_is_set() {
221 result |= DmaTxInterrupt::Done;
222 }
223
224 result
225 }
226
227 fn waker(&self) -> &'static AtomicWaker {
228 self.0.tx_waker()
229 }
230
231 fn is_async(&self) -> bool {
232 self.0.tx_async_flag().load(Ordering::Acquire)
233 }
234
235 fn set_async(&self, is_async: bool) {
236 self.0.tx_async_flag().store(is_async, Ordering::Release);
237 }
238}
239
240impl RegisterAccess for AnySpiDmaRxChannel<'_> {
241 fn reset(&self) {
242 self.regs().dma_conf().modify(|_, w| w.in_rst().set_bit());
243 self.regs().dma_conf().modify(|_, w| w.in_rst().clear_bit());
244 }
245
246 fn set_burst_mode(&self, _burst_mode: BurstConfig) {}
247
248 fn set_descr_burst_mode(&self, burst_mode: bool) {
249 self.regs()
250 .dma_conf()
251 .modify(|_, w| w.indscr_burst_en().bit(burst_mode));
252 }
253
254 fn set_peripheral(&self, _peripheral: u8) {
255 }
257
258 fn set_link_addr(&self, address: u32) {
259 self.regs()
260 .dma_in_link()
261 .modify(|_, w| unsafe { w.inlink_addr().bits(address) });
262 }
263
264 fn start(&self) {
265 self.regs()
266 .dma_in_link()
267 .modify(|_, w| w.inlink_start().set_bit());
268 }
269
270 fn stop(&self) {
271 self.regs()
272 .dma_in_link()
273 .modify(|_, w| w.inlink_stop().set_bit());
274 }
275
276 fn restart(&self) {
277 self.regs()
278 .dma_in_link()
279 .modify(|_, w| w.inlink_restart().set_bit());
280 }
281
282 fn set_check_owner(&self, check_owner: Option<bool>) {
283 if check_owner == Some(true) {
284 panic!("SPI DMA does not support checking descriptor ownership");
285 }
286 }
287
288 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool {
289 self.0.is_compatible_with(peripheral)
290 }
291
292 #[cfg(psram_dma)]
293 fn set_ext_mem_block_size(&self, size: crate::dma::DmaExtMemBKSize) {
294 self.regs()
295 .dma_conf()
296 .modify(|_, w| unsafe { w.ext_mem_bk_size().bits(size as u8) });
297 }
298
299 #[cfg(psram_dma)]
300 fn can_access_psram(&self) -> bool {
301 matches!(self.0, AnySpiDmaChannel(any::Inner::Spi2(_)))
302 }
303}
304
305impl RxRegisterAccess for AnySpiDmaRxChannel<'_> {
306 fn peripheral_interrupt(&self) -> Option<Interrupt> {
307 Some(self.0.peripheral_interrupt())
308 }
309
310 fn async_handler(&self) -> Option<InterruptHandler> {
311 Some(self.0.async_handler())
312 }
313}
314
315impl InterruptAccess<DmaRxInterrupt> for AnySpiDmaRxChannel<'_> {
316 fn enable_listen(&self, interrupts: EnumSet<DmaRxInterrupt>, enable: bool) {
317 self.regs().dma_int_ena().modify(|_, w| {
318 for interrupt in interrupts {
319 match interrupt {
320 DmaRxInterrupt::SuccessfulEof => w.in_suc_eof().bit(enable),
321 DmaRxInterrupt::ErrorEof => w.in_err_eof().bit(enable),
322 DmaRxInterrupt::DescriptorError => w.inlink_dscr_error().bit(enable),
323 DmaRxInterrupt::DescriptorEmpty => w.inlink_dscr_empty().bit(enable),
324 DmaRxInterrupt::Done => w.in_done().bit(enable),
325 };
326 }
327 w
328 });
329 }
330
331 fn is_listening(&self) -> EnumSet<DmaRxInterrupt> {
332 let mut result = EnumSet::new();
333
334 let int_ena = self.regs().dma_int_ena().read();
335 if int_ena.inlink_dscr_error().bit_is_set() {
336 result |= DmaRxInterrupt::DescriptorError;
337 }
338 if int_ena.inlink_dscr_empty().bit_is_set() {
339 result |= DmaRxInterrupt::DescriptorEmpty;
340 }
341 if int_ena.in_suc_eof().bit_is_set() {
342 result |= DmaRxInterrupt::SuccessfulEof;
343 }
344 if int_ena.in_err_eof().bit_is_set() {
345 result |= DmaRxInterrupt::ErrorEof;
346 }
347 if int_ena.in_done().bit_is_set() {
348 result |= DmaRxInterrupt::Done;
349 }
350
351 result
352 }
353
354 fn clear(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
355 self.regs().dma_int_clr().modify(|_, w| {
356 for interrupt in interrupts.into() {
357 match interrupt {
358 DmaRxInterrupt::SuccessfulEof => w.in_suc_eof().clear_bit_by_one(),
359 DmaRxInterrupt::ErrorEof => w.in_err_eof().clear_bit_by_one(),
360 DmaRxInterrupt::DescriptorError => w.inlink_dscr_error().clear_bit_by_one(),
361 DmaRxInterrupt::DescriptorEmpty => w.inlink_dscr_empty().clear_bit_by_one(),
362 DmaRxInterrupt::Done => w.in_done().clear_bit_by_one(),
363 };
364 }
365 w
366 });
367 }
368
369 fn pending_interrupts(&self) -> EnumSet<DmaRxInterrupt> {
370 let mut result = EnumSet::new();
371
372 let int_raw = self.regs().dma_int_raw().read();
373 if int_raw.inlink_dscr_error().bit_is_set() {
374 result |= DmaRxInterrupt::DescriptorError;
375 }
376 if int_raw.inlink_dscr_empty().bit_is_set() {
377 result |= DmaRxInterrupt::DescriptorEmpty;
378 }
379 if int_raw.in_suc_eof().bit_is_set() {
380 result |= DmaRxInterrupt::SuccessfulEof;
381 }
382 if int_raw.in_err_eof().bit_is_set() {
383 result |= DmaRxInterrupt::ErrorEof;
384 }
385 if int_raw.in_done().bit_is_set() {
386 result |= DmaRxInterrupt::Done;
387 }
388
389 result
390 }
391
392 fn waker(&self) -> &'static AtomicWaker {
393 self.0.rx_waker()
394 }
395
396 fn is_async(&self) -> bool {
397 self.0.rx_async_flag().load(Ordering::Relaxed)
398 }
399
400 fn set_async(&self, _is_async: bool) {
401 self.0.rx_async_flag().store(_is_async, Ordering::Relaxed);
402 }
403}
404
405crate::any_peripheral! {
406 pub peripheral AnySpiDmaChannel<'d> {
408 Spi2(crate::peripherals::DMA_SPI2<'d>),
409 Spi3(crate::peripherals::DMA_SPI3<'d>),
410 }
411}
412
413impl<'d> DmaChannel for AnySpiDmaChannel<'d> {
414 type Rx = AnySpiDmaRxChannel<'d>;
415 type Tx = AnySpiDmaTxChannel<'d>;
416
417 unsafe fn split_internal(self, _: crate::private::Internal) -> (Self::Rx, Self::Tx) {
418 (
419 AnySpiDmaRxChannel(unsafe { self.clone_unchecked() }),
420 AnySpiDmaTxChannel(unsafe { self.clone_unchecked() }),
421 )
422 }
423}
424
425impl PdmaChannel for AnySpiDmaChannel<'_> {
426 type RegisterBlock = SpiRegisterBlock;
427
428 delegate::delegate! {
429 to match &self.0 {
430 any::Inner::Spi2(channel) => channel,
431 any::Inner::Spi3(channel) => channel,
432 } {
433 fn register_block(&self) -> &SpiRegisterBlock;
434 fn tx_waker(&self) -> &'static AtomicWaker;
435 fn rx_waker(&self) -> &'static AtomicWaker;
436 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool;
437 fn peripheral_interrupt(&self) -> Interrupt;
438 fn async_handler(&self) -> InterruptHandler;
439 fn rx_async_flag(&self) -> &'static AtomicBool;
440 fn tx_async_flag(&self) -> &'static AtomicBool;
441 }
442 }
443}