1use portable_atomic::{AtomicBool, Ordering};
2
3use crate::{asynch::AtomicWaker, dma::*, peripherals::Interrupt};
4
5pub(super) type SpiRegisterBlock = crate::pac::spi2::RegisterBlock;
6
7#[derive(Debug)]
9#[cfg_attr(feature = "defmt", derive(defmt::Format))]
10pub struct AnySpiDmaRxChannel<'d>(pub(crate) AnySpiDmaChannel<'d>);
11
12impl AnySpiDmaRxChannel<'_> {
13 fn regs(&self) -> &SpiRegisterBlock {
14 self.0.register_block()
15 }
16}
17
18impl crate::private::Sealed for AnySpiDmaRxChannel<'_> {}
19impl DmaRxChannel for AnySpiDmaRxChannel<'_> {}
20
21#[derive(Debug)]
23#[cfg_attr(feature = "defmt", derive(defmt::Format))]
24pub struct AnySpiDmaTxChannel<'d>(pub(crate) AnySpiDmaChannel<'d>);
25
26impl AnySpiDmaTxChannel<'_> {
27 fn regs(&self) -> &SpiRegisterBlock {
28 self.0.register_block()
29 }
30}
31
32impl crate::private::Sealed for AnySpiDmaTxChannel<'_> {}
33impl DmaTxChannel for AnySpiDmaTxChannel<'_> {}
34
35impl RegisterAccess for AnySpiDmaTxChannel<'_> {
36 fn reset(&self) {
37 self.regs().dma_conf().modify(|_, w| w.out_rst().set_bit());
38 self.regs()
39 .dma_conf()
40 .modify(|_, w| w.out_rst().clear_bit());
41 }
42
43 fn set_burst_mode(&self, burst_mode: BurstConfig) {
44 self.regs()
45 .dma_conf()
46 .modify(|_, w| w.out_data_burst_en().bit(burst_mode.is_burst_enabled()));
47 }
48
49 fn set_descr_burst_mode(&self, burst_mode: bool) {
50 self.regs()
51 .dma_conf()
52 .modify(|_, w| w.outdscr_burst_en().bit(burst_mode));
53 }
54
55 fn set_peripheral(&self, _peripheral: u8) {
56 }
58
59 fn set_link_addr(&self, address: u32) {
60 self.regs()
61 .dma_out_link()
62 .modify(|_, w| unsafe { w.outlink_addr().bits(address) });
63 }
64
65 fn start(&self) {
66 self.regs()
67 .dma_out_link()
68 .modify(|_, w| w.outlink_start().set_bit());
69 }
70
71 fn stop(&self) {
72 self.regs()
73 .dma_out_link()
74 .modify(|_, w| w.outlink_stop().set_bit());
75 }
76
77 fn restart(&self) {
78 self.regs()
79 .dma_out_link()
80 .modify(|_, w| w.outlink_restart().set_bit());
81 }
82
83 fn set_check_owner(&self, check_owner: Option<bool>) {
84 if check_owner == Some(true) {
85 panic!("SPI DMA does not support checking descriptor ownership");
86 }
87 }
88
89 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool {
90 self.0.is_compatible_with(peripheral)
91 }
92
93 #[cfg(psram_dma)]
94 fn set_ext_mem_block_size(&self, size: DmaExtMemBKSize) {
95 self.regs()
96 .dma_conf()
97 .modify(|_, w| unsafe { w.ext_mem_bk_size().bits(size as u8) });
98 }
99
100 #[cfg(psram_dma)]
101 fn can_access_psram(&self) -> bool {
102 matches!(self.0, AnySpiDmaChannel(AnySpiDmaChannelInner::Spi2(_)))
103 }
104}
105
106impl TxRegisterAccess for AnySpiDmaTxChannel<'_> {
107 fn is_fifo_empty(&self) -> bool {
108 cfg_if::cfg_if! {
109 if #[cfg(esp32)] {
110 self.regs().dma_rstatus().read().dma_out_status().bits() & 0x80000000 != 0
111 } else {
112 self.regs().dma_outstatus().read().dma_outfifo_empty().bit_is_set()
113 }
114 }
115 }
116
117 fn set_auto_write_back(&self, enable: bool) {
118 assert!(!enable);
120 }
121
122 fn last_dscr_address(&self) -> usize {
123 self.regs()
124 .out_eof_des_addr()
125 .read()
126 .dma_out_eof_des_addr()
127 .bits() as usize
128 }
129
130 fn peripheral_interrupt(&self) -> Option<Interrupt> {
131 None
132 }
133
134 fn async_handler(&self) -> Option<InterruptHandler> {
135 None
136 }
137}
138
139impl InterruptAccess<DmaTxInterrupt> for AnySpiDmaTxChannel<'_> {
140 fn enable_listen(&self, interrupts: EnumSet<DmaTxInterrupt>, enable: bool) {
141 self.regs().dma_int_ena().modify(|_, w| {
142 for interrupt in interrupts {
143 match interrupt {
144 DmaTxInterrupt::TotalEof => w.out_total_eof().bit(enable),
145 DmaTxInterrupt::DescriptorError => w.outlink_dscr_error().bit(enable),
146 DmaTxInterrupt::Eof => w.out_eof().bit(enable),
147 DmaTxInterrupt::Done => w.out_done().bit(enable),
148 };
149 }
150 w
151 });
152 }
153
154 fn is_listening(&self) -> EnumSet<DmaTxInterrupt> {
155 let mut result = EnumSet::new();
156
157 let int_ena = self.regs().dma_int_ena().read();
158 if int_ena.out_total_eof().bit_is_set() {
159 result |= DmaTxInterrupt::TotalEof;
160 }
161 if int_ena.outlink_dscr_error().bit_is_set() {
162 result |= DmaTxInterrupt::DescriptorError;
163 }
164 if int_ena.out_eof().bit_is_set() {
165 result |= DmaTxInterrupt::Eof;
166 }
167 if int_ena.out_done().bit_is_set() {
168 result |= DmaTxInterrupt::Done;
169 }
170
171 result
172 }
173
174 fn clear(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
175 self.regs().dma_int_clr().write(|w| {
176 for interrupt in interrupts.into() {
177 match interrupt {
178 DmaTxInterrupt::TotalEof => w.out_total_eof().clear_bit_by_one(),
179 DmaTxInterrupt::DescriptorError => w.outlink_dscr_error().clear_bit_by_one(),
180 DmaTxInterrupt::Eof => w.out_eof().clear_bit_by_one(),
181 DmaTxInterrupt::Done => w.out_done().clear_bit_by_one(),
182 };
183 }
184 w
185 });
186 }
187
188 fn pending_interrupts(&self) -> EnumSet<DmaTxInterrupt> {
189 let mut result = EnumSet::new();
190
191 let int_raw = self.regs().dma_int_raw().read();
192 if int_raw.out_total_eof().bit_is_set() {
193 result |= DmaTxInterrupt::TotalEof;
194 }
195 if int_raw.outlink_dscr_error().bit_is_set() {
196 result |= DmaTxInterrupt::DescriptorError;
197 }
198 if int_raw.out_eof().bit_is_set() {
199 result |= DmaTxInterrupt::Eof;
200 }
201 if int_raw.out_done().bit_is_set() {
202 result |= DmaTxInterrupt::Done;
203 }
204
205 result
206 }
207
208 fn waker(&self) -> &'static AtomicWaker {
209 self.0.tx_waker()
210 }
211
212 fn is_async(&self) -> bool {
213 self.0.tx_async_flag().load(Ordering::Acquire)
214 }
215
216 fn set_async(&self, is_async: bool) {
217 self.0.tx_async_flag().store(is_async, Ordering::Release);
218 }
219}
220
221impl RegisterAccess for AnySpiDmaRxChannel<'_> {
222 fn reset(&self) {
223 self.regs().dma_conf().modify(|_, w| w.in_rst().set_bit());
224 self.regs().dma_conf().modify(|_, w| w.in_rst().clear_bit());
225 }
226
227 fn set_burst_mode(&self, _burst_mode: BurstConfig) {}
228
229 fn set_descr_burst_mode(&self, burst_mode: bool) {
230 self.regs()
231 .dma_conf()
232 .modify(|_, w| w.indscr_burst_en().bit(burst_mode));
233 }
234
235 fn set_peripheral(&self, _peripheral: u8) {
236 }
238
239 fn set_link_addr(&self, address: u32) {
240 self.regs()
241 .dma_in_link()
242 .modify(|_, w| unsafe { w.inlink_addr().bits(address) });
243 }
244
245 fn start(&self) {
246 self.regs()
247 .dma_in_link()
248 .modify(|_, w| w.inlink_start().set_bit());
249 }
250
251 fn stop(&self) {
252 self.regs()
253 .dma_in_link()
254 .modify(|_, w| w.inlink_stop().set_bit());
255 }
256
257 fn restart(&self) {
258 self.regs()
259 .dma_in_link()
260 .modify(|_, w| w.inlink_restart().set_bit());
261 }
262
263 fn set_check_owner(&self, check_owner: Option<bool>) {
264 if check_owner == Some(true) {
265 panic!("SPI DMA does not support checking descriptor ownership");
266 }
267 }
268
269 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool {
270 self.0.is_compatible_with(peripheral)
271 }
272
273 #[cfg(psram_dma)]
274 fn set_ext_mem_block_size(&self, size: DmaExtMemBKSize) {
275 self.regs()
276 .dma_conf()
277 .modify(|_, w| unsafe { w.ext_mem_bk_size().bits(size as u8) });
278 }
279
280 #[cfg(psram_dma)]
281 fn can_access_psram(&self) -> bool {
282 matches!(self.0, AnySpiDmaChannel(AnySpiDmaChannelInner::Spi2(_)))
283 }
284}
285
286impl RxRegisterAccess for AnySpiDmaRxChannel<'_> {
287 fn peripheral_interrupt(&self) -> Option<Interrupt> {
288 Some(self.0.peripheral_interrupt())
289 }
290
291 fn async_handler(&self) -> Option<InterruptHandler> {
292 Some(self.0.async_handler())
293 }
294}
295
296impl InterruptAccess<DmaRxInterrupt> for AnySpiDmaRxChannel<'_> {
297 fn enable_listen(&self, interrupts: EnumSet<DmaRxInterrupt>, enable: bool) {
298 self.regs().dma_int_ena().modify(|_, w| {
299 for interrupt in interrupts {
300 match interrupt {
301 DmaRxInterrupt::SuccessfulEof => w.in_suc_eof().bit(enable),
302 DmaRxInterrupt::ErrorEof => w.in_err_eof().bit(enable),
303 DmaRxInterrupt::DescriptorError => w.inlink_dscr_error().bit(enable),
304 DmaRxInterrupt::DescriptorEmpty => w.inlink_dscr_empty().bit(enable),
305 DmaRxInterrupt::Done => w.in_done().bit(enable),
306 };
307 }
308 w
309 });
310 }
311
312 fn is_listening(&self) -> EnumSet<DmaRxInterrupt> {
313 let mut result = EnumSet::new();
314
315 let int_ena = self.regs().dma_int_ena().read();
316 if int_ena.inlink_dscr_error().bit_is_set() {
317 result |= DmaRxInterrupt::DescriptorError;
318 }
319 if int_ena.inlink_dscr_empty().bit_is_set() {
320 result |= DmaRxInterrupt::DescriptorEmpty;
321 }
322 if int_ena.in_suc_eof().bit_is_set() {
323 result |= DmaRxInterrupt::SuccessfulEof;
324 }
325 if int_ena.in_err_eof().bit_is_set() {
326 result |= DmaRxInterrupt::ErrorEof;
327 }
328 if int_ena.in_done().bit_is_set() {
329 result |= DmaRxInterrupt::Done;
330 }
331
332 result
333 }
334
335 fn clear(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
336 self.regs().dma_int_clr().modify(|_, w| {
337 for interrupt in interrupts.into() {
338 match interrupt {
339 DmaRxInterrupt::SuccessfulEof => w.in_suc_eof().clear_bit_by_one(),
340 DmaRxInterrupt::ErrorEof => w.in_err_eof().clear_bit_by_one(),
341 DmaRxInterrupt::DescriptorError => w.inlink_dscr_error().clear_bit_by_one(),
342 DmaRxInterrupt::DescriptorEmpty => w.inlink_dscr_empty().clear_bit_by_one(),
343 DmaRxInterrupt::Done => w.in_done().clear_bit_by_one(),
344 };
345 }
346 w
347 });
348 }
349
350 fn pending_interrupts(&self) -> EnumSet<DmaRxInterrupt> {
351 let mut result = EnumSet::new();
352
353 let int_raw = self.regs().dma_int_raw().read();
354 if int_raw.inlink_dscr_error().bit_is_set() {
355 result |= DmaRxInterrupt::DescriptorError;
356 }
357 if int_raw.inlink_dscr_empty().bit_is_set() {
358 result |= DmaRxInterrupt::DescriptorEmpty;
359 }
360 if int_raw.in_suc_eof().bit_is_set() {
361 result |= DmaRxInterrupt::SuccessfulEof;
362 }
363 if int_raw.in_err_eof().bit_is_set() {
364 result |= DmaRxInterrupt::ErrorEof;
365 }
366 if int_raw.in_done().bit_is_set() {
367 result |= DmaRxInterrupt::Done;
368 }
369
370 result
371 }
372
373 fn waker(&self) -> &'static AtomicWaker {
374 self.0.rx_waker()
375 }
376
377 fn is_async(&self) -> bool {
378 self.0.rx_async_flag().load(Ordering::Relaxed)
379 }
380
381 fn set_async(&self, _is_async: bool) {
382 self.0.rx_async_flag().store(_is_async, Ordering::Relaxed);
383 }
384}
385
386crate::any_peripheral! {
387 pub peripheral AnySpiDmaChannel<'d> {
389 Spi2(super::DMA_SPI2<'d>),
390 Spi3(super::DMA_SPI3<'d>),
391 }
392}
393
394impl<'d> DmaChannel for AnySpiDmaChannel<'d> {
395 type Rx = AnySpiDmaRxChannel<'d>;
396 type Tx = AnySpiDmaTxChannel<'d>;
397
398 unsafe fn split_internal(self, _: crate::private::Internal) -> (Self::Rx, Self::Tx) {
399 (
400 AnySpiDmaRxChannel(unsafe { self.clone_unchecked() }),
401 AnySpiDmaTxChannel(unsafe { self.clone_unchecked() }),
402 )
403 }
404}
405
406impl PdmaChannel for AnySpiDmaChannel<'_> {
407 type RegisterBlock = SpiRegisterBlock;
408
409 delegate::delegate! {
410 to match &self.0 {
411 AnySpiDmaChannelInner::Spi2(channel) => channel,
412 AnySpiDmaChannelInner::Spi3(channel) => channel,
413 } {
414 fn register_block(&self) -> &SpiRegisterBlock;
415 fn tx_waker(&self) -> &'static AtomicWaker;
416 fn rx_waker(&self) -> &'static AtomicWaker;
417 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool;
418 fn peripheral_interrupt(&self) -> Interrupt;
419 fn async_handler(&self) -> InterruptHandler;
420 fn rx_async_flag(&self) -> &'static AtomicBool;
421 fn tx_async_flag(&self) -> &'static AtomicBool;
422 }
423 }
424}