1use core::{
2 mem::ManuallyDrop,
3 ops::{Deref, DerefMut},
4};
5
6#[cfg(not(esp32s2))]
7use crate::dma::{AnyGdmaChannel, AnyGdmaRxChannel, AnyGdmaTxChannel, DmaEligible};
8use crate::{
9 Async,
10 Blocking,
11 DriverMode,
12 dma::{
13 BurstConfig,
14 Channel,
15 ChannelRx,
16 ChannelTx,
17 DmaChannelConvert,
18 DmaDescriptor,
19 DmaError,
20 DmaPeripheral,
21 DmaRxBuf,
22 DmaRxBuffer,
23 DmaRxInterrupt,
24 DmaTxBuf,
25 DmaTxBuffer,
26 DmaTxInterrupt,
27 },
28};
29#[cfg(esp32s2)]
30use crate::{
31 dma::{CopyDmaRxChannel, CopyDmaTxChannel},
32 peripherals::DMA_COPY,
33};
34
35cfg_if::cfg_if! {
36 if #[cfg(esp32s2)] {
37 type Mem2MemChannel<'d> = DMA_COPY<'d>;
38 type Mem2MemRxChannel<'d> = CopyDmaRxChannel<'d>;
39 type Mem2MemTxChannel<'d> = CopyDmaTxChannel<'d>;
40 } else {
41 type Mem2MemChannel<'d> = AnyGdmaChannel<'d>;
42 type Mem2MemRxChannel<'d> = AnyGdmaRxChannel<'d>;
43 type Mem2MemTxChannel<'d> = AnyGdmaTxChannel<'d>;
44 }
45}
46
47pub struct Mem2Mem<'d, Dm>
53where
54 Dm: DriverMode,
55{
56 pub rx: Mem2MemRx<'d, Dm>,
58 pub tx: Mem2MemTx<'d, Dm>,
60}
61
62impl<'d> Mem2Mem<'d, Blocking> {
63 pub fn new(
65 channel: impl DmaChannelConvert<Mem2MemChannel<'d>>,
66 #[cfg(dma_kind = "gdma")] peripheral: impl DmaEligible,
67 ) -> Self {
68 unsafe {
69 Self::new_unsafe(
70 channel,
71 #[cfg(dma_kind = "gdma")]
72 peripheral.dma_peripheral(),
73 )
74 }
75 }
76
77 pub unsafe fn new_unsafe(
84 channel: impl DmaChannelConvert<Mem2MemChannel<'d>>,
85 #[cfg(dma_kind = "gdma")] peripheral: DmaPeripheral,
86 ) -> Self {
87 let channel = Channel::new(channel.degrade());
88
89 cfg_if::cfg_if! {
90 if #[cfg(dma_kind = "gdma")] {
91 let mut channel = channel;
92 channel.rx.set_mem2mem_mode(true);
93 } else {
94 let peripheral = DmaPeripheral::Spi2;
97 }
98 }
99
100 Mem2Mem {
101 rx: Mem2MemRx {
102 channel: channel.rx,
103 peripheral,
104 },
105 tx: Mem2MemTx {
106 channel: channel.tx,
107 peripheral,
108 },
109 }
110 }
111
112 pub fn with_descriptors(
114 self,
115 rx_descriptors: &'static mut [DmaDescriptor],
116 tx_descriptors: &'static mut [DmaDescriptor],
117 config: BurstConfig,
118 ) -> Result<SimpleMem2Mem<'d, Blocking>, DmaError> {
119 SimpleMem2Mem::new(self, rx_descriptors, tx_descriptors, config)
120 }
121
122 pub fn into_async(self) -> Mem2Mem<'d, Async> {
124 Mem2Mem {
125 rx: self.rx.into_async(),
126 tx: self.tx.into_async(),
127 }
128 }
129}
130
131pub struct Mem2MemRx<'d, Dm: DriverMode> {
133 channel: ChannelRx<Dm, Mem2MemRxChannel<'d>>,
134 peripheral: DmaPeripheral,
135}
136
137impl<'d> Mem2MemRx<'d, Blocking> {
138 pub fn into_async(self) -> Mem2MemRx<'d, Async> {
140 Mem2MemRx {
141 channel: self.channel.into_async(),
142 peripheral: self.peripheral,
143 }
144 }
145}
146
147impl<'d, Dm> Mem2MemRx<'d, Dm>
148where
149 Dm: DriverMode,
150{
151 pub fn receive<BUF>(
153 mut self,
154 mut buf: BUF,
155 ) -> Result<Mem2MemRxTransfer<'d, Dm, BUF>, (DmaError, Self, BUF)>
156 where
157 BUF: DmaRxBuffer,
158 {
159 let result = unsafe {
160 self.channel
161 .prepare_transfer(self.peripheral, &mut buf)
162 .and_then(|_| self.channel.start_transfer())
163 };
164
165 if let Err(e) = result {
166 return Err((e, self, buf));
167 }
168
169 Ok(Mem2MemRxTransfer {
170 m2m: ManuallyDrop::new(self),
171 buf_view: ManuallyDrop::new(buf.into_view()),
172 })
173 }
174}
175
176pub struct Mem2MemRxTransfer<'d, M: DriverMode, BUF: DmaRxBuffer> {
179 m2m: ManuallyDrop<Mem2MemRx<'d, M>>,
180 buf_view: ManuallyDrop<BUF::View>,
181}
182
183impl<'d, M: DriverMode, BUF: DmaRxBuffer> Mem2MemRxTransfer<'d, M, BUF> {
184 pub fn is_done(&self) -> bool {
186 let done_interrupts = DmaRxInterrupt::DescriptorError | DmaRxInterrupt::DescriptorEmpty;
187 !self
188 .m2m
189 .channel
190 .pending_in_interrupts()
191 .is_disjoint(done_interrupts)
192 }
193
194 pub fn wait(self) -> (Result<(), DmaError>, Mem2MemRx<'d, M>, BUF::Final) {
196 while !self.is_done() {}
197
198 let (m2m, view) = self.release();
199
200 let result = if m2m.channel.has_error() {
201 Err(DmaError::DescriptorError)
202 } else {
203 Ok(())
204 };
205
206 (result, m2m, BUF::from_view(view))
207 }
208
209 pub fn stop(self) -> (Mem2MemRx<'d, M>, BUF::Final) {
211 let (mut m2m, view) = self.release();
212
213 m2m.channel.stop_transfer();
214
215 (m2m, BUF::from_view(view))
216 }
217
218 fn release(mut self) -> (Mem2MemRx<'d, M>, BUF::View) {
219 let result = unsafe {
222 let m2m = ManuallyDrop::take(&mut self.m2m);
223 let view = ManuallyDrop::take(&mut self.buf_view);
224 (m2m, view)
225 };
226 core::mem::forget(self);
227 result
228 }
229}
230
231impl<M: DriverMode, BUF: DmaRxBuffer> Deref for Mem2MemRxTransfer<'_, M, BUF> {
232 type Target = BUF::View;
233
234 fn deref(&self) -> &Self::Target {
235 &self.buf_view
236 }
237}
238
239impl<M: DriverMode, BUF: DmaRxBuffer> DerefMut for Mem2MemRxTransfer<'_, M, BUF> {
240 fn deref_mut(&mut self) -> &mut Self::Target {
241 &mut self.buf_view
242 }
243}
244
245impl<M: DriverMode, BUF: DmaRxBuffer> Drop for Mem2MemRxTransfer<'_, M, BUF> {
246 fn drop(&mut self) {
247 self.m2m.channel.stop_transfer();
248
249 let view = unsafe {
252 ManuallyDrop::drop(&mut self.m2m);
253 ManuallyDrop::take(&mut self.buf_view)
254 };
255 let _ = BUF::from_view(view);
256 }
257}
258
259pub struct Mem2MemTx<'d, Dm: DriverMode> {
261 channel: ChannelTx<Dm, Mem2MemTxChannel<'d>>,
262 peripheral: DmaPeripheral,
263}
264
265impl<'d> Mem2MemTx<'d, Blocking> {
266 pub fn into_async(self) -> Mem2MemTx<'d, Async> {
268 Mem2MemTx {
269 channel: self.channel.into_async(),
270 peripheral: self.peripheral,
271 }
272 }
273}
274
275impl<'d, Dm: DriverMode> Mem2MemTx<'d, Dm> {
276 pub fn send<BUF>(
278 mut self,
279 mut buf: BUF,
280 ) -> Result<Mem2MemTxTransfer<'d, Dm, BUF>, (DmaError, Self, BUF)>
281 where
282 BUF: DmaTxBuffer,
283 {
284 let result = unsafe {
285 self.channel
286 .prepare_transfer(self.peripheral, &mut buf)
287 .and_then(|_| self.channel.start_transfer())
288 };
289
290 if let Err(e) = result {
291 return Err((e, self, buf));
292 }
293
294 Ok(Mem2MemTxTransfer {
295 m2m: ManuallyDrop::new(self),
296 buf_view: ManuallyDrop::new(buf.into_view()),
297 })
298 }
299}
300
301pub struct Mem2MemTxTransfer<'d, Dm: DriverMode, BUF: DmaTxBuffer> {
304 m2m: ManuallyDrop<Mem2MemTx<'d, Dm>>,
305 buf_view: ManuallyDrop<BUF::View>,
306}
307
308impl<'d, Dm: DriverMode, BUF: DmaTxBuffer> Mem2MemTxTransfer<'d, Dm, BUF> {
309 pub fn is_done(&self) -> bool {
311 let done_interrupts = DmaTxInterrupt::DescriptorError | DmaTxInterrupt::TotalEof;
312 !self
313 .m2m
314 .channel
315 .pending_out_interrupts()
316 .is_disjoint(done_interrupts)
317 }
318
319 pub fn wait(self) -> (Result<(), DmaError>, Mem2MemTx<'d, Dm>, BUF::Final) {
321 while !self.is_done() {}
322
323 let (m2m, view) = self.release();
324
325 let result = if m2m.channel.has_error() {
326 Err(DmaError::DescriptorError)
327 } else {
328 Ok(())
329 };
330
331 (result, m2m, BUF::from_view(view))
332 }
333
334 pub fn stop(self) -> (Mem2MemTx<'d, Dm>, BUF::Final) {
336 let (mut m2m, view) = self.release();
337
338 m2m.channel.stop_transfer();
339
340 (m2m, BUF::from_view(view))
341 }
342
343 fn release(mut self) -> (Mem2MemTx<'d, Dm>, BUF::View) {
344 let result = unsafe {
347 let m2m = ManuallyDrop::take(&mut self.m2m);
348 let view = ManuallyDrop::take(&mut self.buf_view);
349 (m2m, view)
350 };
351 core::mem::forget(self);
352 result
353 }
354}
355
356impl<Dm: DriverMode, BUF: DmaTxBuffer> Deref for Mem2MemTxTransfer<'_, Dm, BUF> {
357 type Target = BUF::View;
358
359 fn deref(&self) -> &Self::Target {
360 &self.buf_view
361 }
362}
363
364impl<Dm: DriverMode, BUF: DmaTxBuffer> DerefMut for Mem2MemTxTransfer<'_, Dm, BUF> {
365 fn deref_mut(&mut self) -> &mut Self::Target {
366 &mut self.buf_view
367 }
368}
369
370impl<Dm: DriverMode, BUF: DmaTxBuffer> Drop for Mem2MemTxTransfer<'_, Dm, BUF> {
371 fn drop(&mut self) {
372 self.m2m.channel.stop_transfer();
373
374 let view = unsafe {
377 ManuallyDrop::drop(&mut self.m2m);
378 ManuallyDrop::take(&mut self.buf_view)
379 };
380 let _ = BUF::from_view(view);
381 }
382}
383
384pub struct SimpleMem2Mem<'d, Dm: DriverMode> {
387 state: State<'d, Dm>,
388 config: BurstConfig,
389}
390
391enum State<'d, Dm: DriverMode> {
392 Idle(
393 Mem2Mem<'d, Dm>,
394 &'d mut [DmaDescriptor],
395 &'d mut [DmaDescriptor],
396 ),
397 Active(
398 Mem2MemRxTransfer<'d, Dm, DmaRxBuf>,
399 Mem2MemTxTransfer<'d, Dm, DmaTxBuf>,
400 ),
401 InUse,
402}
403
404impl<'d, Dm: DriverMode> SimpleMem2Mem<'d, Dm> {
405 pub fn new(
407 mem2mem: Mem2Mem<'d, Dm>,
408 rx_descriptors: &'d mut [DmaDescriptor],
409 tx_descriptors: &'d mut [DmaDescriptor],
410 config: BurstConfig,
411 ) -> Result<Self, DmaError> {
412 if rx_descriptors.is_empty() || tx_descriptors.is_empty() {
413 return Err(DmaError::OutOfDescriptors);
414 }
415 Ok(Self {
416 state: State::Idle(mem2mem, rx_descriptors, tx_descriptors),
417 config,
418 })
419 }
420}
421
422impl<'d, Dm: DriverMode> SimpleMem2Mem<'d, Dm> {
423 pub fn start_transfer(
425 &mut self,
426 rx_buffer: &mut [u8],
427 tx_buffer: &[u8],
428 ) -> Result<SimpleMem2MemTransfer<'_, 'd, Dm>, DmaError> {
429 let State::Idle(mem2mem, rx_descriptors, tx_descriptors) =
430 core::mem::replace(&mut self.state, State::InUse)
431 else {
432 panic!("SimpleMem2MemTransfer was forgotten with core::mem::forget or similar");
433 };
434
435 let rx_buffer =
440 unsafe { core::slice::from_raw_parts_mut(rx_buffer.as_mut_ptr(), rx_buffer.len()) };
441 let tx_buffer =
442 unsafe { core::slice::from_raw_parts_mut(tx_buffer.as_ptr() as _, tx_buffer.len()) };
443 let rx_descriptors = unsafe {
444 core::slice::from_raw_parts_mut(rx_descriptors.as_mut_ptr(), rx_descriptors.len())
445 };
446 let tx_descriptors = unsafe {
447 core::slice::from_raw_parts_mut(tx_descriptors.as_mut_ptr(), tx_descriptors.len())
448 };
449
450 let dma_rx_buf = unwrap!(
454 DmaRxBuf::new_with_config(rx_descriptors, rx_buffer, self.config),
455 "There's no way to get the descriptors back yet"
456 );
457
458 let rx = match mem2mem.rx.receive(dma_rx_buf) {
459 Ok(rx) => rx,
460 Err((err, rx, buf)) => {
461 let (rx_descriptors, _rx_buffer) = buf.split();
462 self.state = State::Idle(
463 Mem2Mem { rx, tx: mem2mem.tx },
464 rx_descriptors,
465 tx_descriptors,
466 );
467 return Err(err);
468 }
469 };
470
471 let dma_tx_buf = unwrap!(
472 DmaTxBuf::new_with_config(tx_descriptors, tx_buffer, self.config),
473 "There's no way to get the descriptors back yet"
474 );
475
476 let tx = match mem2mem.tx.send(dma_tx_buf) {
477 Ok(tx) => tx,
478 Err((err, tx, buf)) => {
479 let (tx_descriptors, _tx_buffer) = buf.split();
480 let (rx, buf) = rx.stop();
481 let (rx_descriptors, _rx_buffer) = buf.split();
482 self.state = State::Idle(Mem2Mem { rx, tx }, rx_descriptors, tx_descriptors);
483 return Err(err);
484 }
485 };
486
487 self.state = State::Active(rx, tx);
488
489 Ok(SimpleMem2MemTransfer(self))
490 }
491}
492
493impl<Dm: DriverMode> Drop for SimpleMem2Mem<'_, Dm> {
494 fn drop(&mut self) {
495 if !matches!(&mut self.state, State::Idle(_, _, _)) {
496 panic!("SimpleMem2MemTransfer was forgotten with core::mem::forget or similar");
497 }
498 }
499}
500
501pub struct SimpleMem2MemTransfer<'a, 'd, Dm: DriverMode>(&'a mut SimpleMem2Mem<'d, Dm>);
504
505impl<Dm: DriverMode> SimpleMem2MemTransfer<'_, '_, Dm> {
506 pub fn is_done(&self) -> bool {
508 let State::Active(rx, tx) = &self.0.state else {
509 unreachable!()
510 };
511
512 tx.is_done()
515 && rx
516 .m2m
517 .channel
518 .pending_in_interrupts()
519 .contains(DmaRxInterrupt::SuccessfulEof)
520 }
521
522 pub fn wait(self) -> Result<(), DmaError> {
524 while !self.is_done() {}
525 Ok(())
526 }
527}
528
529impl<Dm: DriverMode> Drop for SimpleMem2MemTransfer<'_, '_, Dm> {
530 fn drop(&mut self) {
531 let State::Active(rx, tx) = core::mem::replace(&mut self.0.state, State::InUse) else {
532 unreachable!()
533 };
534
535 let (tx, dma_tx_buf) = tx.stop();
536 let (rx, dma_rx_buf) = rx.stop();
537
538 let (tx_descriptors, _tx_buffer) = dma_tx_buf.split();
539 let (rx_descriptors, _rx_buffer) = dma_rx_buf.split();
540
541 self.0.state = State::Idle(Mem2Mem { rx, tx }, rx_descriptors, tx_descriptors);
542 }
543}