1use core::{
2 mem::ManuallyDrop,
3 ops::{Deref, DerefMut},
4};
5
6#[cfg(not(esp32s2))]
7use crate::dma::{
8 AnyGdmaChannel,
9 AnyGdmaRxChannel,
10 AnyGdmaTxChannel,
11 DmaChannelConvert,
12 DmaEligible,
13};
14use crate::{
15 Async,
16 Blocking,
17 DriverMode,
18 dma::{
19 BurstConfig,
20 Channel,
21 ChannelRx,
22 ChannelTx,
23 DmaDescriptor,
24 DmaError,
25 DmaPeripheral,
26 DmaRxBuf,
27 DmaRxBuffer,
28 DmaRxInterrupt,
29 DmaTxBuf,
30 DmaTxBuffer,
31 DmaTxInterrupt,
32 },
33};
34#[cfg(esp32s2)]
35use crate::{
36 dma::{CopyDmaRxChannel, CopyDmaTxChannel},
37 peripherals::DMA_COPY,
38};
39
40pub struct Mem2Mem<'d, Dm>
46where
47 Dm: DriverMode,
48{
49 pub rx: Mem2MemRx<'d, Dm>,
51 pub tx: Mem2MemTx<'d, Dm>,
53}
54
55impl<'d> Mem2Mem<'d, Blocking> {
56 #[cfg(not(esp32s2))]
58 pub fn new(
59 channel: impl DmaChannelConvert<AnyGdmaChannel<'d>>,
60 peripheral: impl DmaEligible,
61 ) -> Self {
62 unsafe { Self::new_unsafe(channel, peripheral.dma_peripheral()) }
63 }
64
65 #[cfg(not(esp32s2))]
72 pub unsafe fn new_unsafe(
73 channel: impl DmaChannelConvert<AnyGdmaChannel<'d>>,
74 peripheral: DmaPeripheral,
75 ) -> Self {
76 let mut channel = Channel::new(channel.degrade());
77
78 channel.rx.set_mem2mem_mode(true);
79
80 Mem2Mem {
81 rx: Mem2MemRx {
82 channel: channel.rx,
83 peripheral,
84 },
85 tx: Mem2MemTx {
86 channel: channel.tx,
87 peripheral,
88 },
89 }
90 }
91
92 #[cfg(esp32s2)]
94 pub fn new(channel: DMA_COPY<'d>) -> Self {
95 let channel = Channel::new(channel);
96
97 let peripheral = DmaPeripheral::Spi2;
100
101 Mem2Mem {
102 rx: Mem2MemRx {
103 channel: channel.rx,
104 peripheral,
105 },
106 tx: Mem2MemTx {
107 channel: channel.tx,
108 peripheral,
109 },
110 }
111 }
112
113 pub fn with_descriptors(
115 self,
116 rx_descriptors: &'static mut [DmaDescriptor],
117 tx_descriptors: &'static mut [DmaDescriptor],
118 config: BurstConfig,
119 ) -> Result<SimpleMem2Mem<'d, Blocking>, DmaError> {
120 SimpleMem2Mem::new(self, rx_descriptors, tx_descriptors, config)
121 }
122
123 pub fn into_async(self) -> Mem2Mem<'d, Async> {
125 Mem2Mem {
126 rx: self.rx.into_async(),
127 tx: self.tx.into_async(),
128 }
129 }
130}
131
132pub struct Mem2MemRx<'d, Dm: DriverMode> {
134 #[cfg(not(esp32s2))]
135 channel: ChannelRx<Dm, AnyGdmaRxChannel<'d>>,
136 #[cfg(esp32s2)]
137 channel: ChannelRx<Dm, CopyDmaRxChannel<'d>>,
138 peripheral: DmaPeripheral,
139}
140
141impl<'d> Mem2MemRx<'d, Blocking> {
142 pub fn into_async(self) -> Mem2MemRx<'d, Async> {
144 Mem2MemRx {
145 channel: self.channel.into_async(),
146 peripheral: self.peripheral,
147 }
148 }
149}
150
151impl<'d, Dm> Mem2MemRx<'d, Dm>
152where
153 Dm: DriverMode,
154{
155 pub fn receive<BUF>(
157 mut self,
158 mut buf: BUF,
159 ) -> Result<Mem2MemRxTransfer<'d, Dm, BUF>, (DmaError, Self, BUF)>
160 where
161 BUF: DmaRxBuffer,
162 {
163 let result = unsafe {
164 self.channel
165 .prepare_transfer(self.peripheral, &mut buf)
166 .and_then(|_| self.channel.start_transfer())
167 };
168
169 if let Err(e) = result {
170 return Err((e, self, buf));
171 }
172
173 Ok(Mem2MemRxTransfer {
174 m2m: ManuallyDrop::new(self),
175 buf_view: ManuallyDrop::new(buf.into_view()),
176 })
177 }
178}
179
180pub struct Mem2MemRxTransfer<'d, M: DriverMode, BUF: DmaRxBuffer> {
183 m2m: ManuallyDrop<Mem2MemRx<'d, M>>,
184 buf_view: ManuallyDrop<BUF::View>,
185}
186
187impl<'d, M: DriverMode, BUF: DmaRxBuffer> Mem2MemRxTransfer<'d, M, BUF> {
188 pub fn is_done(&self) -> bool {
190 let done_interrupts = DmaRxInterrupt::DescriptorError | DmaRxInterrupt::DescriptorEmpty;
191 !self
192 .m2m
193 .channel
194 .pending_in_interrupts()
195 .is_disjoint(done_interrupts)
196 }
197
198 pub fn wait(self) -> (Result<(), DmaError>, Mem2MemRx<'d, M>, BUF) {
200 while !self.is_done() {}
201
202 let (m2m, view) = self.release();
203
204 let result = if m2m.channel.has_error() {
205 Err(DmaError::DescriptorError)
206 } else {
207 Ok(())
208 };
209
210 (result, m2m, BUF::from_view(view))
211 }
212
213 pub fn stop(self) -> (Mem2MemRx<'d, M>, BUF) {
215 let (mut m2m, view) = self.release();
216
217 m2m.channel.stop_transfer();
218
219 (m2m, BUF::from_view(view))
220 }
221
222 fn release(mut self) -> (Mem2MemRx<'d, M>, BUF::View) {
223 let result = unsafe {
226 let m2m = ManuallyDrop::take(&mut self.m2m);
227 let view = ManuallyDrop::take(&mut self.buf_view);
228 (m2m, view)
229 };
230 core::mem::forget(self);
231 result
232 }
233}
234
235impl<M: DriverMode, BUF: DmaRxBuffer> Deref for Mem2MemRxTransfer<'_, M, BUF> {
236 type Target = BUF::View;
237
238 fn deref(&self) -> &Self::Target {
239 &self.buf_view
240 }
241}
242
243impl<M: DriverMode, BUF: DmaRxBuffer> DerefMut for Mem2MemRxTransfer<'_, M, BUF> {
244 fn deref_mut(&mut self) -> &mut Self::Target {
245 &mut self.buf_view
246 }
247}
248
249impl<M: DriverMode, BUF: DmaRxBuffer> Drop for Mem2MemRxTransfer<'_, M, BUF> {
250 fn drop(&mut self) {
251 self.m2m.channel.stop_transfer();
252
253 let view = unsafe {
256 ManuallyDrop::drop(&mut self.m2m);
257 ManuallyDrop::take(&mut self.buf_view)
258 };
259 let _ = BUF::from_view(view);
260 }
261}
262
263pub struct Mem2MemTx<'d, Dm: DriverMode> {
265 #[cfg(not(esp32s2))]
266 channel: ChannelTx<Dm, AnyGdmaTxChannel<'d>>,
267 #[cfg(esp32s2)]
268 channel: ChannelTx<Dm, CopyDmaTxChannel<'d>>,
269 peripheral: DmaPeripheral,
270}
271
272impl<'d> Mem2MemTx<'d, Blocking> {
273 pub fn into_async(self) -> Mem2MemTx<'d, Async> {
275 Mem2MemTx {
276 channel: self.channel.into_async(),
277 peripheral: self.peripheral,
278 }
279 }
280}
281
282impl<'d, Dm: DriverMode> Mem2MemTx<'d, Dm> {
283 pub fn send<BUF>(
285 mut self,
286 mut buf: BUF,
287 ) -> Result<Mem2MemTxTransfer<'d, Dm, BUF>, (DmaError, Self, BUF)>
288 where
289 BUF: DmaTxBuffer,
290 {
291 let result = unsafe {
292 self.channel
293 .prepare_transfer(self.peripheral, &mut buf)
294 .and_then(|_| self.channel.start_transfer())
295 };
296
297 if let Err(e) = result {
298 return Err((e, self, buf));
299 }
300
301 Ok(Mem2MemTxTransfer {
302 m2m: ManuallyDrop::new(self),
303 buf_view: ManuallyDrop::new(buf.into_view()),
304 })
305 }
306}
307
308pub struct Mem2MemTxTransfer<'d, Dm: DriverMode, BUF: DmaTxBuffer> {
311 m2m: ManuallyDrop<Mem2MemTx<'d, Dm>>,
312 buf_view: ManuallyDrop<BUF::View>,
313}
314
315impl<'d, Dm: DriverMode, BUF: DmaTxBuffer> Mem2MemTxTransfer<'d, Dm, BUF> {
316 pub fn is_done(&self) -> bool {
318 let done_interrupts = DmaTxInterrupt::DescriptorError | DmaTxInterrupt::TotalEof;
319 !self
320 .m2m
321 .channel
322 .pending_out_interrupts()
323 .is_disjoint(done_interrupts)
324 }
325
326 pub fn wait(self) -> (Result<(), DmaError>, Mem2MemTx<'d, Dm>, BUF) {
328 while !self.is_done() {}
329
330 let (m2m, view) = self.release();
331
332 let result = if m2m.channel.has_error() {
333 Err(DmaError::DescriptorError)
334 } else {
335 Ok(())
336 };
337
338 (result, m2m, BUF::from_view(view))
339 }
340
341 pub fn stop(self) -> (Mem2MemTx<'d, Dm>, BUF) {
343 let (mut m2m, view) = self.release();
344
345 m2m.channel.stop_transfer();
346
347 (m2m, BUF::from_view(view))
348 }
349
350 fn release(mut self) -> (Mem2MemTx<'d, Dm>, BUF::View) {
351 let result = unsafe {
354 let m2m = ManuallyDrop::take(&mut self.m2m);
355 let view = ManuallyDrop::take(&mut self.buf_view);
356 (m2m, view)
357 };
358 core::mem::forget(self);
359 result
360 }
361}
362
363impl<Dm: DriverMode, BUF: DmaTxBuffer> Deref for Mem2MemTxTransfer<'_, Dm, BUF> {
364 type Target = BUF::View;
365
366 fn deref(&self) -> &Self::Target {
367 &self.buf_view
368 }
369}
370
371impl<Dm: DriverMode, BUF: DmaTxBuffer> DerefMut for Mem2MemTxTransfer<'_, Dm, BUF> {
372 fn deref_mut(&mut self) -> &mut Self::Target {
373 &mut self.buf_view
374 }
375}
376
377impl<Dm: DriverMode, BUF: DmaTxBuffer> Drop for Mem2MemTxTransfer<'_, Dm, BUF> {
378 fn drop(&mut self) {
379 self.m2m.channel.stop_transfer();
380
381 let view = unsafe {
384 ManuallyDrop::drop(&mut self.m2m);
385 ManuallyDrop::take(&mut self.buf_view)
386 };
387 let _ = BUF::from_view(view);
388 }
389}
390
391pub struct SimpleMem2Mem<'d, Dm: DriverMode> {
394 state: State<'d, Dm>,
395 config: BurstConfig,
396}
397
398enum State<'d, Dm: DriverMode> {
399 Idle(
400 Mem2Mem<'d, Dm>,
401 &'d mut [DmaDescriptor],
402 &'d mut [DmaDescriptor],
403 ),
404 Active(
405 Mem2MemRxTransfer<'d, Dm, DmaRxBuf>,
406 Mem2MemTxTransfer<'d, Dm, DmaTxBuf>,
407 ),
408 InUse,
409}
410
411impl<'d, Dm: DriverMode> SimpleMem2Mem<'d, Dm> {
412 pub fn new(
414 mem2mem: Mem2Mem<'d, Dm>,
415 rx_descriptors: &'d mut [DmaDescriptor],
416 tx_descriptors: &'d mut [DmaDescriptor],
417 config: BurstConfig,
418 ) -> Result<Self, DmaError> {
419 if rx_descriptors.is_empty() || tx_descriptors.is_empty() {
420 return Err(DmaError::OutOfDescriptors);
421 }
422 Ok(Self {
423 state: State::Idle(mem2mem, rx_descriptors, tx_descriptors),
424 config,
425 })
426 }
427}
428
429impl<'d, Dm: DriverMode> SimpleMem2Mem<'d, Dm> {
430 pub fn start_transfer(
432 &mut self,
433 rx_buffer: &mut [u8],
434 tx_buffer: &[u8],
435 ) -> Result<SimpleMem2MemTransfer<'_, 'd, Dm>, DmaError> {
436 let State::Idle(mem2mem, rx_descriptors, tx_descriptors) =
437 core::mem::replace(&mut self.state, State::InUse)
438 else {
439 panic!("SimpleMem2MemTransfer was forgotten with core::mem::forget or similar");
440 };
441
442 let rx_buffer =
447 unsafe { core::slice::from_raw_parts_mut(rx_buffer.as_mut_ptr(), rx_buffer.len()) };
448 let tx_buffer =
449 unsafe { core::slice::from_raw_parts_mut(tx_buffer.as_ptr() as _, tx_buffer.len()) };
450 let rx_descriptors = unsafe {
451 core::slice::from_raw_parts_mut(rx_descriptors.as_mut_ptr(), rx_descriptors.len())
452 };
453 let tx_descriptors = unsafe {
454 core::slice::from_raw_parts_mut(tx_descriptors.as_mut_ptr(), tx_descriptors.len())
455 };
456
457 let dma_rx_buf = unwrap!(
461 DmaRxBuf::new_with_config(rx_descriptors, rx_buffer, self.config),
462 "There's no way to get the descriptors back yet"
463 );
464
465 let rx = match mem2mem.rx.receive(dma_rx_buf) {
466 Ok(rx) => rx,
467 Err((err, rx, buf)) => {
468 let (rx_descriptors, _rx_buffer) = buf.split();
469 self.state = State::Idle(
470 Mem2Mem { rx, tx: mem2mem.tx },
471 rx_descriptors,
472 tx_descriptors,
473 );
474 return Err(err);
475 }
476 };
477
478 let dma_tx_buf = unwrap!(
479 DmaTxBuf::new_with_config(tx_descriptors, tx_buffer, self.config),
480 "There's no way to get the descriptors back yet"
481 );
482
483 let tx = match mem2mem.tx.send(dma_tx_buf) {
484 Ok(tx) => tx,
485 Err((err, tx, buf)) => {
486 let (tx_descriptors, _tx_buffer) = buf.split();
487 let (rx, buf) = rx.stop();
488 let (rx_descriptors, _rx_buffer) = buf.split();
489 self.state = State::Idle(Mem2Mem { rx, tx }, rx_descriptors, tx_descriptors);
490 return Err(err);
491 }
492 };
493
494 self.state = State::Active(rx, tx);
495
496 Ok(SimpleMem2MemTransfer(self))
497 }
498}
499
500impl<Dm: DriverMode> Drop for SimpleMem2Mem<'_, Dm> {
501 fn drop(&mut self) {
502 if !matches!(&mut self.state, State::Idle(_, _, _)) {
503 panic!("SimpleMem2MemTransfer was forgotten with core::mem::forget or similar");
504 }
505 }
506}
507
508pub struct SimpleMem2MemTransfer<'a, 'd, Dm: DriverMode>(&'a mut SimpleMem2Mem<'d, Dm>);
511
512impl<Dm: DriverMode> SimpleMem2MemTransfer<'_, '_, Dm> {
513 pub fn is_done(&self) -> bool {
515 let State::Active(rx, tx) = &self.0.state else {
516 unreachable!()
517 };
518
519 tx.is_done()
522 && rx
523 .m2m
524 .channel
525 .pending_in_interrupts()
526 .contains(DmaRxInterrupt::SuccessfulEof)
527 }
528
529 pub fn wait(self) -> Result<(), DmaError> {
531 while !self.is_done() {}
532 Ok(())
533 }
534}
535
536impl<Dm: DriverMode> Drop for SimpleMem2MemTransfer<'_, '_, Dm> {
537 fn drop(&mut self) {
538 let State::Active(rx, tx) = core::mem::replace(&mut self.0.state, State::InUse) else {
539 unreachable!()
540 };
541
542 let (tx, dma_tx_buf) = tx.stop();
543 let (rx, dma_rx_buf) = rx.stop();
544
545 let (tx_descriptors, _tx_buffer) = dma_tx_buf.split();
546 let (rx_descriptors, _rx_buffer) = dma_rx_buf.split();
547
548 self.0.state = State::Idle(Mem2Mem { rx, tx }, rx_descriptors, tx_descriptors);
549 }
550}