1use crate::{
2 dma::{
3 dma_private::{DmaSupport, DmaSupportRx},
4 AnyGdmaChannel,
5 AnyGdmaRxChannel,
6 Channel,
7 ChannelRx,
8 DescriptorChain,
9 DmaChannelConvert,
10 DmaDescriptor,
11 DmaEligible,
12 DmaError,
13 DmaPeripheral,
14 DmaTransferRx,
15 ReadBuffer,
16 Rx,
17 Tx,
18 WriteBuffer,
19 },
20 peripheral::Peripheral,
21 Async,
22 Blocking,
23 DriverMode,
24};
25
26pub struct Mem2Mem<'d, Dm>
32where
33 Dm: DriverMode,
34{
35 channel: Channel<'d, Dm, AnyGdmaChannel>,
36 rx_chain: DescriptorChain,
37 tx_chain: DescriptorChain,
38 peripheral: DmaPeripheral,
39}
40
41impl<'d> Mem2Mem<'d, Blocking> {
42 pub fn new<CH>(
44 channel: impl Peripheral<P = CH> + 'd,
45 peripheral: impl DmaEligible,
46 rx_descriptors: &'static mut [DmaDescriptor],
47 tx_descriptors: &'static mut [DmaDescriptor],
48 ) -> Result<Self, DmaError>
49 where
50 CH: DmaChannelConvert<AnyGdmaChannel>,
51 {
52 unsafe {
53 Self::new_unsafe(
54 channel,
55 peripheral.dma_peripheral(),
56 rx_descriptors,
57 tx_descriptors,
58 crate::dma::CHUNK_SIZE,
59 )
60 }
61 }
62
63 pub fn new_with_chunk_size<CH>(
65 channel: impl Peripheral<P = CH> + 'd,
66 peripheral: impl DmaEligible,
67 rx_descriptors: &'static mut [DmaDescriptor],
68 tx_descriptors: &'static mut [DmaDescriptor],
69 chunk_size: usize,
70 ) -> Result<Self, DmaError>
71 where
72 CH: DmaChannelConvert<AnyGdmaChannel>,
73 {
74 unsafe {
75 Self::new_unsafe(
76 channel,
77 peripheral.dma_peripheral(),
78 rx_descriptors,
79 tx_descriptors,
80 chunk_size,
81 )
82 }
83 }
84
85 pub unsafe fn new_unsafe<CH>(
92 channel: impl Peripheral<P = CH> + 'd,
93 peripheral: DmaPeripheral,
94 rx_descriptors: &'static mut [DmaDescriptor],
95 tx_descriptors: &'static mut [DmaDescriptor],
96 chunk_size: usize,
97 ) -> Result<Self, DmaError>
98 where
99 CH: DmaChannelConvert<AnyGdmaChannel>,
100 {
101 if !(1..=4092).contains(&chunk_size) {
102 return Err(DmaError::InvalidChunkSize);
103 }
104 if tx_descriptors.is_empty() || rx_descriptors.is_empty() {
105 return Err(DmaError::OutOfDescriptors);
106 }
107 Ok(Mem2Mem {
108 channel: Channel::new(channel.map(|ch| ch.degrade())),
109 peripheral,
110 rx_chain: DescriptorChain::new_with_chunk_size(rx_descriptors, chunk_size),
111 tx_chain: DescriptorChain::new_with_chunk_size(tx_descriptors, chunk_size),
112 })
113 }
114
115 pub fn into_async(self) -> Mem2Mem<'d, Async> {
117 Mem2Mem {
118 channel: self.channel.into_async(),
119 rx_chain: self.rx_chain,
120 tx_chain: self.tx_chain,
121 peripheral: self.peripheral,
122 }
123 }
124}
125
126impl<Dm> Mem2Mem<'_, Dm>
127where
128 Dm: DriverMode,
129{
130 pub fn start_transfer<'t, TXBUF, RXBUF>(
132 &mut self,
133 rx_buffer: &'t mut RXBUF,
134 tx_buffer: &'t TXBUF,
135 ) -> Result<DmaTransferRx<'_, Self>, DmaError>
136 where
137 TXBUF: ReadBuffer,
138 RXBUF: WriteBuffer,
139 {
140 let (tx_ptr, tx_len) = unsafe { tx_buffer.read_buffer() };
141 let (rx_ptr, rx_len) = unsafe { rx_buffer.write_buffer() };
142 self.tx_chain.fill_for_tx(false, tx_ptr, tx_len)?;
143 self.rx_chain.fill_for_rx(false, rx_ptr, rx_len)?;
144 unsafe {
145 self.channel
146 .tx
147 .prepare_transfer_without_start(self.peripheral, &self.tx_chain)?;
148 self.channel
149 .rx
150 .prepare_transfer_without_start(self.peripheral, &self.rx_chain)?;
151 self.channel.rx.set_mem2mem_mode(true);
152 }
153 self.channel.tx.start_transfer()?;
154 self.channel.rx.start_transfer()?;
155 Ok(DmaTransferRx::new(self))
156 }
157}
158
159impl<Dm> DmaSupport for Mem2Mem<'_, Dm>
160where
161 Dm: DriverMode,
162{
163 fn peripheral_wait_dma(&mut self, _is_rx: bool, _is_tx: bool) {
164 while !self.channel.rx.is_done() {}
165 }
166
167 fn peripheral_dma_stop(&mut self) {
168 unreachable!("unsupported")
169 }
170}
171
172impl<'d, Dm> DmaSupportRx for Mem2Mem<'d, Dm>
173where
174 Dm: DriverMode,
175{
176 type RX = ChannelRx<'d, Dm, AnyGdmaRxChannel>;
177
178 fn rx(&mut self) -> &mut Self::RX {
179 &mut self.channel.rx
180 }
181
182 fn chain(&mut self) -> &mut DescriptorChain {
183 &mut self.tx_chain
184 }
185}