1#![doc = crate::before_snippet!()]
20#![cfg_attr(pdma, doc = "let dma_channel = peripherals.DMA_SPI2;")]
23#![cfg_attr(gdma, doc = "let dma_channel = peripherals.DMA_CH0;")]
24use core::{cmp::min, fmt::Debug, marker::PhantomData, sync::atomic::compiler_fence};
53
54use enumset::{EnumSet, EnumSetType};
55
56pub use self::buffers::*;
57#[cfg(gdma)]
58pub use self::gdma::*;
59#[cfg(gdma)]
60pub use self::m2m::*;
61#[cfg(pdma)]
62pub use self::pdma::*;
63use crate::{
64 interrupt::InterruptHandler,
65 peripheral::{Peripheral, PeripheralRef},
66 peripherals::Interrupt,
67 soc::{is_slice_in_dram, is_valid_memory_address, is_valid_ram_address},
68 system,
69 system::Cpu,
70 Async,
71 Blocking,
72 DriverMode,
73};
74
75trait Word: crate::private::Sealed {}
76
77macro_rules! impl_word {
78 ($w:ty) => {
79 impl $crate::private::Sealed for $w {}
80 impl Word for $w {}
81 };
82}
83
84impl_word!(u8);
85impl_word!(u16);
86impl_word!(u32);
87impl_word!(i8);
88impl_word!(i16);
89impl_word!(i32);
90
91impl<W, const S: usize> crate::private::Sealed for [W; S] where W: Word {}
92
93impl<W, const S: usize> crate::private::Sealed for &[W; S] where W: Word {}
94
95impl<W> crate::private::Sealed for &[W] where W: Word {}
96
97impl<W> crate::private::Sealed for &mut [W] where W: Word {}
98
99pub unsafe trait ReadBuffer {
107 unsafe fn read_buffer(&self) -> (*const u8, usize);
119}
120
121unsafe impl<W, const S: usize> ReadBuffer for [W; S]
122where
123 W: Word,
124{
125 unsafe fn read_buffer(&self) -> (*const u8, usize) {
126 (self.as_ptr() as *const u8, core::mem::size_of_val(self))
127 }
128}
129
130unsafe impl<W, const S: usize> ReadBuffer for &[W; S]
131where
132 W: Word,
133{
134 unsafe fn read_buffer(&self) -> (*const u8, usize) {
135 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
136 }
137}
138
139unsafe impl<W, const S: usize> ReadBuffer for &mut [W; S]
140where
141 W: Word,
142{
143 unsafe fn read_buffer(&self) -> (*const u8, usize) {
144 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
145 }
146}
147
148unsafe impl<W> ReadBuffer for &[W]
149where
150 W: Word,
151{
152 unsafe fn read_buffer(&self) -> (*const u8, usize) {
153 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
154 }
155}
156
157unsafe impl<W> ReadBuffer for &mut [W]
158where
159 W: Word,
160{
161 unsafe fn read_buffer(&self) -> (*const u8, usize) {
162 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
163 }
164}
165
166pub unsafe trait WriteBuffer {
174 unsafe fn write_buffer(&mut self) -> (*mut u8, usize);
187}
188
189unsafe impl<W, const S: usize> WriteBuffer for [W; S]
190where
191 W: Word,
192{
193 unsafe fn write_buffer(&mut self) -> (*mut u8, usize) {
194 (self.as_mut_ptr() as *mut u8, core::mem::size_of_val(self))
195 }
196}
197
198unsafe impl<W, const S: usize> WriteBuffer for &mut [W; S]
199where
200 W: Word,
201{
202 unsafe fn write_buffer(&mut self) -> (*mut u8, usize) {
203 (self.as_mut_ptr() as *mut u8, core::mem::size_of_val(*self))
204 }
205}
206
207unsafe impl<W> WriteBuffer for &mut [W]
208where
209 W: Word,
210{
211 unsafe fn write_buffer(&mut self) -> (*mut u8, usize) {
212 (self.as_mut_ptr() as *mut u8, core::mem::size_of_val(*self))
213 }
214}
215
216bitfield::bitfield! {
217 #[derive(Clone, Copy, PartialEq, Eq)]
219 pub struct DmaDescriptorFlags(u32);
220
221 u16;
222
223 pub size, set_size: 11, 0;
225
226 pub length, set_length: 23, 12;
234
235 pub suc_eof, set_suc_eof: 30;
242
243 pub owner, set_owner: 31;
247}
248
249impl Debug for DmaDescriptorFlags {
250 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
251 f.debug_struct("DmaDescriptorFlags")
252 .field("size", &self.size())
253 .field("length", &self.length())
254 .field("suc_eof", &self.suc_eof())
255 .field("owner", &(if self.owner() { "DMA" } else { "CPU" }))
256 .finish()
257 }
258}
259
260#[cfg(feature = "defmt")]
261impl defmt::Format for DmaDescriptorFlags {
262 fn format(&self, fmt: defmt::Formatter<'_>) {
263 defmt::write!(
264 fmt,
265 "DmaDescriptorFlags {{ size: {}, length: {}, suc_eof: {}, owner: {} }}",
266 self.size(),
267 self.length(),
268 self.suc_eof(),
269 if self.owner() { "DMA" } else { "CPU" }
270 );
271 }
272}
273
274#[derive(Clone, Copy, Debug, PartialEq, Eq)]
276#[cfg_attr(feature = "defmt", derive(defmt::Format))]
277#[repr(C)]
278pub struct DmaDescriptor {
279 pub flags: DmaDescriptorFlags,
281
282 pub buffer: *mut u8,
284
285 pub next: *mut DmaDescriptor,
289}
290
291impl DmaDescriptor {
292 pub const EMPTY: Self = Self {
294 flags: DmaDescriptorFlags(0),
295 buffer: core::ptr::null_mut(),
296 next: core::ptr::null_mut(),
297 };
298
299 pub fn reset_for_rx(&mut self) {
301 self.set_owner(Owner::Dma);
303
304 self.set_suc_eof(false);
307
308 self.set_length(0);
311 }
312
313 pub fn reset_for_tx(&mut self, set_eof: bool) {
317 self.set_owner(Owner::Dma);
319
320 self.set_suc_eof(set_eof);
323 }
324
325 pub fn set_size(&mut self, len: usize) {
327 self.flags.set_size(len as u16)
328 }
329
330 pub fn set_length(&mut self, len: usize) {
332 self.flags.set_length(len as u16)
333 }
334
335 pub fn size(&self) -> usize {
337 self.flags.size() as usize
338 }
339
340 #[allow(clippy::len_without_is_empty)]
342 pub fn len(&self) -> usize {
343 self.flags.length() as usize
344 }
345
346 pub fn set_suc_eof(&mut self, suc_eof: bool) {
348 self.flags.set_suc_eof(suc_eof)
349 }
350
351 pub fn set_owner(&mut self, owner: Owner) {
353 let owner = match owner {
354 Owner::Cpu => false,
355 Owner::Dma => true,
356 };
357 self.flags.set_owner(owner)
358 }
359
360 pub fn owner(&self) -> Owner {
362 match self.flags.owner() {
363 false => Owner::Cpu,
364 true => Owner::Dma,
365 }
366 }
367}
368
369unsafe impl Send for DmaDescriptor {}
373
374mod buffers;
375#[cfg(gdma)]
376mod gdma;
377#[cfg(gdma)]
378mod m2m;
379#[cfg(pdma)]
380mod pdma;
381
382#[derive(Debug, EnumSetType)]
384#[cfg_attr(feature = "defmt", derive(defmt::Format))]
385pub enum DmaInterrupt {
386 RxDone,
388 TxDone,
390}
391
392#[derive(Debug, EnumSetType)]
394#[cfg_attr(feature = "defmt", derive(defmt::Format))]
395pub enum DmaTxInterrupt {
396 TotalEof,
399
400 DescriptorError,
403
404 Eof,
408
409 Done,
412}
413
414#[derive(Debug, EnumSetType)]
416#[cfg_attr(feature = "defmt", derive(defmt::Format))]
417pub enum DmaRxInterrupt {
418 DescriptorEmpty,
421
422 DescriptorError,
425
426 ErrorEof,
430
431 SuccessfulEof,
435
436 Done,
439}
440
441pub const CHUNK_SIZE: usize = 4092;
443
444#[doc = crate::before_snippet!()]
449#[macro_export]
459macro_rules! dma_buffers {
460 ($rx_size:expr, $tx_size:expr) => {
461 $crate::dma_buffers_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
462 };
463 ($size:expr) => {
464 $crate::dma_buffers_chunk_size!($size, $crate::dma::CHUNK_SIZE)
465 };
466}
467
468#[doc = crate::before_snippet!()]
473#[macro_export]
483macro_rules! dma_circular_buffers {
484 ($rx_size:expr, $tx_size:expr) => {
485 $crate::dma_circular_buffers_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
486 };
487
488 ($size:expr) => {
489 $crate::dma_circular_buffers_chunk_size!($size, $size, $crate::dma::CHUNK_SIZE)
490 };
491}
492
493#[doc = crate::before_snippet!()]
498#[macro_export]
507macro_rules! dma_descriptors {
508 ($rx_size:expr, $tx_size:expr) => {
509 $crate::dma_descriptors_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
510 };
511
512 ($size:expr) => {
513 $crate::dma_descriptors_chunk_size!($size, $size, $crate::dma::CHUNK_SIZE)
514 };
515}
516
517#[doc = crate::before_snippet!()]
522#[macro_export]
532macro_rules! dma_circular_descriptors {
533 ($rx_size:expr, $tx_size:expr) => {
534 $crate::dma_circular_descriptors_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
535 };
536
537 ($size:expr) => {
538 $crate::dma_circular_descriptors_chunk_size!($size, $size, $crate::dma::CHUNK_SIZE)
539 };
540}
541
542#[doc(hidden)]
544#[macro_export]
545macro_rules! declare_aligned_dma_buffer {
546 ($name:ident, $size:expr) => {
547 static mut $name: [u32; ($size + 3) / 4] = [0; ($size + 3) / 4];
552 };
553}
554
555#[doc(hidden)]
558#[macro_export]
559macro_rules! as_mut_byte_array {
560 ($name:expr, $size:expr) => {
561 unsafe { &mut *($name.as_mut_ptr() as *mut [u8; $size]) }
562 };
563}
564pub use as_mut_byte_array; #[doc = crate::before_snippet!()]
572#[macro_export]
582macro_rules! dma_buffers_chunk_size {
583 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{
584 $crate::dma_buffers_impl!($rx_size, $tx_size, $chunk_size, is_circular = false)
585 }};
586
587 ($size:expr, $chunk_size:expr) => {
588 $crate::dma_buffers_chunk_size!($size, $size, $chunk_size)
589 };
590}
591
592#[doc = crate::before_snippet!()]
598#[macro_export]
608macro_rules! dma_circular_buffers_chunk_size {
609 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{
610 $crate::dma_buffers_impl!($rx_size, $tx_size, $chunk_size, is_circular = true)
611 }};
612
613 ($size:expr, $chunk_size:expr) => {{
614 $crate::dma_circular_buffers_chunk_size!($size, $size, $chunk_size)
615 }};
616}
617
618#[doc = crate::before_snippet!()]
623#[macro_export]
633macro_rules! dma_descriptors_chunk_size {
634 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{
635 $crate::dma_descriptors_impl!($rx_size, $tx_size, $chunk_size, is_circular = false)
636 }};
637
638 ($size:expr, $chunk_size:expr) => {
639 $crate::dma_descriptors_chunk_size!($size, $size, $chunk_size)
640 };
641}
642
643#[doc = crate::before_snippet!()]
649#[macro_export]
659macro_rules! dma_circular_descriptors_chunk_size {
660 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{
661 $crate::dma_descriptors_impl!($rx_size, $tx_size, $chunk_size, is_circular = true)
662 }};
663
664 ($size:expr, $chunk_size:expr) => {
665 $crate::dma_circular_descriptors_chunk_size!($size, $size, $chunk_size)
666 };
667}
668
669#[doc(hidden)]
670#[macro_export]
671macro_rules! dma_buffers_impl {
672 ($rx_size:expr, $tx_size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
673 let rx = $crate::dma_buffers_impl!($rx_size, $chunk_size, is_circular = $circular);
674 let tx = $crate::dma_buffers_impl!($tx_size, $chunk_size, is_circular = $circular);
675 (rx.0, rx.1, tx.0, tx.1)
676 }};
677
678 ($size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
679 $crate::declare_aligned_dma_buffer!(BUFFER, $size);
680
681 unsafe {
682 (
683 $crate::dma::as_mut_byte_array!(BUFFER, $size),
684 $crate::dma_descriptors_impl!($size, $chunk_size, is_circular = $circular),
685 )
686 }
687 }};
688
689 ($size:expr, is_circular = $circular:tt) => {
690 $crate::dma_buffers_impl!(
691 $size,
692 $crate::dma::BurstConfig::DEFAULT.max_compatible_chunk_size(),
693 is_circular = $circular
694 );
695 };
696}
697
698#[doc(hidden)]
699#[macro_export]
700macro_rules! dma_descriptors_impl {
701 ($rx_size:expr, $tx_size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
702 let rx = $crate::dma_descriptors_impl!($rx_size, $chunk_size, is_circular = $circular);
703 let tx = $crate::dma_descriptors_impl!($tx_size, $chunk_size, is_circular = $circular);
704 (rx, tx)
705 }};
706
707 ($size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
708 const COUNT: usize =
709 $crate::dma_descriptor_count!($size, $chunk_size, is_circular = $circular);
710
711 static mut DESCRIPTORS: [$crate::dma::DmaDescriptor; COUNT] =
712 [$crate::dma::DmaDescriptor::EMPTY; COUNT];
713
714 unsafe { &mut DESCRIPTORS }
715 }};
716}
717
718#[doc(hidden)]
719#[macro_export]
720macro_rules! dma_descriptor_count {
721 ($size:expr, $chunk_size:expr, is_circular = $is_circular:tt) => {{
722 const {
723 ::core::assert!($chunk_size <= 4095, "chunk size must be <= 4095");
724 ::core::assert!($chunk_size > 0, "chunk size must be > 0");
725 }
726
727 if $size == 0 {
729 0
730 } else {
731 $crate::dma::descriptor_count($size, $chunk_size, $is_circular)
732 }
733 }};
734}
735
736#[doc = crate::before_snippet!()]
742#[macro_export]
749macro_rules! dma_tx_buffer {
750 ($tx_size:expr) => {{
751 let (tx_buffer, tx_descriptors) = $crate::dma_buffers_impl!($tx_size, is_circular = false);
752
753 $crate::dma::DmaTxBuf::new(tx_descriptors, tx_buffer)
754 }};
755}
756
757#[doc = crate::before_snippet!()]
767#[macro_export]
775macro_rules! dma_rx_stream_buffer {
776 ($rx_size:expr) => {
777 $crate::dma_rx_stream_buffer!($rx_size, 4095)
778 };
779 ($rx_size:expr, $chunk_size:expr) => {{
780 let (buffer, descriptors) =
781 $crate::dma_buffers_impl!($rx_size, $chunk_size, is_circular = false);
782
783 $crate::dma::DmaRxStreamBuf::new(descriptors, buffer).unwrap()
784 }};
785}
786
787#[doc = crate::before_snippet!()]
792#[macro_export]
799macro_rules! dma_loop_buffer {
800 ($size:expr) => {{
801 const {
802 ::core::assert!($size <= 4095, "size must be <= 4095");
803 ::core::assert!($size > 0, "size must be > 0");
804 }
805
806 let (buffer, descriptors) = $crate::dma_buffers_impl!($size, $size, is_circular = false);
807
808 $crate::dma::DmaLoopBuf::new(&mut descriptors[0], buffer).unwrap()
809 }};
810}
811
812#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
814#[cfg_attr(feature = "defmt", derive(defmt::Format))]
815pub enum DmaError {
816 InvalidAlignment(DmaAlignmentError),
818 OutOfDescriptors,
820 DescriptorError,
824 Overflow,
826 BufferTooSmall,
828 UnsupportedMemoryRegion,
830 InvalidChunkSize,
832 Late,
835}
836
837impl From<DmaBufError> for DmaError {
838 fn from(error: DmaBufError) -> Self {
839 match error {
841 DmaBufError::InsufficientDescriptors => DmaError::OutOfDescriptors,
842 DmaBufError::UnsupportedMemoryRegion => DmaError::UnsupportedMemoryRegion,
843 DmaBufError::InvalidAlignment(err) => DmaError::InvalidAlignment(err),
844 DmaBufError::InvalidChunkSize => DmaError::InvalidChunkSize,
845 DmaBufError::BufferTooSmall => DmaError::BufferTooSmall,
846 }
847 }
848}
849
850#[cfg(gdma)]
852#[derive(Debug, Clone, Copy, PartialEq)]
853#[cfg_attr(feature = "defmt", derive(defmt::Format))]
854pub enum DmaPriority {
855 Priority0 = 0,
857 Priority1 = 1,
859 Priority2 = 2,
861 Priority3 = 3,
863 Priority4 = 4,
865 Priority5 = 5,
867 Priority6 = 6,
869 Priority7 = 7,
871 Priority8 = 8,
873 Priority9 = 9,
875}
876
877#[cfg(pdma)]
880#[derive(Debug, Clone, Copy, PartialEq)]
881#[cfg_attr(feature = "defmt", derive(defmt::Format))]
882pub enum DmaPriority {
883 Priority0 = 0,
885}
886
887#[derive(Debug, Clone, Copy, PartialEq)]
890#[cfg_attr(feature = "defmt", derive(defmt::Format))]
891#[doc(hidden)]
892pub enum DmaPeripheral {
893 Spi2 = 0,
894 #[cfg(any(pdma, esp32s3))]
895 Spi3 = 1,
896 #[cfg(any(esp32c2, esp32c6, esp32h2))]
897 Mem2Mem1 = 1,
898 #[cfg(any(esp32c3, esp32c6, esp32h2, esp32s3))]
899 Uhci0 = 2,
900 #[cfg(any(esp32, esp32s2, esp32c3, esp32c6, esp32h2, esp32s3))]
901 I2s0 = 3,
902 #[cfg(any(esp32, esp32s3))]
903 I2s1 = 4,
904 #[cfg(any(esp32c6, esp32h2))]
905 Mem2Mem4 = 4,
906 #[cfg(esp32s3)]
907 LcdCam = 5,
908 #[cfg(any(esp32c6, esp32h2))]
909 Mem2Mem5 = 5,
910 #[cfg(not(esp32c2))]
911 Aes = 6,
912 #[cfg(any(esp32s2, gdma))]
913 Sha = 7,
914 #[cfg(any(esp32c3, esp32c6, esp32h2, esp32s3))]
915 Adc = 8,
916 #[cfg(esp32s3)]
917 Rmt = 9,
918 #[cfg(parl_io)]
919 ParlIo = 9,
920 #[cfg(any(esp32c6, esp32h2))]
921 Mem2Mem10 = 10,
922 #[cfg(any(esp32c6, esp32h2))]
923 Mem2Mem11 = 11,
924 #[cfg(any(esp32c6, esp32h2))]
925 Mem2Mem12 = 12,
926 #[cfg(any(esp32c6, esp32h2))]
927 Mem2Mem13 = 13,
928 #[cfg(any(esp32c6, esp32h2))]
929 Mem2Mem14 = 14,
930 #[cfg(any(esp32c6, esp32h2))]
931 Mem2Mem15 = 15,
932}
933
934#[derive(PartialEq, PartialOrd)]
936pub enum Owner {
937 Cpu = 0,
939 Dma = 1,
941}
942
943impl From<u32> for Owner {
944 fn from(value: u32) -> Self {
945 match value {
946 0 => Owner::Cpu,
947 _ => Owner::Dma,
948 }
949 }
950}
951
952#[doc(hidden)]
953pub trait DmaEligible {
954 type Dma: DmaChannel;
956
957 fn dma_peripheral(&self) -> DmaPeripheral;
958}
959
960#[doc(hidden)]
961#[derive(Debug)]
962pub struct DescriptorChain {
963 pub(crate) descriptors: &'static mut [DmaDescriptor],
964 chunk_size: usize,
965}
966
967impl DescriptorChain {
968 pub fn new(descriptors: &'static mut [DmaDescriptor]) -> Self {
969 Self::new_with_chunk_size(descriptors, CHUNK_SIZE)
970 }
971
972 pub fn new_with_chunk_size(
973 descriptors: &'static mut [DmaDescriptor],
974 chunk_size: usize,
975 ) -> Self {
976 Self {
977 descriptors,
978 chunk_size,
979 }
980 }
981
982 pub fn first_mut(&mut self) -> *mut DmaDescriptor {
983 self.descriptors.as_mut_ptr()
984 }
985
986 pub fn first(&self) -> *const DmaDescriptor {
987 self.descriptors.as_ptr()
988 }
989
990 pub fn last_mut(&mut self) -> *mut DmaDescriptor {
991 self.descriptors.last_mut().unwrap()
992 }
993
994 pub fn last(&self) -> *const DmaDescriptor {
995 self.descriptors.last().unwrap()
996 }
997
998 #[allow(clippy::not_unsafe_ptr_arg_deref)]
999 pub fn fill_for_rx(
1000 &mut self,
1001 circular: bool,
1002 data: *mut u8,
1003 len: usize,
1004 ) -> Result<(), DmaError> {
1005 self.fill(circular, data, len, |desc, _| {
1006 desc.reset_for_rx();
1007 })
1009 }
1010
1011 #[allow(clippy::not_unsafe_ptr_arg_deref)]
1012 pub fn fill_for_tx(
1013 &mut self,
1014 is_circular: bool,
1015 data: *const u8,
1016 len: usize,
1017 ) -> Result<(), DmaError> {
1018 self.fill(is_circular, data.cast_mut(), len, |desc, chunk_size| {
1019 desc.reset_for_tx(desc.next.is_null() || is_circular);
1024 desc.set_length(chunk_size); })
1026 }
1027
1028 #[allow(clippy::not_unsafe_ptr_arg_deref)]
1029 pub fn fill(
1030 &mut self,
1031 circular: bool,
1032 data: *mut u8,
1033 len: usize,
1034 prepare_descriptor: impl Fn(&mut DmaDescriptor, usize),
1035 ) -> Result<(), DmaError> {
1036 if !is_valid_ram_address(self.first() as usize)
1037 || !is_valid_ram_address(self.last() as usize)
1038 || !is_valid_memory_address(data as usize)
1039 || !is_valid_memory_address(unsafe { data.add(len) } as usize)
1040 {
1041 return Err(DmaError::UnsupportedMemoryRegion);
1042 }
1043
1044 let max_chunk_size = if circular && len <= self.chunk_size * 2 {
1045 if len <= 3 {
1046 return Err(DmaError::BufferTooSmall);
1047 }
1048 len / 3 + len % 3
1049 } else {
1050 self.chunk_size
1051 };
1052
1053 DescriptorSet::set_up_buffer_ptrs(
1054 unsafe { core::slice::from_raw_parts_mut(data, len) },
1055 self.descriptors,
1056 max_chunk_size,
1057 circular,
1058 )?;
1059 DescriptorSet::set_up_descriptors(
1060 self.descriptors,
1061 len,
1062 max_chunk_size,
1063 circular,
1064 prepare_descriptor,
1065 )?;
1066
1067 Ok(())
1068 }
1069}
1070
1071pub const fn descriptor_count(buffer_size: usize, chunk_size: usize, is_circular: bool) -> usize {
1074 if is_circular && buffer_size <= chunk_size * 2 {
1075 return 3;
1076 }
1077
1078 if buffer_size < chunk_size {
1079 return 1;
1081 }
1082
1083 buffer_size.div_ceil(chunk_size)
1084}
1085
1086#[derive(Debug)]
1087#[cfg_attr(feature = "defmt", derive(defmt::Format))]
1088struct DescriptorSet<'a> {
1089 descriptors: &'a mut [DmaDescriptor],
1090}
1091
1092impl<'a> DescriptorSet<'a> {
1093 fn new(descriptors: &'a mut [DmaDescriptor]) -> Result<Self, DmaBufError> {
1096 if !is_slice_in_dram(descriptors) {
1097 return Err(DmaBufError::UnsupportedMemoryRegion);
1098 }
1099
1100 descriptors.fill(DmaDescriptor::EMPTY);
1101
1102 Ok(unsafe { Self::new_unchecked(descriptors) })
1103 }
1104
1105 unsafe fn new_unchecked(descriptors: &'a mut [DmaDescriptor]) -> Self {
1113 Self { descriptors }
1114 }
1115
1116 fn into_inner(self) -> &'a mut [DmaDescriptor] {
1118 self.descriptors
1119 }
1120
1121 fn head(&mut self) -> *mut DmaDescriptor {
1123 self.descriptors.as_mut_ptr()
1124 }
1125
1126 fn linked_iter(&self) -> impl Iterator<Item = &DmaDescriptor> {
1128 let mut was_last = false;
1129 self.descriptors.iter().take_while(move |d| {
1130 if was_last {
1131 false
1132 } else {
1133 was_last = d.next.is_null();
1134 true
1135 }
1136 })
1137 }
1138
1139 fn linked_iter_mut(&mut self) -> impl Iterator<Item = &mut DmaDescriptor> {
1141 let mut was_last = false;
1142 self.descriptors.iter_mut().take_while(move |d| {
1143 if was_last {
1144 false
1145 } else {
1146 was_last = d.next.is_null();
1147 true
1148 }
1149 })
1150 }
1151
1152 fn link_with_buffer(
1158 &mut self,
1159 buffer: &mut [u8],
1160 chunk_size: usize,
1161 ) -> Result<(), DmaBufError> {
1162 Self::set_up_buffer_ptrs(buffer, self.descriptors, chunk_size, false)
1163 }
1164
1165 fn set_length(
1169 &mut self,
1170 len: usize,
1171 chunk_size: usize,
1172 prepare: fn(&mut DmaDescriptor, usize),
1173 ) -> Result<(), DmaBufError> {
1174 Self::set_up_descriptors(self.descriptors, len, chunk_size, false, prepare)
1175 }
1176
1177 fn set_rx_length(&mut self, len: usize, chunk_size: usize) -> Result<(), DmaBufError> {
1181 self.set_length(len, chunk_size, |desc, chunk_size| {
1182 desc.set_size(chunk_size);
1183 })
1184 }
1185
1186 fn set_tx_length(&mut self, len: usize, chunk_size: usize) -> Result<(), DmaBufError> {
1190 self.set_length(len, chunk_size, |desc, chunk_size| {
1191 desc.set_length(chunk_size);
1192 })
1193 }
1194
1195 fn descriptors_for_buffer_len(
1197 descriptors: &mut [DmaDescriptor],
1198 len: usize,
1199 chunk_size: usize,
1200 is_circular: bool,
1201 ) -> Result<&mut [DmaDescriptor], DmaBufError> {
1202 let required_descriptors = descriptor_count(len, chunk_size, is_circular);
1204 if descriptors.len() < required_descriptors {
1205 return Err(DmaBufError::InsufficientDescriptors);
1206 }
1207 Ok(&mut descriptors[..required_descriptors])
1208 }
1209
1210 fn set_up_descriptors(
1218 descriptors: &mut [DmaDescriptor],
1219 len: usize,
1220 chunk_size: usize,
1221 is_circular: bool,
1222 prepare: impl Fn(&mut DmaDescriptor, usize),
1223 ) -> Result<(), DmaBufError> {
1224 let descriptors =
1225 Self::descriptors_for_buffer_len(descriptors, len, chunk_size, is_circular)?;
1226
1227 let mut next = if is_circular {
1229 descriptors.as_mut_ptr()
1230 } else {
1231 core::ptr::null_mut()
1232 };
1233 for desc in descriptors.iter_mut().rev() {
1234 desc.next = next;
1235 next = desc;
1236 }
1237
1238 let mut remaining_length = len;
1240 for desc in descriptors.iter_mut() {
1241 let chunk_size = min(chunk_size, remaining_length);
1242 prepare(desc, chunk_size);
1243 remaining_length -= chunk_size;
1244 }
1245 debug_assert_eq!(remaining_length, 0);
1246
1247 Ok(())
1248 }
1249
1250 fn set_up_buffer_ptrs(
1262 buffer: &mut [u8],
1263 descriptors: &mut [DmaDescriptor],
1264 chunk_size: usize,
1265 is_circular: bool,
1266 ) -> Result<(), DmaBufError> {
1267 let descriptors =
1268 Self::descriptors_for_buffer_len(descriptors, buffer.len(), chunk_size, is_circular)?;
1269
1270 let chunks = buffer.chunks_mut(chunk_size);
1271 for (desc, chunk) in descriptors.iter_mut().zip(chunks) {
1272 desc.set_size(chunk.len());
1273 desc.buffer = chunk.as_mut_ptr();
1274 }
1275
1276 Ok(())
1277 }
1278}
1279
1280#[cfg(psram_dma)]
1282#[derive(Copy, Clone, Debug, PartialEq)]
1283pub enum DmaExtMemBKSize {
1284 Size16 = 0,
1286 Size32 = 1,
1288 Size64 = 2,
1290}
1291
1292#[cfg(psram_dma)]
1293impl From<ExternalBurstConfig> for DmaExtMemBKSize {
1294 fn from(size: ExternalBurstConfig) -> Self {
1295 match size {
1296 ExternalBurstConfig::Size16 => DmaExtMemBKSize::Size16,
1297 ExternalBurstConfig::Size32 => DmaExtMemBKSize::Size32,
1298 ExternalBurstConfig::Size64 => DmaExtMemBKSize::Size64,
1299 }
1300 }
1301}
1302
1303pub(crate) struct TxCircularState {
1304 write_offset: usize,
1305 write_descr_ptr: *mut DmaDescriptor,
1306 pub(crate) available: usize,
1307 last_seen_handled_descriptor_ptr: *mut DmaDescriptor,
1308 buffer_start: *const u8,
1309 buffer_len: usize,
1310
1311 first_desc_ptr: *mut DmaDescriptor,
1312}
1313
1314impl TxCircularState {
1315 pub(crate) fn new(chain: &mut DescriptorChain) -> Self {
1316 Self {
1317 write_offset: 0,
1318 write_descr_ptr: chain.first_mut(),
1319 available: 0,
1320 last_seen_handled_descriptor_ptr: chain.first_mut(),
1321 buffer_start: chain.descriptors[0].buffer as _,
1322 buffer_len: chain.descriptors.iter().map(|d| d.len()).sum(),
1323
1324 first_desc_ptr: chain.first_mut(),
1325 }
1326 }
1327
1328 pub(crate) fn update<T>(&mut self, channel: &T) -> Result<(), DmaError>
1329 where
1330 T: Tx,
1331 {
1332 if channel
1333 .pending_out_interrupts()
1334 .contains(DmaTxInterrupt::Eof)
1335 {
1336 channel.clear_out(DmaTxInterrupt::Eof);
1337
1338 let mut current = self.last_seen_handled_descriptor_ptr;
1342 loop {
1343 let descr = unsafe { current.read_volatile() };
1344 if descr.owner() == Owner::Cpu {
1345 current = descr.next;
1346 } else {
1347 break;
1348 }
1349
1350 if current == self.last_seen_handled_descriptor_ptr {
1351 return Err(DmaError::Late);
1352 }
1353 }
1354
1355 let descr_address = channel.last_out_dscr_address() as *mut DmaDescriptor;
1356
1357 let mut ptr = self.last_seen_handled_descriptor_ptr;
1358 if descr_address >= self.last_seen_handled_descriptor_ptr {
1359 unsafe {
1360 while ptr < descr_address {
1361 let dw0 = ptr.read_volatile();
1362 self.available += dw0.len();
1363 ptr = ptr.offset(1);
1364 }
1365 }
1366 } else {
1367 unsafe {
1368 while !((*ptr).next.is_null() || (*ptr).next == self.first_desc_ptr) {
1369 let dw0 = ptr.read_volatile();
1370 self.available += dw0.len();
1371 ptr = ptr.offset(1);
1372 }
1373
1374 let dw0 = ptr.read_volatile();
1376 self.available += dw0.len();
1377
1378 if (*ptr).next == self.first_desc_ptr {
1380 ptr = self.first_desc_ptr;
1381 while ptr < descr_address {
1382 let dw0 = ptr.read_volatile();
1383 self.available += dw0.len();
1384 ptr = ptr.offset(1);
1385 }
1386 }
1387 }
1388 }
1389
1390 if self.available >= self.buffer_len {
1391 unsafe {
1392 let dw0 = self.write_descr_ptr.read_volatile();
1393 let segment_len = dw0.len();
1394 let next_descriptor = dw0.next;
1395 self.available -= segment_len;
1396 self.write_offset = (self.write_offset + segment_len) % self.buffer_len;
1397
1398 self.write_descr_ptr = if next_descriptor.is_null() {
1399 self.first_desc_ptr
1400 } else {
1401 next_descriptor
1402 }
1403 }
1404 }
1405
1406 self.last_seen_handled_descriptor_ptr = descr_address;
1407 }
1408
1409 Ok(())
1410 }
1411
1412 pub(crate) fn push(&mut self, data: &[u8]) -> Result<usize, DmaError> {
1413 let avail = self.available;
1414
1415 if avail < data.len() {
1416 return Err(DmaError::Overflow);
1417 }
1418
1419 let mut remaining = data.len();
1420 let mut offset = 0;
1421 while self.available >= remaining && remaining > 0 {
1422 let written = self.push_with(|buffer| {
1423 let len = usize::min(buffer.len(), data.len() - offset);
1424 buffer[..len].copy_from_slice(&data[offset..][..len]);
1425 len
1426 })?;
1427 offset += written;
1428 remaining -= written;
1429 }
1430
1431 Ok(data.len())
1432 }
1433
1434 pub(crate) fn push_with(
1435 &mut self,
1436 f: impl FnOnce(&mut [u8]) -> usize,
1437 ) -> Result<usize, DmaError> {
1438 let written = unsafe {
1441 let dst = self.buffer_start.add(self.write_offset).cast_mut();
1442 let block_size = usize::min(self.available, self.buffer_len - self.write_offset);
1443 let buffer = core::slice::from_raw_parts_mut(dst, block_size);
1444 f(buffer)
1445 };
1446
1447 let mut forward = written;
1448 loop {
1449 unsafe {
1450 let mut descr = self.write_descr_ptr.read_volatile();
1451 descr.set_owner(Owner::Dma);
1452 self.write_descr_ptr.write_volatile(descr);
1453
1454 let segment_len = descr.len();
1455 self.write_descr_ptr = if descr.next.is_null() {
1456 self.first_desc_ptr
1457 } else {
1458 descr.next
1459 };
1460
1461 if forward <= segment_len {
1462 break;
1463 }
1464
1465 forward -= segment_len;
1466 }
1467 }
1468
1469 self.write_offset = (self.write_offset + written) % self.buffer_len;
1470 self.available -= written;
1471
1472 Ok(written)
1473 }
1474}
1475
1476pub(crate) struct RxCircularState {
1477 read_descr_ptr: *mut DmaDescriptor,
1478 pub(crate) available: usize,
1479 last_seen_handled_descriptor_ptr: *mut DmaDescriptor,
1480 last_descr_ptr: *mut DmaDescriptor,
1481}
1482
1483impl RxCircularState {
1484 pub(crate) fn new(chain: &mut DescriptorChain) -> Self {
1485 Self {
1486 read_descr_ptr: chain.first_mut(),
1487 available: 0,
1488 last_seen_handled_descriptor_ptr: core::ptr::null_mut(),
1489 last_descr_ptr: chain.last_mut(),
1490 }
1491 }
1492
1493 pub(crate) fn update(&mut self) -> Result<(), DmaError> {
1494 if self.last_seen_handled_descriptor_ptr.is_null() {
1495 self.last_seen_handled_descriptor_ptr = self.last_descr_ptr;
1498 }
1499
1500 let mut current_in_descr_ptr =
1501 unsafe { self.last_seen_handled_descriptor_ptr.read_volatile() }.next;
1502 let mut current_in_descr = unsafe { current_in_descr_ptr.read_volatile() };
1503
1504 let last_seen_ptr = self.last_seen_handled_descriptor_ptr;
1505 while current_in_descr.owner() == Owner::Cpu {
1506 self.available += current_in_descr.len();
1507 self.last_seen_handled_descriptor_ptr = current_in_descr_ptr;
1508
1509 current_in_descr_ptr =
1510 unsafe { self.last_seen_handled_descriptor_ptr.read_volatile() }.next;
1511 current_in_descr = unsafe { current_in_descr_ptr.read_volatile() };
1512
1513 if current_in_descr_ptr == last_seen_ptr {
1514 return Err(DmaError::Late);
1515 }
1516 }
1517
1518 Ok(())
1519 }
1520
1521 pub(crate) fn pop(&mut self, data: &mut [u8]) -> Result<usize, DmaError> {
1522 let len = data.len();
1523 let mut avail = self.available;
1524
1525 if avail > len {
1526 return Err(DmaError::BufferTooSmall);
1527 }
1528
1529 let mut remaining_buffer = data;
1530 let mut descr_ptr = self.read_descr_ptr;
1531
1532 if descr_ptr.is_null() {
1533 return Ok(0);
1534 }
1535
1536 let mut descr = unsafe { descr_ptr.read_volatile() };
1537
1538 while avail > 0 && !remaining_buffer.is_empty() && remaining_buffer.len() >= descr.len() {
1539 unsafe {
1540 let dst = remaining_buffer.as_mut_ptr();
1541 let src = descr.buffer;
1542 let count = descr.len();
1543 core::ptr::copy_nonoverlapping(src, dst, count);
1544
1545 descr.set_owner(Owner::Dma);
1546 descr.set_suc_eof(false);
1547 descr.set_length(0);
1548 descr_ptr.write_volatile(descr);
1549
1550 remaining_buffer = &mut remaining_buffer[count..];
1551 avail -= count;
1552 descr_ptr = descr.next;
1553 }
1554
1555 if descr_ptr.is_null() {
1556 break;
1557 }
1558
1559 descr = unsafe { descr_ptr.read_volatile() };
1560 }
1561
1562 self.read_descr_ptr = descr_ptr;
1563 self.available = avail;
1564 Ok(len - remaining_buffer.len())
1565 }
1566}
1567
1568#[doc(hidden)]
1569macro_rules! impl_dma_eligible {
1570 ([$dma_ch:ident] $name:ident => $dma:ident) => {
1571 impl $crate::dma::DmaEligible for $crate::peripherals::$name {
1572 type Dma = $dma_ch;
1573
1574 fn dma_peripheral(&self) -> $crate::dma::DmaPeripheral {
1575 $crate::dma::DmaPeripheral::$dma
1576 }
1577 }
1578 };
1579
1580 (
1581 $dma_ch:ident {
1582 $($(#[$cfg:meta])? $name:ident => $dma:ident,)*
1583 }
1584 ) => {
1585 $(
1586 $(#[$cfg])?
1587 $crate::dma::impl_dma_eligible!([$dma_ch] $name => $dma);
1588 )*
1589 };
1590}
1591
1592pub(crate) use impl_dma_eligible; pub type PeripheralDmaChannel<T> = <T as DmaEligible>::Dma;
1596pub type PeripheralRxChannel<T> = <PeripheralDmaChannel<T> as DmaChannel>::Rx;
1598pub type PeripheralTxChannel<T> = <PeripheralDmaChannel<T> as DmaChannel>::Tx;
1600
1601#[doc(hidden)]
1602pub trait DmaRxChannel:
1603 RxRegisterAccess + InterruptAccess<DmaRxInterrupt> + Peripheral<P = Self>
1604{
1605}
1606
1607#[doc(hidden)]
1608pub trait DmaTxChannel:
1609 TxRegisterAccess + InterruptAccess<DmaTxInterrupt> + Peripheral<P = Self>
1610{
1611}
1612
1613pub trait DmaChannel: Peripheral<P = Self> {
1615 type Rx: DmaRxChannel;
1617
1618 type Tx: DmaTxChannel;
1620
1621 #[cfg(any(esp32c6, esp32h2, esp32s3))] fn split(self) -> (Self::Rx, Self::Tx) {
1624 unsafe { self.split_internal(crate::private::Internal) }
1628 }
1629
1630 unsafe fn split_internal(self, _: crate::private::Internal) -> (Self::Rx, Self::Tx);
1637}
1638
1639#[doc(hidden)]
1640pub trait DmaChannelExt: DmaChannel {
1641 fn rx_interrupts() -> impl InterruptAccess<DmaRxInterrupt>;
1642 fn tx_interrupts() -> impl InterruptAccess<DmaTxInterrupt>;
1643}
1644
1645#[diagnostic::on_unimplemented(
1646 message = "The DMA channel isn't suitable for this peripheral",
1647 label = "This DMA channel",
1648 note = "Not all channels are useable with all peripherals"
1649)]
1650#[doc(hidden)]
1651pub trait DmaChannelConvert<DEG> {
1652 fn degrade(self) -> DEG;
1653}
1654
1655impl<DEG: DmaChannel> DmaChannelConvert<DEG> for DEG {
1656 fn degrade(self) -> DEG {
1657 self
1658 }
1659}
1660
1661#[cfg_attr(pdma, doc = "")]
1667#[cfg_attr(
1668 pdma,
1669 doc = "Note that using mismatching channels (e.g. trying to use `DMA_SPI2` with SPI3) may compile, but will panic in runtime."
1670)]
1671#[cfg_attr(pdma, doc = "")]
1672#[doc = crate::before_snippet!()]
1679#[cfg_attr(pdma, doc = "let dma_channel = peripherals.DMA_SPI2;")]
1695#[cfg_attr(gdma, doc = "let dma_channel = peripherals.DMA_CH0;")]
1696#[doc = ""]
1697pub trait DmaChannelFor<P: DmaEligible>:
1707 DmaChannel + DmaChannelConvert<PeripheralDmaChannel<P>>
1708{
1709}
1710impl<P, CH> DmaChannelFor<P> for CH
1711where
1712 P: DmaEligible,
1713 CH: DmaChannel + DmaChannelConvert<PeripheralDmaChannel<P>>,
1714{
1715}
1716
1717pub trait RxChannelFor<P: DmaEligible>: DmaChannelConvert<PeripheralRxChannel<P>> {}
1725impl<P, RX> RxChannelFor<P> for RX
1726where
1727 P: DmaEligible,
1728 RX: DmaChannelConvert<PeripheralRxChannel<P>>,
1729{
1730}
1731
1732pub trait TxChannelFor<PER: DmaEligible>: DmaChannelConvert<PeripheralTxChannel<PER>> {}
1740impl<P, TX> TxChannelFor<P> for TX
1741where
1742 P: DmaEligible,
1743 TX: DmaChannelConvert<PeripheralTxChannel<P>>,
1744{
1745}
1746
1747#[doc(hidden)]
1749pub trait Rx: crate::private::Sealed {
1750 unsafe fn prepare_transfer_without_start(
1751 &mut self,
1752 peri: DmaPeripheral,
1753 chain: &DescriptorChain,
1754 ) -> Result<(), DmaError>;
1755
1756 unsafe fn prepare_transfer<BUF: DmaRxBuffer>(
1757 &mut self,
1758 peri: DmaPeripheral,
1759 buffer: &mut BUF,
1760 ) -> Result<(), DmaError>;
1761
1762 fn start_transfer(&mut self) -> Result<(), DmaError>;
1763
1764 fn stop_transfer(&mut self);
1765
1766 #[cfg(gdma)]
1767 fn set_mem2mem_mode(&mut self, value: bool);
1768
1769 fn listen_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>);
1770
1771 fn unlisten_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>);
1772
1773 fn is_listening_in(&self) -> EnumSet<DmaRxInterrupt>;
1774
1775 fn clear_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>);
1776
1777 fn pending_in_interrupts(&self) -> EnumSet<DmaRxInterrupt>;
1778
1779 fn is_done(&self) -> bool;
1780
1781 fn has_error(&self) -> bool {
1782 self.pending_in_interrupts()
1783 .contains(DmaRxInterrupt::DescriptorError)
1784 }
1785
1786 fn has_dscr_empty_error(&self) -> bool {
1787 self.pending_in_interrupts()
1788 .contains(DmaRxInterrupt::DescriptorEmpty)
1789 }
1790
1791 fn has_eof_error(&self) -> bool {
1792 self.pending_in_interrupts()
1793 .contains(DmaRxInterrupt::ErrorEof)
1794 }
1795
1796 fn clear_interrupts(&self);
1797
1798 fn waker(&self) -> &'static crate::asynch::AtomicWaker;
1799}
1800
1801cfg_if::cfg_if! {
1804 if #[cfg(pdma)] {
1805 type PeripheralGuard = system::GenericPeripheralGuard<{ system::Peripheral::Dma as u8}>;
1806 } else {
1807 type PeripheralGuard = system::GenericPeripheralGuard<{ system::Peripheral::Gdma as u8}>;
1808 }
1809}
1810
1811fn create_guard(_ch: &impl RegisterAccess) -> PeripheralGuard {
1812 system::GenericPeripheralGuard::new_with(init_dma)
1814}
1815
1816#[non_exhaustive]
1818#[doc(hidden)]
1819pub struct ChannelRx<'a, Dm, CH>
1820where
1821 Dm: DriverMode,
1822 CH: DmaRxChannel,
1823{
1824 pub(crate) rx_impl: PeripheralRef<'a, CH>,
1825 pub(crate) _phantom: PhantomData<Dm>,
1826 pub(crate) _guard: PeripheralGuard,
1827}
1828
1829impl<'a, CH> ChannelRx<'a, Blocking, CH>
1830where
1831 CH: DmaRxChannel,
1832{
1833 pub fn new(rx_impl: impl Peripheral<P = CH> + 'a) -> Self {
1835 crate::into_ref!(rx_impl);
1836
1837 let _guard = create_guard(&*rx_impl);
1838
1839 #[cfg(gdma)]
1840 rx_impl.set_mem2mem_mode(false);
1843
1844 if let Some(interrupt) = rx_impl.peripheral_interrupt() {
1845 for cpu in Cpu::all() {
1846 crate::interrupt::disable(cpu, interrupt);
1847 }
1848 }
1849 rx_impl.set_async(false);
1850
1851 Self {
1852 rx_impl,
1853 _phantom: PhantomData,
1854 _guard,
1855 }
1856 }
1857
1858 pub(crate) fn into_async(mut self) -> ChannelRx<'a, Async, CH> {
1860 if let Some(handler) = self.rx_impl.async_handler() {
1861 self.set_interrupt_handler(handler);
1862 }
1863 self.rx_impl.set_async(true);
1864 ChannelRx {
1865 rx_impl: self.rx_impl,
1866 _phantom: PhantomData,
1867 _guard: self._guard,
1868 }
1869 }
1870
1871 fn set_interrupt_handler(&mut self, handler: InterruptHandler) {
1872 self.unlisten_in(EnumSet::all());
1873 self.clear_in(EnumSet::all());
1874
1875 if let Some(interrupt) = self.rx_impl.peripheral_interrupt() {
1876 for core in crate::system::Cpu::other() {
1877 crate::interrupt::disable(core, interrupt);
1878 }
1879 unsafe { crate::interrupt::bind_interrupt(interrupt, handler.handler()) };
1880 unwrap!(crate::interrupt::enable(interrupt, handler.priority()));
1881 }
1882 }
1883}
1884
1885impl<'a, CH> ChannelRx<'a, Async, CH>
1886where
1887 CH: DmaRxChannel,
1888{
1889 pub(crate) fn into_blocking(self) -> ChannelRx<'a, Blocking, CH> {
1891 if let Some(interrupt) = self.rx_impl.peripheral_interrupt() {
1892 crate::interrupt::disable(Cpu::current(), interrupt);
1893 }
1894 self.rx_impl.set_async(false);
1895 ChannelRx {
1896 rx_impl: self.rx_impl,
1897 _phantom: PhantomData,
1898 _guard: self._guard,
1899 }
1900 }
1901}
1902
1903impl<Dm, CH> ChannelRx<'_, Dm, CH>
1904where
1905 Dm: DriverMode,
1906 CH: DmaRxChannel,
1907{
1908 #[cfg(gdma)]
1910 pub fn set_priority(&mut self, priority: DmaPriority) {
1911 self.rx_impl.set_priority(priority);
1912 }
1913
1914 fn do_prepare(
1915 &mut self,
1916 preparation: Preparation,
1917 peri: DmaPeripheral,
1918 ) -> Result<(), DmaError> {
1919 debug_assert_eq!(preparation.direction, TransferDirection::In);
1920
1921 debug!("Preparing RX transfer {:?}", preparation);
1922 trace!("First descriptor {:?}", unsafe { &*preparation.start });
1923
1924 #[cfg(psram_dma)]
1925 if preparation.accesses_psram && !self.rx_impl.can_access_psram() {
1926 return Err(DmaError::UnsupportedMemoryRegion);
1927 }
1928
1929 #[cfg(psram_dma)]
1930 self.rx_impl
1931 .set_ext_mem_block_size(preparation.burst_transfer.external_memory.into());
1932 self.rx_impl.set_burst_mode(preparation.burst_transfer);
1933 self.rx_impl.set_descr_burst_mode(true);
1934 self.rx_impl.set_check_owner(preparation.check_owner);
1935
1936 compiler_fence(core::sync::atomic::Ordering::SeqCst);
1937
1938 self.rx_impl.clear_all();
1939 self.rx_impl.reset();
1940 self.rx_impl.set_link_addr(preparation.start as u32);
1941 self.rx_impl.set_peripheral(peri as u8);
1942
1943 Ok(())
1944 }
1945}
1946
1947impl<Dm, CH> crate::private::Sealed for ChannelRx<'_, Dm, CH>
1948where
1949 Dm: DriverMode,
1950 CH: DmaRxChannel,
1951{
1952}
1953
1954impl<Dm, CH> Rx for ChannelRx<'_, Dm, CH>
1955where
1956 Dm: DriverMode,
1957 CH: DmaRxChannel,
1958{
1959 unsafe fn prepare_transfer_without_start(
1962 &mut self,
1963 peri: DmaPeripheral,
1964 chain: &DescriptorChain,
1965 ) -> Result<(), DmaError> {
1966 cfg_if::cfg_if! {
1972 if #[cfg(psram_dma)] {
1973 let mut uses_psram = false;
1974 let psram_range = crate::soc::psram_range();
1975 for des in chain.descriptors.iter() {
1976 let alignment = crate::soc::cache_get_dcache_line_size() as usize;
1979 if crate::soc::addr_in_range(des.buffer as usize, psram_range.clone()) {
1980 uses_psram = true;
1981 if des.buffer as usize % alignment != 0 {
1983 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Address));
1984 }
1985 if des.size() % alignment != 0 {
1986 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Size));
1987 }
1988 crate::soc::cache_invalidate_addr(des.buffer as u32, des.size() as u32);
1989 }
1990 }
1991 }
1992 }
1993
1994 let preparation = Preparation {
1995 start: chain.first().cast_mut(),
1996 direction: TransferDirection::In,
1997 #[cfg(psram_dma)]
1998 accesses_psram: uses_psram,
1999 burst_transfer: BurstConfig::default(),
2000 check_owner: Some(false),
2001 auto_write_back: true,
2002 };
2003 self.do_prepare(preparation, peri)
2004 }
2005
2006 unsafe fn prepare_transfer<BUF: DmaRxBuffer>(
2007 &mut self,
2008 peri: DmaPeripheral,
2009 buffer: &mut BUF,
2010 ) -> Result<(), DmaError> {
2011 let preparation = buffer.prepare();
2012
2013 self.do_prepare(preparation, peri)
2014 }
2015
2016 fn start_transfer(&mut self) -> Result<(), DmaError> {
2017 self.rx_impl.start();
2018
2019 if self
2020 .pending_in_interrupts()
2021 .contains(DmaRxInterrupt::DescriptorError)
2022 {
2023 Err(DmaError::DescriptorError)
2024 } else {
2025 Ok(())
2026 }
2027 }
2028
2029 fn stop_transfer(&mut self) {
2030 self.rx_impl.stop()
2031 }
2032
2033 #[cfg(gdma)]
2034 fn set_mem2mem_mode(&mut self, value: bool) {
2035 self.rx_impl.set_mem2mem_mode(value);
2036 }
2037
2038 fn listen_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
2039 self.rx_impl.listen(interrupts);
2040 }
2041
2042 fn unlisten_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
2043 self.rx_impl.unlisten(interrupts);
2044 }
2045
2046 fn is_listening_in(&self) -> EnumSet<DmaRxInterrupt> {
2047 self.rx_impl.is_listening()
2048 }
2049
2050 fn clear_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
2051 self.rx_impl.clear(interrupts);
2052 }
2053
2054 fn pending_in_interrupts(&self) -> EnumSet<DmaRxInterrupt> {
2055 self.rx_impl.pending_interrupts()
2056 }
2057
2058 fn is_done(&self) -> bool {
2059 self.pending_in_interrupts()
2060 .contains(DmaRxInterrupt::SuccessfulEof)
2061 }
2062
2063 fn clear_interrupts(&self) {
2064 self.rx_impl.clear_all();
2065 }
2066
2067 fn waker(&self) -> &'static crate::asynch::AtomicWaker {
2068 self.rx_impl.waker()
2069 }
2070}
2071
2072#[doc(hidden)]
2074pub trait Tx: crate::private::Sealed {
2075 unsafe fn prepare_transfer_without_start(
2076 &mut self,
2077 peri: DmaPeripheral,
2078 chain: &DescriptorChain,
2079 ) -> Result<(), DmaError>;
2080
2081 unsafe fn prepare_transfer<BUF: DmaTxBuffer>(
2082 &mut self,
2083 peri: DmaPeripheral,
2084 buffer: &mut BUF,
2085 ) -> Result<(), DmaError>;
2086
2087 fn listen_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>);
2088
2089 fn unlisten_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>);
2090
2091 fn is_listening_out(&self) -> EnumSet<DmaTxInterrupt>;
2092
2093 fn clear_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>);
2094
2095 fn pending_out_interrupts(&self) -> EnumSet<DmaTxInterrupt>;
2096
2097 fn start_transfer(&mut self) -> Result<(), DmaError>;
2098
2099 fn stop_transfer(&mut self);
2100
2101 fn is_done(&self) -> bool {
2102 self.pending_out_interrupts()
2103 .contains(DmaTxInterrupt::TotalEof)
2104 }
2105
2106 fn has_error(&self) -> bool {
2107 self.pending_out_interrupts()
2108 .contains(DmaTxInterrupt::DescriptorError)
2109 }
2110
2111 fn clear_interrupts(&self);
2112
2113 fn waker(&self) -> &'static crate::asynch::AtomicWaker;
2114
2115 fn last_out_dscr_address(&self) -> usize;
2116}
2117
2118#[doc(hidden)]
2120pub struct ChannelTx<'a, Dm, CH>
2121where
2122 Dm: DriverMode,
2123 CH: DmaTxChannel,
2124{
2125 pub(crate) tx_impl: PeripheralRef<'a, CH>,
2126 pub(crate) _phantom: PhantomData<Dm>,
2127 pub(crate) _guard: PeripheralGuard,
2128}
2129
2130impl<'a, CH> ChannelTx<'a, Blocking, CH>
2131where
2132 CH: DmaTxChannel,
2133{
2134 pub fn new(tx_impl: impl Peripheral<P = CH> + 'a) -> Self {
2136 crate::into_ref!(tx_impl);
2137
2138 let _guard = create_guard(&*tx_impl);
2139
2140 if let Some(interrupt) = tx_impl.peripheral_interrupt() {
2141 for cpu in Cpu::all() {
2142 crate::interrupt::disable(cpu, interrupt);
2143 }
2144 }
2145 tx_impl.set_async(false);
2146 Self {
2147 tx_impl,
2148 _phantom: PhantomData,
2149 _guard,
2150 }
2151 }
2152
2153 pub(crate) fn into_async(mut self) -> ChannelTx<'a, Async, CH> {
2155 if let Some(handler) = self.tx_impl.async_handler() {
2156 self.set_interrupt_handler(handler);
2157 }
2158 self.tx_impl.set_async(true);
2159 ChannelTx {
2160 tx_impl: self.tx_impl,
2161 _phantom: PhantomData,
2162 _guard: self._guard,
2163 }
2164 }
2165
2166 fn set_interrupt_handler(&mut self, handler: InterruptHandler) {
2167 self.unlisten_out(EnumSet::all());
2168 self.clear_out(EnumSet::all());
2169
2170 if let Some(interrupt) = self.tx_impl.peripheral_interrupt() {
2171 for core in crate::system::Cpu::other() {
2172 crate::interrupt::disable(core, interrupt);
2173 }
2174 unsafe { crate::interrupt::bind_interrupt(interrupt, handler.handler()) };
2175 unwrap!(crate::interrupt::enable(interrupt, handler.priority()));
2176 }
2177 }
2178}
2179
2180impl<'a, CH> ChannelTx<'a, Async, CH>
2181where
2182 CH: DmaTxChannel,
2183{
2184 pub(crate) fn into_blocking(self) -> ChannelTx<'a, Blocking, CH> {
2186 if let Some(interrupt) = self.tx_impl.peripheral_interrupt() {
2187 crate::interrupt::disable(Cpu::current(), interrupt);
2188 }
2189 self.tx_impl.set_async(false);
2190 ChannelTx {
2191 tx_impl: self.tx_impl,
2192 _phantom: PhantomData,
2193 _guard: self._guard,
2194 }
2195 }
2196}
2197
2198impl<Dm, CH> ChannelTx<'_, Dm, CH>
2199where
2200 Dm: DriverMode,
2201 CH: DmaTxChannel,
2202{
2203 #[cfg(gdma)]
2205 pub fn set_priority(&mut self, priority: DmaPriority) {
2206 self.tx_impl.set_priority(priority);
2207 }
2208
2209 fn do_prepare(
2210 &mut self,
2211 preparation: Preparation,
2212 peri: DmaPeripheral,
2213 ) -> Result<(), DmaError> {
2214 debug_assert_eq!(preparation.direction, TransferDirection::Out);
2215
2216 debug!("Preparing TX transfer {:?}", preparation);
2217 trace!("First descriptor {:?}", unsafe { &*preparation.start });
2218
2219 #[cfg(psram_dma)]
2220 if preparation.accesses_psram && !self.tx_impl.can_access_psram() {
2221 return Err(DmaError::UnsupportedMemoryRegion);
2222 }
2223
2224 #[cfg(psram_dma)]
2225 self.tx_impl
2226 .set_ext_mem_block_size(preparation.burst_transfer.external_memory.into());
2227 self.tx_impl.set_burst_mode(preparation.burst_transfer);
2228 self.tx_impl.set_descr_burst_mode(true);
2229 self.tx_impl.set_check_owner(preparation.check_owner);
2230 self.tx_impl
2231 .set_auto_write_back(preparation.auto_write_back);
2232
2233 compiler_fence(core::sync::atomic::Ordering::SeqCst);
2234
2235 self.tx_impl.clear_all();
2236 self.tx_impl.reset();
2237 self.tx_impl.set_link_addr(preparation.start as u32);
2238 self.tx_impl.set_peripheral(peri as u8);
2239
2240 Ok(())
2241 }
2242}
2243
2244impl<Dm, CH> crate::private::Sealed for ChannelTx<'_, Dm, CH>
2245where
2246 Dm: DriverMode,
2247 CH: DmaTxChannel,
2248{
2249}
2250
2251impl<Dm, CH> Tx for ChannelTx<'_, Dm, CH>
2252where
2253 Dm: DriverMode,
2254 CH: DmaTxChannel,
2255{
2256 unsafe fn prepare_transfer_without_start(
2259 &mut self,
2260 peri: DmaPeripheral,
2261 chain: &DescriptorChain,
2262 ) -> Result<(), DmaError> {
2263 #[cfg(psram_dma)]
2269 cfg_if::cfg_if! {
2270 if #[cfg(psram_dma)] {
2271 let mut uses_psram = false;
2272 let psram_range = crate::soc::psram_range();
2273 for des in chain.descriptors.iter() {
2274 let alignment = crate::soc::cache_get_dcache_line_size() as usize;
2277 if crate::soc::addr_in_range(des.buffer as usize, psram_range.clone()) {
2278 uses_psram = true;
2279 if des.buffer as usize % alignment != 0 {
2281 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Address));
2282 }
2283 if des.size() % alignment != 0 {
2284 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Size));
2285 }
2286 crate::soc::cache_writeback_addr(des.buffer as u32, des.size() as u32);
2287 }
2288 }
2289 }
2290 }
2291
2292 let preparation = Preparation {
2293 start: chain.first().cast_mut(),
2294 direction: TransferDirection::Out,
2295 #[cfg(psram_dma)]
2296 accesses_psram: uses_psram,
2297 burst_transfer: BurstConfig::default(),
2298 check_owner: Some(false),
2299 auto_write_back: !(*chain.last()).next.is_null(),
2301 };
2302 self.do_prepare(preparation, peri)?;
2303
2304 Ok(())
2305 }
2306
2307 unsafe fn prepare_transfer<BUF: DmaTxBuffer>(
2308 &mut self,
2309 peri: DmaPeripheral,
2310 buffer: &mut BUF,
2311 ) -> Result<(), DmaError> {
2312 let preparation = buffer.prepare();
2313
2314 self.do_prepare(preparation, peri)
2315 }
2316
2317 fn start_transfer(&mut self) -> Result<(), DmaError> {
2318 self.tx_impl.start();
2319 while self.tx_impl.is_fifo_empty() && self.pending_out_interrupts().is_empty() {}
2320
2321 if self
2322 .pending_out_interrupts()
2323 .contains(DmaTxInterrupt::DescriptorError)
2324 {
2325 Err(DmaError::DescriptorError)
2326 } else {
2327 Ok(())
2328 }
2329 }
2330
2331 fn stop_transfer(&mut self) {
2332 self.tx_impl.stop()
2333 }
2334
2335 fn listen_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
2336 self.tx_impl.listen(interrupts);
2337 }
2338
2339 fn unlisten_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
2340 self.tx_impl.unlisten(interrupts);
2341 }
2342
2343 fn is_listening_out(&self) -> EnumSet<DmaTxInterrupt> {
2344 self.tx_impl.is_listening()
2345 }
2346
2347 fn clear_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
2348 self.tx_impl.clear(interrupts);
2349 }
2350
2351 fn pending_out_interrupts(&self) -> EnumSet<DmaTxInterrupt> {
2352 self.tx_impl.pending_interrupts()
2353 }
2354
2355 fn waker(&self) -> &'static crate::asynch::AtomicWaker {
2356 self.tx_impl.waker()
2357 }
2358
2359 fn clear_interrupts(&self) {
2360 self.tx_impl.clear_all();
2361 }
2362
2363 fn last_out_dscr_address(&self) -> usize {
2364 self.tx_impl.last_dscr_address()
2365 }
2366}
2367
2368#[doc(hidden)]
2369pub trait RegisterAccess: crate::private::Sealed {
2370 fn reset(&self);
2372
2373 fn set_burst_mode(&self, burst_mode: BurstConfig);
2376
2377 fn set_descr_burst_mode(&self, burst_mode: bool);
2380
2381 #[cfg(gdma)]
2384 fn set_priority(&self, priority: DmaPriority);
2385
2386 fn set_peripheral(&self, peripheral: u8);
2388
2389 fn set_link_addr(&self, address: u32);
2391
2392 fn start(&self);
2394
2395 fn stop(&self);
2397
2398 fn restart(&self);
2400
2401 fn set_check_owner(&self, check_owner: Option<bool>);
2404
2405 #[cfg(psram_dma)]
2406 fn set_ext_mem_block_size(&self, size: DmaExtMemBKSize);
2407
2408 #[cfg(pdma)]
2409 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool;
2410
2411 #[cfg(psram_dma)]
2412 fn can_access_psram(&self) -> bool;
2413}
2414
2415#[doc(hidden)]
2416pub trait RxRegisterAccess: RegisterAccess {
2417 #[cfg(gdma)]
2418 fn set_mem2mem_mode(&self, value: bool);
2419
2420 fn peripheral_interrupt(&self) -> Option<Interrupt>;
2421 fn async_handler(&self) -> Option<InterruptHandler>;
2422}
2423
2424#[doc(hidden)]
2425pub trait TxRegisterAccess: RegisterAccess {
2426 fn is_fifo_empty(&self) -> bool;
2428
2429 fn set_auto_write_back(&self, enable: bool);
2431
2432 fn last_dscr_address(&self) -> usize;
2434
2435 fn peripheral_interrupt(&self) -> Option<Interrupt>;
2436 fn async_handler(&self) -> Option<InterruptHandler>;
2437}
2438
2439#[doc(hidden)]
2440pub trait InterruptAccess<T: EnumSetType>: crate::private::Sealed {
2441 fn listen(&self, interrupts: impl Into<EnumSet<T>>) {
2442 self.enable_listen(interrupts.into(), true)
2443 }
2444 fn unlisten(&self, interrupts: impl Into<EnumSet<T>>) {
2445 self.enable_listen(interrupts.into(), false)
2446 }
2447
2448 fn clear_all(&self) {
2449 self.clear(EnumSet::all());
2450 }
2451
2452 fn enable_listen(&self, interrupts: EnumSet<T>, enable: bool);
2453 fn is_listening(&self) -> EnumSet<T>;
2454 fn clear(&self, interrupts: impl Into<EnumSet<T>>);
2455 fn pending_interrupts(&self) -> EnumSet<T>;
2456 fn waker(&self) -> &'static crate::asynch::AtomicWaker;
2457
2458 fn is_async(&self) -> bool;
2459 fn set_async(&self, is_async: bool);
2460}
2461
2462#[non_exhaustive]
2464pub struct Channel<'d, Dm, CH>
2465where
2466 Dm: DriverMode,
2467 CH: DmaChannel,
2468{
2469 pub rx: ChannelRx<'d, Dm, CH::Rx>,
2471 pub tx: ChannelTx<'d, Dm, CH::Tx>,
2473}
2474
2475impl<'d, CH> Channel<'d, Blocking, CH>
2476where
2477 CH: DmaChannel,
2478{
2479 #[instability::unstable]
2481 pub fn new(channel: impl Peripheral<P = CH>) -> Self {
2482 let (rx, tx) = unsafe {
2483 channel
2484 .clone_unchecked()
2485 .split_internal(crate::private::Internal)
2486 };
2487 Self {
2488 rx: ChannelRx::new(rx),
2489 tx: ChannelTx::new(tx),
2490 }
2491 }
2492
2493 #[instability::unstable]
2497 pub fn set_interrupt_handler(&mut self, handler: InterruptHandler) {
2498 self.rx.set_interrupt_handler(handler);
2499 self.tx.set_interrupt_handler(handler);
2500 }
2501
2502 pub fn listen(&mut self, interrupts: impl Into<EnumSet<DmaInterrupt>>) {
2504 for interrupt in interrupts.into() {
2505 match interrupt {
2506 DmaInterrupt::RxDone => self.rx.listen_in(DmaRxInterrupt::Done),
2507 DmaInterrupt::TxDone => self.tx.listen_out(DmaTxInterrupt::Done),
2508 }
2509 }
2510 }
2511
2512 pub fn unlisten(&mut self, interrupts: impl Into<EnumSet<DmaInterrupt>>) {
2514 for interrupt in interrupts.into() {
2515 match interrupt {
2516 DmaInterrupt::RxDone => self.rx.unlisten_in(DmaRxInterrupt::Done),
2517 DmaInterrupt::TxDone => self.tx.unlisten_out(DmaTxInterrupt::Done),
2518 }
2519 }
2520 }
2521
2522 pub fn interrupts(&mut self) -> EnumSet<DmaInterrupt> {
2524 let mut res = EnumSet::new();
2525 if self.rx.is_done() {
2526 res.insert(DmaInterrupt::RxDone);
2527 }
2528 if self.tx.is_done() {
2529 res.insert(DmaInterrupt::TxDone);
2530 }
2531 res
2532 }
2533
2534 pub fn clear_interrupts(&mut self, interrupts: impl Into<EnumSet<DmaInterrupt>>) {
2536 for interrupt in interrupts.into() {
2537 match interrupt {
2538 DmaInterrupt::RxDone => self.rx.clear_in(DmaRxInterrupt::Done),
2539 DmaInterrupt::TxDone => self.tx.clear_out(DmaTxInterrupt::Done),
2540 }
2541 }
2542 }
2543
2544 #[cfg(gdma)]
2546 pub fn set_priority(&mut self, priority: DmaPriority) {
2547 self.tx.set_priority(priority);
2548 self.rx.set_priority(priority);
2549 }
2550
2551 pub fn into_async(self) -> Channel<'d, Async, CH> {
2553 Channel {
2554 rx: self.rx.into_async(),
2555 tx: self.tx.into_async(),
2556 }
2557 }
2558}
2559
2560impl<'d, CH> Channel<'d, Async, CH>
2561where
2562 CH: DmaChannel,
2563{
2564 pub fn into_blocking(self) -> Channel<'d, Blocking, CH> {
2566 Channel {
2567 rx: self.rx.into_blocking(),
2568 tx: self.tx.into_blocking(),
2569 }
2570 }
2571}
2572
2573impl<'d, CH: DmaChannel> From<Channel<'d, Blocking, CH>> for Channel<'d, Async, CH> {
2574 fn from(channel: Channel<'d, Blocking, CH>) -> Self {
2575 channel.into_async()
2576 }
2577}
2578
2579impl<'d, CH: DmaChannel> From<Channel<'d, Async, CH>> for Channel<'d, Blocking, CH> {
2580 fn from(channel: Channel<'d, Async, CH>) -> Self {
2581 channel.into_blocking()
2582 }
2583}
2584
2585pub(crate) mod dma_private {
2586 use super::*;
2587
2588 pub trait DmaSupport {
2589 fn peripheral_wait_dma(&mut self, is_rx: bool, is_tx: bool);
2600
2601 fn peripheral_dma_stop(&mut self);
2604 }
2605
2606 pub trait DmaSupportTx: DmaSupport {
2607 type TX: Tx;
2608
2609 fn tx(&mut self) -> &mut Self::TX;
2610
2611 fn chain(&mut self) -> &mut DescriptorChain;
2612 }
2613
2614 pub trait DmaSupportRx: DmaSupport {
2615 type RX: Rx;
2616
2617 fn rx(&mut self) -> &mut Self::RX;
2618
2619 fn chain(&mut self) -> &mut DescriptorChain;
2620 }
2621}
2622
2623#[non_exhaustive]
2629#[must_use]
2630pub struct DmaTransferTx<'a, I>
2631where
2632 I: dma_private::DmaSupportTx,
2633{
2634 instance: &'a mut I,
2635}
2636
2637impl<'a, I> DmaTransferTx<'a, I>
2638where
2639 I: dma_private::DmaSupportTx,
2640{
2641 pub(crate) fn new(instance: &'a mut I) -> Self {
2642 Self { instance }
2643 }
2644
2645 pub fn wait(self) -> Result<(), DmaError> {
2647 self.instance.peripheral_wait_dma(false, true);
2648
2649 if self
2650 .instance
2651 .tx()
2652 .pending_out_interrupts()
2653 .contains(DmaTxInterrupt::DescriptorError)
2654 {
2655 Err(DmaError::DescriptorError)
2656 } else {
2657 Ok(())
2658 }
2659 }
2660
2661 pub fn is_done(&mut self) -> bool {
2663 self.instance.tx().is_done()
2664 }
2665}
2666
2667impl<I> Drop for DmaTransferTx<'_, I>
2668where
2669 I: dma_private::DmaSupportTx,
2670{
2671 fn drop(&mut self) {
2672 self.instance.peripheral_wait_dma(true, false);
2673 }
2674}
2675
2676#[non_exhaustive]
2682#[must_use]
2683pub struct DmaTransferRx<'a, I>
2684where
2685 I: dma_private::DmaSupportRx,
2686{
2687 instance: &'a mut I,
2688}
2689
2690impl<'a, I> DmaTransferRx<'a, I>
2691where
2692 I: dma_private::DmaSupportRx,
2693{
2694 pub(crate) fn new(instance: &'a mut I) -> Self {
2695 Self { instance }
2696 }
2697
2698 pub fn wait(self) -> Result<(), DmaError> {
2700 self.instance.peripheral_wait_dma(true, false);
2701
2702 if self
2703 .instance
2704 .rx()
2705 .pending_in_interrupts()
2706 .contains(DmaRxInterrupt::DescriptorError)
2707 {
2708 Err(DmaError::DescriptorError)
2709 } else {
2710 Ok(())
2711 }
2712 }
2713
2714 pub fn is_done(&mut self) -> bool {
2716 self.instance.rx().is_done()
2717 }
2718}
2719
2720impl<I> Drop for DmaTransferRx<'_, I>
2721where
2722 I: dma_private::DmaSupportRx,
2723{
2724 fn drop(&mut self) {
2725 self.instance.peripheral_wait_dma(true, false);
2726 }
2727}
2728
2729#[non_exhaustive]
2735#[must_use]
2736pub struct DmaTransferRxTx<'a, I>
2737where
2738 I: dma_private::DmaSupportTx + dma_private::DmaSupportRx,
2739{
2740 instance: &'a mut I,
2741}
2742
2743impl<'a, I> DmaTransferRxTx<'a, I>
2744where
2745 I: dma_private::DmaSupportTx + dma_private::DmaSupportRx,
2746{
2747 #[allow(dead_code)]
2748 pub(crate) fn new(instance: &'a mut I) -> Self {
2749 Self { instance }
2750 }
2751
2752 pub fn wait(self) -> Result<(), DmaError> {
2754 self.instance.peripheral_wait_dma(true, true);
2755
2756 if self
2757 .instance
2758 .tx()
2759 .pending_out_interrupts()
2760 .contains(DmaTxInterrupt::DescriptorError)
2761 || self
2762 .instance
2763 .rx()
2764 .pending_in_interrupts()
2765 .contains(DmaRxInterrupt::DescriptorError)
2766 {
2767 Err(DmaError::DescriptorError)
2768 } else {
2769 Ok(())
2770 }
2771 }
2772
2773 pub fn is_done(&mut self) -> bool {
2775 self.instance.tx().is_done() && self.instance.rx().is_done()
2776 }
2777}
2778
2779impl<I> Drop for DmaTransferRxTx<'_, I>
2780where
2781 I: dma_private::DmaSupportTx + dma_private::DmaSupportRx,
2782{
2783 fn drop(&mut self) {
2784 self.instance.peripheral_wait_dma(true, true);
2785 }
2786}
2787
2788#[non_exhaustive]
2794#[must_use]
2795pub struct DmaTransferTxCircular<'a, I>
2796where
2797 I: dma_private::DmaSupportTx,
2798{
2799 instance: &'a mut I,
2800 state: TxCircularState,
2801}
2802
2803impl<'a, I> DmaTransferTxCircular<'a, I>
2804where
2805 I: dma_private::DmaSupportTx,
2806{
2807 #[allow(unused)] pub(crate) fn new(instance: &'a mut I) -> Self {
2809 let state = TxCircularState::new(instance.chain());
2810 Self { instance, state }
2811 }
2812
2813 pub fn available(&mut self) -> Result<usize, DmaError> {
2815 self.state.update(self.instance.tx())?;
2816 Ok(self.state.available)
2817 }
2818
2819 pub fn push(&mut self, data: &[u8]) -> Result<usize, DmaError> {
2821 self.state.update(self.instance.tx())?;
2822 self.state.push(data)
2823 }
2824
2825 pub fn push_with(&mut self, f: impl FnOnce(&mut [u8]) -> usize) -> Result<usize, DmaError> {
2830 self.state.update(self.instance.tx())?;
2831 self.state.push_with(f)
2832 }
2833
2834 #[allow(clippy::type_complexity)]
2836 pub fn stop(self) -> Result<(), DmaError> {
2837 self.instance.peripheral_dma_stop();
2838
2839 if self
2840 .instance
2841 .tx()
2842 .pending_out_interrupts()
2843 .contains(DmaTxInterrupt::DescriptorError)
2844 {
2845 Err(DmaError::DescriptorError)
2846 } else {
2847 Ok(())
2848 }
2849 }
2850}
2851
2852impl<I> Drop for DmaTransferTxCircular<'_, I>
2853where
2854 I: dma_private::DmaSupportTx,
2855{
2856 fn drop(&mut self) {
2857 self.instance.peripheral_dma_stop();
2858 }
2859}
2860
2861#[non_exhaustive]
2867#[must_use]
2868pub struct DmaTransferRxCircular<'a, I>
2869where
2870 I: dma_private::DmaSupportRx,
2871{
2872 instance: &'a mut I,
2873 state: RxCircularState,
2874}
2875
2876impl<'a, I> DmaTransferRxCircular<'a, I>
2877where
2878 I: dma_private::DmaSupportRx,
2879{
2880 #[allow(unused)] pub(crate) fn new(instance: &'a mut I) -> Self {
2882 let state = RxCircularState::new(instance.chain());
2883 Self { instance, state }
2884 }
2885
2886 pub fn available(&mut self) -> Result<usize, DmaError> {
2891 self.state.update()?;
2892 Ok(self.state.available)
2893 }
2894
2895 pub fn pop(&mut self, data: &mut [u8]) -> Result<usize, DmaError> {
2904 self.state.update()?;
2905 self.state.pop(data)
2906 }
2907}
2908
2909impl<I> Drop for DmaTransferRxCircular<'_, I>
2910where
2911 I: dma_private::DmaSupportRx,
2912{
2913 fn drop(&mut self) {
2914 self.instance.peripheral_dma_stop();
2915 }
2916}
2917
2918pub(crate) mod asynch {
2919 use core::task::Poll;
2920
2921 use super::*;
2922
2923 #[must_use = "futures do nothing unless you `.await` or poll them"]
2924 pub struct DmaTxFuture<'a, TX>
2925 where
2926 TX: Tx,
2927 {
2928 pub(crate) tx: &'a mut TX,
2929 }
2930
2931 impl<'a, TX> DmaTxFuture<'a, TX>
2932 where
2933 TX: Tx,
2934 {
2935 pub fn new(tx: &'a mut TX) -> Self {
2936 Self { tx }
2937 }
2938 }
2939
2940 impl<TX> core::future::Future for DmaTxFuture<'_, TX>
2941 where
2942 TX: Tx,
2943 {
2944 type Output = Result<(), DmaError>;
2945
2946 fn poll(
2947 self: core::pin::Pin<&mut Self>,
2948 cx: &mut core::task::Context<'_>,
2949 ) -> Poll<Self::Output> {
2950 if self.tx.is_done() {
2951 self.tx.clear_interrupts();
2952 Poll::Ready(Ok(()))
2953 } else if self
2954 .tx
2955 .pending_out_interrupts()
2956 .contains(DmaTxInterrupt::DescriptorError)
2957 {
2958 self.tx.clear_interrupts();
2959 Poll::Ready(Err(DmaError::DescriptorError))
2960 } else {
2961 self.tx.waker().register(cx.waker());
2962 self.tx
2963 .listen_out(DmaTxInterrupt::TotalEof | DmaTxInterrupt::DescriptorError);
2964 Poll::Pending
2965 }
2966 }
2967 }
2968
2969 impl<TX> Drop for DmaTxFuture<'_, TX>
2970 where
2971 TX: Tx,
2972 {
2973 fn drop(&mut self) {
2974 self.tx
2975 .unlisten_out(DmaTxInterrupt::TotalEof | DmaTxInterrupt::DescriptorError);
2976 }
2977 }
2978
2979 #[must_use = "futures do nothing unless you `.await` or poll them"]
2980 pub struct DmaRxFuture<'a, RX>
2981 where
2982 RX: Rx,
2983 {
2984 pub(crate) rx: &'a mut RX,
2985 }
2986
2987 impl<'a, RX> DmaRxFuture<'a, RX>
2988 where
2989 RX: Rx,
2990 {
2991 pub fn new(rx: &'a mut RX) -> Self {
2992 Self { rx }
2993 }
2994 }
2995
2996 impl<RX> core::future::Future for DmaRxFuture<'_, RX>
2997 where
2998 RX: Rx,
2999 {
3000 type Output = Result<(), DmaError>;
3001
3002 fn poll(
3003 self: core::pin::Pin<&mut Self>,
3004 cx: &mut core::task::Context<'_>,
3005 ) -> Poll<Self::Output> {
3006 if self.rx.is_done() {
3007 self.rx.clear_interrupts();
3008 Poll::Ready(Ok(()))
3009 } else if !self.rx.pending_in_interrupts().is_disjoint(
3010 DmaRxInterrupt::DescriptorError
3011 | DmaRxInterrupt::DescriptorEmpty
3012 | DmaRxInterrupt::ErrorEof,
3013 ) {
3014 self.rx.clear_interrupts();
3015 Poll::Ready(Err(DmaError::DescriptorError))
3016 } else {
3017 self.rx.waker().register(cx.waker());
3018 self.rx.listen_in(
3019 DmaRxInterrupt::SuccessfulEof
3020 | DmaRxInterrupt::DescriptorError
3021 | DmaRxInterrupt::DescriptorEmpty
3022 | DmaRxInterrupt::ErrorEof,
3023 );
3024 Poll::Pending
3025 }
3026 }
3027 }
3028
3029 impl<RX> Drop for DmaRxFuture<'_, RX>
3030 where
3031 RX: Rx,
3032 {
3033 fn drop(&mut self) {
3034 self.rx.unlisten_in(
3035 DmaRxInterrupt::DescriptorError
3036 | DmaRxInterrupt::DescriptorEmpty
3037 | DmaRxInterrupt::ErrorEof,
3038 );
3039 }
3040 }
3041
3042 #[cfg(any(i2s0, i2s1))]
3043 pub struct DmaTxDoneChFuture<'a, TX>
3044 where
3045 TX: Tx,
3046 {
3047 pub(crate) tx: &'a mut TX,
3048 _a: (),
3049 }
3050
3051 #[cfg(any(i2s0, i2s1))]
3052 impl<'a, TX> DmaTxDoneChFuture<'a, TX>
3053 where
3054 TX: Tx,
3055 {
3056 pub fn new(tx: &'a mut TX) -> Self {
3057 Self { tx, _a: () }
3058 }
3059 }
3060
3061 #[cfg(any(i2s0, i2s1))]
3062 impl<TX> core::future::Future for DmaTxDoneChFuture<'_, TX>
3063 where
3064 TX: Tx,
3065 {
3066 type Output = Result<(), DmaError>;
3067
3068 fn poll(
3069 self: core::pin::Pin<&mut Self>,
3070 cx: &mut core::task::Context<'_>,
3071 ) -> Poll<Self::Output> {
3072 if self
3073 .tx
3074 .pending_out_interrupts()
3075 .contains(DmaTxInterrupt::Done)
3076 {
3077 self.tx.clear_out(DmaTxInterrupt::Done);
3078 Poll::Ready(Ok(()))
3079 } else if self
3080 .tx
3081 .pending_out_interrupts()
3082 .contains(DmaTxInterrupt::DescriptorError)
3083 {
3084 self.tx.clear_interrupts();
3085 Poll::Ready(Err(DmaError::DescriptorError))
3086 } else {
3087 self.tx.waker().register(cx.waker());
3088 self.tx
3089 .listen_out(DmaTxInterrupt::Done | DmaTxInterrupt::DescriptorError);
3090 Poll::Pending
3091 }
3092 }
3093 }
3094
3095 #[cfg(any(i2s0, i2s1))]
3096 impl<TX> Drop for DmaTxDoneChFuture<'_, TX>
3097 where
3098 TX: Tx,
3099 {
3100 fn drop(&mut self) {
3101 self.tx
3102 .unlisten_out(DmaTxInterrupt::Done | DmaTxInterrupt::DescriptorError);
3103 }
3104 }
3105
3106 #[cfg(any(i2s0, i2s1))]
3107 pub struct DmaRxDoneChFuture<'a, RX>
3108 where
3109 RX: Rx,
3110 {
3111 pub(crate) rx: &'a mut RX,
3112 _a: (),
3113 }
3114
3115 #[cfg(any(i2s0, i2s1))]
3116 impl<'a, RX> DmaRxDoneChFuture<'a, RX>
3117 where
3118 RX: Rx,
3119 {
3120 pub fn new(rx: &'a mut RX) -> Self {
3121 Self { rx, _a: () }
3122 }
3123 }
3124
3125 #[cfg(any(i2s0, i2s1))]
3126 impl<RX> core::future::Future for DmaRxDoneChFuture<'_, RX>
3127 where
3128 RX: Rx,
3129 {
3130 type Output = Result<(), DmaError>;
3131
3132 fn poll(
3133 self: core::pin::Pin<&mut Self>,
3134 cx: &mut core::task::Context<'_>,
3135 ) -> Poll<Self::Output> {
3136 if self
3137 .rx
3138 .pending_in_interrupts()
3139 .contains(DmaRxInterrupt::Done)
3140 {
3141 self.rx.clear_in(DmaRxInterrupt::Done);
3142 Poll::Ready(Ok(()))
3143 } else if !self.rx.pending_in_interrupts().is_disjoint(
3144 DmaRxInterrupt::DescriptorError
3145 | DmaRxInterrupt::DescriptorEmpty
3146 | DmaRxInterrupt::ErrorEof,
3147 ) {
3148 self.rx.clear_interrupts();
3149 Poll::Ready(Err(DmaError::DescriptorError))
3150 } else {
3151 self.rx.waker().register(cx.waker());
3152 self.rx.listen_in(
3153 DmaRxInterrupt::Done
3154 | DmaRxInterrupt::DescriptorError
3155 | DmaRxInterrupt::DescriptorEmpty
3156 | DmaRxInterrupt::ErrorEof,
3157 );
3158 Poll::Pending
3159 }
3160 }
3161 }
3162
3163 #[cfg(any(i2s0, i2s1))]
3164 impl<RX> Drop for DmaRxDoneChFuture<'_, RX>
3165 where
3166 RX: Rx,
3167 {
3168 fn drop(&mut self) {
3169 self.rx.unlisten_in(
3170 DmaRxInterrupt::Done
3171 | DmaRxInterrupt::DescriptorError
3172 | DmaRxInterrupt::DescriptorEmpty
3173 | DmaRxInterrupt::ErrorEof,
3174 );
3175 }
3176 }
3177
3178 pub(super) fn handle_in_interrupt<CH: DmaChannelExt>() {
3179 let rx = CH::rx_interrupts();
3180
3181 if !rx.is_async() {
3182 return;
3183 }
3184
3185 if rx.pending_interrupts().is_disjoint(
3186 DmaRxInterrupt::DescriptorError
3187 | DmaRxInterrupt::DescriptorEmpty
3188 | DmaRxInterrupt::ErrorEof,
3189 ) {
3190 rx.unlisten(
3191 DmaRxInterrupt::DescriptorError
3192 | DmaRxInterrupt::DescriptorEmpty
3193 | DmaRxInterrupt::ErrorEof
3194 | DmaRxInterrupt::SuccessfulEof
3195 | DmaRxInterrupt::Done,
3196 );
3197 rx.waker().wake()
3198 }
3199
3200 if rx
3201 .pending_interrupts()
3202 .contains(DmaRxInterrupt::SuccessfulEof)
3203 {
3204 rx.unlisten(DmaRxInterrupt::SuccessfulEof);
3205 rx.waker().wake()
3206 }
3207
3208 if rx.pending_interrupts().contains(DmaRxInterrupt::Done) {
3209 rx.unlisten(DmaRxInterrupt::Done);
3210 rx.waker().wake()
3211 }
3212 }
3213
3214 pub(super) fn handle_out_interrupt<CH: DmaChannelExt>() {
3215 let tx = CH::tx_interrupts();
3216
3217 if !tx.is_async() {
3218 return;
3219 }
3220
3221 if tx
3222 .pending_interrupts()
3223 .contains(DmaTxInterrupt::DescriptorError)
3224 {
3225 tx.unlisten(
3226 DmaTxInterrupt::DescriptorError | DmaTxInterrupt::TotalEof | DmaTxInterrupt::Done,
3227 );
3228 tx.waker().wake()
3229 }
3230
3231 if tx.pending_interrupts().contains(DmaTxInterrupt::TotalEof)
3232 && tx.is_listening().contains(DmaTxInterrupt::TotalEof)
3233 {
3234 tx.unlisten(DmaTxInterrupt::TotalEof);
3235 tx.waker().wake()
3236 }
3237
3238 if tx.pending_interrupts().contains(DmaTxInterrupt::Done) {
3239 tx.unlisten(DmaTxInterrupt::Done);
3240 tx.waker().wake()
3241 }
3242 }
3243}