1#![doc = crate::before_snippet!()]
20#![cfg_attr(pdma, doc = "let dma_channel = peripherals.DMA_SPI2;")]
23#![cfg_attr(gdma, doc = "let dma_channel = peripherals.DMA_CH0;")]
24use core::{cmp::min, fmt::Debug, marker::PhantomData, sync::atomic::compiler_fence};
53
54use enumset::{EnumSet, EnumSetType};
55
56pub use self::buffers::*;
57#[cfg(gdma)]
58pub use self::gdma::*;
59#[cfg(any(gdma, esp32s2))]
60pub use self::m2m::*;
61#[cfg(pdma)]
62pub use self::pdma::*;
63use crate::{
64 Async,
65 Blocking,
66 DriverMode,
67 interrupt::InterruptHandler,
68 peripherals::Interrupt,
69 soc::{is_slice_in_dram, is_valid_memory_address, is_valid_ram_address},
70 system,
71 system::Cpu,
72};
73
74trait Word: crate::private::Sealed {}
75
76macro_rules! impl_word {
77 ($w:ty) => {
78 impl $crate::private::Sealed for $w {}
79 impl Word for $w {}
80 };
81}
82
83impl_word!(u8);
84impl_word!(u16);
85impl_word!(u32);
86impl_word!(i8);
87impl_word!(i16);
88impl_word!(i32);
89
90impl<W, const S: usize> crate::private::Sealed for [W; S] where W: Word {}
91
92impl<W, const S: usize> crate::private::Sealed for &[W; S] where W: Word {}
93
94impl<W> crate::private::Sealed for &[W] where W: Word {}
95
96impl<W> crate::private::Sealed for &mut [W] where W: Word {}
97
98pub unsafe trait ReadBuffer {
106 unsafe fn read_buffer(&self) -> (*const u8, usize);
118}
119
120unsafe impl<W, const S: usize> ReadBuffer for [W; S]
121where
122 W: Word,
123{
124 unsafe fn read_buffer(&self) -> (*const u8, usize) {
125 (self.as_ptr() as *const u8, core::mem::size_of_val(self))
126 }
127}
128
129unsafe impl<W, const S: usize> ReadBuffer for &[W; S]
130where
131 W: Word,
132{
133 unsafe fn read_buffer(&self) -> (*const u8, usize) {
134 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
135 }
136}
137
138unsafe impl<W, const S: usize> ReadBuffer for &mut [W; S]
139where
140 W: Word,
141{
142 unsafe fn read_buffer(&self) -> (*const u8, usize) {
143 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
144 }
145}
146
147unsafe impl<W> ReadBuffer for &[W]
148where
149 W: Word,
150{
151 unsafe fn read_buffer(&self) -> (*const u8, usize) {
152 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
153 }
154}
155
156unsafe impl<W> ReadBuffer for &mut [W]
157where
158 W: Word,
159{
160 unsafe fn read_buffer(&self) -> (*const u8, usize) {
161 (self.as_ptr() as *const u8, core::mem::size_of_val(*self))
162 }
163}
164
165pub unsafe trait WriteBuffer {
173 unsafe fn write_buffer(&mut self) -> (*mut u8, usize);
186}
187
188unsafe impl<W, const S: usize> WriteBuffer for [W; S]
189where
190 W: Word,
191{
192 unsafe fn write_buffer(&mut self) -> (*mut u8, usize) {
193 (self.as_mut_ptr() as *mut u8, core::mem::size_of_val(self))
194 }
195}
196
197unsafe impl<W, const S: usize> WriteBuffer for &mut [W; S]
198where
199 W: Word,
200{
201 unsafe fn write_buffer(&mut self) -> (*mut u8, usize) {
202 (self.as_mut_ptr() as *mut u8, core::mem::size_of_val(*self))
203 }
204}
205
206unsafe impl<W> WriteBuffer for &mut [W]
207where
208 W: Word,
209{
210 unsafe fn write_buffer(&mut self) -> (*mut u8, usize) {
211 (self.as_mut_ptr() as *mut u8, core::mem::size_of_val(*self))
212 }
213}
214
215bitfield::bitfield! {
216 #[derive(Clone, Copy, PartialEq, Eq)]
218 pub struct DmaDescriptorFlags(u32);
219
220 u16;
221
222 pub size, set_size: 11, 0;
224
225 pub length, set_length: 23, 12;
233
234 pub suc_eof, set_suc_eof: 30;
241
242 pub owner, set_owner: 31;
246}
247
248impl Debug for DmaDescriptorFlags {
249 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
250 f.debug_struct("DmaDescriptorFlags")
251 .field("size", &self.size())
252 .field("length", &self.length())
253 .field("suc_eof", &self.suc_eof())
254 .field("owner", &(if self.owner() { "DMA" } else { "CPU" }))
255 .finish()
256 }
257}
258
259#[cfg(feature = "defmt")]
260impl defmt::Format for DmaDescriptorFlags {
261 fn format(&self, fmt: defmt::Formatter<'_>) {
262 defmt::write!(
263 fmt,
264 "DmaDescriptorFlags {{ size: {}, length: {}, suc_eof: {}, owner: {} }}",
265 self.size(),
266 self.length(),
267 self.suc_eof(),
268 if self.owner() { "DMA" } else { "CPU" }
269 );
270 }
271}
272
273#[derive(Clone, Copy, Debug, PartialEq, Eq)]
275#[cfg_attr(feature = "defmt", derive(defmt::Format))]
276#[repr(C)]
277pub struct DmaDescriptor {
278 pub flags: DmaDescriptorFlags,
280
281 pub buffer: *mut u8,
283
284 pub next: *mut DmaDescriptor,
288}
289
290impl DmaDescriptor {
291 pub const EMPTY: Self = Self {
293 flags: DmaDescriptorFlags(0),
294 buffer: core::ptr::null_mut(),
295 next: core::ptr::null_mut(),
296 };
297
298 pub fn reset_for_rx(&mut self) {
300 self.set_owner(Owner::Dma);
302
303 self.set_suc_eof(false);
306
307 self.set_length(0);
310 }
311
312 pub fn reset_for_tx(&mut self, set_eof: bool) {
316 self.set_owner(Owner::Dma);
318
319 self.set_suc_eof(set_eof);
322 }
323
324 pub fn set_size(&mut self, len: usize) {
326 self.flags.set_size(len as u16)
327 }
328
329 pub fn set_length(&mut self, len: usize) {
331 self.flags.set_length(len as u16)
332 }
333
334 pub fn size(&self) -> usize {
336 self.flags.size() as usize
337 }
338
339 #[allow(clippy::len_without_is_empty)]
341 pub fn len(&self) -> usize {
342 self.flags.length() as usize
343 }
344
345 pub fn set_suc_eof(&mut self, suc_eof: bool) {
347 self.flags.set_suc_eof(suc_eof)
348 }
349
350 pub fn set_owner(&mut self, owner: Owner) {
352 let owner = match owner {
353 Owner::Cpu => false,
354 Owner::Dma => true,
355 };
356 self.flags.set_owner(owner)
357 }
358
359 pub fn owner(&self) -> Owner {
361 match self.flags.owner() {
362 false => Owner::Cpu,
363 true => Owner::Dma,
364 }
365 }
366}
367
368unsafe impl Send for DmaDescriptor {}
372
373mod buffers;
374#[cfg(gdma)]
375mod gdma;
376#[cfg(any(gdma, esp32s2))]
377mod m2m;
378#[cfg(pdma)]
379mod pdma;
380
381#[derive(Debug, EnumSetType)]
383#[cfg_attr(feature = "defmt", derive(defmt::Format))]
384pub enum DmaInterrupt {
385 RxDone,
387 TxDone,
389}
390
391#[derive(Debug, EnumSetType)]
393#[cfg_attr(feature = "defmt", derive(defmt::Format))]
394pub enum DmaTxInterrupt {
395 TotalEof,
398
399 DescriptorError,
402
403 Eof,
407
408 Done,
411}
412
413#[derive(Debug, EnumSetType)]
415#[cfg_attr(feature = "defmt", derive(defmt::Format))]
416pub enum DmaRxInterrupt {
417 DescriptorEmpty,
420
421 DescriptorError,
424
425 ErrorEof,
429
430 SuccessfulEof,
434
435 Done,
438}
439
440pub const CHUNK_SIZE: usize = 4092;
442
443#[doc = crate::before_snippet!()]
448#[macro_export]
458macro_rules! dma_buffers {
459 ($rx_size:expr, $tx_size:expr) => {
460 $crate::dma_buffers_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
461 };
462 ($size:expr) => {
463 $crate::dma_buffers_chunk_size!($size, $crate::dma::CHUNK_SIZE)
464 };
465}
466
467#[doc = crate::before_snippet!()]
472#[macro_export]
482macro_rules! dma_circular_buffers {
483 ($rx_size:expr, $tx_size:expr) => {
484 $crate::dma_circular_buffers_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
485 };
486
487 ($size:expr) => {
488 $crate::dma_circular_buffers_chunk_size!($size, $size, $crate::dma::CHUNK_SIZE)
489 };
490}
491
492#[doc = crate::before_snippet!()]
497#[macro_export]
506macro_rules! dma_descriptors {
507 ($rx_size:expr, $tx_size:expr) => {
508 $crate::dma_descriptors_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
509 };
510
511 ($size:expr) => {
512 $crate::dma_descriptors_chunk_size!($size, $size, $crate::dma::CHUNK_SIZE)
513 };
514}
515
516#[doc = crate::before_snippet!()]
521#[macro_export]
531macro_rules! dma_circular_descriptors {
532 ($rx_size:expr, $tx_size:expr) => {
533 $crate::dma_circular_descriptors_chunk_size!($rx_size, $tx_size, $crate::dma::CHUNK_SIZE)
534 };
535
536 ($size:expr) => {
537 $crate::dma_circular_descriptors_chunk_size!($size, $size, $crate::dma::CHUNK_SIZE)
538 };
539}
540
541#[doc(hidden)]
543#[macro_export]
544macro_rules! declare_aligned_dma_buffer {
545 ($name:ident, $size:expr) => {
546 static mut $name: [u32; ($size + 3) / 4] = [0; ($size + 3) / 4];
551 };
552}
553
554#[doc(hidden)]
557#[macro_export]
558macro_rules! as_mut_byte_array {
559 ($name:expr, $size:expr) => {
560 unsafe { &mut *($name.as_mut_ptr() as *mut [u8; $size]) }
561 };
562}
563pub use as_mut_byte_array; #[doc = crate::before_snippet!()]
571#[macro_export]
581macro_rules! dma_buffers_chunk_size {
582 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{ $crate::dma_buffers_impl!($rx_size, $tx_size, $chunk_size, is_circular = false) }};
583
584 ($size:expr, $chunk_size:expr) => {
585 $crate::dma_buffers_chunk_size!($size, $size, $chunk_size)
586 };
587}
588
589#[doc = crate::before_snippet!()]
595#[macro_export]
605macro_rules! dma_circular_buffers_chunk_size {
606 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{ $crate::dma_buffers_impl!($rx_size, $tx_size, $chunk_size, is_circular = true) }};
607
608 ($size:expr, $chunk_size:expr) => {{ $crate::dma_circular_buffers_chunk_size!($size, $size, $chunk_size) }};
609}
610
611#[doc = crate::before_snippet!()]
616#[macro_export]
626macro_rules! dma_descriptors_chunk_size {
627 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{ $crate::dma_descriptors_impl!($rx_size, $tx_size, $chunk_size, is_circular = false) }};
628
629 ($size:expr, $chunk_size:expr) => {
630 $crate::dma_descriptors_chunk_size!($size, $size, $chunk_size)
631 };
632}
633
634#[doc = crate::before_snippet!()]
640#[macro_export]
650macro_rules! dma_circular_descriptors_chunk_size {
651 ($rx_size:expr, $tx_size:expr, $chunk_size:expr) => {{ $crate::dma_descriptors_impl!($rx_size, $tx_size, $chunk_size, is_circular = true) }};
652
653 ($size:expr, $chunk_size:expr) => {
654 $crate::dma_circular_descriptors_chunk_size!($size, $size, $chunk_size)
655 };
656}
657
658#[doc(hidden)]
659#[macro_export]
660macro_rules! dma_buffers_impl {
661 ($rx_size:expr, $tx_size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
662 let rx = $crate::dma_buffers_impl!($rx_size, $chunk_size, is_circular = $circular);
663 let tx = $crate::dma_buffers_impl!($tx_size, $chunk_size, is_circular = $circular);
664 (rx.0, rx.1, tx.0, tx.1)
665 }};
666
667 ($size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
668 $crate::declare_aligned_dma_buffer!(BUFFER, $size);
669
670 unsafe {
671 (
672 $crate::dma::as_mut_byte_array!(BUFFER, $size),
673 $crate::dma_descriptors_impl!($size, $chunk_size, is_circular = $circular),
674 )
675 }
676 }};
677
678 ($size:expr, is_circular = $circular:tt) => {
679 $crate::dma_buffers_impl!(
680 $size,
681 $crate::dma::BurstConfig::DEFAULT.max_compatible_chunk_size(),
682 is_circular = $circular
683 );
684 };
685}
686
687#[doc(hidden)]
688#[macro_export]
689macro_rules! dma_descriptors_impl {
690 ($rx_size:expr, $tx_size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
691 let rx = $crate::dma_descriptors_impl!($rx_size, $chunk_size, is_circular = $circular);
692 let tx = $crate::dma_descriptors_impl!($tx_size, $chunk_size, is_circular = $circular);
693 (rx, tx)
694 }};
695
696 ($size:expr, $chunk_size:expr, is_circular = $circular:tt) => {{
697 const COUNT: usize =
698 $crate::dma_descriptor_count!($size, $chunk_size, is_circular = $circular);
699
700 static mut DESCRIPTORS: [$crate::dma::DmaDescriptor; COUNT] =
701 [$crate::dma::DmaDescriptor::EMPTY; COUNT];
702
703 unsafe { &mut DESCRIPTORS }
704 }};
705}
706
707#[doc(hidden)]
708#[macro_export]
709macro_rules! dma_descriptor_count {
710 ($size:expr, $chunk_size:expr, is_circular = $is_circular:tt) => {{
711 const {
712 ::core::assert!($chunk_size <= 4095, "chunk size must be <= 4095");
713 ::core::assert!($chunk_size > 0, "chunk size must be > 0");
714 }
715
716 if $size == 0 {
718 0
719 } else {
720 $crate::dma::descriptor_count($size, $chunk_size, $is_circular)
721 }
722 }};
723}
724
725#[doc = crate::before_snippet!()]
731#[macro_export]
738macro_rules! dma_tx_buffer {
739 ($tx_size:expr) => {{
740 let (tx_buffer, tx_descriptors) = $crate::dma_buffers_impl!($tx_size, is_circular = false);
741
742 $crate::dma::DmaTxBuf::new(tx_descriptors, tx_buffer)
743 }};
744}
745
746#[doc = crate::before_snippet!()]
756#[macro_export]
764macro_rules! dma_rx_stream_buffer {
765 ($rx_size:expr) => {
766 $crate::dma_rx_stream_buffer!($rx_size, 4095)
767 };
768 ($rx_size:expr, $chunk_size:expr) => {{
769 let (buffer, descriptors) =
770 $crate::dma_buffers_impl!($rx_size, $chunk_size, is_circular = false);
771
772 $crate::dma::DmaRxStreamBuf::new(descriptors, buffer).unwrap()
773 }};
774}
775
776#[doc = crate::before_snippet!()]
781#[macro_export]
788macro_rules! dma_loop_buffer {
789 ($size:expr) => {{
790 const {
791 ::core::assert!($size <= 4095, "size must be <= 4095");
792 ::core::assert!($size > 0, "size must be > 0");
793 }
794
795 let (buffer, descriptors) = $crate::dma_buffers_impl!($size, $size, is_circular = false);
796
797 $crate::dma::DmaLoopBuf::new(&mut descriptors[0], buffer).unwrap()
798 }};
799}
800
801#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
803#[cfg_attr(feature = "defmt", derive(defmt::Format))]
804pub enum DmaError {
805 InvalidAlignment(DmaAlignmentError),
807 OutOfDescriptors,
809 DescriptorError,
813 Overflow,
815 BufferTooSmall,
817 UnsupportedMemoryRegion,
819 InvalidChunkSize,
821 Late,
824}
825
826impl From<DmaBufError> for DmaError {
827 fn from(error: DmaBufError) -> Self {
828 match error {
830 DmaBufError::InsufficientDescriptors => DmaError::OutOfDescriptors,
831 DmaBufError::UnsupportedMemoryRegion => DmaError::UnsupportedMemoryRegion,
832 DmaBufError::InvalidAlignment(err) => DmaError::InvalidAlignment(err),
833 DmaBufError::InvalidChunkSize => DmaError::InvalidChunkSize,
834 DmaBufError::BufferTooSmall => DmaError::BufferTooSmall,
835 }
836 }
837}
838
839#[cfg(gdma)]
841#[derive(Debug, Clone, Copy, PartialEq)]
842#[cfg_attr(feature = "defmt", derive(defmt::Format))]
843pub enum DmaPriority {
844 Priority0 = 0,
846 Priority1 = 1,
848 Priority2 = 2,
850 Priority3 = 3,
852 Priority4 = 4,
854 Priority5 = 5,
856 Priority6 = 6,
858 Priority7 = 7,
860 Priority8 = 8,
862 Priority9 = 9,
864}
865
866#[cfg(pdma)]
869#[derive(Debug, Clone, Copy, PartialEq)]
870#[cfg_attr(feature = "defmt", derive(defmt::Format))]
871pub enum DmaPriority {
872 Priority0 = 0,
874}
875
876#[derive(Debug, Clone, Copy, PartialEq)]
879#[cfg_attr(feature = "defmt", derive(defmt::Format))]
880#[doc(hidden)]
881pub enum DmaPeripheral {
882 Spi2 = 0,
883 #[cfg(any(pdma, esp32s3))]
884 Spi3 = 1,
885 #[cfg(any(esp32c2, esp32c6, esp32h2))]
886 Mem2Mem1 = 1,
887 #[cfg(any(esp32c3, esp32c6, esp32h2, esp32s3))]
888 Uhci0 = 2,
889 #[cfg(any(esp32, esp32s2, esp32c3, esp32c6, esp32h2, esp32s3))]
890 I2s0 = 3,
891 #[cfg(any(esp32, esp32s3))]
892 I2s1 = 4,
893 #[cfg(any(esp32c6, esp32h2))]
894 Mem2Mem4 = 4,
895 #[cfg(esp32s3)]
896 LcdCam = 5,
897 #[cfg(any(esp32c6, esp32h2))]
898 Mem2Mem5 = 5,
899 #[cfg(not(esp32c2))]
900 Aes = 6,
901 #[cfg(any(esp32s2, gdma))]
902 Sha = 7,
903 #[cfg(any(esp32c3, esp32c6, esp32h2, esp32s3))]
904 Adc = 8,
905 #[cfg(esp32s3)]
906 Rmt = 9,
907 #[cfg(parl_io)]
908 ParlIo = 9,
909 #[cfg(any(esp32c6, esp32h2))]
910 Mem2Mem10 = 10,
911 #[cfg(any(esp32c6, esp32h2))]
912 Mem2Mem11 = 11,
913 #[cfg(any(esp32c6, esp32h2))]
914 Mem2Mem12 = 12,
915 #[cfg(any(esp32c6, esp32h2))]
916 Mem2Mem13 = 13,
917 #[cfg(any(esp32c6, esp32h2))]
918 Mem2Mem14 = 14,
919 #[cfg(any(esp32c6, esp32h2))]
920 Mem2Mem15 = 15,
921}
922
923#[derive(PartialEq, PartialOrd)]
925pub enum Owner {
926 Cpu = 0,
928 Dma = 1,
930}
931
932impl From<u32> for Owner {
933 fn from(value: u32) -> Self {
934 match value {
935 0 => Owner::Cpu,
936 _ => Owner::Dma,
937 }
938 }
939}
940
941#[doc(hidden)]
942#[instability::unstable]
943pub trait DmaEligible {
944 type Dma: DmaChannel;
946
947 fn dma_peripheral(&self) -> DmaPeripheral;
948}
949
950#[doc(hidden)]
951#[derive(Debug)]
952pub struct DescriptorChain {
953 pub(crate) descriptors: &'static mut [DmaDescriptor],
954 chunk_size: usize,
955}
956
957impl DescriptorChain {
958 pub fn new(descriptors: &'static mut [DmaDescriptor]) -> Self {
959 Self::new_with_chunk_size(descriptors, CHUNK_SIZE)
960 }
961
962 pub fn new_with_chunk_size(
963 descriptors: &'static mut [DmaDescriptor],
964 chunk_size: usize,
965 ) -> Self {
966 Self {
967 descriptors,
968 chunk_size,
969 }
970 }
971
972 pub fn first_mut(&mut self) -> *mut DmaDescriptor {
973 self.descriptors.as_mut_ptr()
974 }
975
976 pub fn first(&self) -> *const DmaDescriptor {
977 self.descriptors.as_ptr()
978 }
979
980 pub fn last_mut(&mut self) -> *mut DmaDescriptor {
981 self.descriptors.last_mut().unwrap()
982 }
983
984 pub fn last(&self) -> *const DmaDescriptor {
985 self.descriptors.last().unwrap()
986 }
987
988 #[allow(clippy::not_unsafe_ptr_arg_deref)]
989 pub fn fill_for_rx(
990 &mut self,
991 circular: bool,
992 data: *mut u8,
993 len: usize,
994 ) -> Result<(), DmaError> {
995 self.fill(circular, data, len, |desc, _| {
996 desc.reset_for_rx();
997 })
999 }
1000
1001 #[allow(clippy::not_unsafe_ptr_arg_deref)]
1002 pub fn fill_for_tx(
1003 &mut self,
1004 is_circular: bool,
1005 data: *const u8,
1006 len: usize,
1007 ) -> Result<(), DmaError> {
1008 self.fill(is_circular, data.cast_mut(), len, |desc, chunk_size| {
1009 desc.reset_for_tx(desc.next.is_null() || is_circular);
1014 desc.set_length(chunk_size); })
1016 }
1017
1018 #[allow(clippy::not_unsafe_ptr_arg_deref)]
1019 pub fn fill(
1020 &mut self,
1021 circular: bool,
1022 data: *mut u8,
1023 len: usize,
1024 prepare_descriptor: impl Fn(&mut DmaDescriptor, usize),
1025 ) -> Result<(), DmaError> {
1026 if !is_valid_ram_address(self.first() as usize)
1027 || !is_valid_ram_address(self.last() as usize)
1028 || !is_valid_memory_address(data as usize)
1029 || !is_valid_memory_address(unsafe { data.add(len) } as usize)
1030 {
1031 return Err(DmaError::UnsupportedMemoryRegion);
1032 }
1033
1034 let max_chunk_size = if circular && len <= self.chunk_size * 2 {
1035 if len <= 3 {
1036 return Err(DmaError::BufferTooSmall);
1037 }
1038 len / 3 + len % 3
1039 } else {
1040 self.chunk_size
1041 };
1042
1043 DescriptorSet::set_up_buffer_ptrs(
1044 unsafe { core::slice::from_raw_parts_mut(data, len) },
1045 self.descriptors,
1046 max_chunk_size,
1047 circular,
1048 )?;
1049 DescriptorSet::set_up_descriptors(
1050 self.descriptors,
1051 len,
1052 max_chunk_size,
1053 circular,
1054 prepare_descriptor,
1055 )?;
1056
1057 Ok(())
1058 }
1059}
1060
1061pub const fn descriptor_count(buffer_size: usize, chunk_size: usize, is_circular: bool) -> usize {
1064 if is_circular && buffer_size <= chunk_size * 2 {
1065 return 3;
1066 }
1067
1068 if buffer_size < chunk_size {
1069 return 1;
1071 }
1072
1073 buffer_size.div_ceil(chunk_size)
1074}
1075
1076#[derive(Debug)]
1077#[cfg_attr(feature = "defmt", derive(defmt::Format))]
1078struct DescriptorSet<'a> {
1079 descriptors: &'a mut [DmaDescriptor],
1080}
1081
1082impl<'a> DescriptorSet<'a> {
1083 fn new(descriptors: &'a mut [DmaDescriptor]) -> Result<Self, DmaBufError> {
1086 if !is_slice_in_dram(descriptors) {
1087 return Err(DmaBufError::UnsupportedMemoryRegion);
1088 }
1089
1090 descriptors.fill(DmaDescriptor::EMPTY);
1091
1092 Ok(unsafe { Self::new_unchecked(descriptors) })
1093 }
1094
1095 unsafe fn new_unchecked(descriptors: &'a mut [DmaDescriptor]) -> Self {
1103 Self { descriptors }
1104 }
1105
1106 fn into_inner(self) -> &'a mut [DmaDescriptor] {
1108 self.descriptors
1109 }
1110
1111 fn head(&mut self) -> *mut DmaDescriptor {
1113 self.descriptors.as_mut_ptr()
1114 }
1115
1116 fn linked_iter(&self) -> impl Iterator<Item = &DmaDescriptor> {
1118 let mut was_last = false;
1119 self.descriptors.iter().take_while(move |d| {
1120 if was_last {
1121 false
1122 } else {
1123 was_last = d.next.is_null();
1124 true
1125 }
1126 })
1127 }
1128
1129 fn linked_iter_mut(&mut self) -> impl Iterator<Item = &mut DmaDescriptor> + use<'_> {
1131 let mut was_last = false;
1132 self.descriptors.iter_mut().take_while(move |d| {
1133 if was_last {
1134 false
1135 } else {
1136 was_last = d.next.is_null();
1137 true
1138 }
1139 })
1140 }
1141
1142 fn link_with_buffer(
1148 &mut self,
1149 buffer: &mut [u8],
1150 chunk_size: usize,
1151 ) -> Result<(), DmaBufError> {
1152 Self::set_up_buffer_ptrs(buffer, self.descriptors, chunk_size, false)
1153 }
1154
1155 fn set_length(
1159 &mut self,
1160 len: usize,
1161 chunk_size: usize,
1162 prepare: fn(&mut DmaDescriptor, usize),
1163 ) -> Result<(), DmaBufError> {
1164 Self::set_up_descriptors(self.descriptors, len, chunk_size, false, prepare)
1165 }
1166
1167 fn set_rx_length(&mut self, len: usize, chunk_size: usize) -> Result<(), DmaBufError> {
1171 self.set_length(len, chunk_size, |desc, chunk_size| {
1172 desc.set_size(chunk_size);
1173 })
1174 }
1175
1176 fn set_tx_length(&mut self, len: usize, chunk_size: usize) -> Result<(), DmaBufError> {
1180 self.set_length(len, chunk_size, |desc, chunk_size| {
1181 desc.set_length(chunk_size);
1182 })
1183 }
1184
1185 fn descriptors_for_buffer_len(
1187 descriptors: &mut [DmaDescriptor],
1188 len: usize,
1189 chunk_size: usize,
1190 is_circular: bool,
1191 ) -> Result<&mut [DmaDescriptor], DmaBufError> {
1192 let required_descriptors = descriptor_count(len, chunk_size, is_circular);
1194 if descriptors.len() < required_descriptors {
1195 return Err(DmaBufError::InsufficientDescriptors);
1196 }
1197 Ok(&mut descriptors[..required_descriptors])
1198 }
1199
1200 fn set_up_descriptors(
1208 descriptors: &mut [DmaDescriptor],
1209 len: usize,
1210 chunk_size: usize,
1211 is_circular: bool,
1212 prepare: impl Fn(&mut DmaDescriptor, usize),
1213 ) -> Result<(), DmaBufError> {
1214 let descriptors =
1215 Self::descriptors_for_buffer_len(descriptors, len, chunk_size, is_circular)?;
1216
1217 let mut next = if is_circular {
1219 descriptors.as_mut_ptr()
1220 } else {
1221 core::ptr::null_mut()
1222 };
1223 for desc in descriptors.iter_mut().rev() {
1224 desc.next = next;
1225 next = desc;
1226 }
1227
1228 let mut remaining_length = len;
1230 for desc in descriptors.iter_mut() {
1231 let chunk_size = min(chunk_size, remaining_length);
1232 prepare(desc, chunk_size);
1233 remaining_length -= chunk_size;
1234 }
1235 debug_assert_eq!(remaining_length, 0);
1236
1237 Ok(())
1238 }
1239
1240 fn set_up_buffer_ptrs(
1252 buffer: &mut [u8],
1253 descriptors: &mut [DmaDescriptor],
1254 chunk_size: usize,
1255 is_circular: bool,
1256 ) -> Result<(), DmaBufError> {
1257 let descriptors =
1258 Self::descriptors_for_buffer_len(descriptors, buffer.len(), chunk_size, is_circular)?;
1259
1260 let chunks = buffer.chunks_mut(chunk_size);
1261 for (desc, chunk) in descriptors.iter_mut().zip(chunks) {
1262 desc.set_size(chunk.len());
1263 desc.buffer = chunk.as_mut_ptr();
1264 }
1265
1266 Ok(())
1267 }
1268}
1269
1270#[cfg(psram_dma)]
1272#[derive(Copy, Clone, Debug, PartialEq)]
1273pub enum DmaExtMemBKSize {
1274 Size16 = 0,
1276 Size32 = 1,
1278 Size64 = 2,
1280}
1281
1282#[cfg(psram_dma)]
1283impl From<ExternalBurstConfig> for DmaExtMemBKSize {
1284 fn from(size: ExternalBurstConfig) -> Self {
1285 match size {
1286 ExternalBurstConfig::Size16 => DmaExtMemBKSize::Size16,
1287 ExternalBurstConfig::Size32 => DmaExtMemBKSize::Size32,
1288 ExternalBurstConfig::Size64 => DmaExtMemBKSize::Size64,
1289 }
1290 }
1291}
1292
1293pub(crate) struct TxCircularState {
1294 write_offset: usize,
1295 write_descr_ptr: *mut DmaDescriptor,
1296 pub(crate) available: usize,
1297 last_seen_handled_descriptor_ptr: *mut DmaDescriptor,
1298 buffer_start: *const u8,
1299 buffer_len: usize,
1300
1301 first_desc_ptr: *mut DmaDescriptor,
1302}
1303
1304impl TxCircularState {
1305 pub(crate) fn new(chain: &mut DescriptorChain) -> Self {
1306 Self {
1307 write_offset: 0,
1308 write_descr_ptr: chain.first_mut(),
1309 available: 0,
1310 last_seen_handled_descriptor_ptr: chain.first_mut(),
1311 buffer_start: chain.descriptors[0].buffer as _,
1312 buffer_len: chain.descriptors.iter().map(|d| d.len()).sum(),
1313
1314 first_desc_ptr: chain.first_mut(),
1315 }
1316 }
1317
1318 pub(crate) fn update<Dm, CH>(&mut self, channel: &ChannelTx<Dm, CH>) -> Result<(), DmaError>
1319 where
1320 Dm: DriverMode,
1321 CH: DmaTxChannel,
1322 {
1323 if channel
1324 .pending_out_interrupts()
1325 .contains(DmaTxInterrupt::Eof)
1326 {
1327 channel.clear_out(DmaTxInterrupt::Eof);
1328
1329 let mut current = self.last_seen_handled_descriptor_ptr;
1333 loop {
1334 let descr = unsafe { current.read_volatile() };
1335 if descr.owner() == Owner::Cpu {
1336 current = descr.next;
1337 } else {
1338 break;
1339 }
1340
1341 if core::ptr::eq(current, self.last_seen_handled_descriptor_ptr) {
1342 return Err(DmaError::Late);
1343 }
1344 }
1345
1346 let descr_address = channel.last_out_dscr_address() as *mut DmaDescriptor;
1347
1348 let mut ptr = self.last_seen_handled_descriptor_ptr;
1349 if descr_address >= self.last_seen_handled_descriptor_ptr {
1350 unsafe {
1351 while ptr < descr_address {
1352 let dw0 = ptr.read_volatile();
1353 self.available += dw0.len();
1354 ptr = ptr.offset(1);
1355 }
1356 }
1357 } else {
1358 unsafe {
1359 while !((*ptr).next.is_null()
1360 || core::ptr::eq((*ptr).next, self.first_desc_ptr))
1361 {
1362 let dw0 = ptr.read_volatile();
1363 self.available += dw0.len();
1364 ptr = ptr.offset(1);
1365 }
1366
1367 let dw0 = ptr.read_volatile();
1369 self.available += dw0.len();
1370
1371 if core::ptr::eq((*ptr).next, self.first_desc_ptr) {
1373 ptr = self.first_desc_ptr;
1374 while ptr < descr_address {
1375 let dw0 = ptr.read_volatile();
1376 self.available += dw0.len();
1377 ptr = ptr.offset(1);
1378 }
1379 }
1380 }
1381 }
1382
1383 if self.available >= self.buffer_len {
1384 unsafe {
1385 let dw0 = self.write_descr_ptr.read_volatile();
1386 let segment_len = dw0.len();
1387 let next_descriptor = dw0.next;
1388 self.available -= segment_len;
1389 self.write_offset = (self.write_offset + segment_len) % self.buffer_len;
1390
1391 self.write_descr_ptr = if next_descriptor.is_null() {
1392 self.first_desc_ptr
1393 } else {
1394 next_descriptor
1395 }
1396 }
1397 }
1398
1399 self.last_seen_handled_descriptor_ptr = descr_address;
1400 }
1401
1402 Ok(())
1403 }
1404
1405 pub(crate) fn push(&mut self, data: &[u8]) -> Result<usize, DmaError> {
1406 let avail = self.available;
1407
1408 if avail < data.len() {
1409 return Err(DmaError::Overflow);
1410 }
1411
1412 let mut remaining = data.len();
1413 let mut offset = 0;
1414 while self.available >= remaining && remaining > 0 {
1415 let written = self.push_with(|buffer| {
1416 let len = usize::min(buffer.len(), data.len() - offset);
1417 buffer[..len].copy_from_slice(&data[offset..][..len]);
1418 len
1419 })?;
1420 offset += written;
1421 remaining -= written;
1422 }
1423
1424 Ok(data.len())
1425 }
1426
1427 pub(crate) fn push_with(
1428 &mut self,
1429 f: impl FnOnce(&mut [u8]) -> usize,
1430 ) -> Result<usize, DmaError> {
1431 let written = unsafe {
1434 let dst = self.buffer_start.add(self.write_offset).cast_mut();
1435 let block_size = usize::min(self.available, self.buffer_len - self.write_offset);
1436 let buffer = core::slice::from_raw_parts_mut(dst, block_size);
1437 f(buffer)
1438 };
1439
1440 let mut forward = written;
1441 loop {
1442 unsafe {
1443 let mut descr = self.write_descr_ptr.read_volatile();
1444 descr.set_owner(Owner::Dma);
1445 self.write_descr_ptr.write_volatile(descr);
1446
1447 let segment_len = descr.len();
1448 self.write_descr_ptr = if descr.next.is_null() {
1449 self.first_desc_ptr
1450 } else {
1451 descr.next
1452 };
1453
1454 if forward <= segment_len {
1455 break;
1456 }
1457
1458 forward -= segment_len;
1459 }
1460 }
1461
1462 self.write_offset = (self.write_offset + written) % self.buffer_len;
1463 self.available -= written;
1464
1465 Ok(written)
1466 }
1467}
1468
1469pub(crate) struct RxCircularState {
1470 read_descr_ptr: *mut DmaDescriptor,
1471 pub(crate) available: usize,
1472 last_seen_handled_descriptor_ptr: *mut DmaDescriptor,
1473 last_descr_ptr: *mut DmaDescriptor,
1474}
1475
1476impl RxCircularState {
1477 pub(crate) fn new(chain: &mut DescriptorChain) -> Self {
1478 Self {
1479 read_descr_ptr: chain.first_mut(),
1480 available: 0,
1481 last_seen_handled_descriptor_ptr: core::ptr::null_mut(),
1482 last_descr_ptr: chain.last_mut(),
1483 }
1484 }
1485
1486 pub(crate) fn update(&mut self) -> Result<(), DmaError> {
1487 if self.last_seen_handled_descriptor_ptr.is_null() {
1488 self.last_seen_handled_descriptor_ptr = self.last_descr_ptr;
1491 }
1492
1493 let mut current_in_descr_ptr =
1494 unsafe { self.last_seen_handled_descriptor_ptr.read_volatile() }.next;
1495 let mut current_in_descr = unsafe { current_in_descr_ptr.read_volatile() };
1496
1497 let last_seen_ptr = self.last_seen_handled_descriptor_ptr;
1498 while current_in_descr.owner() == Owner::Cpu {
1499 self.available += current_in_descr.len();
1500 self.last_seen_handled_descriptor_ptr = current_in_descr_ptr;
1501
1502 current_in_descr_ptr =
1503 unsafe { self.last_seen_handled_descriptor_ptr.read_volatile() }.next;
1504 current_in_descr = unsafe { current_in_descr_ptr.read_volatile() };
1505
1506 if core::ptr::eq(current_in_descr_ptr, last_seen_ptr) {
1507 return Err(DmaError::Late);
1508 }
1509 }
1510
1511 Ok(())
1512 }
1513
1514 pub(crate) fn pop(&mut self, data: &mut [u8]) -> Result<usize, DmaError> {
1515 let len = data.len();
1516 let mut avail = self.available;
1517
1518 if avail > len {
1519 return Err(DmaError::BufferTooSmall);
1520 }
1521
1522 let mut remaining_buffer = data;
1523 let mut descr_ptr = self.read_descr_ptr;
1524
1525 if descr_ptr.is_null() {
1526 return Ok(0);
1527 }
1528
1529 let mut descr = unsafe { descr_ptr.read_volatile() };
1530
1531 while avail > 0 && !remaining_buffer.is_empty() && remaining_buffer.len() >= descr.len() {
1532 unsafe {
1533 let dst = remaining_buffer.as_mut_ptr();
1534 let src = descr.buffer;
1535 let count = descr.len();
1536 core::ptr::copy_nonoverlapping(src, dst, count);
1537
1538 descr.set_owner(Owner::Dma);
1539 descr.set_suc_eof(false);
1540 descr.set_length(0);
1541 descr_ptr.write_volatile(descr);
1542
1543 remaining_buffer = &mut remaining_buffer[count..];
1544 avail -= count;
1545 descr_ptr = descr.next;
1546 }
1547
1548 if descr_ptr.is_null() {
1549 break;
1550 }
1551
1552 descr = unsafe { descr_ptr.read_volatile() };
1553 }
1554
1555 self.read_descr_ptr = descr_ptr;
1556 self.available = avail;
1557 Ok(len - remaining_buffer.len())
1558 }
1559}
1560
1561#[doc(hidden)]
1562macro_rules! impl_dma_eligible {
1563 ([$dma_ch:ident] $name:ident => $dma:ident) => {
1564 impl<'d> $crate::dma::DmaEligible for $crate::peripherals::$name<'d> {
1565 type Dma = $dma_ch<'d>;
1566
1567 fn dma_peripheral(&self) -> $crate::dma::DmaPeripheral {
1568 $crate::dma::DmaPeripheral::$dma
1569 }
1570 }
1571 };
1572
1573 (
1574 $dma_ch:ident {
1575 $($(#[$cfg:meta])? $name:ident => $dma:ident,)*
1576 }
1577 ) => {
1578 $(
1579 $(#[$cfg])?
1580 $crate::dma::impl_dma_eligible!([$dma_ch] $name => $dma);
1581 )*
1582 };
1583}
1584
1585pub(crate) use impl_dma_eligible; pub type PeripheralDmaChannel<T> = <T as DmaEligible>::Dma;
1589pub type PeripheralRxChannel<T> = <PeripheralDmaChannel<T> as DmaChannel>::Rx;
1591pub type PeripheralTxChannel<T> = <PeripheralDmaChannel<T> as DmaChannel>::Tx;
1593
1594#[instability::unstable]
1595pub trait DmaRxChannel: RxRegisterAccess + InterruptAccess<DmaRxInterrupt> {}
1596
1597#[instability::unstable]
1598pub trait DmaTxChannel: TxRegisterAccess + InterruptAccess<DmaTxInterrupt> {}
1599
1600pub trait DmaChannel: Sized {
1602 type Rx: DmaRxChannel;
1604
1605 type Tx: DmaTxChannel;
1607
1608 #[cfg(any(esp32c6, esp32h2, esp32s3))] fn split(self) -> (Self::Rx, Self::Tx) {
1611 unsafe { self.split_internal(crate::private::Internal) }
1615 }
1616
1617 unsafe fn split_internal(self, _: crate::private::Internal) -> (Self::Rx, Self::Tx);
1624}
1625
1626#[doc(hidden)]
1627pub trait DmaChannelExt: DmaChannel {
1628 fn rx_interrupts() -> impl InterruptAccess<DmaRxInterrupt>;
1629 fn tx_interrupts() -> impl InterruptAccess<DmaTxInterrupt>;
1630}
1631
1632#[diagnostic::on_unimplemented(
1633 message = "The DMA channel isn't suitable for this peripheral",
1634 label = "This DMA channel",
1635 note = "Not all channels are useable with all peripherals"
1636)]
1637#[doc(hidden)]
1638pub trait DmaChannelConvert<DEG> {
1639 fn degrade(self) -> DEG;
1640}
1641
1642impl<DEG: DmaChannel> DmaChannelConvert<DEG> for DEG {
1643 fn degrade(self) -> DEG {
1644 self
1645 }
1646}
1647
1648#[cfg_attr(pdma, doc = "")]
1654#[cfg_attr(
1655 pdma,
1656 doc = "Note that using mismatching channels (e.g. trying to use `DMA_SPI2` with SPI3) may compile, but will panic in runtime."
1657)]
1658#[cfg_attr(pdma, doc = "")]
1659#[doc = crate::before_snippet!()]
1666#[cfg_attr(pdma, doc = "let dma_channel = peripherals.DMA_SPI2;")]
1678#[cfg_attr(gdma, doc = "let dma_channel = peripherals.DMA_CH0;")]
1679#[doc = ""]
1680#[allow(private_bounds)]
1690pub trait DmaChannelFor<P: DmaEligible>:
1691 DmaChannel + DmaChannelConvert<PeripheralDmaChannel<P>>
1692{
1693}
1694impl<P, CH> DmaChannelFor<P> for CH
1695where
1696 P: DmaEligible,
1697 CH: DmaChannel + DmaChannelConvert<PeripheralDmaChannel<P>>,
1698{
1699}
1700
1701#[allow(private_bounds)]
1709pub trait RxChannelFor<P: DmaEligible>: DmaChannelConvert<PeripheralRxChannel<P>> {}
1710impl<P, RX> RxChannelFor<P> for RX
1711where
1712 P: DmaEligible,
1713 RX: DmaChannelConvert<PeripheralRxChannel<P>>,
1714{
1715}
1716
1717#[allow(private_bounds)]
1725pub trait TxChannelFor<PER: DmaEligible>: DmaChannelConvert<PeripheralTxChannel<PER>> {}
1726impl<P, TX> TxChannelFor<P> for TX
1727where
1728 P: DmaEligible,
1729 TX: DmaChannelConvert<PeripheralTxChannel<P>>,
1730{
1731}
1732
1733cfg_if::cfg_if! {
1736 if #[cfg(pdma)] {
1737 type PeripheralGuard = system::GenericPeripheralGuard<{ system::Peripheral::Dma as u8}>;
1738 } else {
1739 type PeripheralGuard = system::GenericPeripheralGuard<{ system::Peripheral::Gdma as u8}>;
1740 }
1741}
1742
1743fn create_guard(_ch: &impl RegisterAccess) -> PeripheralGuard {
1744 system::GenericPeripheralGuard::new_with(init_dma)
1746}
1747
1748#[non_exhaustive]
1750#[doc(hidden)]
1751pub struct ChannelRx<Dm, CH>
1752where
1753 Dm: DriverMode,
1754 CH: DmaRxChannel,
1755{
1756 pub(crate) rx_impl: CH,
1757 pub(crate) _phantom: PhantomData<Dm>,
1758 pub(crate) _guard: PeripheralGuard,
1759}
1760
1761impl<CH> ChannelRx<Blocking, CH>
1762where
1763 CH: DmaRxChannel,
1764{
1765 pub fn new(rx_impl: CH) -> Self {
1767 let _guard = create_guard(&rx_impl);
1768
1769 #[cfg(gdma)]
1770 rx_impl.set_mem2mem_mode(false);
1773
1774 if let Some(interrupt) = rx_impl.peripheral_interrupt() {
1775 for cpu in Cpu::all() {
1776 crate::interrupt::disable(cpu, interrupt);
1777 }
1778 }
1779 rx_impl.set_async(false);
1780
1781 Self {
1782 rx_impl,
1783 _phantom: PhantomData,
1784 _guard,
1785 }
1786 }
1787
1788 pub(crate) fn into_async(mut self) -> ChannelRx<Async, CH> {
1790 if let Some(handler) = self.rx_impl.async_handler() {
1791 self.set_interrupt_handler(handler);
1792 }
1793 self.rx_impl.set_async(true);
1794 ChannelRx {
1795 rx_impl: self.rx_impl,
1796 _phantom: PhantomData,
1797 _guard: self._guard,
1798 }
1799 }
1800
1801 fn set_interrupt_handler(&mut self, handler: InterruptHandler) {
1802 self.unlisten_in(EnumSet::all());
1803 self.clear_in(EnumSet::all());
1804
1805 if let Some(interrupt) = self.rx_impl.peripheral_interrupt() {
1806 for core in crate::system::Cpu::other() {
1807 crate::interrupt::disable(core, interrupt);
1808 }
1809 unsafe { crate::interrupt::bind_interrupt(interrupt, handler.handler()) };
1810 unwrap!(crate::interrupt::enable(interrupt, handler.priority()));
1811 }
1812 }
1813}
1814
1815impl<CH> ChannelRx<Async, CH>
1816where
1817 CH: DmaRxChannel,
1818{
1819 pub(crate) fn into_blocking(self) -> ChannelRx<Blocking, CH> {
1821 if let Some(interrupt) = self.rx_impl.peripheral_interrupt() {
1822 crate::interrupt::disable(Cpu::current(), interrupt);
1823 }
1824 self.rx_impl.set_async(false);
1825 ChannelRx {
1826 rx_impl: self.rx_impl,
1827 _phantom: PhantomData,
1828 _guard: self._guard,
1829 }
1830 }
1831}
1832
1833impl<Dm, CH> ChannelRx<Dm, CH>
1834where
1835 Dm: DriverMode,
1836 CH: DmaRxChannel,
1837{
1838 #[cfg(gdma)]
1840 pub fn set_priority(&mut self, priority: DmaPriority) {
1841 self.rx_impl.set_priority(priority);
1842 }
1843
1844 fn do_prepare(
1845 &mut self,
1846 preparation: Preparation,
1847 peri: DmaPeripheral,
1848 ) -> Result<(), DmaError> {
1849 debug_assert_eq!(preparation.direction, TransferDirection::In);
1850
1851 debug!("Preparing RX transfer {:?}", preparation);
1852 trace!("First descriptor {:?}", unsafe { &*preparation.start });
1853
1854 #[cfg(psram_dma)]
1855 if preparation.accesses_psram && !self.rx_impl.can_access_psram() {
1856 return Err(DmaError::UnsupportedMemoryRegion);
1857 }
1858
1859 #[cfg(psram_dma)]
1860 self.rx_impl
1861 .set_ext_mem_block_size(preparation.burst_transfer.external_memory.into());
1862 self.rx_impl.set_burst_mode(preparation.burst_transfer);
1863 self.rx_impl.set_descr_burst_mode(true);
1864 self.rx_impl.set_check_owner(preparation.check_owner);
1865
1866 compiler_fence(core::sync::atomic::Ordering::SeqCst);
1867
1868 self.rx_impl.clear_all();
1869 self.rx_impl.reset();
1870 self.rx_impl.set_link_addr(preparation.start as u32);
1871 self.rx_impl.set_peripheral(peri as u8);
1872
1873 Ok(())
1874 }
1875}
1876
1877impl<Dm, CH> crate::private::Sealed for ChannelRx<Dm, CH>
1878where
1879 Dm: DriverMode,
1880 CH: DmaRxChannel,
1881{
1882}
1883
1884#[allow(unused)]
1885impl<Dm, CH> ChannelRx<Dm, CH>
1886where
1887 Dm: DriverMode,
1888 CH: DmaRxChannel,
1889{
1890 pub(crate) unsafe fn prepare_transfer_without_start(
1893 &mut self,
1894 peri: DmaPeripheral,
1895 chain: &DescriptorChain,
1896 ) -> Result<(), DmaError> {
1897 cfg_if::cfg_if! {
1903 if #[cfg(psram_dma)] {
1904 let mut uses_psram = false;
1905 let psram_range = crate::soc::psram_range();
1906 for des in chain.descriptors.iter() {
1907 let alignment = unsafe { crate::soc::cache_get_dcache_line_size() } as usize;
1910 if crate::soc::addr_in_range(des.buffer as usize, psram_range.clone()) {
1911 uses_psram = true;
1912 if des.buffer as usize % alignment != 0 {
1914 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Address));
1915 }
1916 if des.size() % alignment != 0 {
1917 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Size));
1918 }
1919 unsafe {crate::soc::cache_invalidate_addr(des.buffer as u32, des.size() as u32); }
1920 }
1921 }
1922 }
1923 }
1924
1925 let preparation = Preparation {
1926 start: chain.first().cast_mut(),
1927 direction: TransferDirection::In,
1928 #[cfg(psram_dma)]
1929 accesses_psram: uses_psram,
1930 burst_transfer: BurstConfig::default(),
1931 check_owner: Some(false),
1932 auto_write_back: true,
1933 };
1934 self.do_prepare(preparation, peri)
1935 }
1936
1937 pub(crate) unsafe fn prepare_transfer<BUF: DmaRxBuffer>(
1938 &mut self,
1939 peri: DmaPeripheral,
1940 buffer: &mut BUF,
1941 ) -> Result<(), DmaError> {
1942 let preparation = buffer.prepare();
1943
1944 self.do_prepare(preparation, peri)
1945 }
1946
1947 pub(crate) fn start_transfer(&mut self) -> Result<(), DmaError> {
1948 self.rx_impl.start();
1949
1950 if self
1951 .pending_in_interrupts()
1952 .contains(DmaRxInterrupt::DescriptorError)
1953 {
1954 Err(DmaError::DescriptorError)
1955 } else {
1956 Ok(())
1957 }
1958 }
1959
1960 pub(crate) fn stop_transfer(&mut self) {
1961 self.rx_impl.stop()
1962 }
1963
1964 #[cfg(gdma)]
1965 pub(crate) fn set_mem2mem_mode(&mut self, value: bool) {
1966 self.rx_impl.set_mem2mem_mode(value);
1967 }
1968
1969 pub(crate) fn listen_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
1970 self.rx_impl.listen(interrupts);
1971 }
1972
1973 pub(crate) fn unlisten_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
1974 self.rx_impl.unlisten(interrupts);
1975 }
1976
1977 pub(crate) fn is_listening_in(&self) -> EnumSet<DmaRxInterrupt> {
1978 self.rx_impl.is_listening()
1979 }
1980
1981 pub(crate) fn clear_in(&self, interrupts: impl Into<EnumSet<DmaRxInterrupt>>) {
1982 self.rx_impl.clear(interrupts);
1983 }
1984
1985 pub(crate) fn pending_in_interrupts(&self) -> EnumSet<DmaRxInterrupt> {
1986 self.rx_impl.pending_interrupts()
1987 }
1988
1989 pub(crate) fn is_done(&self) -> bool {
1990 self.pending_in_interrupts()
1991 .contains(DmaRxInterrupt::SuccessfulEof)
1992 }
1993
1994 pub(crate) fn clear_interrupts(&self) {
1995 self.rx_impl.clear_all();
1996 }
1997
1998 pub(crate) fn waker(&self) -> &'static crate::asynch::AtomicWaker {
1999 self.rx_impl.waker()
2000 }
2001
2002 pub(crate) fn has_error(&self) -> bool {
2003 self.pending_in_interrupts()
2004 .contains(DmaRxInterrupt::DescriptorError)
2005 }
2006
2007 pub(crate) fn has_dscr_empty_error(&self) -> bool {
2008 self.pending_in_interrupts()
2009 .contains(DmaRxInterrupt::DescriptorEmpty)
2010 }
2011
2012 pub(crate) fn has_eof_error(&self) -> bool {
2013 self.pending_in_interrupts()
2014 .contains(DmaRxInterrupt::ErrorEof)
2015 }
2016}
2017
2018#[doc(hidden)]
2020pub struct ChannelTx<Dm, CH>
2021where
2022 Dm: DriverMode,
2023 CH: DmaTxChannel,
2024{
2025 pub(crate) tx_impl: CH,
2026 pub(crate) _phantom: PhantomData<Dm>,
2027 pub(crate) _guard: PeripheralGuard,
2028}
2029
2030impl<CH> ChannelTx<Blocking, CH>
2031where
2032 CH: DmaTxChannel,
2033{
2034 pub fn new(tx_impl: CH) -> Self {
2036 let _guard = create_guard(&tx_impl);
2037
2038 if let Some(interrupt) = tx_impl.peripheral_interrupt() {
2039 for cpu in Cpu::all() {
2040 crate::interrupt::disable(cpu, interrupt);
2041 }
2042 }
2043 tx_impl.set_async(false);
2044 Self {
2045 tx_impl,
2046 _phantom: PhantomData,
2047 _guard,
2048 }
2049 }
2050
2051 pub(crate) fn into_async(mut self) -> ChannelTx<Async, CH> {
2053 if let Some(handler) = self.tx_impl.async_handler() {
2054 self.set_interrupt_handler(handler);
2055 }
2056 self.tx_impl.set_async(true);
2057 ChannelTx {
2058 tx_impl: self.tx_impl,
2059 _phantom: PhantomData,
2060 _guard: self._guard,
2061 }
2062 }
2063
2064 fn set_interrupt_handler(&mut self, handler: InterruptHandler) {
2065 self.unlisten_out(EnumSet::all());
2066 self.clear_out(EnumSet::all());
2067
2068 if let Some(interrupt) = self.tx_impl.peripheral_interrupt() {
2069 for core in crate::system::Cpu::other() {
2070 crate::interrupt::disable(core, interrupt);
2071 }
2072 unsafe { crate::interrupt::bind_interrupt(interrupt, handler.handler()) };
2073 unwrap!(crate::interrupt::enable(interrupt, handler.priority()));
2074 }
2075 }
2076}
2077
2078impl<CH> ChannelTx<Async, CH>
2079where
2080 CH: DmaTxChannel,
2081{
2082 pub(crate) fn into_blocking(self) -> ChannelTx<Blocking, CH> {
2084 if let Some(interrupt) = self.tx_impl.peripheral_interrupt() {
2085 crate::interrupt::disable(Cpu::current(), interrupt);
2086 }
2087 self.tx_impl.set_async(false);
2088 ChannelTx {
2089 tx_impl: self.tx_impl,
2090 _phantom: PhantomData,
2091 _guard: self._guard,
2092 }
2093 }
2094}
2095
2096impl<Dm, CH> ChannelTx<Dm, CH>
2097where
2098 Dm: DriverMode,
2099 CH: DmaTxChannel,
2100{
2101 #[cfg(gdma)]
2103 pub fn set_priority(&mut self, priority: DmaPriority) {
2104 self.tx_impl.set_priority(priority);
2105 }
2106
2107 fn do_prepare(
2108 &mut self,
2109 preparation: Preparation,
2110 peri: DmaPeripheral,
2111 ) -> Result<(), DmaError> {
2112 debug_assert_eq!(preparation.direction, TransferDirection::Out);
2113
2114 debug!("Preparing TX transfer {:?}", preparation);
2115 trace!("First descriptor {:?}", unsafe { &*preparation.start });
2116
2117 #[cfg(psram_dma)]
2118 if preparation.accesses_psram && !self.tx_impl.can_access_psram() {
2119 return Err(DmaError::UnsupportedMemoryRegion);
2120 }
2121
2122 #[cfg(psram_dma)]
2123 self.tx_impl
2124 .set_ext_mem_block_size(preparation.burst_transfer.external_memory.into());
2125 self.tx_impl.set_burst_mode(preparation.burst_transfer);
2126 self.tx_impl.set_descr_burst_mode(true);
2127 self.tx_impl.set_check_owner(preparation.check_owner);
2128 self.tx_impl
2129 .set_auto_write_back(preparation.auto_write_back);
2130
2131 compiler_fence(core::sync::atomic::Ordering::SeqCst);
2132
2133 self.tx_impl.clear_all();
2134 self.tx_impl.reset();
2135 self.tx_impl.set_link_addr(preparation.start as u32);
2136 self.tx_impl.set_peripheral(peri as u8);
2137
2138 Ok(())
2139 }
2140}
2141
2142impl<Dm, CH> crate::private::Sealed for ChannelTx<Dm, CH>
2143where
2144 Dm: DriverMode,
2145 CH: DmaTxChannel,
2146{
2147}
2148
2149#[allow(unused)]
2150impl<Dm, CH> ChannelTx<Dm, CH>
2151where
2152 Dm: DriverMode,
2153 CH: DmaTxChannel,
2154{
2155 pub(crate) unsafe fn prepare_transfer_without_start(
2158 &mut self,
2159 peri: DmaPeripheral,
2160 chain: &DescriptorChain,
2161 ) -> Result<(), DmaError> {
2162 #[cfg(psram_dma)]
2168 cfg_if::cfg_if! {
2169 if #[cfg(psram_dma)] {
2170 let mut uses_psram = false;
2171 let psram_range = crate::soc::psram_range();
2172 for des in chain.descriptors.iter() {
2173 let alignment = unsafe { crate::soc::cache_get_dcache_line_size()} as usize;
2176 if crate::soc::addr_in_range(des.buffer as usize, psram_range.clone()) {
2177 uses_psram = true;
2178 if des.buffer as usize % alignment != 0 {
2180 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Address));
2181 }
2182 if des.size() % alignment != 0 {
2183 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Size));
2184 }
2185 unsafe { crate::soc::cache_writeback_addr(des.buffer as u32, des.size() as u32); }
2186 }
2187 }
2188 }
2189 }
2190
2191 let preparation = Preparation {
2192 start: chain.first().cast_mut(),
2193 direction: TransferDirection::Out,
2194 #[cfg(psram_dma)]
2195 accesses_psram: uses_psram,
2196 burst_transfer: BurstConfig::default(),
2197 check_owner: Some(false),
2198 auto_write_back: !(unsafe { *chain.last() }).next.is_null(),
2200 };
2201 self.do_prepare(preparation, peri)?;
2202
2203 Ok(())
2204 }
2205
2206 pub(crate) unsafe fn prepare_transfer<BUF: DmaTxBuffer>(
2207 &mut self,
2208 peri: DmaPeripheral,
2209 buffer: &mut BUF,
2210 ) -> Result<(), DmaError> {
2211 let preparation = buffer.prepare();
2212
2213 self.do_prepare(preparation, peri)
2214 }
2215
2216 pub(crate) fn start_transfer(&mut self) -> Result<(), DmaError> {
2217 self.tx_impl.start();
2218 while self.tx_impl.is_fifo_empty() && self.pending_out_interrupts().is_empty() {}
2219
2220 if self
2221 .pending_out_interrupts()
2222 .contains(DmaTxInterrupt::DescriptorError)
2223 {
2224 Err(DmaError::DescriptorError)
2225 } else {
2226 Ok(())
2227 }
2228 }
2229
2230 pub(crate) fn stop_transfer(&mut self) {
2231 self.tx_impl.stop()
2232 }
2233
2234 pub(crate) fn listen_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
2235 self.tx_impl.listen(interrupts);
2236 }
2237
2238 pub(crate) fn unlisten_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
2239 self.tx_impl.unlisten(interrupts);
2240 }
2241
2242 pub(crate) fn is_listening_out(&self) -> EnumSet<DmaTxInterrupt> {
2243 self.tx_impl.is_listening()
2244 }
2245
2246 pub(crate) fn clear_out(&self, interrupts: impl Into<EnumSet<DmaTxInterrupt>>) {
2247 self.tx_impl.clear(interrupts);
2248 }
2249
2250 pub(crate) fn pending_out_interrupts(&self) -> EnumSet<DmaTxInterrupt> {
2251 self.tx_impl.pending_interrupts()
2252 }
2253
2254 pub(crate) fn waker(&self) -> &'static crate::asynch::AtomicWaker {
2255 self.tx_impl.waker()
2256 }
2257
2258 pub(crate) fn clear_interrupts(&self) {
2259 self.tx_impl.clear_all();
2260 }
2261
2262 pub(crate) fn last_out_dscr_address(&self) -> usize {
2263 self.tx_impl.last_dscr_address()
2264 }
2265
2266 pub(crate) fn is_done(&self) -> bool {
2267 self.pending_out_interrupts()
2268 .contains(DmaTxInterrupt::TotalEof)
2269 }
2270
2271 pub(crate) fn has_error(&self) -> bool {
2272 self.pending_out_interrupts()
2273 .contains(DmaTxInterrupt::DescriptorError)
2274 }
2275}
2276
2277#[doc(hidden)]
2278pub trait RegisterAccess: crate::private::Sealed {
2279 fn reset(&self);
2281
2282 fn set_burst_mode(&self, burst_mode: BurstConfig);
2285
2286 fn set_descr_burst_mode(&self, burst_mode: bool);
2289
2290 #[cfg(gdma)]
2293 fn set_priority(&self, priority: DmaPriority);
2294
2295 fn set_peripheral(&self, peripheral: u8);
2297
2298 fn set_link_addr(&self, address: u32);
2300
2301 fn start(&self);
2303
2304 fn stop(&self);
2306
2307 fn restart(&self);
2309
2310 fn set_check_owner(&self, check_owner: Option<bool>);
2313
2314 #[cfg(psram_dma)]
2315 fn set_ext_mem_block_size(&self, size: DmaExtMemBKSize);
2316
2317 #[cfg(pdma)]
2318 fn is_compatible_with(&self, peripheral: DmaPeripheral) -> bool;
2319
2320 #[cfg(psram_dma)]
2321 fn can_access_psram(&self) -> bool;
2322}
2323
2324#[doc(hidden)]
2325pub trait RxRegisterAccess: RegisterAccess {
2326 #[cfg(gdma)]
2327 fn set_mem2mem_mode(&self, value: bool);
2328
2329 fn peripheral_interrupt(&self) -> Option<Interrupt>;
2330 fn async_handler(&self) -> Option<InterruptHandler>;
2331}
2332
2333#[doc(hidden)]
2334pub trait TxRegisterAccess: RegisterAccess {
2335 fn is_fifo_empty(&self) -> bool;
2337
2338 fn set_auto_write_back(&self, enable: bool);
2340
2341 fn last_dscr_address(&self) -> usize;
2343
2344 fn peripheral_interrupt(&self) -> Option<Interrupt>;
2345 fn async_handler(&self) -> Option<InterruptHandler>;
2346}
2347
2348#[doc(hidden)]
2349pub trait InterruptAccess<T: EnumSetType>: crate::private::Sealed {
2350 fn listen(&self, interrupts: impl Into<EnumSet<T>>) {
2351 self.enable_listen(interrupts.into(), true)
2352 }
2353 fn unlisten(&self, interrupts: impl Into<EnumSet<T>>) {
2354 self.enable_listen(interrupts.into(), false)
2355 }
2356
2357 fn clear_all(&self) {
2358 self.clear(EnumSet::all());
2359 }
2360
2361 fn enable_listen(&self, interrupts: EnumSet<T>, enable: bool);
2362 fn is_listening(&self) -> EnumSet<T>;
2363 fn clear(&self, interrupts: impl Into<EnumSet<T>>);
2364 fn pending_interrupts(&self) -> EnumSet<T>;
2365 fn waker(&self) -> &'static crate::asynch::AtomicWaker;
2366
2367 fn is_async(&self) -> bool;
2368 fn set_async(&self, is_async: bool);
2369}
2370
2371#[non_exhaustive]
2373pub struct Channel<Dm, CH>
2374where
2375 Dm: DriverMode,
2376 CH: DmaChannel,
2377{
2378 pub rx: ChannelRx<Dm, CH::Rx>,
2380 pub tx: ChannelTx<Dm, CH::Tx>,
2382}
2383
2384impl<CH> Channel<Blocking, CH>
2385where
2386 CH: DmaChannel,
2387{
2388 #[instability::unstable]
2390 pub fn new(channel: CH) -> Self {
2391 let (rx, tx) = unsafe { channel.split_internal(crate::private::Internal) };
2392 Self {
2393 rx: ChannelRx::new(rx),
2394 tx: ChannelTx::new(tx),
2395 }
2396 }
2397
2398 #[instability::unstable]
2402 pub fn set_interrupt_handler(&mut self, handler: InterruptHandler) {
2403 self.rx.set_interrupt_handler(handler);
2404 self.tx.set_interrupt_handler(handler);
2405 }
2406
2407 pub fn listen(&mut self, interrupts: impl Into<EnumSet<DmaInterrupt>>) {
2409 for interrupt in interrupts.into() {
2410 match interrupt {
2411 DmaInterrupt::RxDone => self.rx.listen_in(DmaRxInterrupt::Done),
2412 DmaInterrupt::TxDone => self.tx.listen_out(DmaTxInterrupt::Done),
2413 }
2414 }
2415 }
2416
2417 pub fn unlisten(&mut self, interrupts: impl Into<EnumSet<DmaInterrupt>>) {
2419 for interrupt in interrupts.into() {
2420 match interrupt {
2421 DmaInterrupt::RxDone => self.rx.unlisten_in(DmaRxInterrupt::Done),
2422 DmaInterrupt::TxDone => self.tx.unlisten_out(DmaTxInterrupt::Done),
2423 }
2424 }
2425 }
2426
2427 pub fn interrupts(&mut self) -> EnumSet<DmaInterrupt> {
2429 let mut res = EnumSet::new();
2430 if self.rx.is_done() {
2431 res.insert(DmaInterrupt::RxDone);
2432 }
2433 if self.tx.is_done() {
2434 res.insert(DmaInterrupt::TxDone);
2435 }
2436 res
2437 }
2438
2439 pub fn clear_interrupts(&mut self, interrupts: impl Into<EnumSet<DmaInterrupt>>) {
2441 for interrupt in interrupts.into() {
2442 match interrupt {
2443 DmaInterrupt::RxDone => self.rx.clear_in(DmaRxInterrupt::Done),
2444 DmaInterrupt::TxDone => self.tx.clear_out(DmaTxInterrupt::Done),
2445 }
2446 }
2447 }
2448
2449 #[cfg(gdma)]
2451 pub fn set_priority(&mut self, priority: DmaPriority) {
2452 self.tx.set_priority(priority);
2453 self.rx.set_priority(priority);
2454 }
2455
2456 pub fn into_async(self) -> Channel<Async, CH> {
2458 Channel {
2459 rx: self.rx.into_async(),
2460 tx: self.tx.into_async(),
2461 }
2462 }
2463}
2464
2465impl<CH> Channel<Async, CH>
2466where
2467 CH: DmaChannel,
2468{
2469 pub fn into_blocking(self) -> Channel<Blocking, CH> {
2471 Channel {
2472 rx: self.rx.into_blocking(),
2473 tx: self.tx.into_blocking(),
2474 }
2475 }
2476}
2477
2478impl<CH: DmaChannel> From<Channel<Blocking, CH>> for Channel<Async, CH> {
2479 fn from(channel: Channel<Blocking, CH>) -> Self {
2480 channel.into_async()
2481 }
2482}
2483
2484impl<CH: DmaChannel> From<Channel<Async, CH>> for Channel<Blocking, CH> {
2485 fn from(channel: Channel<Async, CH>) -> Self {
2486 channel.into_blocking()
2487 }
2488}
2489
2490pub(crate) mod dma_private {
2491 use super::*;
2492
2493 pub trait DmaSupport {
2494 type DriverMode: DriverMode;
2495
2496 fn peripheral_wait_dma(&mut self, is_rx: bool, is_tx: bool);
2507
2508 fn peripheral_dma_stop(&mut self);
2511 }
2512
2513 #[instability::unstable]
2514 pub trait DmaSupportTx: DmaSupport {
2515 type Channel: DmaTxChannel;
2516
2517 fn tx(&mut self) -> &mut ChannelTx<Self::DriverMode, Self::Channel>;
2518
2519 fn chain(&mut self) -> &mut DescriptorChain;
2520 }
2521
2522 #[instability::unstable]
2523 pub trait DmaSupportRx: DmaSupport {
2524 type Channel: DmaRxChannel;
2525
2526 fn rx(&mut self) -> &mut ChannelRx<Self::DriverMode, Self::Channel>;
2527
2528 fn chain(&mut self) -> &mut DescriptorChain;
2529 }
2530}
2531
2532#[non_exhaustive]
2538#[must_use]
2539pub struct DmaTransferTx<'a, I>
2540where
2541 I: dma_private::DmaSupportTx,
2542{
2543 instance: &'a mut I,
2544}
2545
2546impl<'a, I> DmaTransferTx<'a, I>
2547where
2548 I: dma_private::DmaSupportTx,
2549{
2550 #[cfg_attr(esp32c2, allow(dead_code))]
2551 pub(crate) fn new(instance: &'a mut I) -> Self {
2552 Self { instance }
2553 }
2554
2555 pub fn wait(self) -> Result<(), DmaError> {
2557 self.instance.peripheral_wait_dma(false, true);
2558
2559 if self
2560 .instance
2561 .tx()
2562 .pending_out_interrupts()
2563 .contains(DmaTxInterrupt::DescriptorError)
2564 {
2565 Err(DmaError::DescriptorError)
2566 } else {
2567 Ok(())
2568 }
2569 }
2570
2571 pub fn is_done(&mut self) -> bool {
2573 self.instance.tx().is_done()
2574 }
2575}
2576
2577impl<I> Drop for DmaTransferTx<'_, I>
2578where
2579 I: dma_private::DmaSupportTx,
2580{
2581 fn drop(&mut self) {
2582 self.instance.peripheral_wait_dma(true, false);
2583 }
2584}
2585
2586#[non_exhaustive]
2592#[must_use]
2593pub struct DmaTransferRx<'a, I>
2594where
2595 I: dma_private::DmaSupportRx,
2596{
2597 instance: &'a mut I,
2598}
2599
2600impl<'a, I> DmaTransferRx<'a, I>
2601where
2602 I: dma_private::DmaSupportRx,
2603{
2604 #[cfg_attr(esp32c2, allow(dead_code))]
2605 pub(crate) fn new(instance: &'a mut I) -> Self {
2606 Self { instance }
2607 }
2608
2609 pub fn wait(self) -> Result<(), DmaError> {
2611 self.instance.peripheral_wait_dma(true, false);
2612
2613 if self
2614 .instance
2615 .rx()
2616 .pending_in_interrupts()
2617 .contains(DmaRxInterrupt::DescriptorError)
2618 {
2619 Err(DmaError::DescriptorError)
2620 } else {
2621 Ok(())
2622 }
2623 }
2624
2625 pub fn is_done(&mut self) -> bool {
2627 self.instance.rx().is_done()
2628 }
2629}
2630
2631impl<I> Drop for DmaTransferRx<'_, I>
2632where
2633 I: dma_private::DmaSupportRx,
2634{
2635 fn drop(&mut self) {
2636 self.instance.peripheral_wait_dma(true, false);
2637 }
2638}
2639
2640#[non_exhaustive]
2646#[must_use]
2647pub struct DmaTransferRxTx<'a, I>
2648where
2649 I: dma_private::DmaSupportTx + dma_private::DmaSupportRx,
2650{
2651 instance: &'a mut I,
2652}
2653
2654impl<'a, I> DmaTransferRxTx<'a, I>
2655where
2656 I: dma_private::DmaSupportTx + dma_private::DmaSupportRx,
2657{
2658 #[allow(dead_code)]
2659 pub(crate) fn new(instance: &'a mut I) -> Self {
2660 Self { instance }
2661 }
2662
2663 pub fn wait(self) -> Result<(), DmaError> {
2665 self.instance.peripheral_wait_dma(true, true);
2666
2667 if self
2668 .instance
2669 .tx()
2670 .pending_out_interrupts()
2671 .contains(DmaTxInterrupt::DescriptorError)
2672 || self
2673 .instance
2674 .rx()
2675 .pending_in_interrupts()
2676 .contains(DmaRxInterrupt::DescriptorError)
2677 {
2678 Err(DmaError::DescriptorError)
2679 } else {
2680 Ok(())
2681 }
2682 }
2683
2684 pub fn is_done(&mut self) -> bool {
2686 self.instance.tx().is_done() && self.instance.rx().is_done()
2687 }
2688}
2689
2690impl<I> Drop for DmaTransferRxTx<'_, I>
2691where
2692 I: dma_private::DmaSupportTx + dma_private::DmaSupportRx,
2693{
2694 fn drop(&mut self) {
2695 self.instance.peripheral_wait_dma(true, true);
2696 }
2697}
2698
2699#[non_exhaustive]
2705#[must_use]
2706pub struct DmaTransferTxCircular<'a, I>
2707where
2708 I: dma_private::DmaSupportTx,
2709{
2710 instance: &'a mut I,
2711 state: TxCircularState,
2712}
2713
2714impl<'a, I> DmaTransferTxCircular<'a, I>
2715where
2716 I: dma_private::DmaSupportTx,
2717{
2718 #[allow(unused)] pub(crate) fn new(instance: &'a mut I) -> Self {
2720 let state = TxCircularState::new(instance.chain());
2721 Self { instance, state }
2722 }
2723
2724 pub fn available(&mut self) -> Result<usize, DmaError> {
2726 self.state.update(self.instance.tx())?;
2727 Ok(self.state.available)
2728 }
2729
2730 pub fn push(&mut self, data: &[u8]) -> Result<usize, DmaError> {
2732 self.state.update(self.instance.tx())?;
2733 self.state.push(data)
2734 }
2735
2736 pub fn push_with(&mut self, f: impl FnOnce(&mut [u8]) -> usize) -> Result<usize, DmaError> {
2741 self.state.update(self.instance.tx())?;
2742 self.state.push_with(f)
2743 }
2744
2745 #[allow(clippy::type_complexity)]
2747 pub fn stop(self) -> Result<(), DmaError> {
2748 self.instance.peripheral_dma_stop();
2749
2750 if self
2751 .instance
2752 .tx()
2753 .pending_out_interrupts()
2754 .contains(DmaTxInterrupt::DescriptorError)
2755 {
2756 Err(DmaError::DescriptorError)
2757 } else {
2758 Ok(())
2759 }
2760 }
2761}
2762
2763impl<I> Drop for DmaTransferTxCircular<'_, I>
2764where
2765 I: dma_private::DmaSupportTx,
2766{
2767 fn drop(&mut self) {
2768 self.instance.peripheral_dma_stop();
2769 }
2770}
2771
2772#[non_exhaustive]
2778#[must_use]
2779pub struct DmaTransferRxCircular<'a, I>
2780where
2781 I: dma_private::DmaSupportRx,
2782{
2783 instance: &'a mut I,
2784 state: RxCircularState,
2785}
2786
2787impl<'a, I> DmaTransferRxCircular<'a, I>
2788where
2789 I: dma_private::DmaSupportRx,
2790{
2791 #[allow(unused)] pub(crate) fn new(instance: &'a mut I) -> Self {
2793 let state = RxCircularState::new(instance.chain());
2794 Self { instance, state }
2795 }
2796
2797 pub fn available(&mut self) -> Result<usize, DmaError> {
2802 self.state.update()?;
2803 Ok(self.state.available)
2804 }
2805
2806 pub fn pop(&mut self, data: &mut [u8]) -> Result<usize, DmaError> {
2815 self.state.update()?;
2816 self.state.pop(data)
2817 }
2818}
2819
2820impl<I> Drop for DmaTransferRxCircular<'_, I>
2821where
2822 I: dma_private::DmaSupportRx,
2823{
2824 fn drop(&mut self) {
2825 self.instance.peripheral_dma_stop();
2826 }
2827}
2828
2829pub(crate) mod asynch {
2830 use core::task::Poll;
2831
2832 use super::*;
2833
2834 #[must_use = "futures do nothing unless you `.await` or poll them"]
2835 pub struct DmaTxFuture<'a, CH>
2836 where
2837 CH: DmaTxChannel,
2838 {
2839 pub(crate) tx: &'a mut ChannelTx<Async, CH>,
2840 }
2841
2842 impl<'a, CH> DmaTxFuture<'a, CH>
2843 where
2844 CH: DmaTxChannel,
2845 {
2846 #[cfg_attr(esp32c2, allow(dead_code))]
2847 pub fn new(tx: &'a mut ChannelTx<Async, CH>) -> Self {
2848 Self { tx }
2849 }
2850 }
2851
2852 impl<CH> core::future::Future for DmaTxFuture<'_, CH>
2853 where
2854 CH: DmaTxChannel,
2855 {
2856 type Output = Result<(), DmaError>;
2857
2858 fn poll(
2859 self: core::pin::Pin<&mut Self>,
2860 cx: &mut core::task::Context<'_>,
2861 ) -> Poll<Self::Output> {
2862 if self.tx.is_done() {
2863 self.tx.clear_interrupts();
2864 Poll::Ready(Ok(()))
2865 } else if self
2866 .tx
2867 .pending_out_interrupts()
2868 .contains(DmaTxInterrupt::DescriptorError)
2869 {
2870 self.tx.clear_interrupts();
2871 Poll::Ready(Err(DmaError::DescriptorError))
2872 } else {
2873 self.tx.waker().register(cx.waker());
2874 self.tx
2875 .listen_out(DmaTxInterrupt::TotalEof | DmaTxInterrupt::DescriptorError);
2876 Poll::Pending
2877 }
2878 }
2879 }
2880
2881 impl<CH> Drop for DmaTxFuture<'_, CH>
2882 where
2883 CH: DmaTxChannel,
2884 {
2885 fn drop(&mut self) {
2886 self.tx
2887 .unlisten_out(DmaTxInterrupt::TotalEof | DmaTxInterrupt::DescriptorError);
2888 }
2889 }
2890
2891 #[must_use = "futures do nothing unless you `.await` or poll them"]
2892 pub struct DmaRxFuture<'a, CH>
2893 where
2894 CH: DmaRxChannel,
2895 {
2896 pub(crate) rx: &'a mut ChannelRx<Async, CH>,
2897 }
2898
2899 impl<'a, CH> DmaRxFuture<'a, CH>
2900 where
2901 CH: DmaRxChannel,
2902 {
2903 pub fn new(rx: &'a mut ChannelRx<Async, CH>) -> Self {
2904 Self { rx }
2905 }
2906 }
2907
2908 impl<CH> core::future::Future for DmaRxFuture<'_, CH>
2909 where
2910 CH: DmaRxChannel,
2911 {
2912 type Output = Result<(), DmaError>;
2913
2914 fn poll(
2915 self: core::pin::Pin<&mut Self>,
2916 cx: &mut core::task::Context<'_>,
2917 ) -> Poll<Self::Output> {
2918 if self.rx.is_done() {
2919 self.rx.clear_interrupts();
2920 Poll::Ready(Ok(()))
2921 } else if !self.rx.pending_in_interrupts().is_disjoint(
2922 DmaRxInterrupt::DescriptorError
2923 | DmaRxInterrupt::DescriptorEmpty
2924 | DmaRxInterrupt::ErrorEof,
2925 ) {
2926 self.rx.clear_interrupts();
2927 Poll::Ready(Err(DmaError::DescriptorError))
2928 } else {
2929 self.rx.waker().register(cx.waker());
2930 self.rx.listen_in(
2931 DmaRxInterrupt::SuccessfulEof
2932 | DmaRxInterrupt::DescriptorError
2933 | DmaRxInterrupt::DescriptorEmpty
2934 | DmaRxInterrupt::ErrorEof,
2935 );
2936 Poll::Pending
2937 }
2938 }
2939 }
2940
2941 impl<CH> Drop for DmaRxFuture<'_, CH>
2942 where
2943 CH: DmaRxChannel,
2944 {
2945 fn drop(&mut self) {
2946 self.rx.unlisten_in(
2947 DmaRxInterrupt::DescriptorError
2948 | DmaRxInterrupt::DescriptorEmpty
2949 | DmaRxInterrupt::ErrorEof,
2950 );
2951 }
2952 }
2953
2954 #[cfg(any(i2s0, i2s1))]
2955 pub struct DmaTxDoneChFuture<'a, CH>
2956 where
2957 CH: DmaTxChannel,
2958 {
2959 pub(crate) tx: &'a mut ChannelTx<Async, CH>,
2960 _a: (),
2961 }
2962
2963 #[cfg(any(i2s0, i2s1))]
2964 impl<'a, CH> DmaTxDoneChFuture<'a, CH>
2965 where
2966 CH: DmaTxChannel,
2967 {
2968 pub fn new(tx: &'a mut ChannelTx<Async, CH>) -> Self {
2969 Self { tx, _a: () }
2970 }
2971 }
2972
2973 #[cfg(any(i2s0, i2s1))]
2974 impl<CH> core::future::Future for DmaTxDoneChFuture<'_, CH>
2975 where
2976 CH: DmaTxChannel,
2977 {
2978 type Output = Result<(), DmaError>;
2979
2980 fn poll(
2981 self: core::pin::Pin<&mut Self>,
2982 cx: &mut core::task::Context<'_>,
2983 ) -> Poll<Self::Output> {
2984 if self
2985 .tx
2986 .pending_out_interrupts()
2987 .contains(DmaTxInterrupt::Done)
2988 {
2989 self.tx.clear_out(DmaTxInterrupt::Done);
2990 Poll::Ready(Ok(()))
2991 } else if self
2992 .tx
2993 .pending_out_interrupts()
2994 .contains(DmaTxInterrupt::DescriptorError)
2995 {
2996 self.tx.clear_interrupts();
2997 Poll::Ready(Err(DmaError::DescriptorError))
2998 } else {
2999 self.tx.waker().register(cx.waker());
3000 self.tx
3001 .listen_out(DmaTxInterrupt::Done | DmaTxInterrupt::DescriptorError);
3002 Poll::Pending
3003 }
3004 }
3005 }
3006
3007 #[cfg(any(i2s0, i2s1))]
3008 impl<CH> Drop for DmaTxDoneChFuture<'_, CH>
3009 where
3010 CH: DmaTxChannel,
3011 {
3012 fn drop(&mut self) {
3013 self.tx
3014 .unlisten_out(DmaTxInterrupt::Done | DmaTxInterrupt::DescriptorError);
3015 }
3016 }
3017
3018 #[cfg(any(i2s0, i2s1))]
3019 pub struct DmaRxDoneChFuture<'a, CH>
3020 where
3021 CH: DmaRxChannel,
3022 {
3023 pub(crate) rx: &'a mut ChannelRx<Async, CH>,
3024 _a: (),
3025 }
3026
3027 #[cfg(any(i2s0, i2s1))]
3028 impl<'a, CH> DmaRxDoneChFuture<'a, CH>
3029 where
3030 CH: DmaRxChannel,
3031 {
3032 pub fn new(rx: &'a mut ChannelRx<Async, CH>) -> Self {
3033 Self { rx, _a: () }
3034 }
3035 }
3036
3037 #[cfg(any(i2s0, i2s1))]
3038 impl<CH> core::future::Future for DmaRxDoneChFuture<'_, CH>
3039 where
3040 CH: DmaRxChannel,
3041 {
3042 type Output = Result<(), DmaError>;
3043
3044 fn poll(
3045 self: core::pin::Pin<&mut Self>,
3046 cx: &mut core::task::Context<'_>,
3047 ) -> Poll<Self::Output> {
3048 if self
3049 .rx
3050 .pending_in_interrupts()
3051 .contains(DmaRxInterrupt::Done)
3052 {
3053 self.rx.clear_in(DmaRxInterrupt::Done);
3054 Poll::Ready(Ok(()))
3055 } else if !self.rx.pending_in_interrupts().is_disjoint(
3056 DmaRxInterrupt::DescriptorError
3057 | DmaRxInterrupt::DescriptorEmpty
3058 | DmaRxInterrupt::ErrorEof,
3059 ) {
3060 self.rx.clear_interrupts();
3061 Poll::Ready(Err(DmaError::DescriptorError))
3062 } else {
3063 self.rx.waker().register(cx.waker());
3064 self.rx.listen_in(
3065 DmaRxInterrupt::Done
3066 | DmaRxInterrupt::DescriptorError
3067 | DmaRxInterrupt::DescriptorEmpty
3068 | DmaRxInterrupt::ErrorEof,
3069 );
3070 Poll::Pending
3071 }
3072 }
3073 }
3074
3075 #[cfg(any(i2s0, i2s1))]
3076 impl<CH> Drop for DmaRxDoneChFuture<'_, CH>
3077 where
3078 CH: DmaRxChannel,
3079 {
3080 fn drop(&mut self) {
3081 self.rx.unlisten_in(
3082 DmaRxInterrupt::Done
3083 | DmaRxInterrupt::DescriptorError
3084 | DmaRxInterrupt::DescriptorEmpty
3085 | DmaRxInterrupt::ErrorEof,
3086 );
3087 }
3088 }
3089
3090 pub(super) fn handle_in_interrupt<CH: DmaChannelExt>() {
3091 let rx = CH::rx_interrupts();
3092
3093 if !rx.is_async() {
3094 return;
3095 }
3096
3097 let pending = rx.pending_interrupts();
3098 let enabled = rx.is_listening();
3099
3100 if !pending.is_disjoint(enabled) {
3101 rx.unlisten(EnumSet::all());
3102 rx.waker().wake()
3103 }
3104 }
3105
3106 pub(super) fn handle_out_interrupt<CH: DmaChannelExt>() {
3107 let tx = CH::tx_interrupts();
3108
3109 if !tx.is_async() {
3110 return;
3111 }
3112
3113 let pending = tx.pending_interrupts();
3114 let enabled = tx.is_listening();
3115
3116 if !pending.is_disjoint(enabled) {
3117 tx.unlisten(EnumSet::all());
3118
3119 tx.waker().wake()
3120 }
3121 }
3122}