1#[cfg(psram_dma)]
2use core::ops::Range;
3use core::{
4 ops::{Deref, DerefMut},
5 ptr::{NonNull, null_mut},
6};
7
8use super::*;
9use crate::soc::is_slice_in_dram;
10#[cfg(psram_dma)]
11use crate::soc::{is_slice_in_psram, is_valid_psram_address, is_valid_ram_address};
12
13#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
15#[cfg_attr(feature = "defmt", derive(defmt::Format))]
16pub enum DmaBufError {
17 BufferTooSmall,
19
20 InsufficientDescriptors,
22
23 UnsupportedMemoryRegion,
25
26 InvalidAlignment(DmaAlignmentError),
28
29 InvalidChunkSize,
31}
32
33#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
35#[cfg_attr(feature = "defmt", derive(defmt::Format))]
36pub enum DmaAlignmentError {
37 Address,
39
40 Size,
42}
43
44impl From<DmaAlignmentError> for DmaBufError {
45 fn from(err: DmaAlignmentError) -> Self {
46 DmaBufError::InvalidAlignment(err)
47 }
48}
49
50cfg_if::cfg_if! {
51 if #[cfg(psram_dma)] {
52 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
54 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
55 pub enum ExternalBurstConfig {
56 Size16 = 16,
58
59 Size32 = 32,
61
62 Size64 = 64,
64 }
65
66 impl ExternalBurstConfig {
67 pub const DEFAULT: Self = Self::Size16;
69 }
70
71 impl Default for ExternalBurstConfig {
72 fn default() -> Self {
73 Self::DEFAULT
74 }
75 }
76
77 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
79 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
80 pub enum InternalBurstConfig {
81 Disabled,
83
84 Enabled,
86 }
87
88 impl InternalBurstConfig {
89 pub const DEFAULT: Self = Self::Disabled;
91 }
92
93 impl Default for InternalBurstConfig {
94 fn default() -> Self {
95 Self::DEFAULT
96 }
97 }
98
99 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
101 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
102 pub struct BurstConfig {
103 pub external_memory: ExternalBurstConfig,
107
108 pub internal_memory: InternalBurstConfig,
112 }
113
114 impl BurstConfig {
115 pub const DEFAULT: Self = Self {
117 external_memory: ExternalBurstConfig::DEFAULT,
118 internal_memory: InternalBurstConfig::DEFAULT,
119 };
120 }
121
122 impl Default for BurstConfig {
123 fn default() -> Self {
124 Self::DEFAULT
125 }
126 }
127
128 impl From<InternalBurstConfig> for BurstConfig {
129 fn from(internal_memory: InternalBurstConfig) -> Self {
130 Self {
131 external_memory: ExternalBurstConfig::DEFAULT,
132 internal_memory,
133 }
134 }
135 }
136
137 impl From<ExternalBurstConfig> for BurstConfig {
138 fn from(external_memory: ExternalBurstConfig) -> Self {
139 Self {
140 external_memory,
141 internal_memory: InternalBurstConfig::DEFAULT,
142 }
143 }
144 }
145 } else {
146 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
148 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
149 pub enum BurstConfig {
150 Disabled,
152
153 Enabled,
155 }
156
157 impl BurstConfig {
158 pub const DEFAULT: Self = Self::Disabled;
160 }
161
162 impl Default for BurstConfig {
163 fn default() -> Self {
164 Self::DEFAULT
165 }
166 }
167
168 type InternalBurstConfig = BurstConfig;
169 }
170}
171
172#[cfg(psram_dma)]
173impl ExternalBurstConfig {
174 const fn min_psram_alignment(self, direction: TransferDirection) -> usize {
175 if matches!(direction, TransferDirection::In) {
186 self as usize
187 } else {
188 1
194 }
195 }
196}
197
198impl InternalBurstConfig {
199 pub(super) const fn is_burst_enabled(self) -> bool {
200 !matches!(self, Self::Disabled)
201 }
202
203 const fn min_dram_alignment(self, direction: TransferDirection) -> usize {
205 if matches!(direction, TransferDirection::In) {
206 if cfg!(esp32) {
207 4
210 } else if self.is_burst_enabled() {
211 4
213 } else {
214 1
215 }
216 } else {
217 if cfg!(esp32) {
220 4
226 } else {
227 1
228 }
229 }
230 }
231}
232
233const fn max(a: usize, b: usize) -> usize {
234 if a > b { a } else { b }
235}
236
237impl BurstConfig {
238 delegate::delegate! {
239 #[cfg(psram_dma)]
240 to self.internal_memory {
241 pub(super) const fn min_dram_alignment(self, direction: TransferDirection) -> usize;
242 pub(super) fn is_burst_enabled(self) -> bool;
243 }
244 }
245
246 pub const fn min_compatible_alignment(self) -> usize {
252 let in_alignment = self.min_dram_alignment(TransferDirection::In);
253 let out_alignment = self.min_dram_alignment(TransferDirection::Out);
254 let alignment = max(in_alignment, out_alignment);
255
256 #[cfg(psram_dma)]
257 let alignment = max(alignment, self.external_memory as usize);
258
259 alignment
260 }
261
262 const fn chunk_size_for_alignment(alignment: usize) -> usize {
263 4096 - alignment
267 }
268
269 pub const fn max_compatible_chunk_size(self) -> usize {
275 Self::chunk_size_for_alignment(self.min_compatible_alignment())
276 }
277
278 fn min_alignment(self, _buffer: &[u8], direction: TransferDirection) -> usize {
279 let alignment = self.min_dram_alignment(direction);
280
281 cfg_if::cfg_if! {
282 if #[cfg(psram_dma)] {
283 let mut alignment = alignment;
284 if is_valid_psram_address(_buffer.as_ptr() as usize) {
285 alignment = max(alignment, self.external_memory.min_psram_alignment(direction));
286 }
287 }
288 }
289
290 alignment
291 }
292
293 fn max_chunk_size_for(self, buffer: &[u8], direction: TransferDirection) -> usize {
296 Self::chunk_size_for_alignment(self.min_alignment(buffer, direction))
297 }
298
299 fn ensure_buffer_aligned(
300 self,
301 buffer: &[u8],
302 direction: TransferDirection,
303 ) -> Result<(), DmaAlignmentError> {
304 let alignment = self.min_alignment(buffer, direction);
305 if !(buffer.as_ptr() as usize).is_multiple_of(alignment) {
306 return Err(DmaAlignmentError::Address);
307 }
308
309 if direction == TransferDirection::In && !buffer.len().is_multiple_of(alignment) {
313 return Err(DmaAlignmentError::Size);
314 }
315
316 Ok(())
317 }
318
319 fn ensure_buffer_compatible(
320 self,
321 buffer: &[u8],
322 direction: TransferDirection,
323 ) -> Result<(), DmaBufError> {
324 let is_in_dram = is_slice_in_dram(buffer);
326 cfg_if::cfg_if! {
327 if #[cfg(psram_dma)]{
328 let is_in_psram = is_slice_in_psram(buffer);
329 } else {
330 let is_in_psram = false;
331 }
332 }
333
334 if !(is_in_dram || is_in_psram) {
335 return Err(DmaBufError::UnsupportedMemoryRegion);
336 }
337
338 self.ensure_buffer_aligned(buffer, direction)?;
339
340 Ok(())
341 }
342}
343
344#[derive(Clone, Copy, PartialEq, Eq, Debug)]
346#[cfg_attr(feature = "defmt", derive(defmt::Format))]
347pub enum TransferDirection {
348 In,
350 Out,
352}
353
354#[derive(PartialEq, Eq, Debug)]
356#[cfg_attr(feature = "defmt", derive(defmt::Format))]
357pub struct Preparation {
358 pub start: *mut DmaDescriptor,
360
361 pub direction: TransferDirection,
363
364 #[cfg(psram_dma)]
366 pub accesses_psram: bool,
367
368 #[doc = crate::trm_markdown_link!()]
376 pub burst_transfer: BurstConfig,
377
378 pub check_owner: Option<bool>,
405
406 pub auto_write_back: bool,
416}
417
418pub unsafe trait DmaTxBuffer {
426 type View;
429
430 type Final;
434
435 fn prepare(&mut self) -> Preparation;
440
441 fn into_view(self) -> Self::View;
443
444 fn from_view(view: Self::View) -> Self::Final;
446}
447
448pub unsafe trait DmaRxBuffer {
460 type View;
463
464 type Final;
468
469 fn prepare(&mut self) -> Preparation;
474
475 fn into_view(self) -> Self::View;
477
478 fn from_view(view: Self::View) -> Self::Final;
480}
481
482pub struct BufView<T>(T);
487
488#[derive(Debug)]
494#[cfg_attr(feature = "defmt", derive(defmt::Format))]
495pub struct DmaTxBuf {
496 descriptors: DescriptorSet<'static>,
497 buffer: &'static mut [u8],
498 burst: BurstConfig,
499}
500
501impl DmaTxBuf {
502 pub fn new(
511 descriptors: &'static mut [DmaDescriptor],
512 buffer: &'static mut [u8],
513 ) -> Result<Self, DmaBufError> {
514 Self::new_with_config(descriptors, buffer, BurstConfig::default())
515 }
516
517 pub fn new_with_config(
526 descriptors: &'static mut [DmaDescriptor],
527 buffer: &'static mut [u8],
528 config: impl Into<BurstConfig>,
529 ) -> Result<Self, DmaBufError> {
530 let mut buf = Self {
531 descriptors: DescriptorSet::new(descriptors)?,
532 buffer,
533 burst: BurstConfig::default(),
534 };
535
536 let capacity = buf.capacity();
537 buf.configure(config, capacity)?;
538
539 Ok(buf)
540 }
541
542 fn configure(
543 &mut self,
544 burst: impl Into<BurstConfig>,
545 length: usize,
546 ) -> Result<(), DmaBufError> {
547 let burst = burst.into();
548 self.set_length_fallible(length, burst)?;
549
550 self.descriptors.link_with_buffer(
551 self.buffer,
552 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
553 )?;
554
555 self.burst = burst;
556 Ok(())
557 }
558
559 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
561 let len = self.len();
562 self.configure(burst, len)
563 }
564
565 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
567 (self.descriptors.into_inner(), self.buffer)
568 }
569
570 pub fn capacity(&self) -> usize {
572 self.buffer.len()
573 }
574
575 #[allow(clippy::len_without_is_empty)]
577 pub fn len(&self) -> usize {
578 self.descriptors
579 .linked_iter()
580 .map(|d| d.len())
581 .sum::<usize>()
582 }
583
584 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
585 if len > self.capacity() {
586 return Err(DmaBufError::BufferTooSmall);
587 }
588 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
589
590 self.descriptors.set_tx_length(
591 len,
592 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
593 )?;
594
595 for desc in self.descriptors.linked_iter_mut() {
598 desc.reset_for_tx(desc.next.is_null());
601 }
602
603 Ok(())
604 }
605
606 pub fn set_length(&mut self, len: usize) {
612 unwrap!(self.set_length_fallible(len, self.burst))
613 }
614
615 pub fn fill(&mut self, data: &[u8]) {
621 self.set_length(data.len());
622 self.as_mut_slice()[..data.len()].copy_from_slice(data);
623 }
624
625 pub fn as_mut_slice(&mut self) -> &mut [u8] {
627 self.buffer
628 }
629
630 pub fn as_slice(&self) -> &[u8] {
632 self.buffer
633 }
634}
635
636unsafe impl DmaTxBuffer for DmaTxBuf {
637 type View = BufView<DmaTxBuf>;
638 type Final = DmaTxBuf;
639
640 fn prepare(&mut self) -> Preparation {
641 cfg_if::cfg_if! {
642 if #[cfg(psram_dma)] {
643 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
644 if is_data_in_psram {
645 unsafe {
646 crate::soc::cache_writeback_addr(
647 self.buffer.as_ptr() as u32,
648 self.buffer.len() as u32,
649 )
650 };
651 }
652 }
653 }
654
655 Preparation {
656 start: self.descriptors.head(),
657 direction: TransferDirection::Out,
658 #[cfg(psram_dma)]
659 accesses_psram: is_data_in_psram,
660 burst_transfer: self.burst,
661 check_owner: None,
662 auto_write_back: false,
663 }
664 }
665
666 fn into_view(self) -> BufView<DmaTxBuf> {
667 BufView(self)
668 }
669
670 fn from_view(view: Self::View) -> Self {
671 view.0
672 }
673}
674
675#[derive(Debug)]
681#[cfg_attr(feature = "defmt", derive(defmt::Format))]
682pub struct DmaRxBuf {
683 descriptors: DescriptorSet<'static>,
684 buffer: &'static mut [u8],
685 burst: BurstConfig,
686}
687
688impl DmaRxBuf {
689 pub fn new(
697 descriptors: &'static mut [DmaDescriptor],
698 buffer: &'static mut [u8],
699 ) -> Result<Self, DmaBufError> {
700 Self::new_with_config(descriptors, buffer, BurstConfig::default())
701 }
702
703 pub fn new_with_config(
712 descriptors: &'static mut [DmaDescriptor],
713 buffer: &'static mut [u8],
714 config: impl Into<BurstConfig>,
715 ) -> Result<Self, DmaBufError> {
716 let mut buf = Self {
717 descriptors: DescriptorSet::new(descriptors)?,
718 buffer,
719 burst: BurstConfig::default(),
720 };
721
722 buf.configure(config, buf.capacity())?;
723
724 Ok(buf)
725 }
726
727 fn configure(
728 &mut self,
729 burst: impl Into<BurstConfig>,
730 length: usize,
731 ) -> Result<(), DmaBufError> {
732 let burst = burst.into();
733 self.set_length_fallible(length, burst)?;
734
735 self.descriptors.link_with_buffer(
736 self.buffer,
737 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
738 )?;
739
740 self.burst = burst;
741 Ok(())
742 }
743
744 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
746 let len = self.len();
747 self.configure(burst, len)
748 }
749
750 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
752 (self.descriptors.into_inner(), self.buffer)
753 }
754
755 pub fn capacity(&self) -> usize {
757 self.buffer.len()
758 }
759
760 #[allow(clippy::len_without_is_empty)]
763 pub fn len(&self) -> usize {
764 self.descriptors
765 .linked_iter()
766 .map(|d| d.size())
767 .sum::<usize>()
768 }
769
770 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
771 if len > self.capacity() {
772 return Err(DmaBufError::BufferTooSmall);
773 }
774 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
775
776 self.descriptors.set_rx_length(
777 len,
778 burst.max_chunk_size_for(&self.buffer[..len], TransferDirection::In),
779 )
780 }
781
782 pub fn set_length(&mut self, len: usize) {
788 unwrap!(self.set_length_fallible(len, self.burst));
789 }
790
791 pub fn as_slice(&self) -> &[u8] {
793 self.buffer
794 }
795
796 pub fn as_mut_slice(&mut self) -> &mut [u8] {
798 self.buffer
799 }
800
801 pub fn number_of_received_bytes(&self) -> usize {
803 self.descriptors
804 .linked_iter()
805 .map(|d| d.len())
806 .sum::<usize>()
807 }
808
809 pub fn read_received_data(&self, mut buf: &mut [u8]) -> usize {
816 let capacity = buf.len();
819 for chunk in self.received_data() {
820 if buf.is_empty() {
821 break;
822 }
823 let to_fill;
824 (to_fill, buf) = buf.split_at_mut(chunk.len());
825 to_fill.copy_from_slice(chunk);
826 }
827
828 capacity - buf.len()
829 }
830
831 pub fn received_data(&self) -> impl Iterator<Item = &[u8]> {
833 self.descriptors.linked_iter().map(|desc| {
834 unsafe { core::slice::from_raw_parts(desc.buffer.cast_const(), desc.len()) }
839 })
840 }
841}
842
843unsafe impl DmaRxBuffer for DmaRxBuf {
844 type View = BufView<DmaRxBuf>;
845 type Final = DmaRxBuf;
846
847 fn prepare(&mut self) -> Preparation {
848 for desc in self.descriptors.linked_iter_mut() {
849 desc.reset_for_rx();
850 }
851
852 cfg_if::cfg_if! {
853 if #[cfg(psram_dma)] {
854 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
856 if is_data_in_psram {
857 unsafe {
858 crate::soc::cache_invalidate_addr(
859 self.buffer.as_ptr() as u32,
860 self.buffer.len() as u32,
861 )
862 };
863 }
864 }
865 }
866
867 Preparation {
868 start: self.descriptors.head(),
869 direction: TransferDirection::In,
870 #[cfg(psram_dma)]
871 accesses_psram: is_data_in_psram,
872 burst_transfer: self.burst,
873 check_owner: None,
874 auto_write_back: true,
875 }
876 }
877
878 fn into_view(self) -> BufView<DmaRxBuf> {
879 BufView(self)
880 }
881
882 fn from_view(view: Self::View) -> Self {
883 view.0
884 }
885}
886
887#[derive(Debug)]
894#[cfg_attr(feature = "defmt", derive(defmt::Format))]
895pub struct DmaRxTxBuf {
896 rx_descriptors: DescriptorSet<'static>,
897 tx_descriptors: DescriptorSet<'static>,
898 buffer: &'static mut [u8],
899 burst: BurstConfig,
900}
901
902impl DmaRxTxBuf {
903 pub fn new(
911 rx_descriptors: &'static mut [DmaDescriptor],
912 tx_descriptors: &'static mut [DmaDescriptor],
913 buffer: &'static mut [u8],
914 ) -> Result<Self, DmaBufError> {
915 let mut buf = Self {
916 rx_descriptors: DescriptorSet::new(rx_descriptors)?,
917 tx_descriptors: DescriptorSet::new(tx_descriptors)?,
918 buffer,
919 burst: BurstConfig::default(),
920 };
921
922 let capacity = buf.capacity();
923 buf.configure(buf.burst, capacity)?;
924
925 Ok(buf)
926 }
927
928 fn configure(
929 &mut self,
930 burst: impl Into<BurstConfig>,
931 length: usize,
932 ) -> Result<(), DmaBufError> {
933 let burst = burst.into();
934 self.set_length_fallible(length, burst)?;
935
936 self.rx_descriptors.link_with_buffer(
937 self.buffer,
938 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
939 )?;
940 self.tx_descriptors.link_with_buffer(
941 self.buffer,
942 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
943 )?;
944
945 self.burst = burst;
946
947 Ok(())
948 }
949
950 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
952 let len = self.len();
953 self.configure(burst, len)
954 }
955
956 pub fn split(
959 self,
960 ) -> (
961 &'static mut [DmaDescriptor],
962 &'static mut [DmaDescriptor],
963 &'static mut [u8],
964 ) {
965 (
966 self.rx_descriptors.into_inner(),
967 self.tx_descriptors.into_inner(),
968 self.buffer,
969 )
970 }
971
972 pub fn capacity(&self) -> usize {
974 self.buffer.len()
975 }
976
977 #[allow(clippy::len_without_is_empty)]
979 pub fn len(&self) -> usize {
980 self.tx_descriptors
981 .linked_iter()
982 .map(|d| d.len())
983 .sum::<usize>()
984 }
985
986 pub fn as_slice(&self) -> &[u8] {
988 self.buffer
989 }
990
991 pub fn as_mut_slice(&mut self) -> &mut [u8] {
993 self.buffer
994 }
995
996 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
997 if len > self.capacity() {
998 return Err(DmaBufError::BufferTooSmall);
999 }
1000 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
1001 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
1002
1003 self.rx_descriptors.set_rx_length(
1004 len,
1005 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
1006 )?;
1007 self.tx_descriptors.set_tx_length(
1008 len,
1009 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
1010 )?;
1011
1012 Ok(())
1013 }
1014
1015 pub fn set_length(&mut self, len: usize) {
1020 unwrap!(self.set_length_fallible(len, self.burst));
1021 }
1022}
1023
1024unsafe impl DmaTxBuffer for DmaRxTxBuf {
1025 type View = BufView<DmaRxTxBuf>;
1026 type Final = DmaRxTxBuf;
1027
1028 fn prepare(&mut self) -> Preparation {
1029 for desc in self.tx_descriptors.linked_iter_mut() {
1030 desc.reset_for_tx(desc.next.is_null());
1033 }
1034
1035 cfg_if::cfg_if! {
1036 if #[cfg(psram_dma)] {
1037 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1039 if is_data_in_psram {
1040 unsafe {
1041 crate::soc::cache_writeback_addr(
1042 self.buffer.as_ptr() as u32,
1043 self.buffer.len() as u32,
1044 )
1045 };
1046 }
1047 }
1048 }
1049
1050 Preparation {
1051 start: self.tx_descriptors.head(),
1052 direction: TransferDirection::Out,
1053 #[cfg(psram_dma)]
1054 accesses_psram: is_data_in_psram,
1055 burst_transfer: self.burst,
1056 check_owner: None,
1057 auto_write_back: false,
1058 }
1059 }
1060
1061 fn into_view(self) -> BufView<DmaRxTxBuf> {
1062 BufView(self)
1063 }
1064
1065 fn from_view(view: Self::View) -> Self {
1066 view.0
1067 }
1068}
1069
1070unsafe impl DmaRxBuffer for DmaRxTxBuf {
1071 type View = BufView<DmaRxTxBuf>;
1072 type Final = DmaRxTxBuf;
1073
1074 fn prepare(&mut self) -> Preparation {
1075 for desc in self.rx_descriptors.linked_iter_mut() {
1076 desc.reset_for_rx();
1077 }
1078
1079 cfg_if::cfg_if! {
1080 if #[cfg(psram_dma)] {
1081 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1083 if is_data_in_psram {
1084 unsafe {
1085 crate::soc::cache_invalidate_addr(
1086 self.buffer.as_ptr() as u32,
1087 self.buffer.len() as u32,
1088 )
1089 };
1090 }
1091 }
1092 }
1093
1094 Preparation {
1095 start: self.rx_descriptors.head(),
1096 direction: TransferDirection::In,
1097 #[cfg(psram_dma)]
1098 accesses_psram: is_data_in_psram,
1099 burst_transfer: self.burst,
1100 check_owner: None,
1101 auto_write_back: true,
1102 }
1103 }
1104
1105 fn into_view(self) -> BufView<DmaRxTxBuf> {
1106 BufView(self)
1107 }
1108
1109 fn from_view(view: Self::View) -> Self {
1110 view.0
1111 }
1112}
1113
1114pub struct DmaRxStreamBuf {
1155 descriptors: &'static mut [DmaDescriptor],
1156 buffer: &'static mut [u8],
1157 burst: BurstConfig,
1158}
1159
1160impl DmaRxStreamBuf {
1161 pub fn new(
1164 descriptors: &'static mut [DmaDescriptor],
1165 buffer: &'static mut [u8],
1166 ) -> Result<Self, DmaBufError> {
1167 if !is_slice_in_dram(descriptors) {
1168 return Err(DmaBufError::UnsupportedMemoryRegion);
1169 }
1170 if !is_slice_in_dram(buffer) {
1171 return Err(DmaBufError::UnsupportedMemoryRegion);
1172 }
1173
1174 if descriptors.is_empty() {
1175 return Err(DmaBufError::InsufficientDescriptors);
1176 }
1177
1178 let chunk_size = buffer.len() / descriptors.len();
1180
1181 if chunk_size > 4095 {
1182 return Err(DmaBufError::InsufficientDescriptors);
1183 }
1184
1185 let excess = buffer.len() % descriptors.len();
1187 if chunk_size + excess > 4095 {
1188 return Err(DmaBufError::InsufficientDescriptors);
1189 }
1190
1191 let mut chunks = buffer.chunks_exact_mut(chunk_size);
1192 for (desc, chunk) in descriptors.iter_mut().zip(chunks.by_ref()) {
1193 desc.buffer = chunk.as_mut_ptr();
1194 desc.set_size(chunk.len());
1195 }
1196
1197 let remainder = chunks.into_remainder();
1198 debug_assert_eq!(remainder.len(), excess);
1199
1200 if !remainder.is_empty() {
1201 let last_descriptor = descriptors.last_mut().unwrap();
1203 last_descriptor.set_size(last_descriptor.size() + remainder.len());
1204 }
1205
1206 Ok(Self {
1207 descriptors,
1208 buffer,
1209 burst: BurstConfig::default(),
1210 })
1211 }
1212
1213 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
1215 (self.descriptors, self.buffer)
1216 }
1217}
1218
1219unsafe impl DmaRxBuffer for DmaRxStreamBuf {
1220 type View = DmaRxStreamBufView;
1221 type Final = DmaRxStreamBuf;
1222
1223 fn prepare(&mut self) -> Preparation {
1224 let mut next = null_mut();
1226 for desc in self.descriptors.iter_mut().rev() {
1227 desc.next = next;
1228 next = desc;
1229
1230 desc.reset_for_rx();
1231 }
1232 Preparation {
1233 start: self.descriptors.as_mut_ptr(),
1234 direction: TransferDirection::In,
1235 #[cfg(psram_dma)]
1236 accesses_psram: false,
1237 burst_transfer: self.burst,
1238
1239 check_owner: None,
1244 auto_write_back: true,
1245 }
1246 }
1247
1248 fn into_view(self) -> DmaRxStreamBufView {
1249 DmaRxStreamBufView {
1250 buf: self,
1251 descriptor_idx: 0,
1252 descriptor_offset: 0,
1253 }
1254 }
1255
1256 fn from_view(view: Self::View) -> Self {
1257 view.buf
1258 }
1259}
1260
1261pub struct DmaRxStreamBufView {
1263 buf: DmaRxStreamBuf,
1264 descriptor_idx: usize,
1265 descriptor_offset: usize,
1266}
1267
1268impl DmaRxStreamBufView {
1269 pub fn available_bytes(&self) -> usize {
1271 let (tail, head) = self.buf.descriptors.split_at(self.descriptor_idx);
1272 let mut result = 0;
1273 for desc in head.iter().chain(tail) {
1274 if desc.owner() == Owner::Dma {
1275 break;
1276 }
1277 result += desc.len();
1278 }
1279 result - self.descriptor_offset
1280 }
1281
1282 pub fn pop(&mut self, buf: &mut [u8]) -> usize {
1284 if buf.is_empty() {
1285 return 0;
1286 }
1287 let total_bytes = buf.len();
1288
1289 let mut remaining = buf;
1290 loop {
1291 let available = self.peek();
1292 if available.len() >= remaining.len() {
1293 remaining.copy_from_slice(&available[0..remaining.len()]);
1294 self.consume(remaining.len());
1295 let consumed = remaining.len();
1296 remaining = &mut remaining[consumed..];
1297 break;
1298 } else {
1299 let to_consume = available.len();
1300 remaining[0..to_consume].copy_from_slice(available);
1301 self.consume(to_consume);
1302 remaining = &mut remaining[to_consume..];
1303 }
1304 }
1305
1306 total_bytes - remaining.len()
1307 }
1308
1309 pub fn peek(&self) -> &[u8] {
1316 let (slice, _) = self.peek_internal(false);
1317 slice
1318 }
1319
1320 pub fn peek_until_eof(&self) -> (&[u8], bool) {
1325 self.peek_internal(true)
1326 }
1327
1328 pub fn consume(&mut self, n: usize) -> usize {
1334 let mut remaining_bytes_to_consume = n;
1335
1336 loop {
1337 let desc = &mut self.buf.descriptors[self.descriptor_idx];
1338
1339 if desc.owner() == Owner::Dma {
1340 break;
1343 }
1344
1345 let remaining_bytes_in_descriptor = desc.len() - self.descriptor_offset;
1346 if remaining_bytes_to_consume < remaining_bytes_in_descriptor {
1347 self.descriptor_offset += remaining_bytes_to_consume;
1348 remaining_bytes_to_consume = 0;
1349 break;
1350 }
1351
1352 desc.set_owner(Owner::Dma);
1354 desc.set_suc_eof(false);
1355 desc.set_length(0);
1356
1357 desc.next = null_mut();
1361
1362 let desc_ptr: *mut _ = desc;
1363
1364 let prev_descriptor_index = self
1365 .descriptor_idx
1366 .checked_sub(1)
1367 .unwrap_or(self.buf.descriptors.len() - 1);
1368
1369 self.buf.descriptors[prev_descriptor_index].next = desc_ptr;
1371
1372 self.descriptor_idx += 1;
1373 if self.descriptor_idx >= self.buf.descriptors.len() {
1374 self.descriptor_idx = 0;
1375 }
1376 self.descriptor_offset = 0;
1377
1378 remaining_bytes_to_consume -= remaining_bytes_in_descriptor;
1379 }
1380
1381 n - remaining_bytes_to_consume
1382 }
1383
1384 fn peek_internal(&self, stop_at_eof: bool) -> (&[u8], bool) {
1385 let descriptors = &self.buf.descriptors[self.descriptor_idx..];
1386
1387 debug_assert!(!descriptors.is_empty());
1389
1390 if descriptors.len() == 1 {
1391 let last_descriptor = &descriptors[0];
1392 if last_descriptor.owner() == Owner::Dma {
1393 (&[], false)
1395 } else {
1396 let length = last_descriptor.len() - self.descriptor_offset;
1397 (
1398 &self.buf.buffer[self.buf.buffer.len() - length..],
1399 last_descriptor.flags.suc_eof(),
1400 )
1401 }
1402 } else {
1403 let chunk_size = descriptors[0].size();
1404 let mut found_eof = false;
1405
1406 let mut number_of_contiguous_bytes = 0;
1407 for desc in descriptors {
1408 if desc.owner() == Owner::Dma {
1409 break;
1410 }
1411 number_of_contiguous_bytes += desc.len();
1412
1413 if stop_at_eof && desc.flags.suc_eof() {
1414 found_eof = true;
1415 break;
1416 }
1417 if desc.len() < desc.size() {
1419 break;
1420 }
1421 }
1422
1423 (
1424 &self.buf.buffer[chunk_size * self.descriptor_idx..][..number_of_contiguous_bytes]
1425 [self.descriptor_offset..],
1426 found_eof,
1427 )
1428 }
1429 }
1430}
1431
1432static mut EMPTY: [DmaDescriptor; 1] = [DmaDescriptor::EMPTY];
1433
1434pub struct EmptyBuf;
1436
1437unsafe impl DmaTxBuffer for EmptyBuf {
1438 type View = EmptyBuf;
1439 type Final = EmptyBuf;
1440
1441 fn prepare(&mut self) -> Preparation {
1442 Preparation {
1443 start: core::ptr::addr_of_mut!(EMPTY).cast(),
1444 direction: TransferDirection::Out,
1445 #[cfg(psram_dma)]
1446 accesses_psram: false,
1447 burst_transfer: BurstConfig::default(),
1448
1449 check_owner: Some(false),
1452
1453 auto_write_back: false,
1455 }
1456 }
1457
1458 fn into_view(self) -> EmptyBuf {
1459 self
1460 }
1461
1462 fn from_view(view: Self::View) -> Self {
1463 view
1464 }
1465}
1466
1467unsafe impl DmaRxBuffer for EmptyBuf {
1468 type View = EmptyBuf;
1469 type Final = EmptyBuf;
1470
1471 fn prepare(&mut self) -> Preparation {
1472 Preparation {
1473 start: core::ptr::addr_of_mut!(EMPTY).cast(),
1474 direction: TransferDirection::In,
1475 #[cfg(psram_dma)]
1476 accesses_psram: false,
1477 burst_transfer: BurstConfig::default(),
1478
1479 check_owner: Some(false),
1482 auto_write_back: true,
1483 }
1484 }
1485
1486 fn into_view(self) -> EmptyBuf {
1487 self
1488 }
1489
1490 fn from_view(view: Self::View) -> Self {
1491 view
1492 }
1493}
1494
1495pub struct DmaLoopBuf {
1506 descriptor: &'static mut DmaDescriptor,
1507 buffer: &'static mut [u8],
1508}
1509
1510impl DmaLoopBuf {
1511 pub fn new(
1513 descriptor: &'static mut DmaDescriptor,
1514 buffer: &'static mut [u8],
1515 ) -> Result<DmaLoopBuf, DmaBufError> {
1516 if !is_slice_in_dram(buffer) {
1517 return Err(DmaBufError::UnsupportedMemoryRegion);
1518 }
1519 if !is_slice_in_dram(core::slice::from_ref(descriptor)) {
1520 return Err(DmaBufError::UnsupportedMemoryRegion);
1521 }
1522
1523 if buffer.len() > BurstConfig::default().max_chunk_size_for(buffer, TransferDirection::Out)
1524 {
1525 return Err(DmaBufError::InsufficientDescriptors);
1526 }
1527
1528 descriptor.set_owner(Owner::Dma); descriptor.set_suc_eof(false);
1530 descriptor.set_length(buffer.len());
1531 descriptor.set_size(buffer.len());
1532 descriptor.buffer = buffer.as_mut_ptr();
1533 descriptor.next = descriptor;
1534
1535 Ok(Self { descriptor, buffer })
1536 }
1537
1538 pub fn split(self) -> (&'static mut DmaDescriptor, &'static mut [u8]) {
1540 (self.descriptor, self.buffer)
1541 }
1542}
1543
1544unsafe impl DmaTxBuffer for DmaLoopBuf {
1545 type View = DmaLoopBuf;
1546 type Final = DmaLoopBuf;
1547
1548 fn prepare(&mut self) -> Preparation {
1549 Preparation {
1550 start: self.descriptor,
1551 #[cfg(psram_dma)]
1552 accesses_psram: false,
1553 direction: TransferDirection::Out,
1554 burst_transfer: BurstConfig::default(),
1555 check_owner: Some(false),
1557
1558 auto_write_back: false,
1560 }
1561 }
1562
1563 fn into_view(self) -> Self::View {
1564 self
1565 }
1566
1567 fn from_view(view: Self::View) -> Self {
1568 view
1569 }
1570}
1571
1572impl Deref for DmaLoopBuf {
1573 type Target = [u8];
1574
1575 fn deref(&self) -> &Self::Target {
1576 self.buffer
1577 }
1578}
1579
1580impl DerefMut for DmaLoopBuf {
1581 fn deref_mut(&mut self) -> &mut Self::Target {
1582 self.buffer
1583 }
1584}
1585
1586pub(crate) struct NoBuffer(Preparation);
1592impl NoBuffer {
1593 fn prep(&self) -> Preparation {
1594 Preparation {
1595 start: self.0.start,
1596 direction: self.0.direction,
1597 #[cfg(psram_dma)]
1598 accesses_psram: self.0.accesses_psram,
1599 burst_transfer: self.0.burst_transfer,
1600 check_owner: self.0.check_owner,
1601 auto_write_back: self.0.auto_write_back,
1602 }
1603 }
1604}
1605unsafe impl DmaTxBuffer for NoBuffer {
1606 type View = ();
1607 type Final = ();
1608
1609 fn prepare(&mut self) -> Preparation {
1610 self.prep()
1611 }
1612
1613 fn into_view(self) -> Self::View {}
1614 fn from_view(_view: Self::View) {}
1615}
1616unsafe impl DmaRxBuffer for NoBuffer {
1617 type View = ();
1618 type Final = ();
1619
1620 fn prepare(&mut self) -> Preparation {
1621 self.prep()
1622 }
1623
1624 fn into_view(self) -> Self::View {}
1625 fn from_view(_view: Self::View) {}
1626}
1627
1628#[cfg_attr(not(aes_dma), expect(unused))]
1641pub(crate) unsafe fn prepare_for_tx(
1642 descriptors: &mut [DmaDescriptor],
1643 mut data: NonNull<[u8]>,
1644 block_size: usize,
1645) -> Result<(NoBuffer, usize), DmaError> {
1646 let alignment =
1647 BurstConfig::DEFAULT.min_alignment(unsafe { data.as_ref() }, TransferDirection::Out);
1648
1649 if !data.addr().get().is_multiple_of(alignment) {
1650 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Address));
1652 }
1653
1654 let alignment = alignment.max(block_size);
1660 let chunk_size = 4096 - alignment;
1661
1662 let data_len = data.len().min(chunk_size * descriptors.len());
1663
1664 cfg_if::cfg_if! {
1665 if #[cfg(psram_dma)] {
1666 let data_addr = data.addr().get();
1667 let data_in_psram = crate::psram::psram_range().contains(&data_addr);
1668
1669 if data_in_psram {
1671 unsafe { crate::soc::cache_writeback_addr(data_addr as u32, data_len as u32) };
1672 }
1673 }
1674 }
1675
1676 let mut descriptors = unwrap!(DescriptorSet::new(descriptors));
1677 unwrap!(descriptors.link_with_buffer(unsafe { data.as_mut() }, chunk_size));
1680 unwrap!(descriptors.set_tx_length(data_len, chunk_size));
1681
1682 for desc in descriptors.linked_iter_mut() {
1683 desc.reset_for_tx(desc.next.is_null());
1684 }
1685
1686 Ok((
1687 NoBuffer(Preparation {
1688 start: descriptors.head(),
1689 direction: TransferDirection::Out,
1690 burst_transfer: BurstConfig::DEFAULT,
1691 check_owner: None,
1692 auto_write_back: true,
1693 #[cfg(psram_dma)]
1694 accesses_psram: data_in_psram,
1695 }),
1696 data_len,
1697 ))
1698}
1699
1700#[cfg_attr(not(aes_dma), expect(unused))]
1709pub(crate) unsafe fn prepare_for_rx(
1710 descriptors: &mut [DmaDescriptor],
1711 #[cfg(psram_dma)] align_buffers: &mut [Option<ManualWritebackBuffer>; 2],
1712 mut data: NonNull<[u8]>,
1713) -> (NoBuffer, usize) {
1714 let chunk_size =
1715 BurstConfig::DEFAULT.max_chunk_size_for(unsafe { data.as_ref() }, TransferDirection::In);
1716
1717 cfg_if::cfg_if! {
1722 if #[cfg(psram_dma)] {
1723 let data_addr = data.addr().get();
1724 let data_in_psram = crate::psram::psram_range().contains(&data_addr);
1725 } else {
1726 let data_in_psram = false;
1727 }
1728 }
1729
1730 let mut descriptors = unwrap!(DescriptorSet::new(descriptors));
1731 let data_len = if data_in_psram {
1732 cfg_if::cfg_if! {
1733 if #[cfg(psram_dma)] {
1734 let consumed_bytes = build_descriptor_list_for_psram(
1737 &mut descriptors,
1738 align_buffers,
1739 data,
1740 );
1741
1742 unsafe {
1744 crate::soc::cache_writeback_addr(data_addr as u32, consumed_bytes as u32);
1745 crate::soc::cache_invalidate_addr(data_addr as u32, consumed_bytes as u32);
1746 }
1747
1748 consumed_bytes
1749 } else {
1750 unreachable!()
1751 }
1752 }
1753 } else {
1754 let data_len = data.len();
1756 unwrap!(descriptors.link_with_buffer(unsafe { data.as_mut() }, chunk_size));
1757 unwrap!(descriptors.set_tx_length(data_len, chunk_size));
1758
1759 data_len
1760 };
1761
1762 for desc in descriptors.linked_iter_mut() {
1763 desc.reset_for_rx();
1764 }
1765
1766 (
1767 NoBuffer(Preparation {
1768 start: descriptors.head(),
1769 direction: TransferDirection::In,
1770 burst_transfer: BurstConfig::DEFAULT,
1771 check_owner: None,
1772 auto_write_back: true,
1773 #[cfg(psram_dma)]
1774 accesses_psram: data_in_psram,
1775 }),
1776 data_len,
1777 )
1778}
1779
1780#[cfg(psram_dma)]
1781fn build_descriptor_list_for_psram(
1782 descriptors: &mut DescriptorSet<'_>,
1783 copy_buffers: &mut [Option<ManualWritebackBuffer>; 2],
1784 data: NonNull<[u8]>,
1785) -> usize {
1786 let data_len = data.len();
1787 let data_addr = data.addr().get();
1788
1789 let min_alignment = ExternalBurstConfig::DEFAULT.min_psram_alignment(TransferDirection::In);
1790 let chunk_size = 4096 - min_alignment;
1791
1792 let mut desciptor_iter = DescriptorChainingIter::new(descriptors.descriptors);
1793 let mut copy_buffer_iter = copy_buffers.iter_mut();
1794
1795 let has_aligned_data = data_len > BUF_LEN;
1800
1801 let offset = data_addr % min_alignment;
1803 let head_to_copy = min_alignment - offset;
1804 let head_to_copy = if !has_aligned_data {
1805 BUF_LEN
1806 } else if head_to_copy > 0 && head_to_copy < MIN_LAST_DMA_LEN {
1807 head_to_copy + min_alignment
1808 } else {
1809 head_to_copy
1810 };
1811 let head_to_copy = head_to_copy.min(data_len);
1812
1813 let tail_to_copy = (data_len - head_to_copy) % min_alignment;
1815 let tail_to_copy = if tail_to_copy > 0 && tail_to_copy < MIN_LAST_DMA_LEN {
1816 tail_to_copy + min_alignment
1817 } else {
1818 tail_to_copy
1819 };
1820
1821 let mut consumed = 0;
1822
1823 if head_to_copy > 0 {
1825 let copy_buffer = unwrap!(copy_buffer_iter.next());
1826 let buffer =
1827 copy_buffer.insert(ManualWritebackBuffer::new(get_range(data, 0..head_to_copy)));
1828
1829 let Some(descriptor) = desciptor_iter.next() else {
1830 return consumed;
1831 };
1832 descriptor.set_size(head_to_copy);
1833 descriptor.buffer = buffer.buffer_ptr();
1834 consumed += head_to_copy;
1835 };
1836
1837 let mut aligned_data = get_range(data, head_to_copy..data.len() - tail_to_copy);
1839 while !aligned_data.is_empty() {
1840 let Some(descriptor) = desciptor_iter.next() else {
1841 return consumed;
1842 };
1843 let chunk = aligned_data.len().min(chunk_size);
1844
1845 descriptor.set_size(chunk);
1846 descriptor.buffer = aligned_data.cast::<u8>().as_ptr();
1847 consumed += chunk;
1848 aligned_data = get_range(aligned_data, chunk..aligned_data.len());
1849 }
1850
1851 if tail_to_copy > 0 {
1853 let copy_buffer = unwrap!(copy_buffer_iter.next());
1854 let buffer = copy_buffer.insert(ManualWritebackBuffer::new(get_range(
1855 data,
1856 data.len() - tail_to_copy..data.len(),
1857 )));
1858
1859 let Some(descriptor) = desciptor_iter.next() else {
1860 return consumed;
1861 };
1862 descriptor.set_size(tail_to_copy);
1863 descriptor.buffer = buffer.buffer_ptr();
1864 consumed += tail_to_copy;
1865 }
1866
1867 consumed
1868}
1869
1870#[cfg(psram_dma)]
1871fn get_range(ptr: NonNull<[u8]>, range: Range<usize>) -> NonNull<[u8]> {
1872 let len = range.end - range.start;
1873 NonNull::slice_from_raw_parts(unsafe { ptr.cast().byte_add(range.start) }, len)
1874}
1875
1876#[cfg(psram_dma)]
1877struct DescriptorChainingIter<'a> {
1878 index: usize,
1880 descriptors: &'a mut [DmaDescriptor],
1881}
1882#[cfg(psram_dma)]
1883impl<'a> DescriptorChainingIter<'a> {
1884 fn new(descriptors: &'a mut [DmaDescriptor]) -> Self {
1885 Self {
1886 descriptors,
1887 index: 0,
1888 }
1889 }
1890
1891 fn next(&mut self) -> Option<&'_ mut DmaDescriptor> {
1892 if self.index == 0 {
1893 self.index += 1;
1894 self.descriptors.get_mut(0)
1895 } else if self.index < self.descriptors.len() {
1896 let index = self.index;
1897 self.index += 1;
1898
1899 let ptr = &raw mut self.descriptors[index];
1901
1902 self.descriptors[index - 1].next = ptr;
1904
1905 Some(unsafe { &mut *ptr })
1908 } else {
1909 None
1910 }
1911 }
1912}
1913
1914#[cfg(psram_dma)]
1915const MIN_LAST_DMA_LEN: usize = if cfg!(esp32s2) { 5 } else { 1 };
1916#[cfg(psram_dma)]
1917const BUF_LEN: usize = 16 + 2 * (MIN_LAST_DMA_LEN - 1); #[cfg(psram_dma)]
1922pub(crate) struct ManualWritebackBuffer {
1923 dst_address: NonNull<u8>,
1924 buffer: [u8; BUF_LEN],
1925 n_bytes: u8,
1926}
1927
1928#[cfg(psram_dma)]
1929impl ManualWritebackBuffer {
1930 pub fn new(ptr: NonNull<[u8]>) -> Self {
1931 assert!(ptr.len() <= BUF_LEN);
1932 Self {
1933 dst_address: ptr.cast(),
1934 buffer: [0; BUF_LEN],
1935 n_bytes: ptr.len() as u8,
1936 }
1937 }
1938
1939 pub fn write_back(&self) {
1940 unsafe {
1941 self.dst_address
1942 .as_ptr()
1943 .copy_from(self.buffer.as_ptr(), self.n_bytes as usize);
1944 }
1945 }
1946
1947 pub fn buffer_ptr(&self) -> *mut u8 {
1948 self.buffer.as_ptr().cast_mut()
1949 }
1950}