1#[cfg(dma_can_access_psram)]
2use core::ops::Range;
3use core::{
4 ops::{Deref, DerefMut},
5 ptr::{NonNull, null_mut},
6};
7
8use super::*;
9use crate::soc::is_slice_in_dram;
10#[cfg(dma_can_access_psram)]
11use crate::soc::{is_slice_in_psram, is_valid_psram_address, is_valid_ram_address};
12
13#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
15#[cfg_attr(feature = "defmt", derive(defmt::Format))]
16pub enum DmaBufError {
17 BufferTooSmall,
19
20 InsufficientDescriptors,
22
23 UnsupportedMemoryRegion,
25
26 InvalidAlignment(DmaAlignmentError),
28
29 InvalidChunkSize,
31}
32
33#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
35#[cfg_attr(feature = "defmt", derive(defmt::Format))]
36pub enum DmaAlignmentError {
37 Address,
39
40 Size,
42}
43
44impl From<DmaAlignmentError> for DmaBufError {
45 fn from(err: DmaAlignmentError) -> Self {
46 DmaBufError::InvalidAlignment(err)
47 }
48}
49
50cfg_if::cfg_if! {
51 if #[cfg(dma_can_access_psram)] {
52 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
54 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
55 pub enum ExternalBurstConfig {
56 Size16 = 16,
58
59 Size32 = 32,
61
62 Size64 = 64,
64 }
65
66 impl ExternalBurstConfig {
67 pub const DEFAULT: Self = Self::Size16;
69 }
70
71 impl Default for ExternalBurstConfig {
72 fn default() -> Self {
73 Self::DEFAULT
74 }
75 }
76
77 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
79 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
80 pub enum InternalBurstConfig {
81 Disabled,
83
84 Enabled,
86 }
87
88 impl InternalBurstConfig {
89 pub const DEFAULT: Self = Self::Disabled;
91 }
92
93 impl Default for InternalBurstConfig {
94 fn default() -> Self {
95 Self::DEFAULT
96 }
97 }
98
99 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
101 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
102 pub struct BurstConfig {
103 pub external_memory: ExternalBurstConfig,
107
108 pub internal_memory: InternalBurstConfig,
112 }
113
114 impl BurstConfig {
115 pub const DEFAULT: Self = Self {
117 external_memory: ExternalBurstConfig::DEFAULT,
118 internal_memory: InternalBurstConfig::DEFAULT,
119 };
120 }
121
122 impl Default for BurstConfig {
123 fn default() -> Self {
124 Self::DEFAULT
125 }
126 }
127
128 impl From<InternalBurstConfig> for BurstConfig {
129 fn from(internal_memory: InternalBurstConfig) -> Self {
130 Self {
131 external_memory: ExternalBurstConfig::DEFAULT,
132 internal_memory,
133 }
134 }
135 }
136
137 impl From<ExternalBurstConfig> for BurstConfig {
138 fn from(external_memory: ExternalBurstConfig) -> Self {
139 Self {
140 external_memory,
141 internal_memory: InternalBurstConfig::DEFAULT,
142 }
143 }
144 }
145 } else {
146 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
148 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
149 pub enum BurstConfig {
150 Disabled,
152
153 Enabled,
155 }
156
157 impl BurstConfig {
158 pub const DEFAULT: Self = Self::Disabled;
160 }
161
162 impl Default for BurstConfig {
163 fn default() -> Self {
164 Self::DEFAULT
165 }
166 }
167
168 type InternalBurstConfig = BurstConfig;
169 }
170}
171
172#[cfg(dma_can_access_psram)]
173impl ExternalBurstConfig {
174 const fn min_psram_alignment(self, direction: TransferDirection) -> usize {
175 if matches!(direction, TransferDirection::In) {
186 self as usize
187 } else {
188 1
194 }
195 }
196}
197
198impl InternalBurstConfig {
199 pub(super) const fn is_burst_enabled(self) -> bool {
200 !matches!(self, Self::Disabled)
201 }
202
203 const fn min_dram_alignment(self, direction: TransferDirection) -> usize {
205 if matches!(direction, TransferDirection::In) {
206 if cfg!(esp32) {
207 4
210 } else if self.is_burst_enabled() {
211 4
213 } else {
214 1
215 }
216 } else {
217 if cfg!(esp32) {
220 4
226 } else {
227 1
228 }
229 }
230 }
231}
232
233const fn max(a: usize, b: usize) -> usize {
234 if a > b { a } else { b }
235}
236
237impl BurstConfig {
238 delegate::delegate! {
239 #[cfg(dma_can_access_psram)]
240 to self.internal_memory {
241 pub(super) const fn min_dram_alignment(self, direction: TransferDirection) -> usize;
242 pub(super) fn is_burst_enabled(self) -> bool;
243 }
244 }
245
246 pub const fn min_compatible_alignment(self) -> usize {
252 let in_alignment = self.min_dram_alignment(TransferDirection::In);
253 let out_alignment = self.min_dram_alignment(TransferDirection::Out);
254 let alignment = max(in_alignment, out_alignment);
255
256 #[cfg(dma_can_access_psram)]
257 let alignment = max(alignment, self.external_memory as usize);
258
259 alignment
260 }
261
262 const fn chunk_size_for_alignment(alignment: usize) -> usize {
263 4096 - alignment
267 }
268
269 pub const fn max_compatible_chunk_size(self) -> usize {
275 Self::chunk_size_for_alignment(self.min_compatible_alignment())
276 }
277
278 fn min_alignment(self, _buffer: &[u8], direction: TransferDirection) -> usize {
279 let alignment = self.min_dram_alignment(direction);
280
281 cfg_if::cfg_if! {
282 if #[cfg(dma_can_access_psram)] {
283 let mut alignment = alignment;
284 if is_valid_psram_address(_buffer.as_ptr() as usize) {
285 alignment = max(alignment, self.external_memory.min_psram_alignment(direction));
286 }
287 }
288 }
289
290 alignment
291 }
292
293 fn max_chunk_size_for(self, buffer: &[u8], direction: TransferDirection) -> usize {
296 Self::chunk_size_for_alignment(self.min_alignment(buffer, direction))
297 }
298
299 fn ensure_buffer_aligned(
300 self,
301 buffer: &[u8],
302 direction: TransferDirection,
303 ) -> Result<(), DmaAlignmentError> {
304 let alignment = self.min_alignment(buffer, direction);
305 if !(buffer.as_ptr() as usize).is_multiple_of(alignment) {
306 return Err(DmaAlignmentError::Address);
307 }
308
309 if direction == TransferDirection::In && !buffer.len().is_multiple_of(alignment) {
313 return Err(DmaAlignmentError::Size);
314 }
315
316 Ok(())
317 }
318
319 fn ensure_buffer_compatible(
320 self,
321 buffer: &[u8],
322 direction: TransferDirection,
323 ) -> Result<(), DmaBufError> {
324 if buffer.is_empty() {
325 return Ok(());
326 }
327 let is_in_dram = is_slice_in_dram(buffer);
329 cfg_if::cfg_if! {
330 if #[cfg(dma_can_access_psram)]{
331 let is_in_psram = is_slice_in_psram(buffer);
332 } else {
333 let is_in_psram = false;
334 }
335 }
336
337 if !(is_in_dram || is_in_psram) {
338 return Err(DmaBufError::UnsupportedMemoryRegion);
339 }
340
341 self.ensure_buffer_aligned(buffer, direction)?;
342
343 Ok(())
344 }
345}
346
347#[derive(Clone, Copy, PartialEq, Eq, Debug)]
349#[cfg_attr(feature = "defmt", derive(defmt::Format))]
350pub enum TransferDirection {
351 In,
353 Out,
355}
356
357#[derive(PartialEq, Eq, Debug)]
359#[cfg_attr(feature = "defmt", derive(defmt::Format))]
360pub struct Preparation {
361 pub start: *mut DmaDescriptor,
363
364 pub direction: TransferDirection,
366
367 #[cfg(dma_can_access_psram)]
369 pub accesses_psram: bool,
370
371 #[doc = crate::trm_markdown_link!()]
379 pub burst_transfer: BurstConfig,
380
381 pub check_owner: Option<bool>,
408
409 pub auto_write_back: bool,
419}
420
421pub unsafe trait DmaTxBuffer {
429 type View;
432
433 type Final;
437
438 fn prepare(&mut self) -> Preparation;
443
444 fn into_view(self) -> Self::View;
446
447 fn from_view(view: Self::View) -> Self::Final;
449}
450
451pub unsafe trait DmaRxBuffer {
463 type View;
466
467 type Final;
471
472 fn prepare(&mut self) -> Preparation;
477
478 fn into_view(self) -> Self::View;
480
481 fn from_view(view: Self::View) -> Self::Final;
483}
484
485pub struct BufView<T>(T);
490
491#[derive(Debug)]
497#[cfg_attr(feature = "defmt", derive(defmt::Format))]
498pub struct DmaTxBuf {
499 descriptors: DescriptorSet<'static>,
500 buffer: &'static mut [u8],
501 burst: BurstConfig,
502}
503
504impl DmaTxBuf {
505 pub fn new(
514 descriptors: &'static mut [DmaDescriptor],
515 buffer: &'static mut [u8],
516 ) -> Result<Self, DmaBufError> {
517 Self::new_with_config(descriptors, buffer, BurstConfig::default())
518 }
519
520 pub fn new_with_config(
529 descriptors: &'static mut [DmaDescriptor],
530 buffer: &'static mut [u8],
531 config: impl Into<BurstConfig>,
532 ) -> Result<Self, DmaBufError> {
533 let mut buf = Self {
534 descriptors: DescriptorSet::new(descriptors)?,
535 buffer,
536 burst: BurstConfig::default(),
537 };
538
539 let capacity = buf.capacity();
540 buf.configure(config, capacity)?;
541
542 Ok(buf)
543 }
544
545 fn configure(
546 &mut self,
547 burst: impl Into<BurstConfig>,
548 length: usize,
549 ) -> Result<(), DmaBufError> {
550 let burst = burst.into();
551 self.set_length_fallible(length, burst)?;
552
553 self.descriptors.link_with_buffer(
554 self.buffer,
555 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
556 )?;
557
558 self.burst = burst;
559 Ok(())
560 }
561
562 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
564 let len = self.len();
565 self.configure(burst, len)
566 }
567
568 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
570 (self.descriptors.into_inner(), self.buffer)
571 }
572
573 pub fn capacity(&self) -> usize {
575 self.buffer.len()
576 }
577
578 #[allow(clippy::len_without_is_empty)]
580 pub fn len(&self) -> usize {
581 self.descriptors
582 .linked_iter()
583 .map(|d| d.len())
584 .sum::<usize>()
585 }
586
587 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
588 if len > self.capacity() {
589 return Err(DmaBufError::BufferTooSmall);
590 }
591 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
592
593 self.descriptors.set_tx_length(
594 len,
595 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
596 )?;
597
598 for desc in self.descriptors.linked_iter_mut() {
601 desc.reset_for_tx(desc.next.is_null());
604 }
605
606 Ok(())
607 }
608
609 pub fn set_length(&mut self, len: usize) {
615 unwrap!(self.set_length_fallible(len, self.burst))
616 }
617
618 pub fn fill(&mut self, data: &[u8]) {
624 self.set_length(data.len());
625 self.as_mut_slice()[..data.len()].copy_from_slice(data);
626 }
627
628 pub fn as_mut_slice(&mut self) -> &mut [u8] {
630 self.buffer
631 }
632
633 pub fn as_slice(&self) -> &[u8] {
635 self.buffer
636 }
637}
638
639unsafe impl DmaTxBuffer for DmaTxBuf {
640 type View = BufView<DmaTxBuf>;
641 type Final = DmaTxBuf;
642
643 fn prepare(&mut self) -> Preparation {
644 cfg_if::cfg_if! {
645 if #[cfg(dma_can_access_psram)] {
646 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
647 if is_data_in_psram {
648 unsafe {
649 crate::soc::cache_writeback_addr(
650 self.buffer.as_ptr() as u32,
651 self.buffer.len() as u32,
652 )
653 };
654 }
655 }
656 }
657
658 Preparation {
659 start: self.descriptors.head(),
660 direction: TransferDirection::Out,
661 #[cfg(dma_can_access_psram)]
662 accesses_psram: is_data_in_psram,
663 burst_transfer: self.burst,
664 check_owner: None,
665 auto_write_back: false,
666 }
667 }
668
669 fn into_view(self) -> BufView<DmaTxBuf> {
670 BufView(self)
671 }
672
673 fn from_view(view: Self::View) -> Self {
674 view.0
675 }
676}
677
678#[derive(Debug)]
684#[cfg_attr(feature = "defmt", derive(defmt::Format))]
685pub struct DmaRxBuf {
686 descriptors: DescriptorSet<'static>,
687 buffer: &'static mut [u8],
688 burst: BurstConfig,
689}
690
691impl DmaRxBuf {
692 pub fn new(
700 descriptors: &'static mut [DmaDescriptor],
701 buffer: &'static mut [u8],
702 ) -> Result<Self, DmaBufError> {
703 Self::new_with_config(descriptors, buffer, BurstConfig::default())
704 }
705
706 pub fn new_with_config(
715 descriptors: &'static mut [DmaDescriptor],
716 buffer: &'static mut [u8],
717 config: impl Into<BurstConfig>,
718 ) -> Result<Self, DmaBufError> {
719 let mut buf = Self {
720 descriptors: DescriptorSet::new(descriptors)?,
721 buffer,
722 burst: BurstConfig::default(),
723 };
724
725 buf.configure(config, buf.capacity())?;
726
727 Ok(buf)
728 }
729
730 fn configure(
731 &mut self,
732 burst: impl Into<BurstConfig>,
733 length: usize,
734 ) -> Result<(), DmaBufError> {
735 let burst = burst.into();
736 self.set_length_fallible(length, burst)?;
737
738 self.descriptors.link_with_buffer(
739 self.buffer,
740 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
741 )?;
742
743 self.burst = burst;
744 Ok(())
745 }
746
747 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
749 let len = self.len();
750 self.configure(burst, len)
751 }
752
753 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
755 (self.descriptors.into_inner(), self.buffer)
756 }
757
758 pub fn capacity(&self) -> usize {
760 self.buffer.len()
761 }
762
763 #[allow(clippy::len_without_is_empty)]
766 pub fn len(&self) -> usize {
767 self.descriptors
768 .linked_iter()
769 .map(|d| d.size())
770 .sum::<usize>()
771 }
772
773 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
774 if len > self.capacity() {
775 return Err(DmaBufError::BufferTooSmall);
776 }
777 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
778
779 self.descriptors.set_rx_length(
780 len,
781 burst.max_chunk_size_for(&self.buffer[..len], TransferDirection::In),
782 )
783 }
784
785 pub fn set_length(&mut self, len: usize) {
791 unwrap!(self.set_length_fallible(len, self.burst));
792 }
793
794 pub fn as_slice(&self) -> &[u8] {
796 self.buffer
797 }
798
799 pub fn as_mut_slice(&mut self) -> &mut [u8] {
801 self.buffer
802 }
803
804 pub fn number_of_received_bytes(&self) -> usize {
806 self.descriptors
807 .linked_iter()
808 .map(|d| d.len())
809 .sum::<usize>()
810 }
811
812 pub fn read_received_data(&self, mut buf: &mut [u8]) -> usize {
819 let capacity = buf.len();
822 for chunk in self.received_data() {
823 if buf.is_empty() {
824 break;
825 }
826 let to_fill;
827 (to_fill, buf) = buf.split_at_mut(chunk.len());
828 to_fill.copy_from_slice(chunk);
829 }
830
831 capacity - buf.len()
832 }
833
834 pub fn received_data(&self) -> impl Iterator<Item = &[u8]> {
836 self.descriptors.linked_iter().map(|desc| {
837 unsafe { core::slice::from_raw_parts(desc.buffer.cast_const(), desc.len()) }
842 })
843 }
844}
845
846unsafe impl DmaRxBuffer for DmaRxBuf {
847 type View = BufView<DmaRxBuf>;
848 type Final = DmaRxBuf;
849
850 fn prepare(&mut self) -> Preparation {
851 for desc in self.descriptors.linked_iter_mut() {
852 desc.reset_for_rx();
853 }
854
855 cfg_if::cfg_if! {
856 if #[cfg(dma_can_access_psram)] {
857 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
859 if is_data_in_psram {
860 unsafe {
861 crate::soc::cache_invalidate_addr(
862 self.buffer.as_ptr() as u32,
863 self.buffer.len() as u32,
864 )
865 };
866 }
867 }
868 }
869
870 Preparation {
871 start: self.descriptors.head(),
872 direction: TransferDirection::In,
873 #[cfg(dma_can_access_psram)]
874 accesses_psram: is_data_in_psram,
875 burst_transfer: self.burst,
876 check_owner: None,
877 auto_write_back: true,
878 }
879 }
880
881 fn into_view(self) -> BufView<DmaRxBuf> {
882 BufView(self)
883 }
884
885 fn from_view(view: Self::View) -> Self {
886 view.0
887 }
888}
889
890#[derive(Debug)]
897#[cfg_attr(feature = "defmt", derive(defmt::Format))]
898pub struct DmaRxTxBuf {
899 rx_descriptors: DescriptorSet<'static>,
900 tx_descriptors: DescriptorSet<'static>,
901 buffer: &'static mut [u8],
902 burst: BurstConfig,
903}
904
905impl DmaRxTxBuf {
906 pub fn new(
914 rx_descriptors: &'static mut [DmaDescriptor],
915 tx_descriptors: &'static mut [DmaDescriptor],
916 buffer: &'static mut [u8],
917 ) -> Result<Self, DmaBufError> {
918 let mut buf = Self {
919 rx_descriptors: DescriptorSet::new(rx_descriptors)?,
920 tx_descriptors: DescriptorSet::new(tx_descriptors)?,
921 buffer,
922 burst: BurstConfig::default(),
923 };
924
925 let capacity = buf.capacity();
926 buf.configure(buf.burst, capacity)?;
927
928 Ok(buf)
929 }
930
931 fn configure(
932 &mut self,
933 burst: impl Into<BurstConfig>,
934 length: usize,
935 ) -> Result<(), DmaBufError> {
936 let burst = burst.into();
937 self.set_length_fallible(length, burst)?;
938
939 self.rx_descriptors.link_with_buffer(
940 self.buffer,
941 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
942 )?;
943 self.tx_descriptors.link_with_buffer(
944 self.buffer,
945 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
946 )?;
947
948 self.burst = burst;
949
950 Ok(())
951 }
952
953 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
955 let len = self.len();
956 self.configure(burst, len)
957 }
958
959 pub fn split(
962 self,
963 ) -> (
964 &'static mut [DmaDescriptor],
965 &'static mut [DmaDescriptor],
966 &'static mut [u8],
967 ) {
968 (
969 self.rx_descriptors.into_inner(),
970 self.tx_descriptors.into_inner(),
971 self.buffer,
972 )
973 }
974
975 pub fn capacity(&self) -> usize {
977 self.buffer.len()
978 }
979
980 #[allow(clippy::len_without_is_empty)]
982 pub fn len(&self) -> usize {
983 self.tx_descriptors
984 .linked_iter()
985 .map(|d| d.len())
986 .sum::<usize>()
987 }
988
989 pub fn as_slice(&self) -> &[u8] {
991 self.buffer
992 }
993
994 pub fn as_mut_slice(&mut self) -> &mut [u8] {
996 self.buffer
997 }
998
999 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
1000 if len > self.capacity() {
1001 return Err(DmaBufError::BufferTooSmall);
1002 }
1003 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
1004 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
1005
1006 self.rx_descriptors.set_rx_length(
1007 len,
1008 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
1009 )?;
1010 self.tx_descriptors.set_tx_length(
1011 len,
1012 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
1013 )?;
1014
1015 Ok(())
1016 }
1017
1018 pub fn set_length(&mut self, len: usize) {
1023 unwrap!(self.set_length_fallible(len, self.burst));
1024 }
1025}
1026
1027unsafe impl DmaTxBuffer for DmaRxTxBuf {
1028 type View = BufView<DmaRxTxBuf>;
1029 type Final = DmaRxTxBuf;
1030
1031 fn prepare(&mut self) -> Preparation {
1032 for desc in self.tx_descriptors.linked_iter_mut() {
1033 desc.reset_for_tx(desc.next.is_null());
1036 }
1037
1038 cfg_if::cfg_if! {
1039 if #[cfg(dma_can_access_psram)] {
1040 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1042 if is_data_in_psram {
1043 unsafe {
1044 crate::soc::cache_writeback_addr(
1045 self.buffer.as_ptr() as u32,
1046 self.buffer.len() as u32,
1047 )
1048 };
1049 }
1050 }
1051 }
1052
1053 Preparation {
1054 start: self.tx_descriptors.head(),
1055 direction: TransferDirection::Out,
1056 #[cfg(dma_can_access_psram)]
1057 accesses_psram: is_data_in_psram,
1058 burst_transfer: self.burst,
1059 check_owner: None,
1060 auto_write_back: false,
1061 }
1062 }
1063
1064 fn into_view(self) -> BufView<DmaRxTxBuf> {
1065 BufView(self)
1066 }
1067
1068 fn from_view(view: Self::View) -> Self {
1069 view.0
1070 }
1071}
1072
1073unsafe impl DmaRxBuffer for DmaRxTxBuf {
1074 type View = BufView<DmaRxTxBuf>;
1075 type Final = DmaRxTxBuf;
1076
1077 fn prepare(&mut self) -> Preparation {
1078 for desc in self.rx_descriptors.linked_iter_mut() {
1079 desc.reset_for_rx();
1080 }
1081
1082 cfg_if::cfg_if! {
1083 if #[cfg(dma_can_access_psram)] {
1084 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1086 if is_data_in_psram {
1087 unsafe {
1088 crate::soc::cache_invalidate_addr(
1089 self.buffer.as_ptr() as u32,
1090 self.buffer.len() as u32,
1091 )
1092 };
1093 }
1094 }
1095 }
1096
1097 Preparation {
1098 start: self.rx_descriptors.head(),
1099 direction: TransferDirection::In,
1100 #[cfg(dma_can_access_psram)]
1101 accesses_psram: is_data_in_psram,
1102 burst_transfer: self.burst,
1103 check_owner: None,
1104 auto_write_back: true,
1105 }
1106 }
1107
1108 fn into_view(self) -> BufView<DmaRxTxBuf> {
1109 BufView(self)
1110 }
1111
1112 fn from_view(view: Self::View) -> Self {
1113 view.0
1114 }
1115}
1116
1117pub struct DmaRxStreamBuf {
1158 descriptors: &'static mut [DmaDescriptor],
1159 buffer: &'static mut [u8],
1160 burst: BurstConfig,
1161}
1162
1163impl DmaRxStreamBuf {
1164 pub fn new(
1167 descriptors: &'static mut [DmaDescriptor],
1168 buffer: &'static mut [u8],
1169 ) -> Result<Self, DmaBufError> {
1170 if !is_slice_in_dram(descriptors) {
1171 return Err(DmaBufError::UnsupportedMemoryRegion);
1172 }
1173 if !is_slice_in_dram(buffer) {
1174 return Err(DmaBufError::UnsupportedMemoryRegion);
1175 }
1176
1177 if descriptors.is_empty() {
1178 return Err(DmaBufError::InsufficientDescriptors);
1179 }
1180
1181 let chunk_size = buffer.len() / descriptors.len();
1183
1184 if chunk_size > 4095 {
1185 return Err(DmaBufError::InsufficientDescriptors);
1186 }
1187
1188 let excess = buffer.len() % descriptors.len();
1190 if chunk_size + excess > 4095 {
1191 return Err(DmaBufError::InsufficientDescriptors);
1192 }
1193
1194 let mut chunks = buffer.chunks_exact_mut(chunk_size);
1195 for (desc, chunk) in descriptors.iter_mut().zip(chunks.by_ref()) {
1196 desc.buffer = chunk.as_mut_ptr();
1197 desc.set_size(chunk.len());
1198 }
1199
1200 let remainder = chunks.into_remainder();
1201 debug_assert_eq!(remainder.len(), excess);
1202
1203 if !remainder.is_empty() {
1204 let last_descriptor = descriptors.last_mut().unwrap();
1206 last_descriptor.set_size(last_descriptor.size() + remainder.len());
1207 }
1208
1209 Ok(Self {
1210 descriptors,
1211 buffer,
1212 burst: BurstConfig::default(),
1213 })
1214 }
1215
1216 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
1218 (self.descriptors, self.buffer)
1219 }
1220}
1221
1222unsafe impl DmaRxBuffer for DmaRxStreamBuf {
1223 type View = DmaRxStreamBufView;
1224 type Final = DmaRxStreamBuf;
1225
1226 fn prepare(&mut self) -> Preparation {
1227 let mut next = null_mut();
1229 for desc in self.descriptors.iter_mut().rev() {
1230 desc.next = next;
1231 next = desc;
1232
1233 desc.reset_for_rx();
1234 }
1235 Preparation {
1236 start: self.descriptors.as_mut_ptr(),
1237 direction: TransferDirection::In,
1238 #[cfg(dma_can_access_psram)]
1239 accesses_psram: false,
1240 burst_transfer: self.burst,
1241
1242 check_owner: None,
1247 auto_write_back: true,
1248 }
1249 }
1250
1251 fn into_view(self) -> DmaRxStreamBufView {
1252 DmaRxStreamBufView {
1253 buf: self,
1254 descriptor_idx: 0,
1255 descriptor_offset: 0,
1256 }
1257 }
1258
1259 fn from_view(view: Self::View) -> Self {
1260 view.buf
1261 }
1262}
1263
1264pub struct DmaRxStreamBufView {
1266 buf: DmaRxStreamBuf,
1267 descriptor_idx: usize,
1268 descriptor_offset: usize,
1269}
1270
1271impl DmaRxStreamBufView {
1272 pub fn available_bytes(&self) -> usize {
1274 let (tail, head) = self.buf.descriptors.split_at(self.descriptor_idx);
1275 let mut result = 0;
1276 for desc in head.iter().chain(tail) {
1277 if desc.owner() == Owner::Dma {
1278 break;
1279 }
1280 result += desc.len();
1281 }
1282 result - self.descriptor_offset
1283 }
1284
1285 pub fn pop(&mut self, buf: &mut [u8]) -> usize {
1287 if buf.is_empty() {
1288 return 0;
1289 }
1290 let total_bytes = buf.len();
1291
1292 let mut remaining = buf;
1293 loop {
1294 let available = self.peek();
1295 if available.len() >= remaining.len() {
1296 remaining.copy_from_slice(&available[0..remaining.len()]);
1297 self.consume(remaining.len());
1298 let consumed = remaining.len();
1299 remaining = &mut remaining[consumed..];
1300 break;
1301 } else {
1302 let to_consume = available.len();
1303 remaining[0..to_consume].copy_from_slice(available);
1304 self.consume(to_consume);
1305 remaining = &mut remaining[to_consume..];
1306 }
1307 }
1308
1309 total_bytes - remaining.len()
1310 }
1311
1312 pub fn peek(&self) -> &[u8] {
1319 let (slice, _) = self.peek_internal(false);
1320 slice
1321 }
1322
1323 pub fn peek_until_eof(&self) -> (&[u8], bool) {
1328 self.peek_internal(true)
1329 }
1330
1331 pub fn consume(&mut self, n: usize) -> usize {
1337 let mut remaining_bytes_to_consume = n;
1338
1339 loop {
1340 let desc = &mut self.buf.descriptors[self.descriptor_idx];
1341
1342 if desc.owner() == Owner::Dma {
1343 break;
1346 }
1347
1348 let remaining_bytes_in_descriptor = desc.len() - self.descriptor_offset;
1349 if remaining_bytes_to_consume < remaining_bytes_in_descriptor {
1350 self.descriptor_offset += remaining_bytes_to_consume;
1351 remaining_bytes_to_consume = 0;
1352 break;
1353 }
1354
1355 desc.set_owner(Owner::Dma);
1357 desc.set_suc_eof(false);
1358 desc.set_length(0);
1359
1360 desc.next = null_mut();
1364
1365 let desc_ptr: *mut _ = desc;
1366
1367 let prev_descriptor_index = self
1368 .descriptor_idx
1369 .checked_sub(1)
1370 .unwrap_or(self.buf.descriptors.len() - 1);
1371
1372 self.buf.descriptors[prev_descriptor_index].next = desc_ptr;
1374
1375 self.descriptor_idx += 1;
1376 if self.descriptor_idx >= self.buf.descriptors.len() {
1377 self.descriptor_idx = 0;
1378 }
1379 self.descriptor_offset = 0;
1380
1381 remaining_bytes_to_consume -= remaining_bytes_in_descriptor;
1382 }
1383
1384 n - remaining_bytes_to_consume
1385 }
1386
1387 fn peek_internal(&self, stop_at_eof: bool) -> (&[u8], bool) {
1388 let descriptors = &self.buf.descriptors[self.descriptor_idx..];
1389
1390 debug_assert!(!descriptors.is_empty());
1392
1393 if descriptors.len() == 1 {
1394 let last_descriptor = &descriptors[0];
1395 if last_descriptor.owner() == Owner::Dma {
1396 (&[], false)
1398 } else {
1399 let length = last_descriptor.len() - self.descriptor_offset;
1400 (
1401 &self.buf.buffer[self.buf.buffer.len() - length..],
1402 last_descriptor.flags.suc_eof(),
1403 )
1404 }
1405 } else {
1406 let chunk_size = descriptors[0].size();
1407 let mut found_eof = false;
1408
1409 let mut number_of_contiguous_bytes = 0;
1410 for desc in descriptors {
1411 if desc.owner() == Owner::Dma {
1412 break;
1413 }
1414 number_of_contiguous_bytes += desc.len();
1415
1416 if stop_at_eof && desc.flags.suc_eof() {
1417 found_eof = true;
1418 break;
1419 }
1420 if desc.len() < desc.size() {
1422 break;
1423 }
1424 }
1425
1426 (
1427 &self.buf.buffer[chunk_size * self.descriptor_idx..][..number_of_contiguous_bytes]
1428 [self.descriptor_offset..],
1429 found_eof,
1430 )
1431 }
1432 }
1433}
1434
1435static mut EMPTY: [DmaDescriptor; 1] = [DmaDescriptor::EMPTY];
1436
1437pub struct EmptyBuf;
1439
1440unsafe impl DmaTxBuffer for EmptyBuf {
1441 type View = EmptyBuf;
1442 type Final = EmptyBuf;
1443
1444 fn prepare(&mut self) -> Preparation {
1445 Preparation {
1446 start: core::ptr::addr_of_mut!(EMPTY).cast(),
1447 direction: TransferDirection::Out,
1448 #[cfg(dma_can_access_psram)]
1449 accesses_psram: false,
1450 burst_transfer: BurstConfig::default(),
1451
1452 check_owner: Some(false),
1455
1456 auto_write_back: false,
1458 }
1459 }
1460
1461 fn into_view(self) -> EmptyBuf {
1462 self
1463 }
1464
1465 fn from_view(view: Self::View) -> Self {
1466 view
1467 }
1468}
1469
1470unsafe impl DmaRxBuffer for EmptyBuf {
1471 type View = EmptyBuf;
1472 type Final = EmptyBuf;
1473
1474 fn prepare(&mut self) -> Preparation {
1475 Preparation {
1476 start: core::ptr::addr_of_mut!(EMPTY).cast(),
1477 direction: TransferDirection::In,
1478 #[cfg(dma_can_access_psram)]
1479 accesses_psram: false,
1480 burst_transfer: BurstConfig::default(),
1481
1482 check_owner: Some(false),
1485 auto_write_back: true,
1486 }
1487 }
1488
1489 fn into_view(self) -> EmptyBuf {
1490 self
1491 }
1492
1493 fn from_view(view: Self::View) -> Self {
1494 view
1495 }
1496}
1497
1498pub struct DmaLoopBuf {
1509 descriptor: &'static mut DmaDescriptor,
1510 buffer: &'static mut [u8],
1511}
1512
1513impl DmaLoopBuf {
1514 pub fn new(
1516 descriptor: &'static mut DmaDescriptor,
1517 buffer: &'static mut [u8],
1518 ) -> Result<DmaLoopBuf, DmaBufError> {
1519 if !is_slice_in_dram(buffer) {
1520 return Err(DmaBufError::UnsupportedMemoryRegion);
1521 }
1522 if !is_slice_in_dram(core::slice::from_ref(descriptor)) {
1523 return Err(DmaBufError::UnsupportedMemoryRegion);
1524 }
1525
1526 if buffer.len() > BurstConfig::default().max_chunk_size_for(buffer, TransferDirection::Out)
1527 {
1528 return Err(DmaBufError::InsufficientDescriptors);
1529 }
1530
1531 descriptor.set_owner(Owner::Dma); descriptor.set_suc_eof(false);
1533 descriptor.set_length(buffer.len());
1534 descriptor.set_size(buffer.len());
1535 descriptor.buffer = buffer.as_mut_ptr();
1536 descriptor.next = descriptor;
1537
1538 Ok(Self { descriptor, buffer })
1539 }
1540
1541 pub fn split(self) -> (&'static mut DmaDescriptor, &'static mut [u8]) {
1543 (self.descriptor, self.buffer)
1544 }
1545}
1546
1547unsafe impl DmaTxBuffer for DmaLoopBuf {
1548 type View = DmaLoopBuf;
1549 type Final = DmaLoopBuf;
1550
1551 fn prepare(&mut self) -> Preparation {
1552 Preparation {
1553 start: self.descriptor,
1554 #[cfg(dma_can_access_psram)]
1555 accesses_psram: false,
1556 direction: TransferDirection::Out,
1557 burst_transfer: BurstConfig::default(),
1558 check_owner: Some(false),
1560
1561 auto_write_back: false,
1563 }
1564 }
1565
1566 fn into_view(self) -> Self::View {
1567 self
1568 }
1569
1570 fn from_view(view: Self::View) -> Self {
1571 view
1572 }
1573}
1574
1575impl Deref for DmaLoopBuf {
1576 type Target = [u8];
1577
1578 fn deref(&self) -> &Self::Target {
1579 self.buffer
1580 }
1581}
1582
1583impl DerefMut for DmaLoopBuf {
1584 fn deref_mut(&mut self) -> &mut Self::Target {
1585 self.buffer
1586 }
1587}
1588
1589pub(crate) struct NoBuffer(Preparation);
1595impl NoBuffer {
1596 fn prep(&self) -> Preparation {
1597 Preparation {
1598 start: self.0.start,
1599 direction: self.0.direction,
1600 #[cfg(dma_can_access_psram)]
1601 accesses_psram: self.0.accesses_psram,
1602 burst_transfer: self.0.burst_transfer,
1603 check_owner: self.0.check_owner,
1604 auto_write_back: self.0.auto_write_back,
1605 }
1606 }
1607}
1608unsafe impl DmaTxBuffer for NoBuffer {
1609 type View = ();
1610 type Final = ();
1611
1612 fn prepare(&mut self) -> Preparation {
1613 self.prep()
1614 }
1615
1616 fn into_view(self) -> Self::View {}
1617 fn from_view(_view: Self::View) {}
1618}
1619unsafe impl DmaRxBuffer for NoBuffer {
1620 type View = ();
1621 type Final = ();
1622
1623 fn prepare(&mut self) -> Preparation {
1624 self.prep()
1625 }
1626
1627 fn into_view(self) -> Self::View {}
1628 fn from_view(_view: Self::View) {}
1629}
1630
1631#[cfg_attr(not(aes_dma), expect(unused))]
1644pub(crate) unsafe fn prepare_for_tx(
1645 descriptors: &mut [DmaDescriptor],
1646 mut data: NonNull<[u8]>,
1647 block_size: usize,
1648) -> Result<(NoBuffer, usize), DmaError> {
1649 let alignment =
1650 BurstConfig::DEFAULT.min_alignment(unsafe { data.as_ref() }, TransferDirection::Out);
1651
1652 if !data.addr().get().is_multiple_of(alignment) {
1653 return Err(DmaError::InvalidAlignment(DmaAlignmentError::Address));
1655 }
1656
1657 let alignment = alignment.max(block_size);
1663 let chunk_size = 4096 - alignment;
1664
1665 let data_len = data.len().min(chunk_size * descriptors.len());
1666
1667 cfg_if::cfg_if! {
1668 if #[cfg(dma_can_access_psram)] {
1669 let data_addr = data.addr().get();
1670 let data_in_psram = crate::psram::psram_range().contains(&data_addr);
1671
1672 if data_in_psram {
1674 unsafe { crate::soc::cache_writeback_addr(data_addr as u32, data_len as u32) };
1675 }
1676 }
1677 }
1678
1679 let mut descriptors = unwrap!(DescriptorSet::new(descriptors));
1680 unwrap!(descriptors.link_with_buffer(unsafe { data.as_mut() }, chunk_size));
1683 unwrap!(descriptors.set_tx_length(data_len, chunk_size));
1684
1685 for desc in descriptors.linked_iter_mut() {
1686 desc.reset_for_tx(desc.next.is_null());
1687 }
1688
1689 Ok((
1690 NoBuffer(Preparation {
1691 start: descriptors.head(),
1692 direction: TransferDirection::Out,
1693 burst_transfer: BurstConfig::DEFAULT,
1694 check_owner: None,
1695 auto_write_back: true,
1696 #[cfg(dma_can_access_psram)]
1697 accesses_psram: data_in_psram,
1698 }),
1699 data_len,
1700 ))
1701}
1702
1703#[cfg_attr(not(aes_dma), expect(unused))]
1712pub(crate) unsafe fn prepare_for_rx(
1713 descriptors: &mut [DmaDescriptor],
1714 #[cfg(dma_can_access_psram)] align_buffers: &mut [Option<ManualWritebackBuffer>; 2],
1715 mut data: NonNull<[u8]>,
1716) -> (NoBuffer, usize) {
1717 let chunk_size =
1718 BurstConfig::DEFAULT.max_chunk_size_for(unsafe { data.as_ref() }, TransferDirection::In);
1719
1720 cfg_if::cfg_if! {
1725 if #[cfg(dma_can_access_psram)] {
1726 let data_addr = data.addr().get();
1727 let data_in_psram = crate::psram::psram_range().contains(&data_addr);
1728 } else {
1729 let data_in_psram = false;
1730 }
1731 }
1732
1733 let mut descriptors = unwrap!(DescriptorSet::new(descriptors));
1734 let data_len = if data_in_psram {
1735 cfg_if::cfg_if! {
1736 if #[cfg(dma_can_access_psram)] {
1737 let consumed_bytes = build_descriptor_list_for_psram(
1740 &mut descriptors,
1741 align_buffers,
1742 data,
1743 );
1744
1745 unsafe {
1747 crate::soc::cache_writeback_addr(data_addr as u32, consumed_bytes as u32);
1748 crate::soc::cache_invalidate_addr(data_addr as u32, consumed_bytes as u32);
1749 }
1750
1751 consumed_bytes
1752 } else {
1753 unreachable!()
1754 }
1755 }
1756 } else {
1757 let data_len = data.len();
1759 unwrap!(descriptors.link_with_buffer(unsafe { data.as_mut() }, chunk_size));
1760 unwrap!(descriptors.set_tx_length(data_len, chunk_size));
1761
1762 data_len
1763 };
1764
1765 for desc in descriptors.linked_iter_mut() {
1766 desc.reset_for_rx();
1767 }
1768
1769 (
1770 NoBuffer(Preparation {
1771 start: descriptors.head(),
1772 direction: TransferDirection::In,
1773 burst_transfer: BurstConfig::DEFAULT,
1774 check_owner: None,
1775 auto_write_back: true,
1776 #[cfg(dma_can_access_psram)]
1777 accesses_psram: data_in_psram,
1778 }),
1779 data_len,
1780 )
1781}
1782
1783#[cfg(dma_can_access_psram)]
1784fn build_descriptor_list_for_psram(
1785 descriptors: &mut DescriptorSet<'_>,
1786 copy_buffers: &mut [Option<ManualWritebackBuffer>; 2],
1787 data: NonNull<[u8]>,
1788) -> usize {
1789 let data_len = data.len();
1790 let data_addr = data.addr().get();
1791
1792 let min_alignment = ExternalBurstConfig::DEFAULT.min_psram_alignment(TransferDirection::In);
1793 let chunk_size = 4096 - min_alignment;
1794
1795 let mut desciptor_iter = DescriptorChainingIter::new(descriptors.descriptors);
1796 let mut copy_buffer_iter = copy_buffers.iter_mut();
1797
1798 let has_aligned_data = data_len > BUF_LEN;
1803
1804 let offset = data_addr % min_alignment;
1806 let head_to_copy = min_alignment - offset;
1807 let head_to_copy = if !has_aligned_data {
1808 BUF_LEN
1809 } else if head_to_copy > 0 && head_to_copy < MIN_LAST_DMA_LEN {
1810 head_to_copy + min_alignment
1811 } else {
1812 head_to_copy
1813 };
1814 let head_to_copy = head_to_copy.min(data_len);
1815
1816 let tail_to_copy = (data_len - head_to_copy) % min_alignment;
1818 let tail_to_copy = if tail_to_copy > 0 && tail_to_copy < MIN_LAST_DMA_LEN {
1819 tail_to_copy + min_alignment
1820 } else {
1821 tail_to_copy
1822 };
1823
1824 let mut consumed = 0;
1825
1826 if head_to_copy > 0 {
1828 let copy_buffer = unwrap!(copy_buffer_iter.next());
1829 let buffer =
1830 copy_buffer.insert(ManualWritebackBuffer::new(get_range(data, 0..head_to_copy)));
1831
1832 let Some(descriptor) = desciptor_iter.next() else {
1833 return consumed;
1834 };
1835 descriptor.set_size(head_to_copy);
1836 descriptor.buffer = buffer.buffer_ptr();
1837 consumed += head_to_copy;
1838 };
1839
1840 let mut aligned_data = get_range(data, head_to_copy..data.len() - tail_to_copy);
1842 while !aligned_data.is_empty() {
1843 let Some(descriptor) = desciptor_iter.next() else {
1844 return consumed;
1845 };
1846 let chunk = aligned_data.len().min(chunk_size);
1847
1848 descriptor.set_size(chunk);
1849 descriptor.buffer = aligned_data.cast::<u8>().as_ptr();
1850 consumed += chunk;
1851 aligned_data = get_range(aligned_data, chunk..aligned_data.len());
1852 }
1853
1854 if tail_to_copy > 0 {
1856 let copy_buffer = unwrap!(copy_buffer_iter.next());
1857 let buffer = copy_buffer.insert(ManualWritebackBuffer::new(get_range(
1858 data,
1859 data.len() - tail_to_copy..data.len(),
1860 )));
1861
1862 let Some(descriptor) = desciptor_iter.next() else {
1863 return consumed;
1864 };
1865 descriptor.set_size(tail_to_copy);
1866 descriptor.buffer = buffer.buffer_ptr();
1867 consumed += tail_to_copy;
1868 }
1869
1870 consumed
1871}
1872
1873#[cfg(dma_can_access_psram)]
1874fn get_range(ptr: NonNull<[u8]>, range: Range<usize>) -> NonNull<[u8]> {
1875 let len = range.end - range.start;
1876 NonNull::slice_from_raw_parts(unsafe { ptr.cast().byte_add(range.start) }, len)
1877}
1878
1879#[cfg(dma_can_access_psram)]
1880struct DescriptorChainingIter<'a> {
1881 index: usize,
1883 descriptors: &'a mut [DmaDescriptor],
1884}
1885#[cfg(dma_can_access_psram)]
1886impl<'a> DescriptorChainingIter<'a> {
1887 fn new(descriptors: &'a mut [DmaDescriptor]) -> Self {
1888 Self {
1889 descriptors,
1890 index: 0,
1891 }
1892 }
1893
1894 fn next(&mut self) -> Option<&'_ mut DmaDescriptor> {
1895 if self.index == 0 {
1896 self.index += 1;
1897 self.descriptors.get_mut(0)
1898 } else if self.index < self.descriptors.len() {
1899 let index = self.index;
1900 self.index += 1;
1901
1902 let ptr = &raw mut self.descriptors[index];
1904
1905 self.descriptors[index - 1].next = ptr;
1907
1908 Some(unsafe { &mut *ptr })
1911 } else {
1912 None
1913 }
1914 }
1915}
1916
1917#[cfg(dma_can_access_psram)]
1918const MIN_LAST_DMA_LEN: usize = if cfg!(esp32s2) { 5 } else { 1 };
1919#[cfg(dma_can_access_psram)]
1920const BUF_LEN: usize = 16 + 2 * (MIN_LAST_DMA_LEN - 1); #[cfg(dma_can_access_psram)]
1925pub(crate) struct ManualWritebackBuffer {
1926 dst_address: NonNull<u8>,
1927 buffer: [u8; BUF_LEN],
1928 n_bytes: u8,
1929}
1930
1931#[cfg(dma_can_access_psram)]
1932impl ManualWritebackBuffer {
1933 pub fn new(ptr: NonNull<[u8]>) -> Self {
1934 assert!(ptr.len() <= BUF_LEN);
1935 Self {
1936 dst_address: ptr.cast(),
1937 buffer: [0; BUF_LEN],
1938 n_bytes: ptr.len() as u8,
1939 }
1940 }
1941
1942 pub fn write_back(&self) {
1943 unsafe {
1944 self.dst_address
1945 .as_ptr()
1946 .copy_from(self.buffer.as_ptr(), self.n_bytes as usize);
1947 }
1948 }
1949
1950 pub fn buffer_ptr(&self) -> *mut u8 {
1951 self.buffer.as_ptr().cast_mut()
1952 }
1953}