1use core::{
2 ops::{Deref, DerefMut},
3 ptr::null_mut,
4};
5
6use super::*;
7use crate::soc::{is_slice_in_dram, is_slice_in_psram};
8#[cfg(psram_dma)]
9use crate::soc::{is_valid_psram_address, is_valid_ram_address};
10
11#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
13#[cfg_attr(feature = "defmt", derive(defmt::Format))]
14pub enum DmaBufError {
15 BufferTooSmall,
17
18 InsufficientDescriptors,
20
21 UnsupportedMemoryRegion,
23
24 InvalidAlignment(DmaAlignmentError),
26
27 InvalidChunkSize,
29}
30
31#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
33#[cfg_attr(feature = "defmt", derive(defmt::Format))]
34pub enum DmaAlignmentError {
35 Address,
37
38 Size,
40}
41
42impl From<DmaAlignmentError> for DmaBufError {
43 fn from(err: DmaAlignmentError) -> Self {
44 DmaBufError::InvalidAlignment(err)
45 }
46}
47
48cfg_if::cfg_if! {
49 if #[cfg(psram_dma)] {
50 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
52 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
53 pub enum ExternalBurstConfig {
54 Size16 = 16,
56
57 Size32 = 32,
59
60 Size64 = 64,
62 }
63
64 impl ExternalBurstConfig {
65 pub const DEFAULT: Self = Self::Size16;
67 }
68
69 impl Default for ExternalBurstConfig {
70 fn default() -> Self {
71 Self::DEFAULT
72 }
73 }
74
75 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
77 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
78 pub enum InternalBurstConfig {
79 Disabled,
81
82 Enabled,
84 }
85
86 impl InternalBurstConfig {
87 pub const DEFAULT: Self = Self::Disabled;
89 }
90
91 impl Default for InternalBurstConfig {
92 fn default() -> Self {
93 Self::DEFAULT
94 }
95 }
96
97 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
99 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
100 pub struct BurstConfig {
101 pub external_memory: ExternalBurstConfig,
105
106 pub internal_memory: InternalBurstConfig,
110 }
111
112 impl BurstConfig {
113 pub const DEFAULT: Self = Self {
115 external_memory: ExternalBurstConfig::DEFAULT,
116 internal_memory: InternalBurstConfig::DEFAULT,
117 };
118 }
119
120 impl Default for BurstConfig {
121 fn default() -> Self {
122 Self::DEFAULT
123 }
124 }
125
126 impl From<InternalBurstConfig> for BurstConfig {
127 fn from(internal_memory: InternalBurstConfig) -> Self {
128 Self {
129 external_memory: ExternalBurstConfig::DEFAULT,
130 internal_memory,
131 }
132 }
133 }
134
135 impl From<ExternalBurstConfig> for BurstConfig {
136 fn from(external_memory: ExternalBurstConfig) -> Self {
137 Self {
138 external_memory,
139 internal_memory: InternalBurstConfig::DEFAULT,
140 }
141 }
142 }
143 } else {
144 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
146 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
147 pub enum BurstConfig {
148 Disabled,
150
151 Enabled,
153 }
154
155 impl BurstConfig {
156 pub const DEFAULT: Self = Self::Disabled;
158 }
159
160 impl Default for BurstConfig {
161 fn default() -> Self {
162 Self::DEFAULT
163 }
164 }
165
166 type InternalBurstConfig = BurstConfig;
167 }
168}
169
170#[cfg(psram_dma)]
171impl ExternalBurstConfig {
172 const fn min_psram_alignment(self, direction: TransferDirection) -> usize {
173 if matches!(direction, TransferDirection::In) {
184 self as usize
185 } else {
186 1
192 }
193 }
194}
195
196impl InternalBurstConfig {
197 pub(super) const fn is_burst_enabled(self) -> bool {
198 !matches!(self, Self::Disabled)
199 }
200
201 const fn min_dram_alignment(self, direction: TransferDirection) -> usize {
203 if matches!(direction, TransferDirection::In) {
204 if cfg!(esp32) {
205 4
208 } else if self.is_burst_enabled() {
209 4
211 } else {
212 1
213 }
214 } else {
215 if cfg!(esp32) {
218 4
224 } else {
225 1
226 }
227 }
228 }
229}
230
231const fn max(a: usize, b: usize) -> usize {
232 if a > b { a } else { b }
233}
234
235impl BurstConfig {
236 delegate::delegate! {
237 #[cfg(psram_dma)]
238 to self.internal_memory {
239 pub(super) const fn min_dram_alignment(self, direction: TransferDirection) -> usize;
240 pub(super) fn is_burst_enabled(self) -> bool;
241 }
242 }
243
244 pub const fn min_compatible_alignment(self) -> usize {
250 let in_alignment = self.min_dram_alignment(TransferDirection::In);
251 let out_alignment = self.min_dram_alignment(TransferDirection::Out);
252 let alignment = max(in_alignment, out_alignment);
253
254 #[cfg(psram_dma)]
255 let alignment = max(alignment, self.external_memory as usize);
256
257 alignment
258 }
259
260 const fn chunk_size_for_alignment(alignment: usize) -> usize {
261 4096 - alignment
265 }
266
267 pub const fn max_compatible_chunk_size(self) -> usize {
273 Self::chunk_size_for_alignment(self.min_compatible_alignment())
274 }
275
276 fn min_alignment(self, _buffer: &[u8], direction: TransferDirection) -> usize {
277 let alignment = self.min_dram_alignment(direction);
278
279 cfg_if::cfg_if! {
280 if #[cfg(psram_dma)] {
281 let mut alignment = alignment;
282 if is_valid_psram_address(_buffer.as_ptr() as usize) {
283 alignment = max(alignment, self.external_memory.min_psram_alignment(direction));
284 }
285 }
286 }
287
288 alignment
289 }
290
291 fn max_chunk_size_for(self, buffer: &[u8], direction: TransferDirection) -> usize {
294 Self::chunk_size_for_alignment(self.min_alignment(buffer, direction))
295 }
296
297 fn ensure_buffer_aligned(
298 self,
299 buffer: &[u8],
300 direction: TransferDirection,
301 ) -> Result<(), DmaAlignmentError> {
302 let alignment = self.min_alignment(buffer, direction);
303 if buffer.as_ptr() as usize % alignment != 0 {
304 return Err(DmaAlignmentError::Address);
305 }
306
307 if direction == TransferDirection::In && buffer.len() % alignment != 0 {
311 return Err(DmaAlignmentError::Size);
312 }
313
314 Ok(())
315 }
316
317 fn ensure_buffer_compatible(
318 self,
319 buffer: &[u8],
320 direction: TransferDirection,
321 ) -> Result<(), DmaBufError> {
322 let is_in_dram = is_slice_in_dram(buffer);
324 let is_in_psram = cfg!(psram_dma) && is_slice_in_psram(buffer);
325 if !(is_in_dram || is_in_psram) {
326 return Err(DmaBufError::UnsupportedMemoryRegion);
327 }
328
329 self.ensure_buffer_aligned(buffer, direction)?;
330
331 Ok(())
332 }
333}
334
335#[derive(Clone, Copy, PartialEq, Eq, Debug)]
337#[cfg_attr(feature = "defmt", derive(defmt::Format))]
338pub enum TransferDirection {
339 In,
341 Out,
343}
344
345#[derive(PartialEq, Eq, Debug)]
347#[cfg_attr(feature = "defmt", derive(defmt::Format))]
348pub struct Preparation {
349 pub start: *mut DmaDescriptor,
351
352 pub direction: TransferDirection,
354
355 #[cfg(psram_dma)]
357 pub accesses_psram: bool,
358
359 #[doc = crate::trm_markdown_link!()]
367 pub burst_transfer: BurstConfig,
368
369 pub check_owner: Option<bool>,
396
397 pub auto_write_back: bool,
407}
408
409pub unsafe trait DmaTxBuffer {
417 type View;
420
421 fn prepare(&mut self) -> Preparation;
426
427 fn into_view(self) -> Self::View;
429
430 fn from_view(view: Self::View) -> Self;
432}
433
434pub unsafe trait DmaRxBuffer {
446 type View;
449
450 fn prepare(&mut self) -> Preparation;
455
456 fn into_view(self) -> Self::View;
458
459 fn from_view(view: Self::View) -> Self;
461}
462
463pub struct BufView<T>(T);
468
469#[derive(Debug)]
475#[cfg_attr(feature = "defmt", derive(defmt::Format))]
476pub struct DmaTxBuf {
477 descriptors: DescriptorSet<'static>,
478 buffer: &'static mut [u8],
479 burst: BurstConfig,
480}
481
482impl DmaTxBuf {
483 pub fn new(
492 descriptors: &'static mut [DmaDescriptor],
493 buffer: &'static mut [u8],
494 ) -> Result<Self, DmaBufError> {
495 Self::new_with_config(descriptors, buffer, BurstConfig::default())
496 }
497
498 pub fn new_with_config(
507 descriptors: &'static mut [DmaDescriptor],
508 buffer: &'static mut [u8],
509 config: impl Into<BurstConfig>,
510 ) -> Result<Self, DmaBufError> {
511 let mut buf = Self {
512 descriptors: DescriptorSet::new(descriptors)?,
513 buffer,
514 burst: BurstConfig::default(),
515 };
516
517 let capacity = buf.capacity();
518 buf.configure(config, capacity)?;
519
520 Ok(buf)
521 }
522
523 fn configure(
524 &mut self,
525 burst: impl Into<BurstConfig>,
526 length: usize,
527 ) -> Result<(), DmaBufError> {
528 let burst = burst.into();
529 self.set_length_fallible(length, burst)?;
530
531 self.descriptors.link_with_buffer(
532 self.buffer,
533 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
534 )?;
535
536 self.burst = burst;
537 Ok(())
538 }
539
540 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
542 let len = self.len();
543 self.configure(burst, len)
544 }
545
546 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
548 (self.descriptors.into_inner(), self.buffer)
549 }
550
551 pub fn capacity(&self) -> usize {
553 self.buffer.len()
554 }
555
556 #[allow(clippy::len_without_is_empty)]
558 pub fn len(&self) -> usize {
559 self.descriptors
560 .linked_iter()
561 .map(|d| d.len())
562 .sum::<usize>()
563 }
564
565 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
566 if len > self.capacity() {
567 return Err(DmaBufError::BufferTooSmall);
568 }
569 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
570
571 self.descriptors.set_tx_length(
572 len,
573 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
574 )?;
575
576 for desc in self.descriptors.linked_iter_mut() {
579 desc.reset_for_tx(desc.next.is_null());
582 }
583
584 Ok(())
585 }
586
587 pub fn set_length(&mut self, len: usize) {
593 unwrap!(self.set_length_fallible(len, self.burst))
594 }
595
596 pub fn fill(&mut self, data: &[u8]) {
602 self.set_length(data.len());
603 self.as_mut_slice()[..data.len()].copy_from_slice(data);
604 }
605
606 pub fn as_mut_slice(&mut self) -> &mut [u8] {
608 self.buffer
609 }
610
611 pub fn as_slice(&self) -> &[u8] {
613 self.buffer
614 }
615}
616
617unsafe impl DmaTxBuffer for DmaTxBuf {
618 type View = BufView<DmaTxBuf>;
619
620 fn prepare(&mut self) -> Preparation {
621 cfg_if::cfg_if! {
622 if #[cfg(psram_dma)] {
623 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
624 if is_data_in_psram {
625 unsafe {
626 crate::soc::cache_writeback_addr(
627 self.buffer.as_ptr() as u32,
628 self.buffer.len() as u32,
629 )
630 };
631 }
632 }
633 }
634
635 Preparation {
636 start: self.descriptors.head(),
637 direction: TransferDirection::Out,
638 #[cfg(psram_dma)]
639 accesses_psram: is_data_in_psram,
640 burst_transfer: self.burst,
641 check_owner: None,
642 auto_write_back: false,
643 }
644 }
645
646 fn into_view(self) -> BufView<DmaTxBuf> {
647 BufView(self)
648 }
649
650 fn from_view(view: Self::View) -> Self {
651 view.0
652 }
653}
654
655#[derive(Debug)]
661#[cfg_attr(feature = "defmt", derive(defmt::Format))]
662pub struct DmaRxBuf {
663 descriptors: DescriptorSet<'static>,
664 buffer: &'static mut [u8],
665 burst: BurstConfig,
666}
667
668impl DmaRxBuf {
669 pub fn new(
677 descriptors: &'static mut [DmaDescriptor],
678 buffer: &'static mut [u8],
679 ) -> Result<Self, DmaBufError> {
680 Self::new_with_config(descriptors, buffer, BurstConfig::default())
681 }
682
683 pub fn new_with_config(
692 descriptors: &'static mut [DmaDescriptor],
693 buffer: &'static mut [u8],
694 config: impl Into<BurstConfig>,
695 ) -> Result<Self, DmaBufError> {
696 let mut buf = Self {
697 descriptors: DescriptorSet::new(descriptors)?,
698 buffer,
699 burst: BurstConfig::default(),
700 };
701
702 buf.configure(config, buf.capacity())?;
703
704 Ok(buf)
705 }
706
707 fn configure(
708 &mut self,
709 burst: impl Into<BurstConfig>,
710 length: usize,
711 ) -> Result<(), DmaBufError> {
712 let burst = burst.into();
713 self.set_length_fallible(length, burst)?;
714
715 self.descriptors.link_with_buffer(
716 self.buffer,
717 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
718 )?;
719
720 self.burst = burst;
721 Ok(())
722 }
723
724 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
726 let len = self.len();
727 self.configure(burst, len)
728 }
729
730 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
732 (self.descriptors.into_inner(), self.buffer)
733 }
734
735 pub fn capacity(&self) -> usize {
737 self.buffer.len()
738 }
739
740 #[allow(clippy::len_without_is_empty)]
743 pub fn len(&self) -> usize {
744 self.descriptors
745 .linked_iter()
746 .map(|d| d.size())
747 .sum::<usize>()
748 }
749
750 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
751 if len > self.capacity() {
752 return Err(DmaBufError::BufferTooSmall);
753 }
754 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
755
756 self.descriptors.set_rx_length(
757 len,
758 burst.max_chunk_size_for(&self.buffer[..len], TransferDirection::In),
759 )
760 }
761
762 pub fn set_length(&mut self, len: usize) {
768 unwrap!(self.set_length_fallible(len, self.burst));
769 }
770
771 pub fn as_slice(&self) -> &[u8] {
773 self.buffer
774 }
775
776 pub fn as_mut_slice(&mut self) -> &mut [u8] {
778 self.buffer
779 }
780
781 pub fn number_of_received_bytes(&self) -> usize {
783 self.descriptors
784 .linked_iter()
785 .map(|d| d.len())
786 .sum::<usize>()
787 }
788
789 pub fn read_received_data(&self, mut buf: &mut [u8]) -> usize {
796 let capacity = buf.len();
799 for chunk in self.received_data() {
800 if buf.is_empty() {
801 break;
802 }
803 let to_fill;
804 (to_fill, buf) = buf.split_at_mut(chunk.len());
805 to_fill.copy_from_slice(chunk);
806 }
807
808 capacity - buf.len()
809 }
810
811 pub fn received_data(&self) -> impl Iterator<Item = &[u8]> {
813 self.descriptors.linked_iter().map(|desc| {
814 unsafe { core::slice::from_raw_parts(desc.buffer.cast_const(), desc.len()) }
819 })
820 }
821}
822
823unsafe impl DmaRxBuffer for DmaRxBuf {
824 type View = BufView<DmaRxBuf>;
825
826 fn prepare(&mut self) -> Preparation {
827 for desc in self.descriptors.linked_iter_mut() {
828 desc.reset_for_rx();
829 }
830
831 cfg_if::cfg_if! {
832 if #[cfg(psram_dma)] {
833 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
835 if is_data_in_psram {
836 unsafe {
837 crate::soc::cache_invalidate_addr(
838 self.buffer.as_ptr() as u32,
839 self.buffer.len() as u32,
840 )
841 };
842 }
843 }
844 }
845
846 Preparation {
847 start: self.descriptors.head(),
848 direction: TransferDirection::In,
849 #[cfg(psram_dma)]
850 accesses_psram: is_data_in_psram,
851 burst_transfer: self.burst,
852 check_owner: None,
853 auto_write_back: true,
854 }
855 }
856
857 fn into_view(self) -> BufView<DmaRxBuf> {
858 BufView(self)
859 }
860
861 fn from_view(view: Self::View) -> Self {
862 view.0
863 }
864}
865
866#[derive(Debug)]
873#[cfg_attr(feature = "defmt", derive(defmt::Format))]
874pub struct DmaRxTxBuf {
875 rx_descriptors: DescriptorSet<'static>,
876 tx_descriptors: DescriptorSet<'static>,
877 buffer: &'static mut [u8],
878 burst: BurstConfig,
879}
880
881impl DmaRxTxBuf {
882 pub fn new(
890 rx_descriptors: &'static mut [DmaDescriptor],
891 tx_descriptors: &'static mut [DmaDescriptor],
892 buffer: &'static mut [u8],
893 ) -> Result<Self, DmaBufError> {
894 let mut buf = Self {
895 rx_descriptors: DescriptorSet::new(rx_descriptors)?,
896 tx_descriptors: DescriptorSet::new(tx_descriptors)?,
897 buffer,
898 burst: BurstConfig::default(),
899 };
900
901 let capacity = buf.capacity();
902 buf.configure(buf.burst, capacity)?;
903
904 Ok(buf)
905 }
906
907 fn configure(
908 &mut self,
909 burst: impl Into<BurstConfig>,
910 length: usize,
911 ) -> Result<(), DmaBufError> {
912 let burst = burst.into();
913 self.set_length_fallible(length, burst)?;
914
915 self.rx_descriptors.link_with_buffer(
916 self.buffer,
917 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
918 )?;
919 self.tx_descriptors.link_with_buffer(
920 self.buffer,
921 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
922 )?;
923
924 self.burst = burst;
925
926 Ok(())
927 }
928
929 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
931 let len = self.len();
932 self.configure(burst, len)
933 }
934
935 pub fn split(
938 self,
939 ) -> (
940 &'static mut [DmaDescriptor],
941 &'static mut [DmaDescriptor],
942 &'static mut [u8],
943 ) {
944 (
945 self.rx_descriptors.into_inner(),
946 self.tx_descriptors.into_inner(),
947 self.buffer,
948 )
949 }
950
951 pub fn capacity(&self) -> usize {
953 self.buffer.len()
954 }
955
956 #[allow(clippy::len_without_is_empty)]
958 pub fn len(&self) -> usize {
959 self.tx_descriptors
960 .linked_iter()
961 .map(|d| d.len())
962 .sum::<usize>()
963 }
964
965 pub fn as_slice(&self) -> &[u8] {
967 self.buffer
968 }
969
970 pub fn as_mut_slice(&mut self) -> &mut [u8] {
972 self.buffer
973 }
974
975 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
976 if len > self.capacity() {
977 return Err(DmaBufError::BufferTooSmall);
978 }
979 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
980 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
981
982 self.rx_descriptors.set_rx_length(
983 len,
984 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
985 )?;
986 self.tx_descriptors.set_tx_length(
987 len,
988 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
989 )?;
990
991 Ok(())
992 }
993
994 pub fn set_length(&mut self, len: usize) {
999 unwrap!(self.set_length_fallible(len, self.burst));
1000 }
1001}
1002
1003unsafe impl DmaTxBuffer for DmaRxTxBuf {
1004 type View = BufView<DmaRxTxBuf>;
1005
1006 fn prepare(&mut self) -> Preparation {
1007 for desc in self.tx_descriptors.linked_iter_mut() {
1008 desc.reset_for_tx(desc.next.is_null());
1011 }
1012
1013 cfg_if::cfg_if! {
1014 if #[cfg(psram_dma)] {
1015 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1017 if is_data_in_psram {
1018 unsafe {
1019 crate::soc::cache_writeback_addr(
1020 self.buffer.as_ptr() as u32,
1021 self.buffer.len() as u32,
1022 )
1023 };
1024 }
1025 }
1026 }
1027
1028 Preparation {
1029 start: self.tx_descriptors.head(),
1030 direction: TransferDirection::Out,
1031 #[cfg(psram_dma)]
1032 accesses_psram: is_data_in_psram,
1033 burst_transfer: self.burst,
1034 check_owner: None,
1035 auto_write_back: false,
1036 }
1037 }
1038
1039 fn into_view(self) -> BufView<DmaRxTxBuf> {
1040 BufView(self)
1041 }
1042
1043 fn from_view(view: Self::View) -> Self {
1044 view.0
1045 }
1046}
1047
1048unsafe impl DmaRxBuffer for DmaRxTxBuf {
1049 type View = BufView<DmaRxTxBuf>;
1050
1051 fn prepare(&mut self) -> Preparation {
1052 for desc in self.rx_descriptors.linked_iter_mut() {
1053 desc.reset_for_rx();
1054 }
1055
1056 cfg_if::cfg_if! {
1057 if #[cfg(psram_dma)] {
1058 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1060 if is_data_in_psram {
1061 unsafe {
1062 crate::soc::cache_invalidate_addr(
1063 self.buffer.as_ptr() as u32,
1064 self.buffer.len() as u32,
1065 )
1066 };
1067 }
1068 }
1069 }
1070
1071 Preparation {
1072 start: self.rx_descriptors.head(),
1073 direction: TransferDirection::In,
1074 #[cfg(psram_dma)]
1075 accesses_psram: is_data_in_psram,
1076 burst_transfer: self.burst,
1077 check_owner: None,
1078 auto_write_back: true,
1079 }
1080 }
1081
1082 fn into_view(self) -> BufView<DmaRxTxBuf> {
1083 BufView(self)
1084 }
1085
1086 fn from_view(view: Self::View) -> Self {
1087 view.0
1088 }
1089}
1090
1091pub struct DmaRxStreamBuf {
1132 descriptors: &'static mut [DmaDescriptor],
1133 buffer: &'static mut [u8],
1134 burst: BurstConfig,
1135}
1136
1137impl DmaRxStreamBuf {
1138 pub fn new(
1141 descriptors: &'static mut [DmaDescriptor],
1142 buffer: &'static mut [u8],
1143 ) -> Result<Self, DmaBufError> {
1144 if !is_slice_in_dram(descriptors) {
1145 return Err(DmaBufError::UnsupportedMemoryRegion);
1146 }
1147 if !is_slice_in_dram(buffer) {
1148 return Err(DmaBufError::UnsupportedMemoryRegion);
1149 }
1150
1151 if descriptors.is_empty() {
1152 return Err(DmaBufError::InsufficientDescriptors);
1153 }
1154
1155 let chunk_size = buffer.len() / descriptors.len();
1157
1158 if chunk_size > 4095 {
1159 return Err(DmaBufError::InsufficientDescriptors);
1160 }
1161
1162 let excess = buffer.len() % descriptors.len();
1164 if chunk_size + excess > 4095 {
1165 return Err(DmaBufError::InsufficientDescriptors);
1166 }
1167
1168 let mut chunks = buffer.chunks_exact_mut(chunk_size);
1169 for (desc, chunk) in descriptors.iter_mut().zip(chunks.by_ref()) {
1170 desc.buffer = chunk.as_mut_ptr();
1171 desc.set_size(chunk.len());
1172 }
1173
1174 let remainder = chunks.into_remainder();
1175 debug_assert_eq!(remainder.len(), excess);
1176
1177 if !remainder.is_empty() {
1178 let last_descriptor = descriptors.last_mut().unwrap();
1180 last_descriptor.set_size(last_descriptor.size() + remainder.len());
1181 }
1182
1183 Ok(Self {
1184 descriptors,
1185 buffer,
1186 burst: BurstConfig::default(),
1187 })
1188 }
1189
1190 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
1192 (self.descriptors, self.buffer)
1193 }
1194}
1195
1196unsafe impl DmaRxBuffer for DmaRxStreamBuf {
1197 type View = DmaRxStreamBufView;
1198
1199 fn prepare(&mut self) -> Preparation {
1200 let mut next = null_mut();
1202 for desc in self.descriptors.iter_mut().rev() {
1203 desc.next = next;
1204 next = desc;
1205
1206 desc.reset_for_rx();
1207 }
1208 Preparation {
1209 start: self.descriptors.as_mut_ptr(),
1210 direction: TransferDirection::In,
1211 #[cfg(psram_dma)]
1212 accesses_psram: false,
1213 burst_transfer: self.burst,
1214
1215 check_owner: None,
1220 auto_write_back: true,
1221 }
1222 }
1223
1224 fn into_view(self) -> DmaRxStreamBufView {
1225 DmaRxStreamBufView {
1226 buf: self,
1227 descriptor_idx: 0,
1228 descriptor_offset: 0,
1229 }
1230 }
1231
1232 fn from_view(view: Self::View) -> Self {
1233 view.buf
1234 }
1235}
1236
1237pub struct DmaRxStreamBufView {
1239 buf: DmaRxStreamBuf,
1240 descriptor_idx: usize,
1241 descriptor_offset: usize,
1242}
1243
1244impl DmaRxStreamBufView {
1245 pub fn available_bytes(&self) -> usize {
1247 let (tail, head) = self.buf.descriptors.split_at(self.descriptor_idx);
1248 let mut result = 0;
1249 for desc in head.iter().chain(tail) {
1250 if desc.owner() == Owner::Dma {
1251 break;
1252 }
1253 result += desc.len();
1254 }
1255 result - self.descriptor_offset
1256 }
1257
1258 pub fn pop(&mut self, buf: &mut [u8]) -> usize {
1260 if buf.is_empty() {
1261 return 0;
1262 }
1263 let total_bytes = buf.len();
1264
1265 let mut remaining = buf;
1266 loop {
1267 let available = self.peek();
1268 if available.len() >= remaining.len() {
1269 remaining.copy_from_slice(&available[0..remaining.len()]);
1270 self.consume(remaining.len());
1271 let consumed = remaining.len();
1272 remaining = &mut remaining[consumed..];
1273 break;
1274 } else {
1275 let to_consume = available.len();
1276 remaining[0..to_consume].copy_from_slice(available);
1277 self.consume(to_consume);
1278 remaining = &mut remaining[to_consume..];
1279 }
1280 }
1281
1282 total_bytes - remaining.len()
1283 }
1284
1285 pub fn peek(&self) -> &[u8] {
1292 let (slice, _) = self.peek_internal(false);
1293 slice
1294 }
1295
1296 pub fn peek_until_eof(&self) -> (&[u8], bool) {
1301 self.peek_internal(true)
1302 }
1303
1304 pub fn consume(&mut self, n: usize) -> usize {
1310 let mut remaining_bytes_to_consume = n;
1311
1312 loop {
1313 let desc = &mut self.buf.descriptors[self.descriptor_idx];
1314
1315 if desc.owner() == Owner::Dma {
1316 break;
1319 }
1320
1321 let remaining_bytes_in_descriptor = desc.len() - self.descriptor_offset;
1322 if remaining_bytes_to_consume < remaining_bytes_in_descriptor {
1323 self.descriptor_offset += remaining_bytes_to_consume;
1324 remaining_bytes_to_consume = 0;
1325 break;
1326 }
1327
1328 desc.set_owner(Owner::Dma);
1330 desc.set_suc_eof(false);
1331 desc.set_length(0);
1332
1333 desc.next = null_mut();
1337
1338 let desc_ptr: *mut _ = desc;
1339
1340 let prev_descriptor_index = self
1341 .descriptor_idx
1342 .checked_sub(1)
1343 .unwrap_or(self.buf.descriptors.len() - 1);
1344
1345 self.buf.descriptors[prev_descriptor_index].next = desc_ptr;
1347
1348 self.descriptor_idx += 1;
1349 if self.descriptor_idx >= self.buf.descriptors.len() {
1350 self.descriptor_idx = 0;
1351 }
1352 self.descriptor_offset = 0;
1353
1354 remaining_bytes_to_consume -= remaining_bytes_in_descriptor;
1355 }
1356
1357 n - remaining_bytes_to_consume
1358 }
1359
1360 fn peek_internal(&self, stop_at_eof: bool) -> (&[u8], bool) {
1361 let descriptors = &self.buf.descriptors[self.descriptor_idx..];
1362
1363 debug_assert!(!descriptors.is_empty());
1365
1366 if descriptors.len() == 1 {
1367 let last_descriptor = &descriptors[0];
1368 if last_descriptor.owner() == Owner::Dma {
1369 (&[], false)
1371 } else {
1372 let length = last_descriptor.len() - self.descriptor_offset;
1373 (
1374 &self.buf.buffer[self.buf.buffer.len() - length..],
1375 last_descriptor.flags.suc_eof(),
1376 )
1377 }
1378 } else {
1379 let chunk_size = descriptors[0].size();
1380 let mut found_eof = false;
1381
1382 let mut number_of_contiguous_bytes = 0;
1383 for desc in descriptors {
1384 if desc.owner() == Owner::Dma {
1385 break;
1386 }
1387 number_of_contiguous_bytes += desc.len();
1388
1389 if stop_at_eof && desc.flags.suc_eof() {
1390 found_eof = true;
1391 break;
1392 }
1393 if desc.len() < desc.size() {
1395 break;
1396 }
1397 }
1398
1399 (
1400 &self.buf.buffer[chunk_size * self.descriptor_idx..][..number_of_contiguous_bytes]
1401 [self.descriptor_offset..],
1402 found_eof,
1403 )
1404 }
1405 }
1406}
1407
1408static mut EMPTY: [DmaDescriptor; 1] = [DmaDescriptor::EMPTY];
1409
1410pub struct EmptyBuf;
1412
1413unsafe impl DmaTxBuffer for EmptyBuf {
1414 type View = EmptyBuf;
1415
1416 fn prepare(&mut self) -> Preparation {
1417 #[allow(unused_unsafe)] Preparation {
1419 start: unsafe { core::ptr::addr_of_mut!(EMPTY).cast() },
1420 direction: TransferDirection::Out,
1421 #[cfg(psram_dma)]
1422 accesses_psram: false,
1423 burst_transfer: BurstConfig::default(),
1424
1425 check_owner: Some(false),
1428
1429 auto_write_back: false,
1431 }
1432 }
1433
1434 fn into_view(self) -> EmptyBuf {
1435 self
1436 }
1437
1438 fn from_view(view: Self::View) -> Self {
1439 view
1440 }
1441}
1442
1443unsafe impl DmaRxBuffer for EmptyBuf {
1444 type View = EmptyBuf;
1445
1446 fn prepare(&mut self) -> Preparation {
1447 #[allow(unused_unsafe)] Preparation {
1449 start: unsafe { core::ptr::addr_of_mut!(EMPTY).cast() },
1450 direction: TransferDirection::In,
1451 #[cfg(psram_dma)]
1452 accesses_psram: false,
1453 burst_transfer: BurstConfig::default(),
1454
1455 check_owner: Some(false),
1458 auto_write_back: true,
1459 }
1460 }
1461
1462 fn into_view(self) -> EmptyBuf {
1463 self
1464 }
1465
1466 fn from_view(view: Self::View) -> Self {
1467 view
1468 }
1469}
1470
1471pub struct DmaLoopBuf {
1482 descriptor: &'static mut DmaDescriptor,
1483 buffer: &'static mut [u8],
1484}
1485
1486impl DmaLoopBuf {
1487 pub fn new(
1489 descriptor: &'static mut DmaDescriptor,
1490 buffer: &'static mut [u8],
1491 ) -> Result<DmaLoopBuf, DmaBufError> {
1492 if !is_slice_in_dram(buffer) {
1493 return Err(DmaBufError::UnsupportedMemoryRegion);
1494 }
1495 if !is_slice_in_dram(core::slice::from_ref(descriptor)) {
1496 return Err(DmaBufError::UnsupportedMemoryRegion);
1497 }
1498
1499 if buffer.len() > BurstConfig::default().max_chunk_size_for(buffer, TransferDirection::Out)
1500 {
1501 return Err(DmaBufError::InsufficientDescriptors);
1502 }
1503
1504 descriptor.set_owner(Owner::Dma); descriptor.set_suc_eof(false);
1506 descriptor.set_length(buffer.len());
1507 descriptor.set_size(buffer.len());
1508 descriptor.buffer = buffer.as_mut_ptr();
1509 descriptor.next = descriptor;
1510
1511 Ok(Self { descriptor, buffer })
1512 }
1513
1514 pub fn split(self) -> (&'static mut DmaDescriptor, &'static mut [u8]) {
1516 (self.descriptor, self.buffer)
1517 }
1518}
1519
1520unsafe impl DmaTxBuffer for DmaLoopBuf {
1521 type View = Self;
1522
1523 fn prepare(&mut self) -> Preparation {
1524 Preparation {
1525 start: self.descriptor,
1526 #[cfg(psram_dma)]
1527 accesses_psram: false,
1528 direction: TransferDirection::Out,
1529 burst_transfer: BurstConfig::default(),
1530 check_owner: Some(false),
1532
1533 auto_write_back: false,
1535 }
1536 }
1537
1538 fn into_view(self) -> Self::View {
1539 self
1540 }
1541
1542 fn from_view(view: Self::View) -> Self {
1543 view
1544 }
1545}
1546
1547impl Deref for DmaLoopBuf {
1548 type Target = [u8];
1549
1550 fn deref(&self) -> &Self::Target {
1551 self.buffer
1552 }
1553}
1554
1555impl DerefMut for DmaLoopBuf {
1556 fn deref_mut(&mut self) -> &mut Self::Target {
1557 self.buffer
1558 }
1559}