1use core::{
2 ops::{Deref, DerefMut},
3 ptr::null_mut,
4};
5
6use super::*;
7use crate::soc::{is_slice_in_dram, is_slice_in_psram};
8#[cfg(psram_dma)]
9use crate::soc::{is_valid_psram_address, is_valid_ram_address};
10
11#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
13#[cfg_attr(feature = "defmt", derive(defmt::Format))]
14pub enum DmaBufError {
15 BufferTooSmall,
17
18 InsufficientDescriptors,
20
21 UnsupportedMemoryRegion,
23
24 InvalidAlignment(DmaAlignmentError),
26
27 InvalidChunkSize,
29}
30
31#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
33#[cfg_attr(feature = "defmt", derive(defmt::Format))]
34pub enum DmaAlignmentError {
35 Address,
37
38 Size,
40}
41
42impl From<DmaAlignmentError> for DmaBufError {
43 fn from(err: DmaAlignmentError) -> Self {
44 DmaBufError::InvalidAlignment(err)
45 }
46}
47
48cfg_if::cfg_if! {
49 if #[cfg(psram_dma)] {
50 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
52 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
53 pub enum ExternalBurstConfig {
54 Size16 = 16,
56
57 Size32 = 32,
59
60 Size64 = 64,
62 }
63
64 impl ExternalBurstConfig {
65 pub const DEFAULT: Self = Self::Size16;
67 }
68
69 impl Default for ExternalBurstConfig {
70 fn default() -> Self {
71 Self::DEFAULT
72 }
73 }
74
75 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
77 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
78 pub enum InternalBurstConfig {
79 Disabled,
81
82 Enabled,
84 }
85
86 impl InternalBurstConfig {
87 pub const DEFAULT: Self = Self::Disabled;
89 }
90
91 impl Default for InternalBurstConfig {
92 fn default() -> Self {
93 Self::DEFAULT
94 }
95 }
96
97 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
99 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
100 pub struct BurstConfig {
101 pub external_memory: ExternalBurstConfig,
105
106 pub internal_memory: InternalBurstConfig,
110 }
111
112 impl BurstConfig {
113 pub const DEFAULT: Self = Self {
115 external_memory: ExternalBurstConfig::DEFAULT,
116 internal_memory: InternalBurstConfig::DEFAULT,
117 };
118 }
119
120 impl Default for BurstConfig {
121 fn default() -> Self {
122 Self::DEFAULT
123 }
124 }
125
126 impl From<InternalBurstConfig> for BurstConfig {
127 fn from(internal_memory: InternalBurstConfig) -> Self {
128 Self {
129 external_memory: ExternalBurstConfig::DEFAULT,
130 internal_memory,
131 }
132 }
133 }
134
135 impl From<ExternalBurstConfig> for BurstConfig {
136 fn from(external_memory: ExternalBurstConfig) -> Self {
137 Self {
138 external_memory,
139 internal_memory: InternalBurstConfig::DEFAULT,
140 }
141 }
142 }
143 } else {
144 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
146 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
147 pub enum BurstConfig {
148 Disabled,
150
151 Enabled,
153 }
154
155 impl BurstConfig {
156 pub const DEFAULT: Self = Self::Disabled;
158 }
159
160 impl Default for BurstConfig {
161 fn default() -> Self {
162 Self::DEFAULT
163 }
164 }
165
166 type InternalBurstConfig = BurstConfig;
167 }
168}
169
170#[cfg(psram_dma)]
171impl ExternalBurstConfig {
172 const fn min_psram_alignment(self, direction: TransferDirection) -> usize {
173 if matches!(direction, TransferDirection::In) {
184 self as usize
185 } else {
186 1
192 }
193 }
194}
195
196impl InternalBurstConfig {
197 pub(super) const fn is_burst_enabled(self) -> bool {
198 !matches!(self, Self::Disabled)
199 }
200
201 const fn min_dram_alignment(self, direction: TransferDirection) -> usize {
203 if matches!(direction, TransferDirection::In) {
204 if self.is_burst_enabled() {
214 4
216 } else {
217 1
218 }
219 } else {
220 if cfg!(esp32) {
223 4
229 } else {
230 1
231 }
232 }
233 }
234}
235
236const fn max(a: usize, b: usize) -> usize {
237 if a > b {
238 a
239 } else {
240 b
241 }
242}
243
244impl BurstConfig {
245 delegate::delegate! {
246 #[cfg(psram_dma)]
247 to self.internal_memory {
248 pub(super) const fn min_dram_alignment(self, direction: TransferDirection) -> usize;
249 pub(super) fn is_burst_enabled(self) -> bool;
250 }
251 }
252
253 pub const fn min_compatible_alignment(self) -> usize {
259 let in_alignment = self.min_dram_alignment(TransferDirection::In);
260 let out_alignment = self.min_dram_alignment(TransferDirection::Out);
261 let alignment = max(in_alignment, out_alignment);
262
263 #[cfg(psram_dma)]
264 let alignment = max(alignment, self.external_memory as usize);
265
266 alignment
267 }
268
269 const fn chunk_size_for_alignment(alignment: usize) -> usize {
270 4096 - alignment
274 }
275
276 pub const fn max_compatible_chunk_size(self) -> usize {
282 Self::chunk_size_for_alignment(self.min_compatible_alignment())
283 }
284
285 fn min_alignment(self, _buffer: &[u8], direction: TransferDirection) -> usize {
286 let alignment = self.min_dram_alignment(direction);
287
288 cfg_if::cfg_if! {
289 if #[cfg(psram_dma)] {
290 let mut alignment = alignment;
291 if is_valid_psram_address(_buffer.as_ptr() as usize) {
292 alignment = max(alignment, self.external_memory.min_psram_alignment(direction));
293 }
294 }
295 }
296
297 alignment
298 }
299
300 fn max_chunk_size_for(self, buffer: &[u8], direction: TransferDirection) -> usize {
303 Self::chunk_size_for_alignment(self.min_alignment(buffer, direction))
304 }
305
306 fn ensure_buffer_aligned(
307 self,
308 buffer: &[u8],
309 direction: TransferDirection,
310 ) -> Result<(), DmaAlignmentError> {
311 let alignment = self.min_alignment(buffer, direction);
312 if buffer.as_ptr() as usize % alignment != 0 {
313 return Err(DmaAlignmentError::Address);
314 }
315
316 if direction == TransferDirection::In && buffer.len() % alignment != 0 {
320 return Err(DmaAlignmentError::Size);
321 }
322
323 Ok(())
324 }
325
326 fn ensure_buffer_compatible(
327 self,
328 buffer: &[u8],
329 direction: TransferDirection,
330 ) -> Result<(), DmaBufError> {
331 let is_in_dram = is_slice_in_dram(buffer);
333 let is_in_psram = cfg!(psram_dma) && is_slice_in_psram(buffer);
334 if !(is_in_dram || is_in_psram) {
335 return Err(DmaBufError::UnsupportedMemoryRegion);
336 }
337
338 self.ensure_buffer_aligned(buffer, direction)?;
339
340 Ok(())
341 }
342}
343
344#[derive(Clone, Copy, PartialEq, Eq, Debug)]
346#[cfg_attr(feature = "defmt", derive(defmt::Format))]
347pub enum TransferDirection {
348 In,
350 Out,
352}
353
354#[derive(PartialEq, Eq, Debug)]
356#[cfg_attr(feature = "defmt", derive(defmt::Format))]
357pub struct Preparation {
358 pub start: *mut DmaDescriptor,
360
361 pub direction: TransferDirection,
363
364 #[cfg(psram_dma)]
366 pub accesses_psram: bool,
367
368 #[doc = crate::trm_markdown_link!()]
376 pub burst_transfer: BurstConfig,
377
378 pub check_owner: Option<bool>,
405
406 pub auto_write_back: bool,
416}
417
418pub unsafe trait DmaTxBuffer {
426 type View;
429
430 fn prepare(&mut self) -> Preparation;
435
436 fn into_view(self) -> Self::View;
438
439 fn from_view(view: Self::View) -> Self;
441}
442
443pub unsafe trait DmaRxBuffer {
455 type View;
458
459 fn prepare(&mut self) -> Preparation;
464
465 fn into_view(self) -> Self::View;
467
468 fn from_view(view: Self::View) -> Self;
470}
471
472pub struct BufView<T>(T);
477
478#[derive(Debug)]
484#[cfg_attr(feature = "defmt", derive(defmt::Format))]
485pub struct DmaTxBuf {
486 descriptors: DescriptorSet<'static>,
487 buffer: &'static mut [u8],
488 burst: BurstConfig,
489}
490
491impl DmaTxBuf {
492 pub fn new(
501 descriptors: &'static mut [DmaDescriptor],
502 buffer: &'static mut [u8],
503 ) -> Result<Self, DmaBufError> {
504 Self::new_with_config(descriptors, buffer, BurstConfig::default())
505 }
506
507 pub fn new_with_config(
516 descriptors: &'static mut [DmaDescriptor],
517 buffer: &'static mut [u8],
518 config: impl Into<BurstConfig>,
519 ) -> Result<Self, DmaBufError> {
520 let mut buf = Self {
521 descriptors: DescriptorSet::new(descriptors)?,
522 buffer,
523 burst: BurstConfig::default(),
524 };
525
526 let capacity = buf.capacity();
527 buf.configure(config, capacity)?;
528
529 Ok(buf)
530 }
531
532 fn configure(
533 &mut self,
534 burst: impl Into<BurstConfig>,
535 length: usize,
536 ) -> Result<(), DmaBufError> {
537 let burst = burst.into();
538 self.set_length_fallible(length, burst)?;
539
540 self.descriptors.link_with_buffer(
541 self.buffer,
542 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
543 )?;
544
545 self.burst = burst;
546 Ok(())
547 }
548
549 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
551 let len = self.len();
552 self.configure(burst, len)
553 }
554
555 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
557 (self.descriptors.into_inner(), self.buffer)
558 }
559
560 pub fn capacity(&self) -> usize {
562 self.buffer.len()
563 }
564
565 #[allow(clippy::len_without_is_empty)]
567 pub fn len(&self) -> usize {
568 self.descriptors
569 .linked_iter()
570 .map(|d| d.len())
571 .sum::<usize>()
572 }
573
574 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
575 if len > self.capacity() {
576 return Err(DmaBufError::BufferTooSmall);
577 }
578 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
579
580 self.descriptors.set_tx_length(
581 len,
582 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
583 )
584 }
585
586 pub fn set_length(&mut self, len: usize) {
592 unwrap!(self.set_length_fallible(len, self.burst))
593 }
594
595 pub fn fill(&mut self, data: &[u8]) {
601 self.set_length(data.len());
602 self.as_mut_slice()[..data.len()].copy_from_slice(data);
603 }
604
605 pub fn as_mut_slice(&mut self) -> &mut [u8] {
607 self.buffer
608 }
609
610 pub fn as_slice(&self) -> &[u8] {
612 self.buffer
613 }
614}
615
616unsafe impl DmaTxBuffer for DmaTxBuf {
617 type View = BufView<DmaTxBuf>;
618
619 fn prepare(&mut self) -> Preparation {
620 for desc in self.descriptors.linked_iter_mut() {
621 desc.reset_for_tx(desc.next.is_null());
624 }
625
626 cfg_if::cfg_if! {
627 if #[cfg(psram_dma)] {
628 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
629 if is_data_in_psram {
630 unsafe {
631 crate::soc::cache_writeback_addr(
632 self.buffer.as_ptr() as u32,
633 self.buffer.len() as u32,
634 )
635 };
636 }
637 }
638 }
639
640 Preparation {
641 start: self.descriptors.head(),
642 direction: TransferDirection::Out,
643 #[cfg(psram_dma)]
644 accesses_psram: is_data_in_psram,
645 burst_transfer: self.burst,
646 check_owner: None,
647 auto_write_back: false,
648 }
649 }
650
651 fn into_view(self) -> BufView<DmaTxBuf> {
652 BufView(self)
653 }
654
655 fn from_view(view: Self::View) -> Self {
656 view.0
657 }
658}
659
660#[derive(Debug)]
666#[cfg_attr(feature = "defmt", derive(defmt::Format))]
667pub struct DmaRxBuf {
668 descriptors: DescriptorSet<'static>,
669 buffer: &'static mut [u8],
670 burst: BurstConfig,
671}
672
673impl DmaRxBuf {
674 pub fn new(
682 descriptors: &'static mut [DmaDescriptor],
683 buffer: &'static mut [u8],
684 ) -> Result<Self, DmaBufError> {
685 let mut buf = Self {
686 descriptors: DescriptorSet::new(descriptors)?,
687 buffer,
688 burst: BurstConfig::default(),
689 };
690
691 buf.configure(buf.burst, buf.capacity())?;
692
693 Ok(buf)
694 }
695
696 fn configure(
697 &mut self,
698 burst: impl Into<BurstConfig>,
699 length: usize,
700 ) -> Result<(), DmaBufError> {
701 let burst = burst.into();
702 self.set_length_fallible(length, burst)?;
703
704 self.descriptors.link_with_buffer(
705 self.buffer,
706 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
707 )?;
708
709 self.burst = burst;
710 Ok(())
711 }
712
713 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
715 let len = self.len();
716 self.configure(burst, len)
717 }
718
719 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
721 (self.descriptors.into_inner(), self.buffer)
722 }
723
724 pub fn capacity(&self) -> usize {
726 self.buffer.len()
727 }
728
729 #[allow(clippy::len_without_is_empty)]
732 pub fn len(&self) -> usize {
733 self.descriptors
734 .linked_iter()
735 .map(|d| d.size())
736 .sum::<usize>()
737 }
738
739 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
740 if len > self.capacity() {
741 return Err(DmaBufError::BufferTooSmall);
742 }
743 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
744
745 self.descriptors.set_rx_length(
746 len,
747 burst.max_chunk_size_for(&self.buffer[..len], TransferDirection::In),
748 )
749 }
750
751 pub fn set_length(&mut self, len: usize) {
757 unwrap!(self.set_length_fallible(len, self.burst));
758 }
759
760 pub fn as_slice(&self) -> &[u8] {
762 self.buffer
763 }
764
765 pub fn as_mut_slice(&mut self) -> &mut [u8] {
767 self.buffer
768 }
769
770 pub fn number_of_received_bytes(&self) -> usize {
772 self.descriptors
773 .linked_iter()
774 .map(|d| d.len())
775 .sum::<usize>()
776 }
777
778 pub fn read_received_data(&self, mut buf: &mut [u8]) -> usize {
785 let capacity = buf.len();
786 for chunk in self.received_data() {
787 if buf.is_empty() {
788 break;
789 }
790 let to_fill;
791 (to_fill, buf) = buf.split_at_mut(chunk.len());
792 to_fill.copy_from_slice(chunk);
793 }
794
795 capacity - buf.len()
796 }
797
798 pub fn received_data(&self) -> impl Iterator<Item = &[u8]> {
800 self.descriptors.linked_iter().map(|desc| {
801 unsafe { core::slice::from_raw_parts(desc.buffer.cast_const(), desc.len()) }
806 })
807 }
808}
809
810unsafe impl DmaRxBuffer for DmaRxBuf {
811 type View = BufView<DmaRxBuf>;
812
813 fn prepare(&mut self) -> Preparation {
814 for desc in self.descriptors.linked_iter_mut() {
815 desc.reset_for_rx();
816 }
817
818 cfg_if::cfg_if! {
819 if #[cfg(psram_dma)] {
820 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
822 if is_data_in_psram {
823 unsafe {
824 crate::soc::cache_invalidate_addr(
825 self.buffer.as_ptr() as u32,
826 self.buffer.len() as u32,
827 )
828 };
829 }
830 }
831 }
832
833 Preparation {
834 start: self.descriptors.head(),
835 direction: TransferDirection::In,
836 #[cfg(psram_dma)]
837 accesses_psram: is_data_in_psram,
838 burst_transfer: self.burst,
839 check_owner: None,
840 auto_write_back: true,
841 }
842 }
843
844 fn into_view(self) -> BufView<DmaRxBuf> {
845 BufView(self)
846 }
847
848 fn from_view(view: Self::View) -> Self {
849 view.0
850 }
851}
852
853#[derive(Debug)]
860#[cfg_attr(feature = "defmt", derive(defmt::Format))]
861pub struct DmaRxTxBuf {
862 rx_descriptors: DescriptorSet<'static>,
863 tx_descriptors: DescriptorSet<'static>,
864 buffer: &'static mut [u8],
865 burst: BurstConfig,
866}
867
868impl DmaRxTxBuf {
869 pub fn new(
877 rx_descriptors: &'static mut [DmaDescriptor],
878 tx_descriptors: &'static mut [DmaDescriptor],
879 buffer: &'static mut [u8],
880 ) -> Result<Self, DmaBufError> {
881 let mut buf = Self {
882 rx_descriptors: DescriptorSet::new(rx_descriptors)?,
883 tx_descriptors: DescriptorSet::new(tx_descriptors)?,
884 buffer,
885 burst: BurstConfig::default(),
886 };
887
888 let capacity = buf.capacity();
889 buf.configure(buf.burst, capacity)?;
890
891 Ok(buf)
892 }
893
894 fn configure(
895 &mut self,
896 burst: impl Into<BurstConfig>,
897 length: usize,
898 ) -> Result<(), DmaBufError> {
899 let burst = burst.into();
900 self.set_length_fallible(length, burst)?;
901
902 self.rx_descriptors.link_with_buffer(
903 self.buffer,
904 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
905 )?;
906 self.tx_descriptors.link_with_buffer(
907 self.buffer,
908 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
909 )?;
910
911 self.burst = burst;
912
913 Ok(())
914 }
915
916 pub fn set_burst_config(&mut self, burst: BurstConfig) -> Result<(), DmaBufError> {
918 let len = self.len();
919 self.configure(burst, len)
920 }
921
922 pub fn split(
925 self,
926 ) -> (
927 &'static mut [DmaDescriptor],
928 &'static mut [DmaDescriptor],
929 &'static mut [u8],
930 ) {
931 (
932 self.rx_descriptors.into_inner(),
933 self.tx_descriptors.into_inner(),
934 self.buffer,
935 )
936 }
937
938 pub fn capacity(&self) -> usize {
940 self.buffer.len()
941 }
942
943 #[allow(clippy::len_without_is_empty)]
945 pub fn len(&self) -> usize {
946 self.tx_descriptors
947 .linked_iter()
948 .map(|d| d.len())
949 .sum::<usize>()
950 }
951
952 pub fn as_slice(&self) -> &[u8] {
954 self.buffer
955 }
956
957 pub fn as_mut_slice(&mut self) -> &mut [u8] {
959 self.buffer
960 }
961
962 fn set_length_fallible(&mut self, len: usize, burst: BurstConfig) -> Result<(), DmaBufError> {
963 if len > self.capacity() {
964 return Err(DmaBufError::BufferTooSmall);
965 }
966 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::In)?;
967 burst.ensure_buffer_compatible(&self.buffer[..len], TransferDirection::Out)?;
968
969 self.rx_descriptors.set_rx_length(
970 len,
971 burst.max_chunk_size_for(self.buffer, TransferDirection::In),
972 )?;
973 self.tx_descriptors.set_tx_length(
974 len,
975 burst.max_chunk_size_for(self.buffer, TransferDirection::Out),
976 )?;
977
978 Ok(())
979 }
980
981 pub fn set_length(&mut self, len: usize) {
986 unwrap!(self.set_length_fallible(len, self.burst));
987 }
988}
989
990unsafe impl DmaTxBuffer for DmaRxTxBuf {
991 type View = BufView<DmaRxTxBuf>;
992
993 fn prepare(&mut self) -> Preparation {
994 for desc in self.tx_descriptors.linked_iter_mut() {
995 desc.reset_for_tx(desc.next.is_null());
998 }
999
1000 cfg_if::cfg_if! {
1001 if #[cfg(psram_dma)] {
1002 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1004 if is_data_in_psram {
1005 unsafe {
1006 crate::soc::cache_writeback_addr(
1007 self.buffer.as_ptr() as u32,
1008 self.buffer.len() as u32,
1009 )
1010 };
1011 }
1012 }
1013 }
1014
1015 Preparation {
1016 start: self.tx_descriptors.head(),
1017 direction: TransferDirection::Out,
1018 #[cfg(psram_dma)]
1019 accesses_psram: is_data_in_psram,
1020 burst_transfer: self.burst,
1021 check_owner: None,
1022 auto_write_back: false,
1023 }
1024 }
1025
1026 fn into_view(self) -> BufView<DmaRxTxBuf> {
1027 BufView(self)
1028 }
1029
1030 fn from_view(view: Self::View) -> Self {
1031 view.0
1032 }
1033}
1034
1035unsafe impl DmaRxBuffer for DmaRxTxBuf {
1036 type View = BufView<DmaRxTxBuf>;
1037
1038 fn prepare(&mut self) -> Preparation {
1039 for desc in self.rx_descriptors.linked_iter_mut() {
1040 desc.reset_for_rx();
1041 }
1042
1043 cfg_if::cfg_if! {
1044 if #[cfg(psram_dma)] {
1045 let is_data_in_psram = !is_valid_ram_address(self.buffer.as_ptr() as usize);
1047 if is_data_in_psram {
1048 unsafe {
1049 crate::soc::cache_invalidate_addr(
1050 self.buffer.as_ptr() as u32,
1051 self.buffer.len() as u32,
1052 )
1053 };
1054 }
1055 }
1056 }
1057
1058 Preparation {
1059 start: self.rx_descriptors.head(),
1060 direction: TransferDirection::In,
1061 #[cfg(psram_dma)]
1062 accesses_psram: is_data_in_psram,
1063 burst_transfer: self.burst,
1064 check_owner: None,
1065 auto_write_back: true,
1066 }
1067 }
1068
1069 fn into_view(self) -> BufView<DmaRxTxBuf> {
1070 BufView(self)
1071 }
1072
1073 fn from_view(view: Self::View) -> Self {
1074 view.0
1075 }
1076}
1077
1078pub struct DmaRxStreamBuf {
1119 descriptors: &'static mut [DmaDescriptor],
1120 buffer: &'static mut [u8],
1121 burst: BurstConfig,
1122}
1123
1124impl DmaRxStreamBuf {
1125 pub fn new(
1128 descriptors: &'static mut [DmaDescriptor],
1129 buffer: &'static mut [u8],
1130 ) -> Result<Self, DmaBufError> {
1131 if !is_slice_in_dram(descriptors) {
1132 return Err(DmaBufError::UnsupportedMemoryRegion);
1133 }
1134 if !is_slice_in_dram(buffer) {
1135 return Err(DmaBufError::UnsupportedMemoryRegion);
1136 }
1137
1138 if descriptors.is_empty() {
1139 return Err(DmaBufError::InsufficientDescriptors);
1140 }
1141
1142 let chunk_size = buffer.len() / descriptors.len();
1144
1145 if chunk_size > 4095 {
1146 return Err(DmaBufError::InsufficientDescriptors);
1147 }
1148
1149 let excess = buffer.len() % descriptors.len();
1151 if chunk_size + excess > 4095 {
1152 return Err(DmaBufError::InsufficientDescriptors);
1153 }
1154
1155 let mut chunks = buffer.chunks_exact_mut(chunk_size);
1156 for (desc, chunk) in descriptors.iter_mut().zip(chunks.by_ref()) {
1157 desc.buffer = chunk.as_mut_ptr();
1158 desc.set_size(chunk.len());
1159 }
1160
1161 let remainder = chunks.into_remainder();
1162 debug_assert_eq!(remainder.len(), excess);
1163
1164 if !remainder.is_empty() {
1165 let last_descriptor = descriptors.last_mut().unwrap();
1167 last_descriptor.set_size(last_descriptor.size() + remainder.len());
1168 }
1169
1170 Ok(Self {
1171 descriptors,
1172 buffer,
1173 burst: BurstConfig::default(),
1174 })
1175 }
1176
1177 pub fn split(self) -> (&'static mut [DmaDescriptor], &'static mut [u8]) {
1179 (self.descriptors, self.buffer)
1180 }
1181}
1182
1183unsafe impl DmaRxBuffer for DmaRxStreamBuf {
1184 type View = DmaRxStreamBufView;
1185
1186 fn prepare(&mut self) -> Preparation {
1187 let mut next = null_mut();
1189 for desc in self.descriptors.iter_mut().rev() {
1190 desc.next = next;
1191 next = desc;
1192
1193 desc.reset_for_rx();
1194 }
1195 Preparation {
1196 start: self.descriptors.as_mut_ptr(),
1197 direction: TransferDirection::In,
1198 #[cfg(psram_dma)]
1199 accesses_psram: false,
1200 burst_transfer: self.burst,
1201
1202 check_owner: None,
1207 auto_write_back: true,
1208 }
1209 }
1210
1211 fn into_view(self) -> DmaRxStreamBufView {
1212 DmaRxStreamBufView {
1213 buf: self,
1214 descriptor_idx: 0,
1215 descriptor_offset: 0,
1216 }
1217 }
1218
1219 fn from_view(view: Self::View) -> Self {
1220 view.buf
1221 }
1222}
1223
1224pub struct DmaRxStreamBufView {
1226 buf: DmaRxStreamBuf,
1227 descriptor_idx: usize,
1228 descriptor_offset: usize,
1229}
1230
1231impl DmaRxStreamBufView {
1232 pub fn available_bytes(&self) -> usize {
1234 let (tail, head) = self.buf.descriptors.split_at(self.descriptor_idx);
1235 let mut result = 0;
1236 for desc in head.iter().chain(tail) {
1237 if desc.owner() == Owner::Dma {
1238 break;
1239 }
1240 result += desc.len();
1241 }
1242 result - self.descriptor_offset
1243 }
1244
1245 pub fn pop(&mut self, buf: &mut [u8]) -> usize {
1247 if buf.is_empty() {
1248 return 0;
1249 }
1250 let total_bytes = buf.len();
1251
1252 let mut remaining = buf;
1253 loop {
1254 let available = self.peek();
1255 if available.len() >= remaining.len() {
1256 remaining.copy_from_slice(&available[0..remaining.len()]);
1257 self.consume(remaining.len());
1258 let consumed = remaining.len();
1259 remaining = &mut remaining[consumed..];
1260 break;
1261 } else {
1262 let to_consume = available.len();
1263 remaining[0..to_consume].copy_from_slice(available);
1264 self.consume(to_consume);
1265 remaining = &mut remaining[to_consume..];
1266 }
1267 }
1268
1269 total_bytes - remaining.len()
1270 }
1271
1272 pub fn peek(&self) -> &[u8] {
1279 let (slice, _) = self.peek_internal(false);
1280 slice
1281 }
1282
1283 pub fn peek_until_eof(&self) -> (&[u8], bool) {
1288 self.peek_internal(true)
1289 }
1290
1291 pub fn consume(&mut self, n: usize) -> usize {
1297 let mut remaining_bytes_to_consume = n;
1298
1299 loop {
1300 let desc = &mut self.buf.descriptors[self.descriptor_idx];
1301
1302 if desc.owner() == Owner::Dma {
1303 break;
1306 }
1307
1308 let remaining_bytes_in_descriptor = desc.len() - self.descriptor_offset;
1309 if remaining_bytes_to_consume < remaining_bytes_in_descriptor {
1310 self.descriptor_offset += remaining_bytes_to_consume;
1311 remaining_bytes_to_consume = 0;
1312 break;
1313 }
1314
1315 desc.set_owner(Owner::Dma);
1317 desc.set_suc_eof(false);
1318 desc.set_length(0);
1319
1320 desc.next = null_mut();
1324
1325 let desc_ptr: *mut _ = desc;
1326
1327 let prev_descriptor_index = self
1328 .descriptor_idx
1329 .checked_sub(1)
1330 .unwrap_or(self.buf.descriptors.len() - 1);
1331
1332 self.buf.descriptors[prev_descriptor_index].next = desc_ptr;
1334
1335 self.descriptor_idx += 1;
1336 if self.descriptor_idx >= self.buf.descriptors.len() {
1337 self.descriptor_idx = 0;
1338 }
1339 self.descriptor_offset = 0;
1340
1341 remaining_bytes_to_consume -= remaining_bytes_in_descriptor;
1342 }
1343
1344 n - remaining_bytes_to_consume
1345 }
1346
1347 fn peek_internal(&self, stop_at_eof: bool) -> (&[u8], bool) {
1348 let descriptors = &self.buf.descriptors[self.descriptor_idx..];
1349
1350 debug_assert!(!descriptors.is_empty());
1352
1353 if descriptors.len() == 1 {
1354 let last_descriptor = &descriptors[0];
1355 if last_descriptor.owner() == Owner::Dma {
1356 (&[], false)
1358 } else {
1359 let length = last_descriptor.len() - self.descriptor_offset;
1360 (
1361 &self.buf.buffer[self.buf.buffer.len() - length..],
1362 last_descriptor.flags.suc_eof(),
1363 )
1364 }
1365 } else {
1366 let chunk_size = descriptors[0].size();
1367 let mut found_eof = false;
1368
1369 let mut number_of_contiguous_bytes = 0;
1370 for desc in descriptors {
1371 if desc.owner() == Owner::Dma {
1372 break;
1373 }
1374 number_of_contiguous_bytes += desc.len();
1375
1376 if stop_at_eof && desc.flags.suc_eof() {
1377 found_eof = true;
1378 break;
1379 }
1380 if desc.len() < desc.size() {
1382 break;
1383 }
1384 }
1385
1386 (
1387 &self.buf.buffer[chunk_size * self.descriptor_idx..][..number_of_contiguous_bytes]
1388 [self.descriptor_offset..],
1389 found_eof,
1390 )
1391 }
1392 }
1393}
1394
1395static mut EMPTY: [DmaDescriptor; 1] = [DmaDescriptor::EMPTY];
1396
1397pub struct EmptyBuf;
1399
1400unsafe impl DmaTxBuffer for EmptyBuf {
1401 type View = EmptyBuf;
1402
1403 fn prepare(&mut self) -> Preparation {
1404 #[allow(unused_unsafe)] Preparation {
1406 start: unsafe { core::ptr::addr_of_mut!(EMPTY).cast() },
1407 direction: TransferDirection::Out,
1408 #[cfg(psram_dma)]
1409 accesses_psram: false,
1410 burst_transfer: BurstConfig::default(),
1411
1412 check_owner: Some(false),
1415
1416 auto_write_back: false,
1418 }
1419 }
1420
1421 fn into_view(self) -> EmptyBuf {
1422 self
1423 }
1424
1425 fn from_view(view: Self::View) -> Self {
1426 view
1427 }
1428}
1429
1430unsafe impl DmaRxBuffer for EmptyBuf {
1431 type View = EmptyBuf;
1432
1433 fn prepare(&mut self) -> Preparation {
1434 #[allow(unused_unsafe)] Preparation {
1436 start: unsafe { core::ptr::addr_of_mut!(EMPTY).cast() },
1437 direction: TransferDirection::In,
1438 #[cfg(psram_dma)]
1439 accesses_psram: false,
1440 burst_transfer: BurstConfig::default(),
1441
1442 check_owner: Some(false),
1445 auto_write_back: true,
1446 }
1447 }
1448
1449 fn into_view(self) -> EmptyBuf {
1450 self
1451 }
1452
1453 fn from_view(view: Self::View) -> Self {
1454 view
1455 }
1456}
1457
1458pub struct DmaLoopBuf {
1469 descriptor: &'static mut DmaDescriptor,
1470 buffer: &'static mut [u8],
1471}
1472
1473impl DmaLoopBuf {
1474 pub fn new(
1476 descriptor: &'static mut DmaDescriptor,
1477 buffer: &'static mut [u8],
1478 ) -> Result<DmaLoopBuf, DmaBufError> {
1479 if !is_slice_in_dram(buffer) {
1480 return Err(DmaBufError::UnsupportedMemoryRegion);
1481 }
1482 if !is_slice_in_dram(core::slice::from_ref(descriptor)) {
1483 return Err(DmaBufError::UnsupportedMemoryRegion);
1484 }
1485
1486 if buffer.len() > BurstConfig::default().max_chunk_size_for(buffer, TransferDirection::Out)
1487 {
1488 return Err(DmaBufError::InsufficientDescriptors);
1489 }
1490
1491 descriptor.set_owner(Owner::Dma); descriptor.set_suc_eof(false);
1493 descriptor.set_length(buffer.len());
1494 descriptor.set_size(buffer.len());
1495 descriptor.buffer = buffer.as_mut_ptr();
1496 descriptor.next = descriptor;
1497
1498 Ok(Self { descriptor, buffer })
1499 }
1500
1501 pub fn split(self) -> (&'static mut DmaDescriptor, &'static mut [u8]) {
1503 (self.descriptor, self.buffer)
1504 }
1505}
1506
1507unsafe impl DmaTxBuffer for DmaLoopBuf {
1508 type View = Self;
1509
1510 fn prepare(&mut self) -> Preparation {
1511 Preparation {
1512 start: self.descriptor,
1513 #[cfg(psram_dma)]
1514 accesses_psram: false,
1515 direction: TransferDirection::Out,
1516 burst_transfer: BurstConfig::default(),
1517 check_owner: Some(false),
1519
1520 auto_write_back: false,
1522 }
1523 }
1524
1525 fn into_view(self) -> Self::View {
1526 self
1527 }
1528
1529 fn from_view(view: Self::View) -> Self {
1530 view
1531 }
1532}
1533
1534impl Deref for DmaLoopBuf {
1535 type Target = [u8];
1536
1537 fn deref(&self) -> &Self::Target {
1538 self.buffer
1539 }
1540}
1541
1542impl DerefMut for DmaLoopBuf {
1543 fn deref_mut(&mut self) -> &mut Self::Target {
1544 self.buffer
1545 }
1546}