1#![doc = crate::before_snippet!()]
26use crate::{
60 pac,
61 peripherals::AES,
62 reg_access::{AlignmentHelper, NativeEndianess},
63 system::GenericPeripheralGuard,
64};
65
66#[cfg_attr(esp32, path = "esp32.rs")]
67#[cfg_attr(esp32s3, path = "esp32s3.rs")]
68#[cfg_attr(esp32s2, path = "esp32s2.rs")]
69#[cfg_attr(esp32c3, path = "esp32cX.rs")]
70#[cfg_attr(esp32c6, path = "esp32cX.rs")]
71#[cfg_attr(esp32h2, path = "esp32cX.rs")]
72mod aes_spec_impl;
73
74const ALIGN_SIZE: usize = core::mem::size_of::<u32>();
75
76pub enum Key {
78 Key16([u8; 16]),
80 #[cfg(any(esp32, esp32s2))]
82 Key24([u8; 24]),
83 Key32([u8; 32]),
85}
86
87impl From<[u8; 16]> for Key {
89 fn from(key: [u8; 16]) -> Self {
90 Key::Key16(key)
91 }
92}
93
94#[cfg(any(esp32, esp32s2))]
95impl From<[u8; 24]> for Key {
96 fn from(key: [u8; 24]) -> Self {
97 Key::Key24(key)
98 }
99}
100
101impl From<[u8; 32]> for Key {
102 fn from(key: [u8; 32]) -> Self {
103 Key::Key32(key)
104 }
105}
106
107impl Key {
108 fn as_slice(&self) -> &[u8] {
110 match self {
111 Key::Key16(key) => key,
112 #[cfg(any(esp32, esp32s2))]
113 Key::Key24(key) => key,
114 Key::Key32(key) => key,
115 }
116 }
117}
118
119pub enum Mode {
121 Encryption128 = 0,
123 #[cfg(any(esp32, esp32s2))]
125 Encryption192 = 1,
126 Encryption256 = 2,
128 Decryption128 = 4,
130 #[cfg(any(esp32, esp32s2))]
132 Decryption192 = 5,
133 Decryption256 = 6,
135}
136
137pub struct Aes<'d> {
139 aes: AES<'d>,
140 alignment_helper: AlignmentHelper<NativeEndianess>,
141 _guard: GenericPeripheralGuard<{ crate::system::Peripheral::Aes as u8 }>,
142}
143
144impl<'d> Aes<'d> {
145 pub fn new(aes: AES<'d>) -> Self {
147 let guard = GenericPeripheralGuard::new();
148
149 let mut ret = Self {
150 aes,
151 alignment_helper: AlignmentHelper::native_endianess(),
152 _guard: guard,
153 };
154 ret.init();
155
156 ret
157 }
158
159 fn regs(&self) -> &pac::aes::RegisterBlock {
160 self.aes.register_block()
161 }
162
163 pub fn process<K>(&mut self, block: &mut [u8; 16], mode: Mode, key: K)
165 where
166 K: Into<Key>,
167 {
168 self.write_key(key.into().as_slice());
170 self.write_mode(mode);
171 self.set_block(block);
172 self.start();
173 while !(self.is_idle()) {}
174 self.block(block);
175 }
176
177 fn is_idle(&mut self) -> bool {
178 self.read_idle()
179 }
180
181 fn set_block(&mut self, block: &[u8; 16]) {
182 self.write_block(block);
183 }
184
185 fn block(&self, block: &mut [u8; 16]) {
186 self.read_block(block);
187 }
188
189 fn start(&mut self) {
190 self.write_start();
191 }
192}
193
194pub trait AesFlavour: crate::private::Sealed {
196 type KeyType<'b>;
201}
202
203pub struct Aes128;
205
206#[cfg(any(esp32, esp32s2))]
208pub struct Aes192;
209
210pub struct Aes256;
212
213impl crate::private::Sealed for Aes128 {}
214#[cfg(any(esp32, esp32s2))]
215impl crate::private::Sealed for Aes192 {}
216impl crate::private::Sealed for Aes256 {}
217
218#[cfg(any(esp32, esp32s2))]
220pub enum Endianness {
221 BigEndian = 1,
223 LittleEndian = 0,
225}
226
227#[cfg(any(esp32c3, esp32c6, esp32h2, esp32s2, esp32s3))]
234pub mod dma {
235 use core::mem::ManuallyDrop;
236
237 use crate::{
238 Blocking,
239 aes::{Key, Mode},
240 dma::{
241 Channel,
242 DmaChannelFor,
243 DmaPeripheral,
244 DmaRxBuffer,
245 DmaTxBuffer,
246 PeripheralDmaChannel,
247 },
248 peripherals::AES,
249 };
250
251 const ALIGN_SIZE: usize = core::mem::size_of::<u32>();
252
253 #[derive(Clone, Copy, PartialEq, Eq)]
255 pub enum CipherMode {
256 Ecb = 0,
258 Cbc,
260 Ofb,
262 Ctr,
264 Cfb8,
266 Cfb128,
268 }
269
270 #[instability::unstable]
272 pub struct AesDma<'d> {
273 pub aes: super::Aes<'d>,
275
276 channel: Channel<Blocking, PeripheralDmaChannel<AES<'d>>>,
277 }
278
279 impl<'d> crate::aes::Aes<'d> {
280 pub fn with_dma(self, channel: impl DmaChannelFor<AES<'d>>) -> AesDma<'d> {
282 let channel = Channel::new(channel.degrade());
283 channel.runtime_ensure_compatible(&self.aes);
284 AesDma { aes: self, channel }
285 }
286 }
287
288 impl core::fmt::Debug for AesDma<'_> {
289 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
290 f.debug_struct("AesDma").finish()
291 }
292 }
293
294 impl<'d> AesDma<'d> {
295 pub fn write_key<K>(&mut self, key: K)
298 where
299 K: Into<Key>,
300 {
301 let key = key.into(); debug_assert!(key.as_slice().len() <= 8 * ALIGN_SIZE);
303 debug_assert_eq!(key.as_slice().len() % ALIGN_SIZE, 0);
304 self.aes.write_key(key.as_slice());
305 }
306
307 pub fn write_block(&mut self, block: &[u8]) {
310 debug_assert_eq!(block.len(), 4 * ALIGN_SIZE);
311 self.aes.write_key(block);
312 }
313
314 pub fn process<K, RXBUF, TXBUF>(
319 mut self,
320 number_of_blocks: usize,
321 mut output: RXBUF,
322 mut input: TXBUF,
323 mode: Mode,
324 cipher_mode: CipherMode,
325 key: K,
326 ) -> Result<AesTransfer<'d, RXBUF, TXBUF>, (crate::dma::DmaError, Self, RXBUF, TXBUF)>
327 where
328 K: Into<Key>,
329 TXBUF: DmaTxBuffer,
330 RXBUF: DmaRxBuffer,
331 {
332 self.reset_aes();
334
335 let result = unsafe {
336 self.channel
337 .tx
338 .prepare_transfer(self.dma_peripheral(), &mut input)
339 .and_then(|_| self.channel.tx.start_transfer())
340 };
341 if let Err(err) = result {
342 return Err((err, self, output, input));
343 }
344
345 let result = unsafe {
346 self.channel
347 .rx
348 .prepare_transfer(self.dma_peripheral(), &mut output)
349 .and_then(|_| self.channel.rx.start_transfer())
350 };
351 if let Err(err) = result {
352 self.channel.tx.stop_transfer();
353
354 return Err((err, self, output, input));
355 }
356
357 self.enable_dma(true);
358 self.enable_interrupt();
359 self.aes.write_mode(mode);
360 self.set_cipher_mode(cipher_mode);
361 self.write_key(key.into());
362
363 self.set_num_block(number_of_blocks as u32);
364
365 self.start_transform();
366
367 Ok(AesTransfer {
368 aes_dma: ManuallyDrop::new(self),
369 rx_view: ManuallyDrop::new(output.into_view()),
370 tx_view: ManuallyDrop::new(input.into_view()),
371 })
372 }
373
374 #[cfg(any(esp32c3, esp32s2, esp32s3))]
375 fn reset_aes(&self) {
376 use crate::peripherals::SYSTEM;
377
378 SYSTEM::regs()
379 .perip_rst_en1()
380 .modify(|_, w| w.crypto_aes_rst().set_bit());
381 SYSTEM::regs()
382 .perip_rst_en1()
383 .modify(|_, w| w.crypto_aes_rst().clear_bit());
384 }
385
386 #[cfg(any(esp32c6, esp32h2))]
387 fn reset_aes(&self) {
388 use crate::peripherals::PCR;
389
390 PCR::regs()
391 .aes_conf()
392 .modify(|_, w| w.aes_rst_en().set_bit());
393 PCR::regs()
394 .aes_conf()
395 .modify(|_, w| w.aes_rst_en().clear_bit());
396 }
397
398 fn dma_peripheral(&self) -> DmaPeripheral {
399 DmaPeripheral::Aes
400 }
401
402 fn enable_dma(&self, enable: bool) {
403 self.aes
404 .regs()
405 .dma_enable()
406 .write(|w| w.dma_enable().bit(enable));
407 }
408
409 fn enable_interrupt(&self) {
410 self.aes.regs().int_ena().write(|w| w.int_ena().set_bit());
411 }
412
413 fn set_cipher_mode(&self, mode: CipherMode) {
414 self.aes
415 .regs()
416 .block_mode()
417 .modify(|_, w| unsafe { w.block_mode().bits(mode as u8) });
418
419 if mode == CipherMode::Ctr {
420 self.aes
421 .regs()
422 .inc_sel()
423 .modify(|_, w| w.inc_sel().clear_bit());
424 }
425 }
426
427 fn start_transform(&self) {
428 self.aes.write_start();
429 }
430
431 fn finish_transform(&self) {
432 self.aes.regs().dma_exit().write(|w| w.dma_exit().set_bit());
433 self.enable_dma(false);
434 self.reset_aes();
435 }
436
437 fn set_num_block(&self, block: u32) {
438 self.aes
439 .regs()
440 .block_num()
441 .modify(|_, w| unsafe { w.block_num().bits(block) });
442 }
443 }
444
445 #[instability::unstable]
447 pub struct AesTransfer<'d, RX: DmaRxBuffer, TX: DmaTxBuffer> {
448 aes_dma: ManuallyDrop<AesDma<'d>>,
449 rx_view: ManuallyDrop<RX::View>,
450 tx_view: ManuallyDrop<TX::View>,
451 }
452
453 impl<'d, RX: DmaRxBuffer, TX: DmaTxBuffer> AesTransfer<'d, RX, TX> {
454 pub fn is_done(&self) -> bool {
456 self.aes_dma.aes.regs().state().read().state().bits() == 2
458 }
459
460 pub fn wait(mut self) -> (AesDma<'d>, RX, TX) {
463 while !self.is_done() {}
464
465 self.aes_dma.channel.rx.stop_transfer();
467 self.aes_dma.channel.tx.stop_transfer();
468
469 self.aes_dma.finish_transform();
470
471 let (aes_dma, rx_view, tx_view) = unsafe {
472 let aes_dma = ManuallyDrop::take(&mut self.aes_dma);
473 let rx_view = ManuallyDrop::take(&mut self.rx_view);
474 let tx_view = ManuallyDrop::take(&mut self.tx_view);
475 core::mem::forget(self);
476 (aes_dma, rx_view, tx_view)
477 };
478
479 (aes_dma, RX::from_view(rx_view), TX::from_view(tx_view))
480 }
481
482 pub fn rx_view(&self) -> &RX::View {
484 &self.rx_view
485 }
486
487 pub fn rx_view_mut(&mut self) -> &mut RX::View {
489 &mut self.rx_view
490 }
491
492 pub fn tx_view(&self) -> &TX::View {
494 &self.tx_view
495 }
496
497 pub fn tx_view_mut(&mut self) -> &mut TX::View {
499 &mut self.tx_view
500 }
501 }
502
503 impl<RX: DmaRxBuffer, TX: DmaTxBuffer> Drop for AesTransfer<'_, RX, TX> {
504 fn drop(&mut self) {
505 self.aes_dma.channel.rx.stop_transfer();
507 self.aes_dma.channel.tx.stop_transfer();
508
509 unsafe {
512 ManuallyDrop::drop(&mut self.aes_dma);
513 }
514 let rx_view = unsafe { ManuallyDrop::take(&mut self.rx_view) };
515 let tx_view = unsafe { ManuallyDrop::take(&mut self.tx_view) };
516 let _ = RX::from_view(rx_view);
517 let _ = TX::from_view(tx_view);
518 }
519 }
520}