1#![doc = crate::before_snippet!()]
26use crate::{
60 pac,
61 peripheral::{Peripheral, PeripheralRef},
62 peripherals::AES,
63 reg_access::{AlignmentHelper, NativeEndianess},
64 system::GenericPeripheralGuard,
65};
66
67#[cfg_attr(esp32, path = "esp32.rs")]
68#[cfg_attr(esp32s3, path = "esp32s3.rs")]
69#[cfg_attr(esp32s2, path = "esp32s2.rs")]
70#[cfg_attr(esp32c3, path = "esp32cX.rs")]
71#[cfg_attr(esp32c6, path = "esp32cX.rs")]
72#[cfg_attr(esp32h2, path = "esp32cX.rs")]
73mod aes_spec_impl;
74
75const ALIGN_SIZE: usize = core::mem::size_of::<u32>();
76
77pub enum Key {
79 Key16([u8; 16]),
81 #[cfg(any(esp32, esp32s2))]
83 Key24([u8; 24]),
84 Key32([u8; 32]),
86}
87
88impl From<[u8; 16]> for Key {
90 fn from(key: [u8; 16]) -> Self {
91 Key::Key16(key)
92 }
93}
94
95#[cfg(any(esp32, esp32s2))]
96impl From<[u8; 24]> for Key {
97 fn from(key: [u8; 24]) -> Self {
98 Key::Key24(key)
99 }
100}
101
102impl From<[u8; 32]> for Key {
103 fn from(key: [u8; 32]) -> Self {
104 Key::Key32(key)
105 }
106}
107
108impl Key {
109 fn as_slice(&self) -> &[u8] {
111 match self {
112 Key::Key16(ref key) => key,
113 #[cfg(any(esp32, esp32s2))]
114 Key::Key24(ref key) => key,
115 Key::Key32(ref key) => key,
116 }
117 }
118}
119
120pub enum Mode {
122 Encryption128 = 0,
124 #[cfg(any(esp32, esp32s2))]
126 Encryption192 = 1,
127 Encryption256 = 2,
129 Decryption128 = 4,
131 #[cfg(any(esp32, esp32s2))]
133 Decryption192 = 5,
134 Decryption256 = 6,
136}
137
138pub struct Aes<'d> {
140 aes: PeripheralRef<'d, AES>,
141 alignment_helper: AlignmentHelper<NativeEndianess>,
142 _guard: GenericPeripheralGuard<{ crate::system::Peripheral::Aes as u8 }>,
143}
144
145impl<'d> Aes<'d> {
146 pub fn new(aes: impl Peripheral<P = AES> + 'd) -> Self {
148 crate::into_ref!(aes);
149
150 let guard = GenericPeripheralGuard::new();
151
152 let mut ret = Self {
153 aes,
154 alignment_helper: AlignmentHelper::native_endianess(),
155 _guard: guard,
156 };
157 ret.init();
158
159 ret
160 }
161
162 fn regs(&self) -> &pac::aes::RegisterBlock {
163 self.aes.register_block()
164 }
165
166 pub fn process<K>(&mut self, block: &mut [u8; 16], mode: Mode, key: K)
168 where
169 K: Into<Key>,
170 {
171 self.write_key(key.into().as_slice());
173 self.write_mode(mode);
174 self.set_block(block);
175 self.start();
176 while !(self.is_idle()) {}
177 self.block(block);
178 }
179
180 fn is_idle(&mut self) -> bool {
181 self.read_idle()
182 }
183
184 fn set_block(&mut self, block: &[u8; 16]) {
185 self.write_block(block);
186 }
187
188 fn block(&self, block: &mut [u8; 16]) {
189 self.read_block(block);
190 }
191
192 fn start(&mut self) {
193 self.write_start();
194 }
195}
196
197pub trait AesFlavour: crate::private::Sealed {
199 type KeyType<'b>;
204}
205
206pub struct Aes128;
208
209#[cfg(any(esp32, esp32s2))]
211pub struct Aes192;
212
213pub struct Aes256;
215
216impl crate::private::Sealed for Aes128 {}
217#[cfg(any(esp32, esp32s2))]
218impl crate::private::Sealed for Aes192 {}
219impl crate::private::Sealed for Aes256 {}
220
221#[cfg(any(esp32, esp32s2))]
223pub enum Endianness {
224 BigEndian = 1,
226 LittleEndian = 0,
228}
229
230#[cfg(any(esp32c3, esp32c6, esp32h2, esp32s2, esp32s3))]
237pub mod dma {
238 use core::mem::ManuallyDrop;
239
240 use crate::{
241 aes::{Key, Mode},
242 dma::{
243 Channel,
244 DmaChannelFor,
245 DmaPeripheral,
246 DmaRxBuffer,
247 DmaTxBuffer,
248 PeripheralDmaChannel,
249 Rx,
250 Tx,
251 },
252 peripheral::Peripheral,
253 peripherals::AES,
254 Blocking,
255 };
256
257 const ALIGN_SIZE: usize = core::mem::size_of::<u32>();
258
259 #[derive(Clone, Copy, PartialEq, Eq)]
261 pub enum CipherMode {
262 Ecb = 0,
264 Cbc,
266 Ofb,
268 Ctr,
270 Cfb8,
272 Cfb128,
274 }
275
276 #[instability::unstable]
278 pub struct AesDma<'d> {
279 pub aes: super::Aes<'d>,
281
282 channel: Channel<'d, Blocking, PeripheralDmaChannel<AES>>,
283 }
284
285 impl<'d> crate::aes::Aes<'d> {
286 pub fn with_dma<CH>(self, channel: impl Peripheral<P = CH> + 'd) -> AesDma<'d>
288 where
289 CH: DmaChannelFor<AES>,
290 {
291 let channel = Channel::new(channel.map(|ch| ch.degrade()));
292 channel.runtime_ensure_compatible(&self.aes);
293 AesDma { aes: self, channel }
294 }
295 }
296
297 impl core::fmt::Debug for AesDma<'_> {
298 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
299 f.debug_struct("AesDma").finish()
300 }
301 }
302
303 impl<'d> AesDma<'d> {
304 pub fn write_key<K>(&mut self, key: K)
307 where
308 K: Into<Key>,
309 {
310 let key = key.into(); debug_assert!(key.as_slice().len() <= 8 * ALIGN_SIZE);
312 debug_assert_eq!(key.as_slice().len() % ALIGN_SIZE, 0);
313 self.aes.write_key(key.as_slice());
314 }
315
316 pub fn write_block(&mut self, block: &[u8]) {
319 debug_assert_eq!(block.len(), 4 * ALIGN_SIZE);
320 self.aes.write_key(block);
321 }
322
323 pub fn process<K, RXBUF, TXBUF>(
328 mut self,
329 number_of_blocks: usize,
330 mut output: RXBUF,
331 mut input: TXBUF,
332 mode: Mode,
333 cipher_mode: CipherMode,
334 key: K,
335 ) -> Result<AesTransfer<'d, RXBUF, TXBUF>, (crate::dma::DmaError, Self, RXBUF, TXBUF)>
336 where
337 K: Into<Key>,
338 TXBUF: DmaTxBuffer,
339 RXBUF: DmaRxBuffer,
340 {
341 self.reset_aes();
343
344 let result = unsafe {
345 self.channel
346 .tx
347 .prepare_transfer(self.dma_peripheral(), &mut input)
348 .and_then(|_| self.channel.tx.start_transfer())
349 };
350 if let Err(err) = result {
351 return Err((err, self, output, input));
352 }
353
354 let result = unsafe {
355 self.channel
356 .rx
357 .prepare_transfer(self.dma_peripheral(), &mut output)
358 .and_then(|_| self.channel.rx.start_transfer())
359 };
360 if let Err(err) = result {
361 self.channel.tx.stop_transfer();
362
363 return Err((err, self, output, input));
364 }
365
366 self.enable_dma(true);
367 self.enable_interrupt();
368 self.aes.write_mode(mode);
369 self.set_cipher_mode(cipher_mode);
370 self.write_key(key.into());
371
372 self.set_num_block(number_of_blocks as u32);
373
374 self.start_transform();
375
376 Ok(AesTransfer {
377 aes_dma: ManuallyDrop::new(self),
378 rx_view: ManuallyDrop::new(output.into_view()),
379 tx_view: ManuallyDrop::new(input.into_view()),
380 })
381 }
382
383 #[cfg(any(esp32c3, esp32s2, esp32s3))]
384 fn reset_aes(&self) {
385 use crate::peripherals::SYSTEM;
386
387 SYSTEM::regs()
388 .perip_rst_en1()
389 .modify(|_, w| w.crypto_aes_rst().set_bit());
390 SYSTEM::regs()
391 .perip_rst_en1()
392 .modify(|_, w| w.crypto_aes_rst().clear_bit());
393 }
394
395 #[cfg(any(esp32c6, esp32h2))]
396 fn reset_aes(&self) {
397 use crate::peripherals::PCR;
398
399 PCR::regs()
400 .aes_conf()
401 .modify(|_, w| w.aes_rst_en().set_bit());
402 PCR::regs()
403 .aes_conf()
404 .modify(|_, w| w.aes_rst_en().clear_bit());
405 }
406
407 fn dma_peripheral(&self) -> DmaPeripheral {
408 DmaPeripheral::Aes
409 }
410
411 fn enable_dma(&self, enable: bool) {
412 self.aes
413 .regs()
414 .dma_enable()
415 .write(|w| w.dma_enable().bit(enable));
416 }
417
418 fn enable_interrupt(&self) {
419 self.aes.regs().int_ena().write(|w| w.int_ena().set_bit());
420 }
421
422 fn set_cipher_mode(&self, mode: CipherMode) {
423 self.aes
424 .regs()
425 .block_mode()
426 .modify(|_, w| unsafe { w.block_mode().bits(mode as u8) });
427
428 if mode == CipherMode::Ctr {
429 self.aes
430 .regs()
431 .inc_sel()
432 .modify(|_, w| w.inc_sel().clear_bit());
433 }
434 }
435
436 fn start_transform(&self) {
437 self.aes.write_start();
438 }
439
440 fn finish_transform(&self) {
441 self.aes.regs().dma_exit().write(|w| w.dma_exit().set_bit());
442 self.enable_dma(false);
443 self.reset_aes();
444 }
445
446 fn set_num_block(&self, block: u32) {
447 self.aes
448 .regs()
449 .block_num()
450 .modify(|_, w| unsafe { w.block_num().bits(block) });
451 }
452 }
453
454 #[instability::unstable]
456 pub struct AesTransfer<'d, RX: DmaRxBuffer, TX: DmaTxBuffer> {
457 aes_dma: ManuallyDrop<AesDma<'d>>,
458 rx_view: ManuallyDrop<RX::View>,
459 tx_view: ManuallyDrop<TX::View>,
460 }
461
462 impl<'d, RX: DmaRxBuffer, TX: DmaTxBuffer> AesTransfer<'d, RX, TX> {
463 pub fn is_done(&self) -> bool {
465 self.aes_dma.aes.regs().state().read().state().bits() == 2
467 }
468
469 pub fn wait(mut self) -> (AesDma<'d>, RX, TX) {
472 while !self.is_done() {}
473
474 self.aes_dma.channel.rx.stop_transfer();
476 self.aes_dma.channel.tx.stop_transfer();
477
478 self.aes_dma.finish_transform();
479
480 let (aes_dma, rx_view, tx_view) = unsafe {
481 let aes_dma = ManuallyDrop::take(&mut self.aes_dma);
482 let rx_view = ManuallyDrop::take(&mut self.rx_view);
483 let tx_view = ManuallyDrop::take(&mut self.tx_view);
484 core::mem::forget(self);
485 (aes_dma, rx_view, tx_view)
486 };
487
488 (aes_dma, RX::from_view(rx_view), TX::from_view(tx_view))
489 }
490
491 pub fn rx_view(&self) -> &RX::View {
493 &self.rx_view
494 }
495
496 pub fn rx_view_mut(&mut self) -> &mut RX::View {
498 &mut self.rx_view
499 }
500
501 pub fn tx_view(&self) -> &TX::View {
503 &self.tx_view
504 }
505
506 pub fn tx_view_mut(&mut self) -> &mut TX::View {
508 &mut self.tx_view
509 }
510 }
511
512 impl<RX: DmaRxBuffer, TX: DmaTxBuffer> Drop for AesTransfer<'_, RX, TX> {
513 fn drop(&mut self) {
514 self.aes_dma.channel.rx.stop_transfer();
516 self.aes_dma.channel.tx.stop_transfer();
517
518 unsafe {
521 ManuallyDrop::drop(&mut self.aes_dma);
522 }
523 let rx_view = unsafe { ManuallyDrop::take(&mut self.rx_view) };
524 let tx_view = unsafe { ManuallyDrop::take(&mut self.tx_view) };
525 let _ = RX::from_view(rx_view);
526 let _ = TX::from_view(tx_view);
527 }
528 }
529}