Browse Source

drivers: crypto: cc23x0: Add support for DMA mode

Two DMA channels are assigned to AES channels A and B respectively.
Each channel A/B has an interface to control the conditions that will
generate requests on the related DMA channel: trigger condition,
R/W address, and DMA done action.

Signed-off-by: Julien Panis <jpanis@baylibre.com>
pull/91480/head
Julien Panis 10 months ago committed by Dan Kalowsky
parent
commit
e0f02d93a6
  1. 9
      drivers/crypto/Kconfig.cc23x0
  2. 389
      drivers/crypto/crypto_cc23x0.c
  3. 27
      dts/bindings/crypto/ti,cc23x0-aes.yaml

9
drivers/crypto/Kconfig.cc23x0

@ -13,3 +13,12 @@ config CRYPTO_CC23X0 @@ -13,3 +13,12 @@ config CRYPTO_CC23X0
- ECB (Electronic Code Book) encryption only (decryption not supported by the hardware)
- CTR (Counter)
- CCM (CTR with CBC-MAC)
config CRYPTO_CC23X0_DMA
bool "DMA support for TI CC23X0 AES accelerator devices"
depends on CRYPTO_CC23X0
select DMA
help
DMA driven transactions for the AES peripheral.
DMA driven mode offloads data transfer tasks from the CPU
and requires fewer interrupts to handle the AES operations.

389
drivers/crypto/crypto_cc23x0.c

@ -11,6 +11,7 @@ LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL); @@ -11,6 +11,7 @@ LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL);
#include <zephyr/crypto/crypto.h>
#include <zephyr/device.h>
#include <zephyr/drivers/dma.h>
#include <zephyr/irq.h>
#include <zephyr/kernel.h>
#include <zephyr/sys/util.h>
@ -20,11 +21,11 @@ LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL); @@ -20,11 +21,11 @@ LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL);
#include <driverlib/aes.h>
#include <driverlib/clkctl.h>
#include <inc/hw_memmap.h>
#define CRYPTO_CC23_CAP (CAP_RAW_KEY | CAP_SEPARATE_IO_BUFS | \
CAP_SYNC_OPS | CAP_NO_IV_PREFIX)
#define CRYPTO_CC23_INT_MASK AES_IMASK_AESDONE
/* CCM mode: see https://datatracker.ietf.org/doc/html/rfc3610 for reference */
#define CCM_CC23_MSG_LEN_SIZE_MIN 2
#define CCM_CC23_MSG_LEN_SIZE_MAX 8
@ -48,11 +49,35 @@ LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL); @@ -48,11 +49,35 @@ LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL);
* processing 2 columns/cycle, completing 10 rounds in 20 cycles. With three cycles
* of pre-processing, the execution/encryption time is 23 cycles.
*/
#define CRYPTO_CC23_OP_TIMEOUT K_CYC(23 << 1)
#define CRYPTO_CC23_BLK_PROC_CYC 23
#define CRYPTO_CC23_BLK_PROC_TIMEOUT (CRYPTO_CC23_BLK_PROC_CYC << 1)
#define CRYPTO_CC23_OP_TIMEOUT K_CYC(CRYPTO_CC23_BLK_PROC_TIMEOUT)
#ifdef CONFIG_CRYPTO_CC23X0_DMA
#define CRYPTO_CC23_OP_TIMEOUT_DMA(len) \
K_CYC(CRYPTO_CC23_BLK_PROC_TIMEOUT * ((len) / AES_BLOCK_SIZE))
#define CRYPTO_CC23_IS_INVALID_DATA_LEN_DMA(len) ((len) % AES_BLOCK_SIZE)
#define CRYPTO_CC23_REG_GET(offset) (AES_BASE + (offset))
struct crypto_cc23x0_config {
const struct device *dma_dev;
uint8_t dma_channel_a;
uint8_t dma_trigsrc_a;
uint8_t dma_channel_b;
uint8_t dma_trigsrc_b;
};
#endif
struct crypto_cc23x0_data {
struct k_mutex device_mutex;
#ifdef CONFIG_CRYPTO_CC23X0_DMA
struct k_sem cha_done;
struct k_sem chb_done;
#else
struct k_sem aes_done;
#endif
};
static void crypto_cc23x0_isr(const struct device *dev)
@ -62,15 +87,32 @@ static void crypto_cc23x0_isr(const struct device *dev) @@ -62,15 +87,32 @@ static void crypto_cc23x0_isr(const struct device *dev)
status = AESGetMaskedInterruptStatus();
#ifdef CONFIG_CRYPTO_CC23X0_DMA
if (status & AES_IMASK_CHADONE) {
k_sem_give(&data->cha_done);
} else if (status & AES_IMASK_CHBDONE) {
k_sem_give(&data->chb_done);
}
#else
if (status & AES_IMASK_AESDONE) {
k_sem_give(&data->aes_done);
}
#endif
AESClearInterrupt(status);
}
static void crypto_cc23x0_cleanup(void)
static void crypto_cc23x0_cleanup(const struct device *dev)
{
#ifdef CONFIG_CRYPTO_CC23X0_DMA
const struct crypto_cc23x0_config *cfg = dev->config;
dma_stop(cfg->dma_dev, cfg->dma_channel_b);
dma_stop(cfg->dma_dev, cfg->dma_channel_a);
AESDisableDMA();
#else
ARG_UNUSED(dev);
#endif
AESClearAUTOCFGTrigger();
AESClearAUTOCFGBusHalt();
AESClearTXTAndBUF();
@ -80,9 +122,62 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt * @@ -80,9 +122,62 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt *
{
const struct device *dev = ctx->device;
struct crypto_cc23x0_data *data = dev->data;
int in_bytes_processed = 0;
int out_bytes_processed = 0;
int ret;
#ifdef CONFIG_CRYPTO_CC23X0_DMA
uint32_t int_flags = AES_IMASK_CHBDONE;
const struct crypto_cc23x0_config *cfg = dev->config;
struct dma_block_config block_cfg_cha = {
.source_address = (uint32_t)(pkt->in_buf),
.dest_address = CRYPTO_CC23_REG_GET(AES_O_DMACHA),
.source_addr_adj = DMA_ADDR_ADJ_INCREMENT,
.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE,
.block_size = pkt->in_len,
};
struct dma_config dma_cfg_cha = {
.dma_slot = cfg->dma_trigsrc_a,
.channel_direction = MEMORY_TO_PERIPHERAL,
.block_count = 1,
.head_block = &block_cfg_cha,
.source_data_size = sizeof(uint32_t),
.dest_data_size = sizeof(uint32_t),
.source_burst_length = AES_BLOCK_SIZE,
.dma_callback = NULL,
.user_data = NULL,
};
struct dma_block_config block_cfg_chb = {
.source_address = CRYPTO_CC23_REG_GET(AES_O_DMACHB),
.dest_address = (uint32_t)(pkt->out_buf),
.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE,
.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT,
.block_size = pkt->in_len,
};
struct dma_config dma_cfg_chb = {
.dma_slot = cfg->dma_trigsrc_b,
.channel_direction = PERIPHERAL_TO_MEMORY,
.block_count = 1,
.head_block = &block_cfg_chb,
.source_data_size = sizeof(uint32_t),
.dest_data_size = sizeof(uint32_t),
.source_burst_length = AES_BLOCK_SIZE,
.dma_callback = NULL,
.user_data = NULL,
};
#else
uint32_t int_flags = AES_IMASK_AESDONE;
int in_bytes_processed = 0;
#endif
#ifdef CONFIG_CRYPTO_CC23X0_DMA
if (CRYPTO_CC23_IS_INVALID_DATA_LEN_DMA(pkt->in_len)) {
LOG_ERR("In DMA mode, data length must be a multiple of %d", AES_BLOCK_SIZE);
return -EINVAL;
}
#endif
if (pkt->out_buf_max < ROUND_UP(pkt->in_len, AES_BLOCK_SIZE)) {
LOG_ERR("Output buffer too small");
@ -91,6 +186,9 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt * @@ -91,6 +186,9 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt *
k_mutex_lock(&data->device_mutex, K_FOREVER);
/* Enable interrupts */
AESSetIMASK(int_flags);
/* Load key */
AESWriteKEY(ctx->key.bit_stream);
@ -99,6 +197,42 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt * @@ -99,6 +197,42 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt *
AES_AUTOCFG_TRGAES_RDTXT3 |
AES_AUTOCFG_TRGAES_WRBUF3S);
#ifdef CONFIG_CRYPTO_CC23X0_DMA
/* Setup the DMA for the AES engine */
AESSetupDMA(AES_DMA_ADRCHA_BUF0 |
AES_DMA_TRGCHA_AESSTART |
AES_DMA_ADRCHB_TXT0 |
AES_DMA_TRGCHB_AESDONE |
(pkt->in_len == AES_BLOCK_SIZE ?
AES_DMA_DONEACT_GATE_TRGAES_ON_CHA :
AES_DMA_DONEACT_GATE_TRGAES_ON_CHA_DEL));
ret = dma_config(cfg->dma_dev, cfg->dma_channel_a, &dma_cfg_cha);
if (ret) {
goto cleanup;
}
ret = dma_config(cfg->dma_dev, cfg->dma_channel_b, &dma_cfg_chb);
if (ret) {
goto cleanup;
}
dma_start(cfg->dma_dev, cfg->dma_channel_a);
dma_start(cfg->dma_dev, cfg->dma_channel_b);
/* Trigger AES operation */
AESSetTrigger(AES_TRG_DMACHA);
/* Wait for AES operation completion */
ret = k_sem_take(&data->chb_done, CRYPTO_CC23_OP_TIMEOUT_DMA(pkt->in_len));
if (ret) {
goto cleanup;
}
LOG_DBG("AES operation completed");
out_bytes_processed = pkt->in_len;
#else
/* Write first block of input to trigger encryption */
AESWriteBUF(pkt->in_buf);
in_bytes_processed += AES_BLOCK_SIZE;
@ -128,9 +262,10 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt * @@ -128,9 +262,10 @@ static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt *
AESReadTXT(&pkt->out_buf[out_bytes_processed]);
out_bytes_processed += AES_BLOCK_SIZE;
} while (out_bytes_processed < pkt->in_len);
#endif
cleanup:
crypto_cc23x0_cleanup();
crypto_cc23x0_cleanup(dev);
k_mutex_unlock(&data->device_mutex);
pkt->out_len = out_bytes_processed;
@ -143,12 +278,65 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin @@ -143,12 +278,65 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin
struct crypto_cc23x0_data *data = dev->data;
uint32_t ctr_len = ctx->mode_params.ctr_info.ctr_len >> 3;
uint8_t ctr[AES_BLOCK_SIZE] = { 0 };
uint8_t last_buf[AES_BLOCK_SIZE] = { 0 };
int bytes_remaining = pkt->in_len;
int bytes_processed = 0;
int block_size;
int iv_len;
int ret;
#ifdef CONFIG_CRYPTO_CC23X0_DMA
uint32_t int_flags = AES_IMASK_CHBDONE;
const struct crypto_cc23x0_config *cfg = dev->config;
struct dma_block_config block_cfg_cha = {
.source_address = (uint32_t)(pkt->in_buf),
.dest_address = CRYPTO_CC23_REG_GET(AES_O_DMACHA),
.source_addr_adj = DMA_ADDR_ADJ_INCREMENT,
.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE,
.block_size = pkt->in_len,
};
struct dma_config dma_cfg_cha = {
.dma_slot = cfg->dma_trigsrc_a,
.channel_direction = MEMORY_TO_PERIPHERAL,
.block_count = 1,
.head_block = &block_cfg_cha,
.source_data_size = sizeof(uint32_t),
.dest_data_size = sizeof(uint32_t),
.source_burst_length = AES_BLOCK_SIZE,
.dma_callback = NULL,
.user_data = NULL,
};
struct dma_block_config block_cfg_chb = {
.source_address = CRYPTO_CC23_REG_GET(AES_O_DMACHB),
.dest_address = (uint32_t)(pkt->out_buf),
.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE,
.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT,
.block_size = pkt->in_len,
};
struct dma_config dma_cfg_chb = {
.dma_slot = cfg->dma_trigsrc_b,
.channel_direction = PERIPHERAL_TO_MEMORY,
.block_count = 1,
.head_block = &block_cfg_chb,
.source_data_size = sizeof(uint32_t),
.dest_data_size = sizeof(uint32_t),
.source_burst_length = AES_BLOCK_SIZE,
.dma_callback = NULL,
.user_data = NULL,
};
#else
uint32_t int_flags = AES_IMASK_AESDONE;
uint8_t last_buf[AES_BLOCK_SIZE] = { 0 };
int bytes_remaining = pkt->in_len;
int block_size;
#endif
#ifdef CONFIG_CRYPTO_CC23X0_DMA
if (CRYPTO_CC23_IS_INVALID_DATA_LEN_DMA(pkt->in_len)) {
LOG_ERR("In DMA mode, data length must be a multiple of %d", AES_BLOCK_SIZE);
return -EINVAL;
}
#endif
if (pkt->out_buf_max < ROUND_UP(pkt->in_len, AES_BLOCK_SIZE)) {
LOG_ERR("Output buffer too small");
@ -157,6 +345,9 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin @@ -157,6 +345,9 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin
k_mutex_lock(&data->device_mutex, K_FOREVER);
/* Enable interrupts */
AESSetIMASK(int_flags);
/* Load key */
AESWriteKEY(ctx->key.bit_stream);
@ -167,12 +358,44 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin @@ -167,12 +358,44 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin
AES_AUTOCFG_CTRENDN_BIGENDIAN |
AES_AUTOCFG_CTRSIZE_CTR128);
#ifdef CONFIG_CRYPTO_CC23X0_DMA
/* Setup the DMA for the AES engine */
AESSetupDMA(AES_DMA_ADRCHA_TXTX0 |
AES_DMA_TRGCHA_AESDONE |
AES_DMA_ADRCHB_TXT0 |
AES_DMA_TRGCHB_WRTXT3);
ret = dma_config(cfg->dma_dev, cfg->dma_channel_a, &dma_cfg_cha);
if (ret) {
goto cleanup;
}
ret = dma_config(cfg->dma_dev, cfg->dma_channel_b, &dma_cfg_chb);
if (ret) {
goto cleanup;
}
dma_start(cfg->dma_dev, cfg->dma_channel_a);
dma_start(cfg->dma_dev, cfg->dma_channel_b);
#endif
/* Write the counter value to the AES engine to trigger first encryption */
iv_len = (ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_CCM) ?
AES_BLOCK_SIZE : (ctx->keylen - ctr_len);
memcpy(ctr, iv, iv_len);
AESWriteBUF(ctr);
#ifdef CONFIG_CRYPTO_CC23X0_DMA
/* Wait for AES operation completion */
ret = k_sem_take(&data->chb_done, CRYPTO_CC23_OP_TIMEOUT_DMA(pkt->in_len));
if (ret) {
goto cleanup;
}
LOG_DBG("AES operation completed");
bytes_processed = pkt->in_len;
#else
do {
/* Wait for AES operation completion */
ret = k_sem_take(&data->aes_done, CRYPTO_CC23_OP_TIMEOUT);
@ -207,9 +430,10 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin @@ -207,9 +430,10 @@ static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uin
bytes_processed += block_size;
bytes_remaining -= block_size;
} while (bytes_remaining > 0);
#endif
cleanup:
crypto_cc23x0_cleanup();
crypto_cc23x0_cleanup(dev);
k_mutex_unlock(&data->device_mutex);
pkt->out_len = bytes_processed;
@ -222,11 +446,44 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt, @@ -222,11 +446,44 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
const struct device *dev = ctx->device;
struct crypto_cc23x0_data *data = dev->data;
uint32_t iv[AES_BLOCK_SIZE_WORDS] = { 0 };
int bytes_processed = 0;
int ret;
#ifdef CONFIG_CRYPTO_CC23X0_DMA
uint32_t int_flags = AES_IMASK_CHADONE;
const struct crypto_cc23x0_config *cfg = dev->config;
struct dma_block_config block_cfg_cha = {
.source_address = (uint32_t)b0,
.dest_address = CRYPTO_CC23_REG_GET(AES_O_DMACHA),
.source_addr_adj = DMA_ADDR_ADJ_INCREMENT,
.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE,
.block_size = AES_BLOCK_SIZE,
};
struct dma_config dma_cfg_cha = {
.dma_slot = cfg->dma_trigsrc_a,
.channel_direction = MEMORY_TO_PERIPHERAL,
.block_count = 1,
.head_block = &block_cfg_cha,
.source_data_size = sizeof(uint32_t),
.dest_data_size = sizeof(uint32_t),
.source_burst_length = AES_BLOCK_SIZE,
.dma_callback = NULL,
.user_data = NULL,
};
#else
uint32_t int_flags = AES_IMASK_AESDONE;
uint8_t last_buf[AES_BLOCK_SIZE] = { 0 };
int bytes_remaining = pkt->in_len;
int bytes_processed = 0;
int block_size;
int ret;
#endif
#ifdef CONFIG_CRYPTO_CC23X0_DMA
if (CRYPTO_CC23_IS_INVALID_DATA_LEN_DMA(pkt->in_len)) {
LOG_ERR("In DMA mode, data length must be a multiple of %d", AES_BLOCK_SIZE);
return -EINVAL;
}
#endif
if (pkt->out_buf_max < AES_BLOCK_SIZE) {
LOG_ERR("Output buffer too small");
@ -235,6 +492,9 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt, @@ -235,6 +492,9 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
k_mutex_lock(&data->device_mutex, K_FOREVER);
/* Enable interrupts */
AESSetIMASK(int_flags);
/* Load key */
AESWriteKEY(ctx->key.bit_stream);
@ -247,6 +507,27 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt, @@ -247,6 +507,27 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
AESWriteIV32(iv);
if (b0) {
#ifdef CONFIG_CRYPTO_CC23X0_DMA
/* Setup the DMA for the AES engine */
AESSetupDMA(AES_DMA_ADRCHA_BUF0 |
AES_DMA_TRGCHA_AESSTART);
ret = dma_config(cfg->dma_dev, cfg->dma_channel_a, &dma_cfg_cha);
if (ret) {
goto out;
}
dma_start(cfg->dma_dev, cfg->dma_channel_a);
/* Trigger AES operation */
AESSetTrigger(AES_TRG_DMACHA);
/* Wait for AES operation completion */
ret = k_sem_take(&data->cha_done, CRYPTO_CC23_OP_TIMEOUT_DMA(AES_BLOCK_SIZE));
if (ret) {
goto out;
}
#else
/* Load input block */
AESWriteBUF(b0);
@ -255,11 +536,30 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt, @@ -255,11 +536,30 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
if (ret) {
goto out;
}
#endif
LOG_DBG("AES operation completed (block 0)");
}
if (b1) {
#ifdef CONFIG_CRYPTO_CC23X0_DMA
block_cfg_cha.source_address = (uint32_t)b1;
ret = dma_config(cfg->dma_dev, cfg->dma_channel_a, &dma_cfg_cha);
if (ret) {
goto out;
}
dma_start(cfg->dma_dev, cfg->dma_channel_a);
/* Trigger AES operation */
AESSetTrigger(AES_TRG_DMACHA);
/* Wait for AES operation completion */
ret = k_sem_take(&data->cha_done, CRYPTO_CC23_OP_TIMEOUT_DMA(AES_BLOCK_SIZE));
if (ret) {
goto out;
}
#else
/* Load input block */
AESWriteBUF(b1);
@ -268,10 +568,34 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt, @@ -268,10 +568,34 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
if (ret) {
goto out;
}
#endif
LOG_DBG("AES operation completed (block 1)");
}
#ifdef CONFIG_CRYPTO_CC23X0_DMA
block_cfg_cha.source_address = (uint32_t)(pkt->in_buf);
block_cfg_cha.block_size = pkt->in_len;
ret = dma_config(cfg->dma_dev, cfg->dma_channel_a, &dma_cfg_cha);
if (ret) {
goto out;
}
dma_start(cfg->dma_dev, cfg->dma_channel_a);
/* Trigger AES operation */
AESSetTrigger(AES_TRG_DMACHA);
/* Wait for AES operation completion */
ret = k_sem_take(&data->cha_done, CRYPTO_CC23_OP_TIMEOUT_DMA(pkt->in_len));
if (ret) {
goto out;
}
LOG_DBG("AES operation completed (data)");
bytes_processed = pkt->in_len;
#else
do {
/* Load input block */
if (bytes_remaining >= AES_BLOCK_SIZE) {
@ -294,12 +618,13 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt, @@ -294,12 +618,13 @@ static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
bytes_processed += block_size;
bytes_remaining -= block_size;
} while (bytes_remaining > 0);
#endif
/* Read tag */
AESReadTag(pkt->out_buf);
out:
crypto_cc23x0_cleanup();
crypto_cc23x0_cleanup(dev);
k_mutex_unlock(&data->device_mutex);
pkt->out_len = bytes_processed;
@ -652,6 +977,9 @@ static int crypto_cc23x0_query_caps(const struct device *dev) @@ -652,6 +977,9 @@ static int crypto_cc23x0_query_caps(const struct device *dev)
static int crypto_cc23x0_init(const struct device *dev)
{
#ifdef CONFIG_CRYPTO_CC23X0_DMA
const struct crypto_cc23x0_config *cfg = dev->config;
#endif
struct crypto_cc23x0_data *data = dev->data;
IRQ_CONNECT(DT_INST_IRQN(0),
@ -663,10 +991,18 @@ static int crypto_cc23x0_init(const struct device *dev) @@ -663,10 +991,18 @@ static int crypto_cc23x0_init(const struct device *dev)
CLKCTLEnable(CLKCTL_BASE, CLKCTL_LAES);
AESSetIMASK(CRYPTO_CC23_INT_MASK);
k_mutex_init(&data->device_mutex);
#ifdef CONFIG_CRYPTO_CC23X0_DMA
k_sem_init(&data->cha_done, 0, 1);
k_sem_init(&data->chb_done, 0, 1);
if (!device_is_ready(cfg->dma_dev)) {
return -ENODEV;
}
#else
k_sem_init(&data->aes_done, 0, 1);
#endif
return 0;
}
@ -679,6 +1015,24 @@ static DEVICE_API(crypto, crypto_enc_funcs) = { @@ -679,6 +1015,24 @@ static DEVICE_API(crypto, crypto_enc_funcs) = {
static struct crypto_cc23x0_data crypto_cc23x0_dev_data;
#ifdef CONFIG_CRYPTO_CC23X0_DMA
static const struct crypto_cc23x0_config crypto_cc23x0_dev_config = {
.dma_dev = DEVICE_DT_GET(TI_CC23X0_DT_INST_DMA_CTLR(0, cha)),
.dma_channel_a = TI_CC23X0_DT_INST_DMA_CHANNEL(0, cha),
.dma_trigsrc_a = TI_CC23X0_DT_INST_DMA_TRIGSRC(0, cha),
.dma_channel_b = TI_CC23X0_DT_INST_DMA_CHANNEL(0, chb),
.dma_trigsrc_b = TI_CC23X0_DT_INST_DMA_TRIGSRC(0, chb),
};
DEVICE_DT_INST_DEFINE(0,
crypto_cc23x0_init,
NULL,
&crypto_cc23x0_dev_data,
&crypto_cc23x0_dev_config,
POST_KERNEL,
CONFIG_CRYPTO_INIT_PRIORITY,
&crypto_enc_funcs);
#else
DEVICE_DT_INST_DEFINE(0,
crypto_cc23x0_init,
NULL,
@ -687,3 +1041,4 @@ DEVICE_DT_INST_DEFINE(0, @@ -687,3 +1041,4 @@ DEVICE_DT_INST_DEFINE(0,
POST_KERNEL,
CONFIG_CRYPTO_INIT_PRIORITY,
&crypto_enc_funcs);
#endif

27
dts/bindings/crypto/ti,cc23x0-aes.yaml

@ -13,3 +13,30 @@ properties: @@ -13,3 +13,30 @@ properties:
interrupts:
required: true
dmas:
description: |
Optional CHA & CHB DMA specifiers. Each specifier will have a phandle
reference to the DMA controller, the channel number, and the peripheral
trigger source.
Example for channels 4/5 with aestrga/aestrgb trigger sources:
dmas = <&dma 4 3>, <&dma 5 4>;
For ECB mode:
- CHA moves plaintext data into buffer registers when AES operation starts,
- CHB moves ciphertext to memory when AES completes.
For CTR (also used by CCM):
- CHA moves plaintext data into text XOR registers when AES operation starts,
- CHB moves ciphertext to memory after CHA has written the last text XOR register.
For CBC-MAC (also used by CCM):
- CHA moves plaintext data into buffer when AES operation starts,
- CHB is not used.
dma-names:
description: |
Required if the dmas property exists. These must be "cha" and "chb"
to match the dmas property.
Example:
dma-names = "cha", "chb";

Loading…
Cancel
Save