Browse Source

drivers: adc: stm32: dma support

Sampling multiple adc channels at once using dma

Only verified to be working on nucleo_h743zi

Signed-off-by: Hein Wessels <heinwessels93@gmail.com>
pull/53167/head
Hein Wessels 2 years ago committed by Carles Cufí
parent
commit
a2dd232410
  1. 15
      drivers/adc/Kconfig.stm32
  2. 245
      drivers/adc/adc_stm32.c

15
drivers/adc/Kconfig.stm32

@ -4,6 +4,7 @@ @@ -4,6 +4,7 @@
# Copyright (c) 2019 Endre Karlson
# Copyright (c) 2019 Song Qiang <songqiang1304521@gmail.com>
# Copyright (c) 2021 Marius Scholtz, RIC Electronics
# Copyright (c) 2022 Hein Wessels, Nobleo Technology
# SPDX-License-Identifier: Apache-2.0
config ADC_STM32
@ -13,12 +14,24 @@ config ADC_STM32 @@ -13,12 +14,24 @@ config ADC_STM32
help
Enable the driver implementation for the stm32xx ADC
if ADC_STM32
config ADC_STM32_DMA
bool "STM32 MCU ADC DMA Support"
select DMA
help
Enable the ADC DMA mode for ADC instances
that enable dma channels in their device tree node.
if SOC_SERIES_STM32F2X || SOC_SERIES_STM32F4X || SOC_SERIES_STM32F7X || SOC_SERIES_STM32G4X
config ADC_STM32_SHARED_IRQS
bool "STM32 ADC shared interrupts"
default y
depends on ADC_STM32
depends on ADC_STM32 && !ADC_STM32_DMA
help
Enable the use of shared interrupts for families that only have a single interrupt for all ADC's
endif
endif

245
drivers/adc/adc_stm32.c

@ -24,6 +24,13 @@ @@ -24,6 +24,13 @@
#include <stm32_ll_pwr.h>
#endif /* CONFIG_SOC_SERIES_STM32U5X */
#ifdef CONFIG_ADC_STM32_DMA
#include <zephyr/drivers/dma/dma_stm32.h>
#include <zephyr/drivers/dma.h>
#include <zephyr/toolchain/common.h>
#include <stm32_ll_dma.h>
#endif
#define ADC_CONTEXT_USES_KERNEL_TIMER
#define ADC_CONTEXT_ENABLE_ON_COMPLETE
#include "adc_context.h"
@ -246,6 +253,18 @@ static const uint32_t table_samp_time[] = { @@ -246,6 +253,18 @@ static const uint32_t table_samp_time[] = {
/* External channels (maximum). */
#define STM32_CHANNEL_COUNT 20
#ifdef CONFIG_ADC_STM32_DMA
struct stream {
const struct device *dma_dev;
uint32_t channel;
struct dma_config dma_cfg;
struct dma_block_config dma_blk_cfg;
uint8_t priority;
bool src_addr_increment;
bool dst_addr_increment;
};
#endif /* CONFIG_ADC_STM32_DMA */
struct adc_stm32_data {
struct adc_context ctx;
const struct device *dev;
@ -261,6 +280,11 @@ struct adc_stm32_data { @@ -261,6 +280,11 @@ struct adc_stm32_data {
defined(CONFIG_SOC_SERIES_STM32L0X)
int8_t acq_time_index;
#endif
#ifdef CONFIG_ADC_STM32_DMA
volatile int dma_error;
struct stream dma;
#endif
};
struct adc_stm32_cfg {
@ -277,7 +301,108 @@ struct adc_stm32_cfg { @@ -277,7 +301,108 @@ struct adc_stm32_cfg {
static bool init_irq = true;
#endif
static int check_buffer_size(const struct adc_sequence *sequence,
#ifdef CONFIG_ADC_STM32_DMA
static int adc_stm32_dma_start(const struct device *dev,
void *buffer, size_t channel_count)
{
const struct adc_stm32_cfg *config = dev->config;
ADC_TypeDef *adc = (ADC_TypeDef *)config->base;
struct adc_stm32_data *data = dev->data;
struct dma_block_config *blk_cfg;
int ret;
struct stream *dma = &data->dma;
blk_cfg = &dma->dma_blk_cfg;
/* prepare the block */
blk_cfg->block_size = channel_count * sizeof(int16_t);
/* Source and destination */
blk_cfg->source_address = (uint32_t)LL_ADC_DMA_GetRegAddr(adc, LL_ADC_DMA_REG_REGULAR_DATA);
blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
blk_cfg->source_reload_en = 0;
blk_cfg->dest_address = (uint32_t)buffer;
blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT;
blk_cfg->dest_reload_en = 0;
/* Manually set the FIFO threshold to 1/4 because the
* dmamux DTS entry does not contain fifo threshold
*/
blk_cfg->fifo_mode_control = 0;
/* direction is given by the DT */
dma->dma_cfg.head_block = blk_cfg;
dma->dma_cfg.user_data = data;
ret = dma_config(data->dma.dma_dev, data->dma.channel,
&dma->dma_cfg);
if (ret != 0) {
LOG_ERR("Problem setting up DMA: %d", ret);
return ret;
}
/* Allow ADC to create DMA request and set to one-shot mode,
* as implemented in HAL drivers, if applicable.
*/
#if defined(ADC_VER_V5_V90)
if (adc == ADC3) {
LL_ADC_REG_SetDMATransferMode(adc,
ADC3_CFGR_DMACONTREQ(LL_ADC_REG_DMA_TRANSFER_LIMITED));
LL_ADC_EnableDMAReq(adc);
} else {
LL_ADC_REG_SetDataTransferMode(adc,
ADC_CFGR_DMACONTREQ(LL_ADC_REG_DMA_TRANSFER_LIMITED));
}
#elif defined(ADC_VER_V5_X)
LL_ADC_REG_SetDataTransferMode(adc, LL_ADC_REG_DMA_TRANSFER_LIMITED);
#endif
data->dma_error = 0;
ret = dma_start(data->dma.dma_dev, data->dma.channel);
if (ret != 0) {
LOG_ERR("Problem starting DMA: %d", ret);
return ret;
}
LOG_DBG("DMA started");
return ret;
}
#endif /* CONFIG_ADC_STM32_DMA */
#if defined(CONFIG_ADC_STM32_DMA) && defined(CONFIG_SOC_SERIES_STM32H7X)
/* Returns true if given buffer is in a non-cacheable SRAM region.
* This is determined using the device tree, meaning the .nocache region won't work.
* The entire buffer must be in a single region.
* An example of how the SRAM region can be defined in the DTS:
* &sram4 {
* zephyr,memory-region-mpu = "RAM_NOCACHE";
* };
*/
static bool address_in_non_cacheable_sram(const uint16_t *buffer, const uint16_t size)
{
/* Default if no valid SRAM region found or buffer+size not located in a single region */
bool cachable = false;
#define IS_NON_CACHEABLE_REGION_FN(node_id) \
COND_CODE_1(DT_NODE_HAS_PROP(node_id, zephyr_memory_region_mpu), ({ \
const uint32_t region_start = DT_REG_ADDR(node_id); \
const uint32_t region_end = region_start + DT_REG_SIZE(node_id); \
if (((uint32_t)buffer >= region_start) && \
(((uint32_t)buffer + size) < region_end)) { \
cachable = strcmp(DT_PROP(node_id, zephyr_memory_region_mpu), \
"RAM_NOCACHE") == 0; \
} \
}), \
(EMPTY))
DT_FOREACH_STATUS_OKAY(mmio_sram, IS_NON_CACHEABLE_REGION_FN);
return cachable;
}
#endif /* defined(CONFIG_ADC_STM32_DMA) && defined(CONFIG_SOC_SERIES_STM32H7X) */
static int check_buffer(const struct adc_sequence *sequence,
uint8_t active_channels)
{
size_t needed_buffer_size;
@ -294,6 +419,14 @@ static int check_buffer_size(const struct adc_sequence *sequence, @@ -294,6 +419,14 @@ static int check_buffer_size(const struct adc_sequence *sequence,
return -ENOMEM;
}
#if defined(CONFIG_ADC_STM32_DMA) && defined(CONFIG_SOC_SERIES_STM32H7X)
/* Buffer is forced to be in non-cacheable SRAM region to avoid cache maintenance */
if (!address_in_non_cacheable_sram(sequence->buffer, needed_buffer_size)) {
LOG_ERR("Supplied buffer is not in a non-cacheable region according to DTS.");
return -EINVAL;
}
#endif
return 0;
}
@ -649,6 +782,43 @@ static void adc_stm32_teardown_channels(const struct device *dev) @@ -649,6 +782,43 @@ static void adc_stm32_teardown_channels(const struct device *dev)
adc_stm32_enable(adc);
}
#ifdef CONFIG_ADC_STM32_DMA
static void dma_callback(const struct device *dev, void *user_data,
uint32_t channel, int status)
{
/* user_data directly holds the adc device */
struct adc_stm32_data *data = user_data;
const struct adc_stm32_cfg *config = dev->config;
ADC_TypeDef *adc = (ADC_TypeDef *)config->base;
LOG_DBG("dma callback");
if (channel == data->dma.channel) {
if (LL_ADC_IsActiveFlag_OVR(adc) || (status == 0)) {
data->samples_count = data->channel_count;
data->buffer += data->channel_count;
/* Stop the DMA engine, only to start it again when the callback returns
* ADC_ACTION_REPEAT or ADC_ACTION_CONTINUE, or the number of samples
* haven't been reached Starting the DMA engine is done
* within adc_context_start_sampling
*/
dma_stop(data->dma.dma_dev, data->dma.channel);
LL_ADC_ClearFlag_OVR(adc);
/* No need to invalidate the cache because it's assumed that
* the address is in a non-cacheable SRAM region.
*/
adc_context_on_sampling_done(&data->ctx, dev);
} else {
LOG_ERR("DMA sampling complete, but DMA reported error %d", status);
data->dma_error = status;
LL_ADC_REG_StopConversion(adc);
dma_stop(data->dma.dma_dev, data->dma.channel);
adc_context_complete(&data->ctx, status);
}
}
}
#endif /* CONFIG_ADC_STM32_DMA */
static int start_read(const struct device *dev,
const struct adc_sequence *sequence)
{
@ -814,7 +984,7 @@ static int start_read(const struct device *dev, @@ -814,7 +984,7 @@ static int start_read(const struct device *dev,
#endif
}
err = check_buffer_size(sequence, data->channel_count);
err = check_buffer(sequence, data->channel_count);
if (err) {
return err;
}
@ -917,6 +1087,9 @@ static int start_read(const struct device *dev, @@ -917,6 +1087,9 @@ static int start_read(const struct device *dev,
*/
adc_stm32_enable(adc);
LL_ADC_ClearFlag_OVR(adc);
#if !defined(CONFIG_ADC_STM32_DMA)
#if defined(CONFIG_SOC_SERIES_STM32F0X) || \
defined(STM32F3X_ADC_V1_1) || \
defined(CONFIG_SOC_SERIES_STM32L0X) || \
@ -935,11 +1108,18 @@ static int start_read(const struct device *dev, @@ -935,11 +1108,18 @@ static int start_read(const struct device *dev,
#else
LL_ADC_EnableIT_EOCS(adc);
#endif
#endif /* CONFIG_ADC_STM32_DMA */
/* This call will start the DMA */
adc_context_start_read(&data->ctx, sequence);
int result = adc_context_wait_for_completion(&data->ctx);
#ifdef CONFIG_ADC_STM32_DMA
/* check if there's anything wrong with dma start */
result = (data->dma_error ? data->dma_error : result);
#endif
return result;
}
@ -950,6 +1130,9 @@ static void adc_context_start_sampling(struct adc_context *ctx) @@ -950,6 +1130,9 @@ static void adc_context_start_sampling(struct adc_context *ctx)
data->repeat_buffer = data->buffer;
#ifdef CONFIG_ADC_STM32_DMA
adc_stm32_dma_start(data->dev, data->buffer, data->channel_count);
#endif
adc_stm32_start_conversion(data->dev);
}
@ -964,6 +1147,7 @@ static void adc_context_update_buffer_pointer(struct adc_context *ctx, @@ -964,6 +1147,7 @@ static void adc_context_update_buffer_pointer(struct adc_context *ctx,
}
}
#ifndef CONFIG_ADC_STM32_DMA
static void adc_stm32_isr(const struct device *dev)
{
struct adc_stm32_data *data = dev->data;
@ -981,6 +1165,7 @@ static void adc_stm32_isr(const struct device *dev) @@ -981,6 +1165,7 @@ static void adc_stm32_isr(const struct device *dev)
LOG_DBG("%s ISR triggered.", dev->name);
}
#endif /* !CONFIG_ADC_STM32_DMA */
static void adc_context_on_complete(struct adc_context *ctx, int status)
{
@ -1147,7 +1332,7 @@ static int adc_stm32_init(const struct device *dev) @@ -1147,7 +1332,7 @@ static int adc_stm32_init(const struct device *dev)
ADC_TypeDef *adc = (ADC_TypeDef *)config->base;
int err;
LOG_DBG("Initializing....");
LOG_DBG("Initializing %s", dev->name);
if (!device_is_ready(clk)) {
LOG_ERR("clock control device not ready");
@ -1184,6 +1369,14 @@ static int adc_stm32_init(const struct device *dev) @@ -1184,6 +1369,14 @@ static int adc_stm32_init(const struct device *dev)
LL_PWR_EnableVDDA();
#endif /* CONFIG_SOC_SERIES_STM32U5X */
#ifdef CONFIG_ADC_STM32_DMA
if ((data->dma.dma_dev != NULL) &&
!device_is_ready(data->dma.dma_dev)) {
LOG_ERR("%s device not ready", data->dma.dma_dev->name);
return -ENODEV;
}
#endif
#if defined(CONFIG_SOC_SERIES_STM32L4X) || \
defined(CONFIG_SOC_SERIES_STM32L5X) || \
defined(CONFIG_SOC_SERIES_STM32WBX) || \
@ -1402,9 +1595,47 @@ static void adc_stm32_irq_init(void) @@ -1402,9 +1595,47 @@ static void adc_stm32_irq_init(void)
#define ADC_STM32_IRQ_CONFIG(index)
#define ADC_STM32_IRQ_FUNC(index) \
.irq_cfg_func = adc_stm32_irq_init, \
.irq_cfg_func = adc_stm32_irq_init,
#define ADC_DMA_CHANNEL(id, dir, DIR, src, dest)
#elif defined(CONFIG_ADC_STM32_DMA) /* !CONFIG_ADC_STM32_SHARED_IRQS */
#define ADC_DMA_CHANNEL_INIT(index, name, dir_cap, src_dev, dest_dev) \
.dma = { \
.dma_dev = DEVICE_DT_GET(STM32_DMA_CTLR(index, name)), \
.channel = DT_INST_DMAS_CELL_BY_NAME(index, name, channel), \
.dma_cfg = { \
.dma_slot = STM32_DMA_SLOT(index, name, slot), \
.channel_direction = STM32_DMA_CONFIG_DIRECTION( \
STM32_DMA_CHANNEL_CONFIG(index, name)), \
.source_data_size = STM32_DMA_CONFIG_##src_dev##_DATA_SIZE( \
STM32_DMA_CHANNEL_CONFIG(index, name)), \
.dest_data_size = STM32_DMA_CONFIG_##dest_dev##_DATA_SIZE( \
STM32_DMA_CHANNEL_CONFIG(index, name)), \
.source_burst_length = 1, /* SINGLE transfer */ \
.dest_burst_length = 1, /* SINGLE transfer */ \
.channel_priority = STM32_DMA_CONFIG_PRIORITY( \
STM32_DMA_CHANNEL_CONFIG(index, name)), \
.dma_callback = dma_callback, \
.block_count = 2, \
}, \
.src_addr_increment = STM32_DMA_CONFIG_##src_dev##_ADDR_INC( \
STM32_DMA_CHANNEL_CONFIG(index, name)), \
.dst_addr_increment = STM32_DMA_CONFIG_##dest_dev##_ADDR_INC( \
STM32_DMA_CHANNEL_CONFIG(index, name)), \
}
#else
#define ADC_DMA_CHANNEL(id, dir, DIR, src, dest) \
COND_CODE_1(DT_INST_DMAS_HAS_NAME(id, dir), \
(ADC_DMA_CHANNEL_INIT(id, dir, DIR, src, dest)), \
(EMPTY))
#define ADC_STM32_IRQ_CONFIG(index) \
static void adc_stm32_cfg_func_##index(void){ EMPTY }
#define ADC_STM32_IRQ_FUNC(index) \
.irq_cfg_func = adc_stm32_cfg_func_##index,
#else /* CONFIG_ADC_STM32_DMA */
#define ADC_STM32_IRQ_CONFIG(index) \
static void adc_stm32_cfg_func_##index(void) \
@ -1416,8 +1647,9 @@ static void adc_stm32_cfg_func_##index(void) \ @@ -1416,8 +1647,9 @@ static void adc_stm32_cfg_func_##index(void) \
}
#define ADC_STM32_IRQ_FUNC(index) \
.irq_cfg_func = adc_stm32_cfg_func_##index,
#define ADC_DMA_CHANNEL(id, dir, DIR, src, dest)
#endif /* CONFIG_ADC_STM32_SHARED_IRQS */
#endif /* CONFIG_ADC_STM32_DMA && CONFIG_ADC_STM32_SHARED_IRQS */
#define ADC_STM32_INIT(index) \
@ -1443,6 +1675,7 @@ static struct adc_stm32_data adc_stm32_data_##index = { \ @@ -1443,6 +1675,7 @@ static struct adc_stm32_data adc_stm32_data_##index = { \
ADC_CONTEXT_INIT_TIMER(adc_stm32_data_##index, ctx), \
ADC_CONTEXT_INIT_LOCK(adc_stm32_data_##index, ctx), \
ADC_CONTEXT_INIT_SYNC(adc_stm32_data_##index, ctx), \
ADC_DMA_CHANNEL(index, dmamux, NULL, PERIPHERAL, MEMORY) \
}; \
\
DEVICE_DT_INST_DEFINE(index, \

Loading…
Cancel
Save