Merge branch 'bugfix/fix_adc_continuous_driver_conv_frame_issue_v5.0' into 'release/v5.0'

adc: fix adc continuous driver conv_frame_size not bigger than 4092 issue / pr 11500, use circular dma descriptors in adc continuous mode (v5.0)

See merge request espressif/esp-idf!24189
This commit is contained in:
Jiang Jiang Jian 2023-06-13 20:20:25 +08:00
commit fa17fc83cb
5 changed files with 88 additions and 45 deletions

View File

@ -207,7 +207,9 @@ esp_err_t adc_digi_initialize(const adc_digi_init_config_t *init_config)
} }
//malloc dma descriptor //malloc dma descriptor
s_adc_digi_ctx->hal.rx_desc = heap_caps_calloc(1, (sizeof(dma_descriptor_t)) * INTERNAL_BUF_NUM, MALLOC_CAP_DMA); uint32_t dma_desc_num_per_frame = (init_config->conv_num_each_intr + DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED - 1) / DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED;
uint32_t dma_desc_max_num = dma_desc_num_per_frame * INTERNAL_BUF_NUM;
s_adc_digi_ctx->hal.rx_desc = heap_caps_calloc(1, (sizeof(dma_descriptor_t)) * dma_desc_max_num, MALLOC_CAP_DMA);
if (!s_adc_digi_ctx->hal.rx_desc) { if (!s_adc_digi_ctx->hal.rx_desc) {
ret = ESP_ERR_NO_MEM; ret = ESP_ERR_NO_MEM;
goto cleanup; goto cleanup;
@ -310,7 +312,8 @@ esp_err_t adc_digi_initialize(const adc_digi_init_config_t *init_config)
#elif CONFIG_IDF_TARGET_ESP32 #elif CONFIG_IDF_TARGET_ESP32
.dev = (void *)I2S_LL_GET_HW(s_adc_digi_ctx->i2s_host), .dev = (void *)I2S_LL_GET_HW(s_adc_digi_ctx->i2s_host),
#endif #endif
.desc_max_num = INTERNAL_BUF_NUM, .eof_desc_num = INTERNAL_BUF_NUM,
.eof_step = dma_desc_num_per_frame,
.dma_chan = dma_chan, .dma_chan = dma_chan,
.eof_num = init_config->conv_num_each_intr / SOC_ADC_DIGI_DATA_BYTES_PER_CONV .eof_num = init_config->conv_num_each_intr / SOC_ADC_DIGI_DATA_BYTES_PER_CONV
}; };
@ -367,26 +370,22 @@ static IRAM_ATTR bool s_adc_dma_intr(adc_digi_context_t *adc_digi_ctx)
portBASE_TYPE taskAwoken = 0; portBASE_TYPE taskAwoken = 0;
BaseType_t ret; BaseType_t ret;
adc_hal_dma_desc_status_t status = false; adc_hal_dma_desc_status_t status = false;
dma_descriptor_t *current_desc = NULL; uint8_t *finished_buffer = NULL;
uint32_t finished_size = 0;
while (1) { while (1) {
status = adc_hal_get_reading_result(&adc_digi_ctx->hal, adc_digi_ctx->rx_eof_desc_addr, &current_desc); status = adc_hal_get_reading_result(&adc_digi_ctx->hal, adc_digi_ctx->rx_eof_desc_addr, &finished_buffer, &finished_size);
if (status != ADC_HAL_DMA_DESC_VALID) { if (status != ADC_HAL_DMA_DESC_VALID) {
break; break;
} }
ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, current_desc->buffer, current_desc->dw0.length, &taskAwoken); ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, finished_buffer, finished_size, &taskAwoken);
if (ret == pdFALSE) { if (ret == pdFALSE) {
//ringbuffer overflow //ringbuffer overflow
adc_digi_ctx->ringbuf_overflow_flag = 1; adc_digi_ctx->ringbuf_overflow_flag = 1;
} }
} }
if (status == ADC_HAL_DMA_DESC_NULL) {
//start next turns of dma operation
adc_hal_digi_start(&adc_digi_ctx->hal, adc_digi_ctx->rx_dma_buf);
}
return (taskAwoken == pdTRUE); return (taskAwoken == pdTRUE);
} }

View File

@ -166,7 +166,9 @@ esp_err_t adc_continuous_new_handle(const adc_continuous_handle_cfg_t *hdl_confi
} }
//malloc dma descriptor //malloc dma descriptor
adc_ctx->hal.rx_desc = heap_caps_calloc(1, (sizeof(dma_descriptor_t)) * INTERNAL_BUF_NUM, MALLOC_CAP_INTERNAL | MALLOC_CAP_DMA); uint32_t dma_desc_num_per_frame = (hdl_config->conv_frame_size + DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED - 1) / DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED;
uint32_t dma_desc_max_num = dma_desc_num_per_frame * INTERNAL_BUF_NUM;
adc_ctx->hal.rx_desc = heap_caps_calloc(1, (sizeof(dma_descriptor_t)) * dma_desc_max_num, MALLOC_CAP_INTERNAL | MALLOC_CAP_DMA);
if (!adc_ctx->hal.rx_desc) { if (!adc_ctx->hal.rx_desc) {
ret = ESP_ERR_NO_MEM; ret = ESP_ERR_NO_MEM;
goto cleanup; goto cleanup;
@ -256,7 +258,8 @@ esp_err_t adc_continuous_new_handle(const adc_continuous_handle_cfg_t *hdl_confi
#elif CONFIG_IDF_TARGET_ESP32 #elif CONFIG_IDF_TARGET_ESP32
.dev = (void *)I2S_LL_GET_HW(adc_ctx->i2s_host), .dev = (void *)I2S_LL_GET_HW(adc_ctx->i2s_host),
#endif #endif
.desc_max_num = INTERNAL_BUF_NUM, .eof_desc_num = INTERNAL_BUF_NUM,
.eof_step = dma_desc_num_per_frame,
.dma_chan = dma_chan, .dma_chan = dma_chan,
.eof_num = hdl_config->conv_frame_size / SOC_ADC_DIGI_DATA_BYTES_PER_CONV .eof_num = hdl_config->conv_frame_size / SOC_ADC_DIGI_DATA_BYTES_PER_CONV
}; };
@ -290,6 +293,7 @@ static IRAM_ATTR bool adc_dma_in_suc_eof_callback(gdma_channel_handle_t dma_chan
ctx->rx_eof_desc_addr = event_data->rx_eof_desc_addr; ctx->rx_eof_desc_addr = event_data->rx_eof_desc_addr;
return s_adc_dma_intr(user_data); return s_adc_dma_intr(user_data);
} }
#else #else
static IRAM_ATTR void adc_dma_intr_handler(void *arg) static IRAM_ATTR void adc_dma_intr_handler(void *arg)
{ {
@ -318,21 +322,22 @@ static IRAM_ATTR bool s_adc_dma_intr(adc_continuous_ctx_t *adc_digi_ctx)
bool need_yield = false; bool need_yield = false;
BaseType_t ret; BaseType_t ret;
adc_hal_dma_desc_status_t status = false; adc_hal_dma_desc_status_t status = false;
dma_descriptor_t *current_desc = NULL; uint8_t *finished_buffer = NULL;
uint32_t finished_size = 0;
while (1) { while (1) {
status = adc_hal_get_reading_result(&adc_digi_ctx->hal, adc_digi_ctx->rx_eof_desc_addr, &current_desc); status = adc_hal_get_reading_result(&adc_digi_ctx->hal, adc_digi_ctx->rx_eof_desc_addr, &finished_buffer, &finished_size);
if (status != ADC_HAL_DMA_DESC_VALID) { if (status != ADC_HAL_DMA_DESC_VALID) {
break; break;
} }
ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, current_desc->buffer, current_desc->dw0.length, &taskAwoken); ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, finished_buffer, finished_size, &taskAwoken);
need_yield |= (taskAwoken == pdTRUE); need_yield |= (taskAwoken == pdTRUE);
if (adc_digi_ctx->cbs.on_conv_done) { if (adc_digi_ctx->cbs.on_conv_done) {
adc_continuous_evt_data_t edata = { adc_continuous_evt_data_t edata = {
.conv_frame_buffer = current_desc->buffer, .conv_frame_buffer = finished_buffer,
.size = current_desc->dw0.length, .size = finished_size,
}; };
if (adc_digi_ctx->cbs.on_conv_done(adc_digi_ctx, &edata, adc_digi_ctx->user_data)) { if (adc_digi_ctx->cbs.on_conv_done(adc_digi_ctx, &edata, adc_digi_ctx->user_data)) {
need_yield |= true; need_yield |= true;
@ -350,11 +355,6 @@ static IRAM_ATTR bool s_adc_dma_intr(adc_continuous_ctx_t *adc_digi_ctx)
} }
} }
if (status == ADC_HAL_DMA_DESC_NULL) {
//start next turns of dma operation
adc_hal_digi_start(&adc_digi_ctx->hal, adc_digi_ctx->rx_dma_buf);
}
return need_yield; return need_yield;
} }

View File

@ -97,7 +97,8 @@ void adc_hal_dma_ctx_config(adc_hal_dma_ctx_t *hal, const adc_hal_dma_config_t *
{ {
hal->desc_dummy_head.next = hal->rx_desc; hal->desc_dummy_head.next = hal->rx_desc;
hal->dev = config->dev; hal->dev = config->dev;
hal->desc_max_num = config->desc_max_num; hal->eof_desc_num = config->eof_desc_num;
hal->eof_step = config->eof_step;
hal->dma_chan = config->dma_chan; hal->dma_chan = config->dma_chan;
hal->eof_num = config->eof_num; hal->eof_num = config->eof_num;
} }
@ -232,25 +233,36 @@ void adc_hal_digi_controller_config(adc_hal_dma_ctx_t *hal, const adc_hal_digi_c
adc_hal_digi_sample_freq_config(hal, cfg->sample_freq_hz); adc_hal_digi_sample_freq_config(hal, cfg->sample_freq_hz);
} }
static void adc_hal_digi_dma_link_descriptors(dma_descriptor_t *desc, uint8_t *data_buf, uint32_t size, uint32_t num) static void adc_hal_digi_dma_link_descriptors(dma_descriptor_t *desc, uint8_t *data_buf, uint32_t per_eof_size, uint32_t eof_step, uint32_t eof_num)
{ {
HAL_ASSERT(((uint32_t)data_buf % 4) == 0); HAL_ASSERT(((uint32_t)data_buf % 4) == 0);
HAL_ASSERT((size % 4) == 0); HAL_ASSERT((per_eof_size % 4) == 0);
uint32_t n = 0; uint32_t n = 0;
dma_descriptor_t *desc_head = desc;
while (num--) { while (eof_num--) {
desc[n] = (dma_descriptor_t) { uint32_t eof_size = per_eof_size;
.dw0.size = size,
.dw0.length = 0, for (int i = 0; i < eof_step; i++) {
.dw0.suc_eof = 0, uint32_t this_len = eof_size;
.dw0.owner = 1, if (this_len > DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED) {
.buffer = data_buf, this_len = DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED;
.next = &desc[n+1] }
};
data_buf += size; desc[n] = (dma_descriptor_t) {
n++; .dw0.size = this_len,
.dw0.length = 0,
.dw0.suc_eof = 0,
.dw0.owner = 1,
.buffer = data_buf,
.next = &desc[n+1]
};
eof_size -= this_len;
data_buf += this_len;
n++;
}
} }
desc[n-1].next = NULL; desc[n-1].next = desc_head;
} }
void adc_hal_digi_start(adc_hal_dma_ctx_t *hal, uint8_t *data_buf) void adc_hal_digi_start(adc_hal_dma_ctx_t *hal, uint8_t *data_buf)
@ -265,7 +277,7 @@ void adc_hal_digi_start(adc_hal_dma_ctx_t *hal, uint8_t *data_buf)
//reset the current descriptor address //reset the current descriptor address
hal->cur_desc_ptr = &hal->desc_dummy_head; hal->cur_desc_ptr = &hal->desc_dummy_head;
adc_hal_digi_dma_link_descriptors(hal->rx_desc, data_buf, hal->eof_num * SOC_ADC_DIGI_DATA_BYTES_PER_CONV, hal->desc_max_num); adc_hal_digi_dma_link_descriptors(hal->rx_desc, data_buf, hal->eof_num * SOC_ADC_DIGI_DATA_BYTES_PER_CONV, hal->eof_step, hal->eof_desc_num);
//start DMA //start DMA
adc_dma_ll_rx_start(hal->dev, hal->dma_chan, (lldesc_t *)hal->rx_desc); adc_dma_ll_rx_start(hal->dev, hal->dma_chan, (lldesc_t *)hal->rx_desc);
@ -287,18 +299,45 @@ bool adc_hal_check_event(adc_hal_dma_ctx_t *hal, uint32_t mask)
} }
#endif //#if !SOC_GDMA_SUPPORTED #endif //#if !SOC_GDMA_SUPPORTED
adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, const intptr_t eof_desc_addr, dma_descriptor_t **cur_desc) adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, const intptr_t eof_desc_addr, uint8_t **buffer, uint32_t *len)
{ {
HAL_ASSERT(hal->cur_desc_ptr); HAL_ASSERT(hal->cur_desc_ptr);
if (!hal->cur_desc_ptr->next) { if (!hal->cur_desc_ptr->next) {
return ADC_HAL_DMA_DESC_NULL; return ADC_HAL_DMA_DESC_NULL;
} }
if ((intptr_t)hal->cur_desc_ptr == eof_desc_addr) { if ((intptr_t)hal->cur_desc_ptr == eof_desc_addr) {
return ADC_HAL_DMA_DESC_WAITING; return ADC_HAL_DMA_DESC_WAITING;
} }
hal->cur_desc_ptr = hal->cur_desc_ptr->next; uint8_t *buffer_start = NULL;
*cur_desc = hal->cur_desc_ptr; uint32_t eof_len = 0;
dma_descriptor_t *eof_desc = hal->cur_desc_ptr;
//Find the eof list start
eof_desc = eof_desc->next;
eof_desc->dw0.owner = 1;
buffer_start = eof_desc->buffer;
eof_len += eof_desc->dw0.length;
if ((intptr_t)eof_desc == eof_desc_addr) {
goto valid;
}
//Find the eof list end
for (int i = 1; i < hal->eof_step; i++) {
eof_desc = eof_desc->next;
eof_desc->dw0.owner = 1;
eof_len += eof_desc->dw0.length;
if ((intptr_t)eof_desc == eof_desc_addr) {
goto valid;
}
}
valid:
hal->cur_desc_ptr = eof_desc;
*buffer = buffer_start;
*len = eof_len;
return ADC_HAL_DMA_DESC_VALID; return ADC_HAL_DMA_DESC_VALID;
} }

View File

@ -55,7 +55,8 @@ typedef enum adc_hal_dma_desc_status_t {
*/ */
typedef struct adc_hal_dma_config_t { typedef struct adc_hal_dma_config_t {
void *dev; ///< DMA peripheral address void *dev; ///< DMA peripheral address
uint32_t desc_max_num; ///< Number of the descriptors linked once uint32_t eof_desc_num; ///< Number of dma descriptors that is eof
uint32_t eof_step; ///< Number of linked descriptors that is one eof
uint32_t dma_chan; ///< DMA channel to be used uint32_t dma_chan; ///< DMA channel to be used
uint32_t eof_num; ///< Bytes between 2 in_suc_eof interrupts uint32_t eof_num; ///< Bytes between 2 in_suc_eof interrupts
} adc_hal_dma_config_t; } adc_hal_dma_config_t;
@ -73,7 +74,8 @@ typedef struct adc_hal_dma_ctx_t {
/**< these need to be configured by `adc_hal_dma_config_t` via driver layer*/ /**< these need to be configured by `adc_hal_dma_config_t` via driver layer*/
void *dev; ///< DMA address void *dev; ///< DMA address
uint32_t desc_max_num; ///< Number of the descriptors linked once uint32_t eof_desc_num; ///< Number of dma descriptors that is eof
uint32_t eof_step; ///< Number of linked descriptors that is one eof
uint32_t dma_chan; ///< DMA channel to be used uint32_t dma_chan; ///< DMA channel to be used
uint32_t eof_num; ///< Words between 2 in_suc_eof interrupts uint32_t eof_num; ///< Words between 2 in_suc_eof interrupts
} adc_hal_dma_ctx_t; } adc_hal_dma_ctx_t;
@ -190,11 +192,12 @@ bool adc_hal_check_event(adc_hal_dma_ctx_t *hal, uint32_t mask);
* *
* @param hal Context of the HAL * @param hal Context of the HAL
* @param eof_desc_addr The last descriptor that is finished by HW. Should be got from DMA * @param eof_desc_addr The last descriptor that is finished by HW. Should be got from DMA
* @param[out] cur_desc The descriptor with ADC reading result (from the 1st one to the last one (``eof_desc_addr``)) * @param[out] buffer ADC reading result buffer
* @param[out] len ADC reading result len
* *
* @return See ``adc_hal_dma_desc_status_t`` * @return See ``adc_hal_dma_desc_status_t``
*/ */
adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, const intptr_t eof_desc_addr, dma_descriptor_t **cur_desc); adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, const intptr_t eof_desc_addr, uint8_t **buffer, uint32_t *len);
/** /**
* @brief Clear interrupt * @brief Clear interrupt

View File

@ -36,6 +36,8 @@ ESP_STATIC_ASSERT(sizeof(dma_descriptor_t) == 12, "dma_descriptor_t should occup
#define DMA_DESCRIPTOR_BUFFER_OWNER_CPU (0) /*!< DMA buffer is allowed to be accessed by CPU */ #define DMA_DESCRIPTOR_BUFFER_OWNER_CPU (0) /*!< DMA buffer is allowed to be accessed by CPU */
#define DMA_DESCRIPTOR_BUFFER_OWNER_DMA (1) /*!< DMA buffer is allowed to be accessed by DMA engine */ #define DMA_DESCRIPTOR_BUFFER_OWNER_DMA (1) /*!< DMA buffer is allowed to be accessed by DMA engine */
#define DMA_DESCRIPTOR_BUFFER_MAX_SIZE (4095) /*!< Maximum size of the buffer that can be attached to descriptor */ #define DMA_DESCRIPTOR_BUFFER_MAX_SIZE (4095) /*!< Maximum size of the buffer that can be attached to descriptor */
#define DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED (4095-3) /*!< Maximum size of the buffer that can be attached to descriptor, and aligned to 4B */
#ifdef __cplusplus #ifdef __cplusplus
} }