Merge branch 'bugfix/i2s_tx_auto_clear_not_totally_clean_the_buf_v5.0' into 'release/v5.0'

i2s: fix tx incomplete auto clear (v5.0)

See merge request espressif/esp-idf!20570
This commit is contained in:
morris 2022-11-01 10:14:54 +08:00
commit 88482af17a
2 changed files with 11 additions and 12 deletions

View File

@ -199,9 +199,6 @@ static bool IRAM_ATTR i2s_dma_tx_callback(gdma_channel_handle_t dma_chan, gdma_e
if (xQueueIsQueueFullFromISR(p_i2s->tx->queue)) { if (xQueueIsQueueFullFromISR(p_i2s->tx->queue)) {
xQueueReceiveFromISR(p_i2s->tx->queue, &dummy, &tmp); xQueueReceiveFromISR(p_i2s->tx->queue, &dummy, &tmp);
need_awoke |= tmp; need_awoke |= tmp;
if (p_i2s->tx_desc_auto_clear) {
memset((void *) dummy, 0, p_i2s->tx->buf_size);
}
if (p_i2s->i2s_queue) { if (p_i2s->i2s_queue) {
i2s_event.type = I2S_EVENT_TX_Q_OVF; i2s_event.type = I2S_EVENT_TX_Q_OVF;
i2s_event.size = p_i2s->tx->buf_size; i2s_event.size = p_i2s->tx->buf_size;
@ -209,6 +206,9 @@ static bool IRAM_ATTR i2s_dma_tx_callback(gdma_channel_handle_t dma_chan, gdma_e
need_awoke |= tmp; need_awoke |= tmp;
} }
} }
if (p_i2s->tx_desc_auto_clear) {
memset((void *) (((lldesc_t *)finish_desc)->buf), 0, p_i2s->tx->buf_size);
}
xQueueSendFromISR(p_i2s->tx->queue, &(((lldesc_t *)finish_desc)->buf), &tmp); xQueueSendFromISR(p_i2s->tx->queue, &(((lldesc_t *)finish_desc)->buf), &tmp);
need_awoke |= tmp; need_awoke |= tmp;
if (p_i2s->i2s_queue) { if (p_i2s->i2s_queue) {
@ -255,18 +255,18 @@ static void IRAM_ATTR i2s_intr_handler_default(void *arg)
if (xQueueIsQueueFullFromISR(p_i2s->tx->queue)) { if (xQueueIsQueueFullFromISR(p_i2s->tx->queue)) {
xQueueReceiveFromISR(p_i2s->tx->queue, &dummy, &tmp); xQueueReceiveFromISR(p_i2s->tx->queue, &dummy, &tmp);
need_awoke |= tmp; need_awoke |= tmp;
// See if tx descriptor needs to be auto cleared:
// This will avoid any kind of noise that may get introduced due to transmission
// of previous data from tx descriptor on I2S line.
if (p_i2s->tx_desc_auto_clear == true) {
memset((void *) dummy, 0, p_i2s->tx->buf_size);
}
if (p_i2s->i2s_queue) { if (p_i2s->i2s_queue) {
i2s_event.type = I2S_EVENT_TX_Q_OVF; i2s_event.type = I2S_EVENT_TX_Q_OVF;
xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &tmp); xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &tmp);
need_awoke |= tmp; need_awoke |= tmp;
} }
} }
// See if tx descriptor needs to be auto cleared:
// This will avoid any kind of noise that may get introduced due to transmission
// of previous data from tx descriptor on I2S line.
if (p_i2s->tx_desc_auto_clear == true) {
memset((void *)(((lldesc_t *)finish_desc)->buf), 0, p_i2s->tx->buf_size);
}
xQueueSendFromISR(p_i2s->tx->queue, &(((lldesc_t *)finish_desc)->buf), &tmp); xQueueSendFromISR(p_i2s->tx->queue, &(((lldesc_t *)finish_desc)->buf), &tmp);
need_awoke |= tmp; need_awoke |= tmp;
if (p_i2s->i2s_queue) { if (p_i2s->i2s_queue) {

View File

@ -527,12 +527,11 @@ static bool IRAM_ATTR i2s_dma_tx_callback(gdma_channel_handle_t dma_chan, gdma_e
user_need_yield |= handle->callbacks.on_send_q_ovf(handle, &evt, handle->user_data); user_need_yield |= handle->callbacks.on_send_q_ovf(handle, &evt, handle->user_data);
} }
} }
xQueueSendFromISR(handle->msg_queue, &(finish_desc->buf), &need_yield2);
if (handle->dma.auto_clear) { if (handle->dma.auto_clear) {
uint8_t *sent_buf = (uint8_t *)finish_desc->buf; uint8_t *sent_buf = (uint8_t *)finish_desc->buf;
memset(sent_buf, 0, handle->dma.buf_size); memset(sent_buf, 0, handle->dma.buf_size);
} }
xQueueSendFromISR(handle->msg_queue, &(finish_desc->buf), &need_yield2);
return need_yield1 | need_yield2 | user_need_yield; return need_yield1 | need_yield2 | user_need_yield;
} }
@ -607,12 +606,12 @@ static void IRAM_ATTR i2s_dma_tx_callback(void *arg)
user_need_yield |= handle->callbacks.on_send_q_ovf(handle, &evt, handle->user_data); user_need_yield |= handle->callbacks.on_send_q_ovf(handle, &evt, handle->user_data);
} }
} }
xQueueSendFromISR(handle->msg_queue, &(finish_desc->buf), &need_yield2);
// Auto clear the dma buffer after data sent // Auto clear the dma buffer after data sent
if (handle->dma.auto_clear) { if (handle->dma.auto_clear) {
uint8_t *buff = (uint8_t *)finish_desc->buf; uint8_t *buff = (uint8_t *)finish_desc->buf;
memset(buff, 0, handle->dma.buf_size); memset(buff, 0, handle->dma.buf_size);
} }
xQueueSendFromISR(handle->msg_queue, &(finish_desc->buf), &need_yield2);
} }
if (need_yield1 || need_yield2 || user_need_yield) { if (need_yield1 || need_yield2 || user_need_yield) {