cache: refactor cache_utils into cache_hal instade

This commit is contained in:
wanlei 2023-06-13 11:56:14 +08:00 committed by wuzhenghui
parent 68e07d90ba
commit 1283619767
14 changed files with 378 additions and 98 deletions

View File

@ -11,7 +11,11 @@ if(NOT CONFIG_HAL_WDT_USE_ROM_IMPL)
list(APPEND srcs "wdt_hal_iram.c")
endif()
if(NOT ${target} STREQUAL "esp32")
# We wrap Cache ROM APIs as Cache HAL APIs for: 1. internal ram ; 2. unified APIs
# ESP32 cache structure / ROM APIs are different and we have a patch `cache_hal_esp32.c` for it.
if(${target} STREQUAL "esp32")
list(APPEND srcs "esp32/cache_hal_esp32.c")
else()
list(APPEND srcs "cache_hal.c")
endif()

View File

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -52,6 +52,10 @@
typedef struct {
uint32_t data_autoload_flag;
uint32_t inst_autoload_flag;
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
// There's no register indicating if cache is enabled on these chips, use sw flag to save this state.
volatile bool cache_enabled;
#endif
} cache_hal_context_t;
static cache_hal_context_t ctx;
@ -75,6 +79,10 @@ void cache_hal_init(void)
cache_ll_l1_enable_bus(1, CACHE_LL_DEFAULT_DBUS_MASK);
cache_ll_l1_enable_bus(1, CACHE_LL_DEFAULT_IBUS_MASK);
#endif
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
ctx.cache_enabled = 1;
#endif
}
void cache_hal_disable(cache_type_t type)
@ -91,6 +99,10 @@ void cache_hal_disable(cache_type_t type)
Cache_Disable_DCache();
}
#endif
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
ctx.cache_enabled = 0;
#endif
}
void cache_hal_enable(cache_type_t type)
@ -107,4 +119,57 @@ void cache_hal_enable(cache_type_t type)
Cache_Enable_DCache(ctx.data_autoload_flag);
}
#endif
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
ctx.cache_enabled = 1;
#endif
}
void cache_hal_suspend(cache_type_t type)
{
#if SOC_SHARED_IDCACHE_SUPPORTED
Cache_Suspend_ICache();
#else
if (type == CACHE_TYPE_DATA) {
Cache_Suspend_DCache();
} else if (type == CACHE_TYPE_INSTRUCTION) {
Cache_Suspend_ICache();
} else {
Cache_Suspend_ICache();
Cache_Suspend_DCache();
}
#endif
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
ctx.cache_enabled = 0;
#endif
}
void cache_hal_resume(cache_type_t type)
{
#if SOC_SHARED_IDCACHE_SUPPORTED
Cache_Resume_ICache(ctx.inst_autoload_flag);
#else
if (type == CACHE_TYPE_DATA) {
Cache_Resume_DCache(ctx.data_autoload_flag);
} else if (type == CACHE_TYPE_INSTRUCTION) {
Cache_Resume_ICache(ctx.inst_autoload_flag);
} else {
Cache_Resume_ICache(ctx.inst_autoload_flag);
Cache_Resume_DCache(ctx.data_autoload_flag);
}
#endif
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
ctx.cache_enabled = 1;
#endif
}
bool cache_hal_is_cache_enabled(cache_type_t type)
{
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
return ctx.cache_enabled;
#else
return cache_ll_l1_is_cache_enabled(0, type);
#endif
}

View File

@ -0,0 +1,40 @@
/*
* SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#include "hal/cache_ll.h"
#include "hal/cache_hal.h"
static uint32_t s_cache_status[2];
void cache_hal_suspend(cache_type_t type)
{
s_cache_status[0] = cache_ll_l1_get_enabled_bus(0);
cache_ll_l1_disable_cache(0);
#if !CONFIG_FREERTOS_UNICORE
s_cache_status[1] = cache_ll_l1_get_enabled_bus(1);
cache_ll_l1_disable_cache(1);
#endif
}
void cache_hal_resume(cache_type_t type)
{
cache_ll_l1_enable_cache(0);
cache_ll_l1_enable_bus(0, s_cache_status[0]);
#if !CONFIG_FREERTOS_UNICORE
cache_ll_l1_enable_cache(1);
cache_ll_l1_enable_bus(1, s_cache_status[1]);
#endif
}
bool cache_hal_is_cache_enabled(cache_type_t type)
{
bool result = cache_ll_l1_is_cache_enabled(0, CACHE_TYPE_ALL);
#if !CONFIG_FREERTOS_UNICORE
result = result && cache_ll_l1_is_cache_enabled(1, CACHE_TYPE_ALL);
#endif
return result;
}

View File

@ -19,6 +19,66 @@
extern "C" {
#endif
/**
* @brief enable a cache unit
*
* @param cache_id cache ID (when l1 cache is per core)
*/
__attribute__((always_inline))
static inline void cache_ll_l1_enable_cache(uint32_t cache_id)
{
HAL_ASSERT(cache_id == 0 || cache_id == 1);
if (cache_id == 0) {
DPORT_REG_SET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE);
} else {
DPORT_REG_SET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE);
}
}
/**
* @brief disable a cache unit
*
* @param cache_id cache ID (when l1 cache is per core)
*/
__attribute__((always_inline))
static inline void cache_ll_l1_disable_cache(uint32_t cache_id)
{
if (cache_id == 0) {
while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1){
;
}
DPORT_REG_CLR_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE);
} else {
while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1){
;
}
DPORT_REG_CLR_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE);
}
}
/**
* @brief Get the status of cache if it is enabled or not
*
* @param cache_id cache ID (when l1 cache is per core)
* @param type see `cache_type_t`
* @return enabled or not
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_cache_enabled(uint32_t cache_id, cache_type_t type)
{
HAL_ASSERT(cache_id == 0 || cache_id == 1);
(void) type; //On 32 it shares between I and D cache
bool enabled;
if (cache_id == 0) {
enabled = DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE);
} else {
enabled = DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE);
}
return enabled;
}
/**
* @brief Get the buses of a particular cache that are mapped to a virtual address range
*

View File

@ -8,6 +8,7 @@
#pragma once
#include <stdbool.h>
#include "soc/extmem_reg.h"
#include "soc/ext_mem_defs.h"
#include "hal/cache_types.h"
@ -35,6 +36,21 @@ extern "C" {
#define CACHE_LL_L1_ILG_EVENT_PRELOAD_OP_FAULT (1<<1)
#define CACHE_LL_L1_ILG_EVENT_SYNC_OP_FAULT (1<<0)
/**
* @brief Get the status of cache if it is enabled or not
*
* @param cache_id cache ID (when l1 cache is per core)
* @param type see `cache_type_t`
* @return enabled or not
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_cache_enabled(uint32_t cache_id, cache_type_t type)
{
HAL_ASSERT(cache_id == 0);
(void) type; // On C2 there's only ICache
return REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE);
}
/**
* @brief Get the buses of a particular cache that are mapped to a virtual address range
*

View File

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -8,6 +8,7 @@
#pragma once
#include <stdbool.h>
#include "soc/extmem_reg.h"
#include "soc/ext_mem_defs.h"
#include "hal/cache_types.h"
@ -35,6 +36,21 @@ extern "C" {
#define CACHE_LL_L1_ILG_EVENT_SYNC_OP_FAULT (1<<0)
/**
* @brief Get the status of cache if it is enabled or not
*
* @param cache_id cache ID (when l1 cache is per core)
* @param type see `cache_type_t`
* @return enabled or not
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_cache_enabled(uint32_t cache_id, cache_type_t type)
{
HAL_ASSERT(cache_id == 0);
(void) type; // On C3 there's only ICache
return REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE);
}
/**
* @brief Get the buses of a particular cache that are mapped to a virtual address range
*

View File

@ -16,7 +16,7 @@
#ifdef __cplusplus
extern "C" {
#endif
#define CACHE_LL_ENABLE_DISABLE_STATE_SW 1 //There's no register indicating cache enable/disable state, we need to use software way for this state.
#define CACHE_LL_DEFAULT_IBUS_MASK CACHE_BUS_IBUS0
#define CACHE_LL_DEFAULT_DBUS_MASK CACHE_BUS_DBUS0

View File

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -22,6 +22,29 @@ extern "C" {
#define CACHE_LL_DEFAULT_IBUS_MASK CACHE_BUS_IBUS0
#define CACHE_LL_DEFAULT_DBUS_MASK CACHE_BUS_IBUS2
/**
* @brief Get the status of cache if it is enabled or not
*
* @param cache_id cache ID (when l1 cache is per core)
* @param type see `cache_type_t`
* @return enabled or not
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_cache_enabled(uint32_t cache_id, cache_type_t type)
{
HAL_ASSERT(cache_id == 0);
bool enabled;
if (type == CACHE_TYPE_INSTRUCTION) {
enabled = REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE);
} else if (type == CACHE_TYPE_DATA) {
enabled = REG_GET_BIT(EXTMEM_PRO_DCACHE_CTRL_REG, EXTMEM_PRO_DCACHE_ENABLE);
} else {
enabled = REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE);
enabled = enabled && REG_GET_BIT(EXTMEM_PRO_DCACHE_CTRL_REG, EXTMEM_PRO_DCACHE_ENABLE);
}
return enabled;
}
/**
* @brief Get the buses of a particular cache that are mapped to a virtual address range

View File

@ -8,6 +8,7 @@
#pragma once
#include <stdbool.h>
#include "soc/extmem_reg.h"
#include "soc/ext_mem_defs.h"
#include "hal/cache_types.h"
@ -37,6 +38,29 @@ extern "C" {
#define CACHE_LL_L1_ILG_EVENT_ICACHE_PRELOAD_OP_FAULT (1<<1)
#define CACHE_LL_L1_ILG_EVENT_ICACHE_SYNC_OP_FAULT (1<<0)
/**
* @brief Get the status of cache if it is enabled or not
*
* @param cache_id cache ID (when l1 cache is per core)
* @param type see `cache_type_t`
* @return enabled or not
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_cache_enabled(uint32_t cache_id, cache_type_t type)
{
HAL_ASSERT(cache_id == 0 || cache_id == 1);
bool enabled;
if (type == CACHE_TYPE_INSTRUCTION) {
enabled = REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE);
} else if (type == CACHE_TYPE_DATA) {
enabled = REG_GET_BIT(EXTMEM_DCACHE_CTRL_REG, EXTMEM_DCACHE_ENABLE);
} else {
enabled = REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE);
enabled = enabled && REG_GET_BIT(EXTMEM_DCACHE_CTRL_REG, EXTMEM_DCACHE_ENABLE);
}
return enabled;
}
/**
* @brief Get the buses of a particular cache that are mapped to a virtual address range

View File

@ -1,12 +1,13 @@
/*
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include <stdbool.h>
#include "hal/cache_types.h"
#ifdef __cplusplus
@ -36,6 +37,86 @@ void cache_hal_disable(cache_type_t type);
*/
void cache_hal_enable(cache_type_t type);
/**
* @brief Suspend cache
*
* Suspend the ICache or DCache or bothsuspends the CPU access to cache for a while, without invalidation.
*
* @param type see `cache_type_t`
*
* @return Current status of corresponding Cache(s)
*/
void cache_hal_suspend(cache_type_t type);
/**
* @brief Resume cache
*
* Resume the ICache or DCache or both.
*
* @param type see `cache_type_t`
*/
void cache_hal_resume(cache_type_t type);
/**
* @brief Check if corresponding cache is enabled or not
*
* @param type see `cache_type_t`
*
* @return true: enabled; false: disabled
*/
bool cache_hal_is_cache_enabled(cache_type_t type);
/**
* @brief Invalidate cache supported addr
*
* Invalidate a Cache item for either ICache or DCache.
*
* @param vaddr Start address of the region to be invalidated
* @param size Size of the region to be invalidated
*/
void cache_hal_invalidate_addr(uint32_t vaddr, uint32_t size);
#if SOC_CACHE_WRITEBACK_SUPPORTED
/**
* @brief Writeback cache supported addr
*
* Writeback the DCache item to external memory
*
* @param vaddr Start address of the region to writeback
* @param size Size of the region to writeback
*/
void cache_hal_writeback_addr(uint32_t vaddr, uint32_t size);
#endif //#if SOC_CACHE_WRITEBACK_SUPPORTED
#if SOC_CACHE_FREEZE_SUPPORTED
/**
* @brief Freeze cache
*
* Freeze cache, CPU access to cache will be suspended, until the cache is unfrozen.
*
* @param type see `cache_type_t`
*/
void cache_hal_freeze(cache_type_t type);
/**
* @brief Unfreeze cache
*
* Unfreeze cache, CPU access to cache will be restored
*
* @param type see `cache_type_t`
*/
void cache_hal_unfreeze(cache_type_t type);
#endif //#if SOC_CACHE_FREEZE_SUPPORTED
/**
* @brief Get cache line size, in bytes
*
* @param type see `cache_type_t`
*
* @return cache line size, in bytes
*/
uint32_t cache_hal_get_cache_line_size(cache_type_t type);
#ifdef __cplusplus
}
#endif

View File

@ -2,7 +2,9 @@
archive: libhal.a
entries:
mmu_hal (noflash)
if IDF_TARGET_ESP32 = n:
if IDF_TARGET_ESP32 = y:
cache_hal_esp32 (noflash)
else:
cache_hal (noflash)
spi_hal_iram (noflash)
spi_slave_hal_iram (noflash)

View File

@ -38,6 +38,8 @@
#include "soc/ext_mem_defs.h"
#endif
#include "esp_rom_spiflash.h"
#include "hal/cache_hal.h"
#include "hal/cache_ll.h"
#include <soc/soc.h>
#include "sdkconfig.h"
#ifndef CONFIG_FREERTOS_UNICORE
@ -52,22 +54,11 @@
static __attribute__((unused)) const char *TAG = "cache";
#define DPORT_CACHE_BIT(cpuid, regid) DPORT_ ## cpuid ## regid
#define DPORT_CACHE_MASK(cpuid) (DPORT_CACHE_BIT(cpuid, _CACHE_MASK_OPSDRAM) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IROM0) | \
DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0) )
#define DPORT_CACHE_VAL(cpuid) (~(DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | \
DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0)))
#define DPORT_CACHE_GET_VAL(cpuid) (cpuid == 0) ? DPORT_CACHE_VAL(PRO) : DPORT_CACHE_VAL(APP)
#define DPORT_CACHE_GET_MASK(cpuid) (cpuid == 0) ? DPORT_CACHE_MASK(PRO) : DPORT_CACHE_MASK(APP)
static void spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state);
static void spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
// Used only on ROM impl. in idf, this param unused, cache status hold by hal
static uint32_t s_flash_op_cache_state[2];
#ifndef CONFIG_FREERTOS_UNICORE
@ -221,7 +212,7 @@ void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
s_flash_op_cpu = -1;
#endif
// Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
// Re-enable cache. After this, cache (flash and external RAM) should work again.
spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
#if SOC_IDCACHE_PER_CORE
//only needed if cache(s) is per core
@ -341,6 +332,19 @@ void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
#endif // CONFIG_FREERTOS_UNICORE
void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
{
#if CONFIG_IDF_TARGET_ESP32
uint32_t cache_value = cache_ll_l1_get_enabled_bus(cpuid);
// Re-enable cache on this CPU
spi_flash_restore_cache(cpuid, cache_value);
#else
spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
#endif
}
/**
* The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
* function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
@ -348,77 +352,17 @@ void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
*/
static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
{
#if CONFIG_IDF_TARGET_ESP32
uint32_t ret = 0;
const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
if (cpuid == 0) {
ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
;
}
DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
}
#if !CONFIG_FREERTOS_UNICORE
else {
ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
;
}
DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
}
#endif
*saved_state = ret;
#elif CONFIG_IDF_TARGET_ESP32S2
*saved_state = Cache_Suspend_ICache();
#elif CONFIG_IDF_TARGET_ESP32S3
uint32_t icache_state, dcache_state;
icache_state = Cache_Suspend_ICache() << 16;
dcache_state = Cache_Suspend_DCache();
*saved_state = icache_state | dcache_state;
#elif CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C2
uint32_t icache_state;
icache_state = Cache_Suspend_ICache() << 16;
*saved_state = icache_state;
#endif
cache_hal_suspend(CACHE_TYPE_ALL);
}
static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
{
#if CONFIG_IDF_TARGET_ESP32
const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
if (cpuid == 0) {
DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
}
#if !CONFIG_FREERTOS_UNICORE
else {
DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
}
#endif
#elif CONFIG_IDF_TARGET_ESP32S2
Cache_Resume_ICache(saved_state);
#elif CONFIG_IDF_TARGET_ESP32S3
Cache_Resume_DCache(saved_state & 0xffff);
Cache_Resume_ICache(saved_state >> 16);
#elif CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C2
Cache_Resume_ICache(saved_state >> 16);
#endif
cache_hal_resume(CACHE_TYPE_ALL);
}
IRAM_ATTR bool spi_flash_cache_enabled(void)
bool IRAM_ATTR spi_flash_cache_enabled(void)
{
#if CONFIG_IDF_TARGET_ESP32
bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
#if portNUM_PROCESSORS == 2
result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
#endif
#elif CONFIG_IDF_TARGET_ESP32S2
bool result = (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE) != 0);
#elif CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C2
bool result = (REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE) != 0);
#endif
return result;
return cache_hal_is_cache_enabled(CACHE_TYPE_ALL);
}
#if CONFIG_IDF_TARGET_ESP32S2
@ -961,20 +905,7 @@ esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable)
}
return ESP_OK;
}
#endif // CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C2
void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
{
#if CONFIG_IDF_TARGET_ESP32
uint32_t cache_value = DPORT_CACHE_GET_VAL(cpuid);
cache_value &= DPORT_CACHE_GET_MASK(cpuid);
// Re-enable cache on this CPU
spi_flash_restore_cache(cpuid, cache_value);
#else
spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
#endif
}
#endif // CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32C2
#if CONFIG_IDF_TARGET_ESP32S3
/*protect cache opreation*/

View File

@ -6,3 +6,16 @@ set(EXTRA_COMPONENT_DIRS "$ENV{IDF_PATH}/components/spi_flash/test_apps/componen
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
project(test_esp_flash_stress)
if(CONFIG_COMPILER_DUMP_RTL_FILES)
add_custom_target(check_test_app_sections ALL
COMMAND ${PYTHON} $ENV{IDF_PATH}/tools/ci/check_callgraph.py
--rtl-dir ${CMAKE_BINARY_DIR}/esp-idf/driver/
--elf-file ${CMAKE_BINARY_DIR}/mspi_test.elf
find-refs
--from-sections=.iram0.text
--to-sections=.flash.text,.flash.rodata
--exit-code
DEPENDS ${elf}
)
endif()

View File

@ -0,0 +1,5 @@
# This config lists merged freertos_flash no_optimization in UT all together.
CONFIG_ESP_SYSTEM_MEMPROT_FEATURE=n
CONFIG_FREERTOS_PLACE_FUNCTIONS_INTO_FLASH=y
CONFIG_COMPILER_OPTIMIZATION_NONE=y
CONFIG_COMPILER_DUMP_RTL_FILES=y