Merge branch 'feat/cache_s31' into 'master'

cache: driver support and panic support on s31

Closes IDF-14650, IDF-14651, and IDFCI-10314

See merge request espressif/esp-idf!47156
This commit is contained in:
Armando (Dou Yiwen)
2026-04-08 02:06:44 +00:00
6 changed files with 266 additions and 308 deletions
@@ -1,4 +1,3 @@
menu "Cache config"
config CACHE_L1_ICACHE_SIZE
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -23,14 +23,12 @@ static const char *TAG = "CACHE_ERR";
const char cache_error_msg[] = "Cache access error";
// TODO: ["ESP32S31"] IDF-14650
void esp_cache_err_get_panic_info(esp_cache_err_info_t *err_info)
{
if (err_info == NULL) {
return;
}
uint32_t access_err_status = cache_ll_l1_get_access_error_intr_status(0, CACHE_LL_L1_ACCESS_EVENT_MASK) | cache_ll_l2_get_access_error_intr_status(0, CACHE_LL_L2_ACCESS_EVENT_MASK);
const uint32_t access_err_status = cache_ll_l1_get_access_error_intr_status(0, CACHE_LL_L1_ACCESS_EVENT_MASK);
/* Return the error string if a cache error is active */
err_info->err_str = access_err_status ? cache_error_msg : NULL;
@@ -38,8 +36,12 @@ void esp_cache_err_get_panic_info(esp_cache_err_info_t *err_info)
bool esp_cache_err_has_active_err(void)
{
bool has_active_err = cache_ll_l1_get_access_error_intr_status(0, CACHE_LL_L1_ACCESS_EVENT_MASK) | cache_ll_l2_get_access_error_intr_status(0, CACHE_LL_L2_ACCESS_EVENT_MASK);
return has_active_err;
return cache_ll_l1_get_access_error_intr_status(0, CACHE_LL_L1_ACCESS_EVENT_MASK);
}
void esp_cache_err_clear_active_err(void)
{
cache_ll_l1_clear_access_error_intr(0, CACHE_LL_L1_ACCESS_EVENT_MASK);
}
void esp_cache_err_int_init(void)
@@ -65,23 +67,22 @@ void esp_cache_err_int_init(void)
esprv_int_set_priority(ETS_CACHEERR_INUM, SOC_INTERRUPT_LEVEL_MEDIUM);
ESP_DRAM_LOGV(TAG, "access error intr clr & ena mask is: 0x%x", CACHE_LL_L1_ACCESS_EVENT_MASK);
/**
* Here we
* 1. enable the cache fail tracer to take cache error interrupt into effect.
* 2. clear potential cache error interrupt raw bits
* 3. enable cache error interrupt en bits
*/
cache_ll_l1_enable_fail_tracer(0, true);
/* On the hardware side, start by clearing all the bits responsible for cache access error */
cache_ll_l1_clear_access_error_intr(0, CACHE_LL_L1_ACCESS_EVENT_MASK);
cache_ll_l2_clear_access_error_intr(0, CACHE_LL_L2_ACCESS_EVENT_MASK);
/* Then enable cache access error interrupts. */
cache_ll_l1_enable_access_error_intr(0, CACHE_LL_L1_ACCESS_EVENT_MASK);
cache_ll_l2_enable_access_error_intr(0, CACHE_LL_L2_ACCESS_EVENT_MASK);
/* Enable the interrupts for cache error. */
ESP_INTR_ENABLE(ETS_CACHEERR_INUM);
}
void esp_cache_err_clear_active_err(void)
{
}
int esp_cache_err_get_cpuid(void)
{
if (cache_ll_l1_get_access_error_intr_status(0, CACHE_LL_L1_CORE0_EVENT_MASK)) {
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2021-2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2021-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -120,11 +120,17 @@ TEST_CASE_MULTIPLE_STAGES("invalid access to cache raises panic (APP CPU)", "[ms
#endif // !TEMPORARY_DISABLED_FOR_TARGETS(ESP32S2)
#if MSPI_LL_AXI_DISABLE_SUPPORTED
#if CONFIG_IDF_TARGET_ESP32P4
#define AXI_RESET_REASON ESP_RST_WDT
#elif CONFIG_IDF_TARGET_ESP32S31
//aligned with rom impl
#define AXI_RESET_REASON 5
#endif
static void reset_after_disable_axi(void)
{
//For now we only support AXI disabling LL APIs, so the reset reason will be `ESP_RST_WDT`
//For now we only support AXI disabling LL APIs, so the reset reason will be AXI_RESET_REASON
//This will be updated when AXI disabling methods are fully supported
TEST_ASSERT_EQUAL(ESP_RST_WDT, esp_reset_reason());
TEST_ASSERT_EQUAL(AXI_RESET_REASON, esp_reset_reason());
}
static void NOINLINE_ATTR IRAM_ATTR s_invalid_axi_access(void)
+242 -279
View File
@@ -20,23 +20,21 @@
#ifdef __cplusplus
extern "C" {
#endif
// TODO: [ESP32S31] IDF-14651
/**
* Cache capabilities
*/
#define CACHE_LL_ENABLE_DISABLE_STATE_SW 1 //There's no register indicating cache enable/disable state, we need to use software way for this state.
#define CACHE_LL_EXT_MEM_VIA_L2CACHE 0
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
#define CACHE_LL_ID_ALL 2 //All of the caches in a type and level, make this value greater than any ID
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
#define CACHE_LL_CACHE_AUTOLOAD (1<<0)
#define CACHE_LL_DEFAULT_IBUS_MASK (CACHE_BUS_IBUS0 | CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2)
#define CACHE_LL_DEFAULT_DBUS_MASK (CACHE_BUS_DBUS0 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)
#define CACHE_LL_DEFAULT_IBUS_MASK (CACHE_BUS_IBUS0 | CACHE_BUS_IBUS1)
#define CACHE_LL_DEFAULT_DBUS_MASK (CACHE_BUS_DBUS0)
#define CACHE_LL_L1_ACCESS_EVENT_MASK (0x1f)
#define CACHE_LL_L2_ACCESS_EVENT_MASK (1<<6)
@@ -77,7 +75,6 @@ static inline void cache_ll_clk_init(void)
__attribute__((always_inline))
static inline bool cache_ll_l1_is_icache_autoload_enabled(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
bool enabled = false;
if (cache_id == 0) {
@@ -101,7 +98,6 @@ static inline bool cache_ll_l1_is_icache_autoload_enabled(uint32_t cache_id)
__attribute__((always_inline))
static inline bool cache_ll_l1_is_dcache_autoload_enabled(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
bool enabled = false;
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
enabled = REG_GET_BIT(CACHE_L1_DCACHE_AUTOLOAD_CTRL_REG, CACHE_L1_DCACHE_AUTOLOAD_ENA);
@@ -109,24 +105,6 @@ static inline bool cache_ll_l1_is_dcache_autoload_enabled(uint32_t cache_id)
return enabled;
}
/**
* @brief Check if L2 Cache auto preload is enabled or not
*
* @param cache_id id of the cache in this type and level
*
* @return true: enabled; false: disabled
*/
__attribute__((always_inline))
static inline bool cache_ll_l2_is_cache_autoload_enabled(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
bool enabled = false;
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
enabled = REG_GET_BIT(CACHE_L2_CACHE_AUTOLOAD_CTRL_REG, CACHE_L2_CACHE_AUTOLOAD_ENA);
}
return enabled;
}
/**
* @brief Check if Cache auto preload is enabled or not.
*
@@ -139,12 +117,9 @@ static inline bool cache_ll_l2_is_cache_autoload_enabled(uint32_t cache_id)
__attribute__((always_inline))
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
bool enabled = false;
if (cache_level == 2) {
enabled = cache_ll_l2_is_cache_autoload_enabled(cache_id);
} else if (cache_level == 1) {
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
enabled = cache_ll_l1_is_icache_autoload_enabled(cache_id);
@@ -173,7 +148,6 @@ static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cach
__attribute__((always_inline))
static inline void cache_ll_l1_disable_icache(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0) {
Cache_Disable_L1_CORE0_ICache();
} else if (cache_id == 1) {
@@ -192,23 +166,11 @@ static inline void cache_ll_l1_disable_icache(uint32_t cache_id)
__attribute__((always_inline))
static inline void cache_ll_l1_disable_dcache(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Disable_L1_DCache();
}
}
/**
* @brief Disable L2 Cache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l2_disable_cache(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
}
/**
* @brief Disable Cache
*
@@ -219,9 +181,6 @@ static inline void cache_ll_l2_disable_cache(uint32_t cache_id)
__attribute__((always_inline))
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
@@ -236,8 +195,6 @@ static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t typ
cache_ll_l1_disable_dcache(cache_id);
break;
}
} else {
cache_ll_l2_disable_cache(cache_id);
}
}
@@ -253,7 +210,6 @@ static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t typ
__attribute__((always_inline))
static inline void cache_ll_l1_enable_icache(uint32_t cache_id, bool inst_autoload_en)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0) {
Cache_Enable_L1_CORE0_ICache(inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
} else if (cache_id == 1) {
@@ -273,24 +229,11 @@ static inline void cache_ll_l1_enable_icache(uint32_t cache_id, bool inst_autolo
__attribute__((always_inline))
static inline void cache_ll_l1_enable_dcache(uint32_t cache_id, bool data_autoload_en)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Enable_L1_DCache(data_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
}
}
/**
* @brief Enable L2 Cache
*
* @param cache_id id of the cache in this type and level
* @param inst_autoload_en autoload enabled or not
*/
__attribute__((always_inline))
static inline void cache_ll_l2_enable_cache(uint32_t cache_id, bool autoload_en)
{
// TODO: [ESP32S31] IDF-14651
}
/**
* @brief Enable Cache
*
@@ -303,12 +246,9 @@ static inline void cache_ll_l2_enable_cache(uint32_t cache_id, bool autoload_en)
__attribute__((always_inline))
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
{
// TODO: [ESP32S31] IDF-14651
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 2) {
cache_ll_l2_enable_cache(cache_id, inst_autoload_en);
} else {
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_enable_icache(cache_id, inst_autoload_en);
@@ -336,7 +276,6 @@ static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type
__attribute__((always_inline))
static inline void cache_ll_l1_suspend_icache(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0) {
Cache_Suspend_L1_CORE0_ICache();
} else if (cache_id == 1) {
@@ -355,23 +294,11 @@ static inline void cache_ll_l1_suspend_icache(uint32_t cache_id)
__attribute__((always_inline))
static inline void cache_ll_l1_suspend_dcache(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Suspend_L1_DCache();
}
}
/**
* @brief Suspend L2 Cache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l2_suspend_cache(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
}
/**
* @brief Suspend Cache
*
@@ -382,9 +309,6 @@ static inline void cache_ll_l2_suspend_cache(uint32_t cache_id)
__attribute__((always_inline))
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
@@ -399,8 +323,6 @@ static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t typ
cache_ll_l1_suspend_dcache(cache_id);
break;
}
} else {
cache_ll_l2_suspend_cache(cache_id);
}
}
@@ -416,7 +338,6 @@ static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t typ
__attribute__((always_inline))
static inline void cache_ll_l1_resume_icache(uint32_t cache_id, bool inst_autoload_en)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0) {
Cache_Resume_L1_CORE0_ICache(inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
} else if (cache_id == 1) {
@@ -436,24 +357,11 @@ static inline void cache_ll_l1_resume_icache(uint32_t cache_id, bool inst_autolo
__attribute__((always_inline))
static inline void cache_ll_l1_resume_dcache(uint32_t cache_id, bool data_autoload_en)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Resume_L1_DCache(data_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
}
}
/**
* @brief Resume L2 Cache
*
* @param cache_id id of the cache in this type and level
* @param inst_autoload_en autoload enabled or not
*/
__attribute__((always_inline))
static inline void cache_ll_l2_resume_cache(uint32_t cache_id, bool autoload_en)
{
// TODO: [ESP32S31] IDF-14651
}
/**
* @brief Resume Cache
*
@@ -466,12 +374,7 @@ static inline void cache_ll_l2_resume_cache(uint32_t cache_id, bool autoload_en)
__attribute__((always_inline))
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
{
// TODO: [ESP32S31] IDF-14651
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 2) {
cache_ll_l2_resume_cache(cache_id, inst_autoload_en);
} else {
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_resume_icache(cache_id, inst_autoload_en);
@@ -501,7 +404,6 @@ static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type
__attribute__((always_inline))
static inline void cache_ll_l1_invalidate_icache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0) {
Cache_Invalidate_Addr(CACHE_MAP_L1_ICACHE_0, vaddr, size);
} else if (cache_id == 1) {
@@ -521,28 +423,11 @@ static inline void cache_ll_l1_invalidate_icache_addr(uint32_t cache_id, uint32_
__attribute__((always_inline))
static inline void cache_ll_l1_invalidate_dcache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Invalidate_Addr(CACHE_MAP_L1_DCACHE, vaddr, size);
}
}
/**
* @brief Invalidate L2 Cache addr
*
* @param cache_id id of the cache in this type and level
* @param vaddr start address of the region to be invalidated
* @param size size of the region to be invalidated
*/
__attribute__((always_inline))
static inline void cache_ll_l2_invalidate_cache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
// Cache_Invalidate_Addr(CACHE_MAP_L2_CACHE, vaddr, size);
}
}
/**
* @brief Invalidate cache supported addr
*
@@ -592,21 +477,6 @@ static inline void cache_ll_l1_writeback_dcache_addr(uint32_t cache_id, uint32_t
}
}
/**
* @brief Writeback L2 Cache addr
*
* @param cache_id id of the cache in this type and level
* @param vaddr start address of the region to be written back
* @param size size of the region to be written back
*/
__attribute__((always_inline))
static inline void cache_ll_l2_writeback_cache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
// Cache_WriteBack_Addr(CACHE_MAP_L2_CACHE, vaddr, size);
}
}
/**
* @brief Writeback cache supported addr
*
@@ -621,7 +491,6 @@ static inline void cache_ll_l2_writeback_cache_addr(uint32_t cache_id, uint32_t
__attribute__((always_inline))
static inline void cache_ll_writeback_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
// TODO: [ESP32S31] IDF-14651
if (cache_level == 1 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_DATA:
@@ -646,20 +515,6 @@ static inline void cache_ll_l1_writeback_dcache_all(uint32_t cache_id)
}
}
/**
* @brief Writeback L2 Cache all
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l2_writeback_cache_all(uint32_t cache_id)
{
// TODO: [ESP32S31] IDF-14651
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
// Cache_WriteBack_All(CACHE_MAP_L2_CACHE);
}
}
/**
* @brief Writeback all
*
@@ -725,7 +580,7 @@ static inline void cache_ll_l1_freeze_dcache(uint32_t cache_id)
__attribute__((always_inline))
static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
HAL_ASSERT(cache_level == 1 || cache_level == 2);
HAL_ASSERT(cache_level == 1);
if (cache_level == 1) {
switch (type) {
@@ -740,8 +595,6 @@ static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type
cache_ll_l1_freeze_dcache(cache_id);
break;
}
} else {
HAL_ASSERT(0);
}
}
@@ -789,11 +642,7 @@ static inline void cache_ll_l1_unfreeze_dcache(uint32_t cache_id)
__attribute__((always_inline))
static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 2) {
HAL_ASSERT(0);
} else {
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_unfreeze_icache(cache_id);
@@ -810,6 +659,154 @@ static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t ty
}
}
/*------------------------------------------------------------------------------
* Cache Preload
*----------------------------------------------------------------------------*/
/**
* @brief Start L1 ICache manual preload
*
* Starts preload for the given region and does not wait. Use
* cache_ll_l1_icache_preload_wait_done() to wait for completion.
*
* @param cache_id id of the cache in this type and level (0: Core0, 1: Core1, CACHE_LL_ID_ALL: both)
* @param vaddr start virtual address of the preload region
* @param size size of the preload region in bytes
* @param ascending true: ascending (positive) order; false: descending (negative) order
*/
__attribute__((always_inline))
static inline void cache_ll_l1_icache_preload(uint32_t cache_id, uint32_t vaddr, uint32_t size, bool ascending)
{
uint32_t order = ascending ? 0 : 1;
if (cache_id == 0) {
Cache_Start_L1_CORE0_ICache_Preload(vaddr, size, order);
} else if (cache_id == 1) {
Cache_Start_L1_CORE1_ICache_Preload(vaddr, size, order);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Start_L1_CORE0_ICache_Preload(vaddr, size, order);
Cache_Start_L1_CORE1_ICache_Preload(vaddr, size, order);
}
}
/**
* @brief Wait until L1 ICache manual preload is done
*
* @param cache_id id of the cache in this type and level (0: Core0, 1: Core1, CACHE_LL_ID_ALL: both)
*/
__attribute__((always_inline))
static inline void cache_ll_l1_icache_preload_wait_done(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
while (Cache_L1_CORE0_ICache_Preload_Done() == 0) {
}
}
if (cache_id == 1 || cache_id == CACHE_LL_ID_ALL) {
while (Cache_L1_CORE1_ICache_Preload_Done() == 0) {
}
}
}
/**
* @brief Start L1 DCache manual preload
*
* Starts preload for the given region and does not wait. Use
* cache_ll_l1_dcache_preload_wait_done() to wait for completion.
*
* @param cache_id id of the cache in this type and level (0 or CACHE_LL_ID_ALL)
* @param vaddr start virtual address of the preload region
* @param size size of the preload region in bytes
* @param ascending true: ascending (positive) order; false: descending (negative) order
*/
__attribute__((always_inline))
static inline void cache_ll_l1_dcache_preload(uint32_t cache_id, uint32_t vaddr, uint32_t size, bool ascending)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Start_L1_DCache_Preload(vaddr, size, ascending ? 0 : 1);
}
}
/**
* @brief Wait until L1 DCache manual preload is done
*
* @param cache_id id of the cache in this type and level (0 or CACHE_LL_ID_ALL)
*/
__attribute__((always_inline))
static inline void cache_ll_l1_dcache_preload_wait_done(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
while (Cache_L1_DCache_Preload_Done() == 0) {
}
}
}
/**
* @brief Set the preload strategy (no-op)
*/
__attribute__((always_inline))
static inline void cache_ll_preload_set_strategy(uint32_t cache_level, cache_type_t type, uint32_t cache_id, cache_ll_preload_strategy_t strategy)
{
if (cache_level == 1 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
CACHE.l1_icache_ctrl.l1_icache_undef_op = strategy;
break;
case CACHE_TYPE_DATA:
CACHE.l1_dcache_ctrl.l1_dcache_undef_op = strategy;
break;
case CACHE_TYPE_ALL:
default:
CACHE.l1_icache_ctrl.l1_icache_undef_op = strategy;
CACHE.l1_dcache_ctrl.l1_dcache_undef_op = strategy;
break;
}
}
}
/**
* @brief Preload cache
*/
__attribute__((always_inline))
static inline void cache_ll_preload(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size, bool ascending)
{
if (cache_level == 1 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_icache_preload(cache_id, vaddr, size, ascending);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_dcache_preload(cache_id, vaddr, size, ascending);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_icache_preload(cache_id, vaddr, size, ascending);
cache_ll_l1_dcache_preload(cache_id, vaddr, size, ascending);
break;
}
}
}
/**
* @brief Wait until cache preload is done
*/
__attribute__((always_inline))
static inline void cache_ll_preload_wait_done(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
if (cache_level == 1 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_icache_preload_wait_done(cache_id);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_dcache_preload_wait_done(cache_id);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_icache_preload_wait_done(cache_id);
cache_ll_l1_dcache_preload_wait_done(cache_id);
break;
}
}
}
/*------------------------------------------------------------------------------
* Cache Line Size
*----------------------------------------------------------------------------*/
@@ -851,47 +848,6 @@ static inline uint32_t cache_ll_l1_dcache_get_line_size(uint32_t cache_id)
return size;
}
/**
* @brief Get L2 Cache line size, in bytes
*
* @param cache_id id of the cache in this type and level
*
* @return L2 Cache line size, in bytes
*/
__attribute__((always_inline))
static inline uint32_t cache_ll_l2_cache_get_line_size(uint32_t cache_id)
{
return 0;
}
/**
* @brief Set the preload strategy (no-op)
*/
__attribute__((always_inline))
static inline void cache_ll_preload_set_strategy(uint32_t cache_level, cache_type_t type, uint32_t cache_id, cache_ll_preload_strategy_t strategy)
{
(void)cache_level;
(void)type;
(void)cache_id;
(void)strategy;
}
/**
* @brief Preload cache
*/
__attribute__((always_inline))
static inline void cache_ll_preload(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size, bool ascending)
{
}
/**
* @brief Wait until cache preload is done
*/
__attribute__((always_inline))
static inline void cache_ll_preload_wait_done(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
}
/**
* @brief Get Cache line size, in bytes
*
@@ -904,12 +860,10 @@ static inline void cache_ll_preload_wait_done(uint32_t cache_level, cache_type_t
__attribute__((always_inline))
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
HAL_ASSERT(cache_level == 1 || cache_level == 2);
HAL_ASSERT(cache_level == 1);
uint32_t size = 0;
if (cache_level == 2) {
size = cache_ll_l2_cache_get_line_size(cache_id);
} else {
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
size = cache_ll_l1_icache_get_line_size(cache_id);
@@ -951,7 +905,7 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
mask = (cache_bus_mask_t)(mask | CACHE_BUS_DBUS0);
} else if (vaddr_start >= SOC_DRAM_PSRAM_ADDRESS_LOW && vaddr_end < SOC_DRAM_PSRAM_ADDRESS_HIGH) {
mask = (cache_bus_mask_t)(mask | CACHE_BUS_IBUS1);
mask = (cache_bus_mask_t)(mask | CACHE_BUS_DBUS1);
mask = (cache_bus_mask_t)(mask | CACHE_BUS_DBUS0);
} else {
HAL_ASSERT(0);
}
@@ -960,41 +914,91 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
}
/**
* Enable the L1 Cache Buses
* Enable the Cache Buses
*
* @param cache_id cache ID (when l1 cache is per core)
* @param bus_id bus ID
* @param mask To know which buses should be enabled
*/
#if !BOOTLOADER_BUILD
__attribute__((always_inline))
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
#endif
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
{
//not used, for compatibility
HAL_ASSERT((mask & (CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
uint32_t ibus_mask = 0;
if (bus_id == 0) {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS0 : 0);
} else {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS1 : 0);
}
REG_CLR_BIT(CACHE_L1_ICACHE_CTRL_REG, ibus_mask);
uint32_t dbus_mask = 0;
if (bus_id == 1) {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS0 : 0);
} else {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS1 : 0);
}
REG_CLR_BIT(CACHE_L1_DCACHE_CTRL_REG, dbus_mask);
}
/**
* Returns enabled buses for a given core
*
* @param cache_id cache ID (when l1 cache is per core)
*
* @return State of enabled buses
*/
__attribute__((always_inline))
static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
{
cache_bus_mask_t mask = (cache_bus_mask_t)0;
uint32_t ibus_mask = REG_READ(CACHE_L1_ICACHE_CTRL_REG);
if (cache_id == 0) {
mask = (cache_bus_mask_t)(mask | ((!(ibus_mask & CACHE_L1_ICACHE_SHUT_IBUS0)) ? CACHE_BUS_IBUS0 : 0));
} else if (cache_id == 1) {
mask = (cache_bus_mask_t)(mask | ((!(ibus_mask & CACHE_L1_ICACHE_SHUT_IBUS1)) ? CACHE_BUS_IBUS0 : 0));
}
uint32_t dbus_mask = REG_READ(CACHE_L1_DCACHE_CTRL_REG);
if (cache_id == 0) {
mask = (cache_bus_mask_t)(mask | ((!(dbus_mask & CACHE_L1_DCACHE_SHUT_DBUS0)) ? CACHE_BUS_DBUS0 : 0));
} else if (cache_id == 1) {
mask = (cache_bus_mask_t)(mask | ((!(dbus_mask & CACHE_L1_DCACHE_SHUT_DBUS1)) ? CACHE_BUS_DBUS0 : 0));
}
return mask;
}
/**
* Disable the Cache Buses
*
* @param cache_id cache ID (when l1 cache is per core)
* @param bus_id bus ID
* @param mask To know which buses should be disabled
*/
__attribute__((always_inline))
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
{
//not used, for compatibility
}
//On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
/**
* @brief Get the buses of a particular cache that are mapped to a virtual address range
*
* @param cache_id cache ID
* @param vaddr_start virtual address start
* @param len vaddr length
*/
__attribute__((always_inline))
static inline cache_bus_mask_t cache_ll_l2_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
{
//not used, for compatibility
return CACHE_BUS_IBUS0;
uint32_t ibus_mask = 0;
if (bus_id == 0) {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS0 : 0);
} else {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS1 : 0);
}
REG_SET_BIT(CACHE_L1_ICACHE_CTRL_REG, ibus_mask);
uint32_t dbus_mask = 0;
if (bus_id == 1) {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS0 : 0);
} else {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS1 : 0);
}
REG_SET_BIT(CACHE_L1_DCACHE_CTRL_REG, dbus_mask);
}
/**
@@ -1013,20 +1017,27 @@ static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32
bool valid = false;
uint32_t vaddr_end = vaddr_start + len - 1;
if (vaddr_start >= SOC_IRAM0_ADDRESS_LOW && vaddr_end < SOC_IRAM0_ADDRESS_HIGH) {
if (((vaddr_start >= SOC_DRAM_FLASH_ADDRESS_LOW) && (vaddr_end < SOC_DRAM_FLASH_ADDRESS_HIGH)) ||
((vaddr_start >= SOC_DRAM_PSRAM_ADDRESS_LOW) && (vaddr_end < SOC_DRAM_PSRAM_ADDRESS_HIGH))) {
*out_level = 1;
*out_id = CACHE_LL_ID_ALL;
valid = true;
} else if (vaddr_start >= SOC_DRAM_FLASH_ADDRESS_LOW && vaddr_end < SOC_DRAM_PSRAM_ADDRESS_HIGH) {
//PSRAM vaddr is right after the FLASH vaddr
*out_level = 2;
*out_id = CACHE_LL_ID_ALL;
valid = true;
}
return valid;
}
/**
* Enable the Cache fail tracer
*
* @param cache_id cache ID
* @param en enable / disable
*/
static inline void cache_ll_l1_enable_fail_tracer(uint32_t cache_id, bool en)
{
CACHE.trace_ena.l1_cache_trace_ena = en;
}
/*------------------------------------------------------------------------------
* Interrupt
*----------------------------------------------------------------------------*/
@@ -1065,54 +1076,6 @@ static inline uint32_t cache_ll_l1_get_access_error_intr_status(uint32_t cache_i
return CACHE.l1_cache_acs_fail_int_st.val & mask;
}
/**
* @brief Enable L2 Cache access error interrupt
*
* @param cache_id Cache ID
* @param mask Interrupt mask
*/
static inline void cache_ll_l2_enable_access_error_intr(uint32_t cache_id, uint32_t mask)
{
CACHE.l2_cache_acs_fail_int_ena.val |= mask;
}
/**
* @brief Clear L2 Cache access error interrupt status
*
* @param cache_id Cache ID
* @param mask Interrupt mask
*/
static inline void cache_ll_l2_clear_access_error_intr(uint32_t cache_id, uint32_t mask)
{
CACHE.l2_cache_acs_fail_int_clr.val = mask;
}
/**
* @brief Get L2 Cache access error interrupt status
*
* @param cache_id Cache ID
* @param mask Interrupt mask
*
* @return Status mask
*/
static inline uint32_t cache_ll_l2_get_access_error_intr_status(uint32_t cache_id, uint32_t mask)
{
return CACHE.l2_cache_acs_fail_int_st.val & mask;
}
/**
* Returns enabled buses for a given core
*
* @param cache_id cache ID (when l1 cache is per core)
*
* @return State of enabled buses
*/
__attribute__((always_inline))
static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
{
return CACHE_BUS_IBUS0;
}
#ifdef __cplusplus
}
#endif
@@ -131,10 +131,6 @@ config SOC_ADC_PERIPH_NUM
int
default 2
config SOC_SHARED_IDCACHE_SUPPORTED
bool
default y
config SOC_CACHE_WRITEBACK_SUPPORTED
bool
default y
@@ -143,10 +139,6 @@ config SOC_CACHE_FREEZE_SUPPORTED
bool
default y
config SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
bool
default n
config SOC_CPU_CORES_NUM
int
default 2
@@ -111,11 +111,8 @@
#define SOC_ADC_PERIPH_NUM (2)
/*-------------------------- CACHE CAPS --------------------------------------*/
// TODO: [ESP32S31] IDF-14651
#define SOC_SHARED_IDCACHE_SUPPORTED 1 //Shared Cache for both instructions and data
#define SOC_CACHE_WRITEBACK_SUPPORTED 1
#define SOC_CACHE_FREEZE_SUPPORTED 1
#define SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE 0
/*-------------------------- CPU CAPS ----------------------------------------*/
#define SOC_CPU_CORES_NUM (2U)