feat(dma2d): add support for esp32s31

This commit is contained in:
Song Ruo Jing
2026-03-31 15:17:23 +08:00
parent 17f8e9c6d5
commit db8b921a13
14 changed files with 1136 additions and 197 deletions
+5 -2
View File
@@ -378,7 +378,6 @@ esp_err_t dma2d_acquire_pool(const dma2d_pool_config_t *config, dma2d_pool_handl
pre_alloc_group->rx_channel_reserved_mask = dma2d_rx_channel_reserved_mask[group_id];
pre_alloc_group->tx_periph_m2m_free_id_mask = DMA2D_LL_TX_CHANNEL_PERIPH_M2M_AVAILABLE_ID_MASK;
pre_alloc_group->rx_periph_m2m_free_id_mask = DMA2D_LL_RX_CHANNEL_PERIPH_M2M_AVAILABLE_ID_MASK;
pre_alloc_group->intr_priority = -1;
for (int i = 0; i < DMA2D_LL_GET(TX_CHANS_PER_INST); i++) {
pre_alloc_group->tx_chans[i] = &pre_alloc_tx_channels[i];
dma2d_tx_channel_t *tx_chan = pre_alloc_group->tx_chans[i];
@@ -713,7 +712,11 @@ esp_err_t dma2d_set_desc_addr(dma2d_channel_handle_t dma2d_chan, intptr_t desc_b
esp_err_t ret = ESP_OK;
ESP_GOTO_ON_FALSE_ISR(dma2d_chan && desc_base_addr, ESP_ERR_INVALID_ARG, err, TAG, "invalid argument");
// 2D-DMA descriptor addr needs 8-byte alignment and not in SPM (addr not in SPM is IDF restriction)
ESP_GOTO_ON_FALSE_ISR((desc_base_addr & 0x7) == 0 && !esp_ptr_in_spm((void *)desc_base_addr), ESP_ERR_INVALID_ARG, err, TAG, "invalid argument");
bool addr_in_spm = false;
#if SOC_MEM_SPM_SUPPORTED
addr_in_spm = esp_ptr_in_spm((void *)desc_base_addr);
#endif
ESP_GOTO_ON_FALSE_ISR((desc_base_addr & 0x7) == 0 && !addr_in_spm, ESP_ERR_INVALID_ARG, err, TAG, "invalid argument");
// When flash encryption is enabled, the descriptor must be in internal RAM because descriptor size is not 16-byte aligned, which breaks flash encryption alignment restriction
ESP_GOTO_ON_FALSE_ISR(!esp_efuse_is_flash_encryption_enabled() || esp_ptr_internal((void *)desc_base_addr), ESP_ERR_INVALID_ARG, err, TAG, "invalid argument");
@@ -1,2 +1,2 @@
| Supported Targets | ESP32-P4 |
| ----------------- | -------- |
| Supported Targets | ESP32-P4 | ESP32-S31 |
| ----------------- | -------- | --------- |
@@ -28,5 +28,6 @@ def test_dma2d(dut: Dut) -> None:
indirect=True,
)
@idf_parametrize('target', soc_filtered_targets('SOC_DMA2D_SUPPORTED == 1'), indirect=['target'])
@pytest.mark.temp_skip_ci(targets=['esp32s31'], reason='TODO: IDFCI-10377 no runner yet')
def test_dma2d_flash_encryption(dut: Dut) -> None:
dut.run_all_single_board_cases()
@@ -0,0 +1,3 @@
CONFIG_SPIRAM=y
CONFIG_SPIRAM_MODE_OCT=y
CONFIG_SPIRAM_SPEED_250M=y
+2
View File
@@ -10,6 +10,8 @@
void dma2d_hal_init(dma2d_hal_context_t *hal, int group_id)
{
hal->dev = DMA2D_LL_GET_HW(group_id);
dma2d_ll_mem_power_by_pmu(hal->dev);
dma2d_ll_mem_set_low_power_mode(hal->dev, DMA2D_LL_MEM_LP_MODE_SHUT_DOWN);
}
void dma2d_hal_tx_reset_channel(dma2d_hal_context_t *hal, uint32_t channel)
@@ -10,6 +10,7 @@
const dma2d_signal_conn_t dma2d_periph_signals = {
.groups = {
[0] = {
.module_name = "dma2d0",
.tx_irq_id = {
[0] = ETS_DMA2D_OUT_CH0_INTR_SOURCE,
[1] = ETS_DMA2D_OUT_CH1_INTR_SOURCE,
@@ -35,10 +35,6 @@
#define DMA2D_LL_RX_CHANS_PER_INST 2
#endif
#ifdef __cplusplus
extern "C" {
#endif
// 2D-DMA interrupts
#define DMA2D_LL_RX_EVENT_MASK (0x3FFF)
#define DMA2D_LL_TX_EVENT_MASK (0x1FFF)
@@ -92,6 +88,14 @@ extern "C" {
#define DMA2D_LL_DESC_ALIGNMENT 8 // Descriptor must be aligned to 8 bytes
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
DMA2D_LL_MEM_LP_MODE_SHUT_DOWN, // power down memory during low power stage
} dma2d_ll_mem_lp_mode_t;
///////////////////////////////////// Common /////////////////////////////////////////
/**
* @brief Enable the bus clock for 2D-DMA module
@@ -141,6 +145,48 @@ static inline bool dma2d_ll_is_bus_clock_enabled(int group_id)
return HP_SYS_CLKRST.soc_clk_ctrl1.reg_dma2d_sys_clk_en;
}
/**
* @brief Force power on the DMA2D memory block, regardless of the outside PMU logic
*
* @param dev Peripheral instance address
*/
static inline void dma2d_ll_mem_force_power_on(dma2d_dev_t *dev)
{
(void)dev;
}
/**
* @brief Force the DMA2D memory block into low power mode, regardless of the outside PMU logic
*
* @param dev Peripheral instance address
*/
static inline void dma2d_ll_mem_force_low_power(dma2d_dev_t *dev)
{
(void)dev;
}
/**
* @brief Power control the DMA2D memory block by the outside PMU logic
*
* @param dev Peripheral instance address
*/
static inline void dma2d_ll_mem_power_by_pmu(dma2d_dev_t *dev)
{
(void)dev;
}
/**
* @brief Set low power mode for DMA2D memory block
*
* @param dev Peripheral instance address
* @param mode DMA2D memory low power mode in low power stage
*/
static inline void dma2d_ll_mem_set_low_power_mode(dma2d_dev_t *dev, dma2d_ll_mem_lp_mode_t mode)
{
(void)dev;
HAL_ASSERT(mode == DMA2D_LL_MEM_LP_MODE_SHUT_DOWN);
}
/**
* @brief Enable 2D-DMA module
*/
@@ -0,0 +1,27 @@
/*
* SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#include "hal/dma2d_periph.h"
#include "soc/interrupts.h"
const dma2d_signal_conn_t dma2d_periph_signals = {
.groups = {
[0] = {
.module_name = "dma2d0",
.tx_irq_id = {
[0] = ETS_DMA2D_OUT_CH0_INTR_SOURCE,
[1] = ETS_DMA2D_OUT_CH1_INTR_SOURCE,
[2] = ETS_DMA2D_OUT_CH2_INTR_SOURCE,
[3] = ETS_DMA2D_OUT_CH3_INTR_SOURCE,
},
.rx_irq_id = {
[0] = ETS_DMA2D_IN_CH0_INTR_SOURCE,
[1] = ETS_DMA2D_IN_CH1_INTR_SOURCE,
[2] = ETS_DMA2D_IN_CH2_INTR_SOURCE,
}
}
}
};
@@ -0,0 +1,935 @@
/*
* SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include <stddef.h> /* Required for NULL constant */
#include <stdint.h>
#include <stdbool.h>
#include "hal/dma2d_types.h"
#include "soc/dma2d_channel.h"
#include "soc/dma2d_struct.h"
#include "hal/misc.h"
#include "hal/assert.h"
#include "hal/config.h"
#include "soc/soc.h"
#include "soc/hp_sys_clkrst_struct.h"
#include "soc/hp_system_struct.h"
#define DMA2D_LL_INST_NUM 1
#define DMA2D_LL_GET_HW(id) (((id) == 0) ? (&DMA2D) : NULL)
#define DMA2D_LL_GET(_attr) DMA2D_LL_ ## _attr
#define DMA2D_LL_TX_CHANS_PER_INST 4
#define DMA2D_LL_RX_CHANS_PER_INST 3
#define DMA2D_LL_RX_EVENT_MASK (0x3FFF)
#define DMA2D_LL_TX_EVENT_MASK (0x1FFF)
#define DMA2D_LL_EVENT_RX_DESC_TASK_OVF (1<<13)
#define DMA2D_LL_EVENT_RX_FIFO_REORDER_UDF (1<<12)
#define DMA2D_LL_EVENT_RX_FIFO_REORDER_OVF (1<<11)
#define DMA2D_LL_EVENT_RX_DESC_EMPTY (1<<10)
#define DMA2D_LL_EVENT_RX_FIFO_L3_UDF (1<<9)
#define DMA2D_LL_EVENT_RX_FIFO_L3_OVF (1<<8)
#define DMA2D_LL_EVENT_RX_FIFO_L2_UDF (1<<7)
#define DMA2D_LL_EVENT_RX_FIFO_L2_OVF (1<<6)
#define DMA2D_LL_EVENT_RX_FIFO_L1_UDF (1<<5)
#define DMA2D_LL_EVENT_RX_FIFO_L1_OVF (1<<4)
#define DMA2D_LL_EVENT_RX_DESC_ERROR (1<<3)
#define DMA2D_LL_EVENT_RX_ERR_EOF (1<<2)
#define DMA2D_LL_EVENT_RX_SUC_EOF (1<<1)
#define DMA2D_LL_EVENT_RX_DONE (1<<0)
#define DMA2D_LL_EVENT_TX_DESC_TASK_OVF (1<<12)
#define DMA2D_LL_EVENT_TX_FIFO_REORDER_UDF (1<<11)
#define DMA2D_LL_EVENT_TX_FIFO_REORDER_OVF (1<<10)
#define DMA2D_LL_EVENT_TX_FIFO_L3_UDF (1<<9)
#define DMA2D_LL_EVENT_TX_FIFO_L3_OVF (1<<8)
#define DMA2D_LL_EVENT_TX_FIFO_L2_UDF (1<<7)
#define DMA2D_LL_EVENT_TX_FIFO_L2_OVF (1<<6)
#define DMA2D_LL_EVENT_TX_FIFO_L1_UDF (1<<5)
#define DMA2D_LL_EVENT_TX_FIFO_L1_OVF (1<<4)
#define DMA2D_LL_EVENT_TX_TOTAL_EOF (1<<3)
#define DMA2D_LL_EVENT_TX_DESC_ERROR (1<<2)
#define DMA2D_LL_EVENT_TX_EOF (1<<1)
#define DMA2D_LL_EVENT_TX_DONE (1<<0)
#define DMA2D_LL_TX_CHANNEL_SUPPORT_RO_MASK (0U | BIT0)
#define DMA2D_LL_TX_CHANNEL_SUPPORT_CSC_MASK (0U | BIT0 | BIT1 | BIT2 | BIT3)
#define DMA2D_LL_RX_CHANNEL_SUPPORT_RO_MASK (0U | BIT0)
#define DMA2D_LL_RX_CHANNEL_SUPPORT_CSC_MASK (0U | BIT0)
#define DMA2D_LL_TX_CHANNEL_PERIPH_M2M_AVAILABLE_ID_MASK (0xF0)
#define DMA2D_LL_RX_CHANNEL_PERIPH_M2M_AVAILABLE_ID_MASK (0xF8)
#define DMA2D_LL_CHANNEL_PERIPH_NO_CHOICE (7)
#define DMA2D_LL_CHANNEL_PERIPH_SEL_BIT_WIDTH (3)
#define DMA2D_LL_DESC_ALIGNMENT 8
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
DMA2D_LL_MEM_LP_MODE_DEEP_SLEEP, // memory will enter deep sleep during low power stage, keep memory data
DMA2D_LL_MEM_LP_MODE_LIGHT_SLEEP, // memory will enter light sleep during low power stage, keep memory data
DMA2D_LL_MEM_LP_MODE_SHUT_DOWN, // memory will be powered down during low power stage
DMA2D_LL_MEM_LP_MODE_DISABLE, // disable the low power stage
} dma2d_ll_mem_lp_mode_t;
///////////////////////////////////// Common /////////////////////////////////////////
/**
* @brief Enable the bus clock for 2D-DMA module
*/
static inline void dma2d_ll_enable_bus_clock(int group_id, bool enable)
{
(void)group_id;
HP_SYS_CLKRST.dma2d_ctrl0.reg_dma2d_sys_clk_en = enable;
}
/**
* @brief Reset the 2D-DMA module
*/
static inline void dma2d_ll_reset_register(int group_id)
{
(void)group_id;
HP_SYS_CLKRST.dma2d_ctrl0.reg_dma2d_rst_en = 1;
HP_SYS_CLKRST.dma2d_ctrl0.reg_dma2d_rst_en = 0;
}
/**
* @brief Check if the bus clock is enabled for the DMA module
*/
__attribute__((always_inline))
static inline bool dma2d_ll_is_bus_clock_enabled(int group_id)
{
(void) group_id;
return HP_SYS_CLKRST.dma2d_ctrl0.reg_dma2d_sys_clk_en;
}
/**
* @brief Force power on the DMA2D memory block, regardless of the outside PMU logic
*
* @param dev Peripheral instance address
*/
static inline void dma2d_ll_mem_force_power_on(dma2d_dev_t *dev)
{
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_force_ctrl = 1;
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_lp_en = 0;
}
/**
* @brief Force the DMA2D memory block into low power mode, regardless of the outside PMU logic
*
* @param dev Peripheral instance address
*/
static inline void dma2d_ll_mem_force_low_power(dma2d_dev_t *dev)
{
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_force_ctrl = 1;
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_lp_en = 1;
}
/**
* @brief Power control the DMA2D memory block by the outside PMU logic
*
* @param dev Peripheral instance address
*/
static inline void dma2d_ll_mem_power_by_pmu(dma2d_dev_t *dev)
{
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_force_ctrl = 0;
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_lp_en = 0;
}
/**
* @brief Set low power mode for DMA2D memory block
*
* @param dev Peripheral instance address
* @param mode DMA2D memory low power mode in low power stage
*/
static inline void dma2d_ll_mem_set_low_power_mode(dma2d_dev_t *dev, dma2d_ll_mem_lp_mode_t mode)
{
HP_SYSTEM.sys_dma2d_mem_lp_ctrl.sys_2ddma_mem_lp_mode = mode;
}
/**
* @brief Enable 2D-DMA module
*/
static inline void dma2d_ll_hw_enable(dma2d_dev_t *dev, bool enable)
{
dev->rst_conf.clk_en = enable;
dev->rst_conf.axim_rd_rst = 1;
dev->rst_conf.axim_rd_rst = 0;
dev->rst_conf.axim_wr_rst = 1;
dev->rst_conf.axim_wr_rst = 0;
dev->out_arb_config.val = 0;
dev->in_arb_config.val = 0;
}
/**
* @brief Helper function to get the register field value for different pixel bytes scramble order
*/
__attribute__((always_inline))
static inline uint32_t dma2d_ll_get_scramble_order_sel(dma2d_scramble_order_t order)
{
switch (order) {
case DMA2D_SCRAMBLE_ORDER_BYTE2_1_0:
return 0;
case DMA2D_SCRAMBLE_ORDER_BYTE2_0_1:
return 1;
case DMA2D_SCRAMBLE_ORDER_BYTE1_0_2:
return 2;
case DMA2D_SCRAMBLE_ORDER_BYTE1_2_0:
return 3;
case DMA2D_SCRAMBLE_ORDER_BYTE0_2_1:
return 4;
case DMA2D_SCRAMBLE_ORDER_BYTE0_1_2:
return 5;
default:
abort();
}
}
/////////////////////////////////////// RX ///////////////////////////////////////////
__attribute__((always_inline))
static inline uint32_t dma2d_ll_rx_get_interrupt_status(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_int_st.val & DMA2D_LL_RX_EVENT_MASK;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_enable_interrupt(dma2d_dev_t *dev, uint32_t channel, uint32_t mask, bool enable)
{
if (enable) {
dev->in_channel[channel].in_int_ena.val = dev->in_channel[channel].in_int_ena.val | (mask & DMA2D_LL_RX_EVENT_MASK);
} else {
dev->in_channel[channel].in_int_ena.val = dev->in_channel[channel].in_int_ena.val & ~(mask & DMA2D_LL_RX_EVENT_MASK);
}
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_clear_interrupt_status(dma2d_dev_t *dev, uint32_t channel, uint32_t mask)
{
dev->in_channel[channel].in_int_clr.val = (mask & DMA2D_LL_RX_EVENT_MASK);
}
static inline volatile void *dma2d_ll_rx_get_interrupt_status_reg(dma2d_dev_t *dev, uint32_t channel)
{
return (volatile void *)(&dev->in_channel[channel].in_int_st);
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_enable_owner_check(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->in_channel[channel].in_conf0.in_check_owner_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_enable_page_bound_wrap(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->in_channel[channel].in_conf0.in_page_bound_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_data_burst_length(dma2d_dev_t *dev, uint32_t channel, uint32_t length)
{
uint32_t sel;
switch (length) {
case 8:
sel = 0;
break;
case 16:
sel = 1;
break;
case 32:
sel = 2;
break;
case 64:
sel = 3;
break;
case 128:
sel = 4;
break;
default:
abort();
}
dev->in_channel[channel].in_conf0.in_mem_burst_length_chn = sel;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_enable_descriptor_burst(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->in_channel[channel].in_conf0.indscr_burst_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_reset_channel(dma2d_dev_t *dev, uint32_t channel)
{
dev->in_channel[channel].in_conf0.in_rst_chn = 1;
dev->in_channel[channel].in_conf0.in_rst_chn = 0;
}
__attribute__((always_inline))
static inline bool dma2d_ll_rx_is_reset_avail(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_state.in_reset_avail_chn;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_abort(dma2d_dev_t *dev, uint32_t channel, bool disable)
{
dev->in_channel[channel].in_conf0.in_cmd_disable_chn = disable;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_enable_dscr_port(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->in_channel[channel].in_conf0.in_dscr_port_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_macro_block_size(dma2d_dev_t *dev, uint32_t channel, dma2d_macro_block_size_t size)
{
uint32_t sel;
switch (size) {
case DMA2D_MACRO_BLOCK_SIZE_NONE:
sel = 3;
break;
case DMA2D_MACRO_BLOCK_SIZE_8_8:
sel = 0;
break;
case DMA2D_MACRO_BLOCK_SIZE_8_16:
sel = 1;
break;
case DMA2D_MACRO_BLOCK_SIZE_16_16:
sel = 2;
break;
default:
abort();
}
dev->in_channel[channel].in_conf0.in_macro_block_size_chn = sel;
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_rx_pop_data(dma2d_dev_t *dev, uint32_t channel)
{
dev->in_channel[channel].in_pop.infifo_pop_chn = 1;
return dev->in_channel[channel].in_pop.infifo_rdata_chn;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_desc_addr(dma2d_dev_t *dev, uint32_t channel, uint32_t addr)
{
dev->in_channel[channel].in_link_addr.inlink_addr_chn = addr;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_start(dma2d_dev_t *dev, uint32_t channel)
{
dev->in_channel[channel].in_link_conf.inlink_start_chn = 1;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_stop(dma2d_dev_t *dev, uint32_t channel)
{
dev->in_channel[channel].in_link_conf.inlink_stop_chn = 1;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_restart(dma2d_dev_t *dev, uint32_t channel)
{
dev->in_channel[channel].in_link_conf.inlink_restart_chn = 1;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_auto_return_owner(dma2d_dev_t *dev, uint32_t channel, int owner)
{
dev->in_channel[channel].in_link_conf.inlink_auto_ret_chn = owner;
}
__attribute__((always_inline))
static inline bool dma2d_ll_rx_is_desc_fsm_idle(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_link_conf.inlink_park_chn;
}
__attribute__((always_inline))
static inline bool dma2d_ll_rx_is_fsm_idle(dma2d_dev_t *dev, uint32_t channel)
{
return (dev->in_channel[channel].in_state.in_state_chn == 0);
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_rx_get_success_eof_desc_addr(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_suc_eof_des_addr.val;
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_rx_get_error_eof_desc_addr(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_err_eof_des_addr.val;
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_rx_get_prefetched_desc_addr(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_dscr.val;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_connect_to_periph(dma2d_dev_t *dev, uint32_t channel, dma2d_trigger_peripheral_t periph, int periph_id)
{
dev->in_channel[channel].in_peri_sel.in_peri_sel_chn = periph_id;
dev->in_channel[channel].in_conf0.in_mem_trans_en_chn = (periph == DMA2D_TRIG_PERIPH_M2M);
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_disconnect_from_periph(dma2d_dev_t *dev, uint32_t channel)
{
dev->in_channel[channel].in_peri_sel.in_peri_sel_chn = DMA2D_LL_CHANNEL_PERIPH_NO_CHOICE;
dev->in_channel[channel].in_conf0.in_mem_trans_en_chn = false;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_enable_reorder(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->in_channel[channel].in_conf0.in_reorder_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_configure_color_space_conv(dma2d_dev_t *dev, uint32_t channel, dma2d_csc_rx_option_t csc_sel)
{
HAL_ASSERT(channel == 0);
uint32_t input_sel = 7;
bool proc_en = false;
int (*table)[4] = NULL;
uint32_t output_sel = 1;
const int color_space_conv_param_yuv2rgb_bt601_table[3][4] = DMA2D_COLOR_SPACE_CONV_PARAM_YUV2RGB_BT601;
const int color_space_conv_param_yuv2rgb_bt709_table[3][4] = DMA2D_COLOR_SPACE_CONV_PARAM_YUV2RGB_BT709;
switch (csc_sel) {
case DMA2D_CSC_RX_NONE:
input_sel = 7;
break;
case DMA2D_CSC_RX_SCRAMBLE:
input_sel = 1;
proc_en = false;
output_sel = 1;
break;
case DMA2D_CSC_RX_YUV422_TO_YUV444:
case DMA2D_CSC_RX_YUV420_TO_YUV444:
input_sel = 0;
proc_en = false;
output_sel = 1;
break;
case DMA2D_CSC_RX_YUV444_TO_YUV422:
input_sel = 0;
proc_en = false;
output_sel = 2;
break;
case DMA2D_CSC_RX_YUV444_TO_YUV420:
case DMA2D_CSC_RX_YUV422_TO_YUV420:
input_sel = 0;
proc_en = false;
output_sel = 3;
break;
case DMA2D_CSC_RX_YUV420_TO_RGB888_601:
case DMA2D_CSC_RX_YUV422_TO_RGB888_601:
input_sel = 0;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt601_table;
output_sel = 1;
break;
case DMA2D_CSC_RX_YUV420_TO_RGB565_601:
case DMA2D_CSC_RX_YUV422_TO_RGB565_601:
input_sel = 0;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt601_table;
output_sel = 0;
break;
case DMA2D_CSC_RX_YUV420_TO_RGB888_709:
case DMA2D_CSC_RX_YUV422_TO_RGB888_709:
input_sel = 0;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt709_table;
output_sel = 1;
break;
case DMA2D_CSC_RX_YUV420_TO_RGB565_709:
case DMA2D_CSC_RX_YUV422_TO_RGB565_709:
input_sel = 0;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt709_table;
output_sel = 0;
break;
case DMA2D_CSC_RX_YUV444_TO_RGB888_601:
input_sel = 2;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt601_table;
output_sel = 1;
break;
case DMA2D_CSC_RX_YUV444_TO_RGB565_601:
input_sel = 2;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt601_table;
output_sel = 0;
break;
case DMA2D_CSC_RX_YUV444_TO_RGB888_709:
input_sel = 2;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt709_table;
output_sel = 1;
break;
case DMA2D_CSC_RX_YUV444_TO_RGB565_709:
input_sel = 2;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt709_table;
output_sel = 0;
break;
default:
abort();
}
dev->in_channel[channel].in_color_convert.in_color_input_sel_chn = input_sel;
dev->in_channel[channel].in_color_convert.in_color_3b_proc_en_chn = proc_en;
dev->in_channel[channel].in_color_convert.in_color_output_sel_chn = output_sel;
if (proc_en) {
HAL_ASSERT(table);
typeof(dev->in_channel[channel].in_color_param_group) color_param_group;
color_param_group.param_h.a = table[0][0];
color_param_group.param_h.b = table[0][1];
color_param_group.param_h.c = table[0][2];
color_param_group.param_h.d = table[0][3];
color_param_group.param_m.a = table[1][0];
color_param_group.param_m.b = table[1][1];
color_param_group.param_m.c = table[1][2];
color_param_group.param_m.d = table[1][3];
color_param_group.param_l.a = table[2][0];
color_param_group.param_l.b = table[2][1];
color_param_group.param_l.c = table[2][2];
color_param_group.param_l.d = table[2][3];
dev->in_channel[channel].in_color_param_group.param_h.val[0] = color_param_group.param_h.val[0];
dev->in_channel[channel].in_color_param_group.param_h.val[1] = color_param_group.param_h.val[1];
dev->in_channel[channel].in_color_param_group.param_m.val[0] = color_param_group.param_m.val[0];
dev->in_channel[channel].in_color_param_group.param_m.val[1] = color_param_group.param_m.val[1];
dev->in_channel[channel].in_color_param_group.param_l.val[0] = color_param_group.param_l.val[0];
dev->in_channel[channel].in_color_param_group.param_l.val[1] = color_param_group.param_l.val[1];
}
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_csc_pre_scramble(dma2d_dev_t *dev, uint32_t channel, dma2d_scramble_order_t order)
{
HAL_ASSERT(channel == 0);
dev->in_channel[channel].in_scramble.in_scramble_sel_pre_chn = dma2d_ll_get_scramble_order_sel(order);
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_csc_post_scramble(dma2d_dev_t *dev, uint32_t channel, dma2d_scramble_order_t order)
{
HAL_ASSERT(channel == 0);
dev->in_channel[channel].in_scramble.in_scramble_sel_post_chn = dma2d_ll_get_scramble_order_sel(order);
}
static inline void dma2d_ll_rx_enable_arb_weight(dma2d_dev_t *dev, bool enable)
{
dev->in_arb_config.in_weight_en = enable;
}
static inline void dma2d_ll_rx_set_arb_timeout(dma2d_dev_t *dev, uint32_t timeout_num)
{
HAL_FORCE_MODIFY_U32_REG_FIELD(dev->in_arb_config, in_arb_timeout_num, timeout_num);
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_arb_token_num(dma2d_dev_t *dev, uint32_t channel, uint32_t token_num)
{
dev->in_channel[channel].in_arb.in_arb_token_num_chn = token_num;
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_rx_get_arb_token_num(dma2d_dev_t *dev, uint32_t channel)
{
return dev->in_channel[channel].in_arb.in_arb_token_num_chn;
}
__attribute__((always_inline))
static inline void dma2d_ll_rx_set_priority(dma2d_dev_t *dev, uint32_t channel, uint32_t priority)
{
dev->in_channel[channel].in_arb.in_arb_priority_chn = priority;
}
/////////////////////////////////////// TX ///////////////////////////////////////////
__attribute__((always_inline))
static inline uint32_t dma2d_ll_tx_get_interrupt_status(dma2d_dev_t *dev, uint32_t channel)
{
return dev->out_channel[channel].out_int_st.val & DMA2D_LL_TX_EVENT_MASK;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_interrupt(dma2d_dev_t *dev, uint32_t channel, uint32_t mask, bool enable)
{
if (enable) {
dev->out_channel[channel].out_int_ena.val = dev->out_channel[channel].out_int_ena.val | (mask & DMA2D_LL_TX_EVENT_MASK);
} else {
dev->out_channel[channel].out_int_ena.val = dev->out_channel[channel].out_int_ena.val & ~(mask & DMA2D_LL_TX_EVENT_MASK);
}
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_clear_interrupt_status(dma2d_dev_t *dev, uint32_t channel, uint32_t mask)
{
dev->out_channel[channel].out_int_clr.val = (mask & DMA2D_LL_TX_EVENT_MASK);
}
static inline volatile void *dma2d_ll_tx_get_interrupt_status_reg(dma2d_dev_t *dev, uint32_t channel)
{
return (volatile void *)(&dev->out_channel[channel].out_int_st);
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_owner_check(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.out_check_owner_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_eof_mode(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.out_eof_mode_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_auto_write_back(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.out_auto_wrback_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_page_bound_wrap(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.out_page_bound_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_data_burst_length(dma2d_dev_t *dev, uint32_t channel, uint32_t length)
{
uint32_t sel;
switch (length) {
case 8:
sel = 0;
break;
case 16:
sel = 1;
break;
case 32:
sel = 2;
break;
case 64:
sel = 3;
break;
case 128:
sel = 4;
break;
default:
abort();
}
dev->out_channel[channel].out_conf0.out_mem_burst_length_chn = sel;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_descriptor_burst(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.outdscr_burst_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_reset_channel(dma2d_dev_t *dev, uint32_t channel)
{
dev->out_channel[channel].out_conf0.out_rst_chn = 1;
dev->out_channel[channel].out_conf0.out_rst_chn = 0;
}
__attribute__((always_inline))
static inline bool dma2d_ll_tx_is_reset_avail(dma2d_dev_t *dev, uint32_t channel)
{
return dev->out_channel[channel].out_state.out_reset_avail_chn;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_abort(dma2d_dev_t *dev, uint32_t channel, bool disable)
{
dev->out_channel[channel].out_conf0.out_cmd_disable_chn = disable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_dscr_port(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.out_dscr_port_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_dscr_port_block_size(dma2d_dev_t *dev, uint32_t channel, uint32_t blk_h, uint32_t blk_v)
{
dev->out_channel[channel].out_dscr_port_blk.out_dscr_port_blk_h_chn = blk_h;
dev->out_channel[channel].out_dscr_port_blk.out_dscr_port_blk_v_chn = blk_v;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_macro_block_size(dma2d_dev_t *dev, uint32_t channel, dma2d_macro_block_size_t size)
{
uint32_t sel;
switch (size) {
case DMA2D_MACRO_BLOCK_SIZE_NONE:
sel = 3;
break;
case DMA2D_MACRO_BLOCK_SIZE_8_8:
sel = 0;
break;
case DMA2D_MACRO_BLOCK_SIZE_8_16:
sel = 1;
break;
case DMA2D_MACRO_BLOCK_SIZE_16_16:
sel = 2;
break;
default:
abort();
}
dev->out_channel[channel].out_conf0.out_macro_block_size_chn = sel;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_push_data(dma2d_dev_t *dev, uint32_t channel, uint32_t data)
{
dev->out_channel[channel].out_push.outfifo_wdata_chn = data;
dev->out_channel[channel].out_push.outfifo_push_chn = 1;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_desc_addr(dma2d_dev_t *dev, uint32_t channel, uint32_t addr)
{
dev->out_channel[channel].out_link_addr.outlink_addr_chn = addr;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_start(dma2d_dev_t *dev, uint32_t channel)
{
dev->out_channel[channel].out_link_conf.outlink_start_chn = 1;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_stop(dma2d_dev_t *dev, uint32_t channel)
{
dev->out_channel[channel].out_link_conf.outlink_stop_chn = 1;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_restart(dma2d_dev_t *dev, uint32_t channel)
{
dev->out_channel[channel].out_link_conf.outlink_restart_chn = 1;
}
__attribute__((always_inline))
static inline bool dma2d_ll_tx_is_desc_fsm_idle(dma2d_dev_t *dev, uint32_t channel)
{
return dev->out_channel[channel].out_link_conf.outlink_park_chn;
}
__attribute__((always_inline))
static inline bool dma2d_ll_tx_is_fsm_idle(dma2d_dev_t *dev, uint32_t channel)
{
return (dev->out_channel[channel].out_state.out_state_chn == 0);
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_tx_get_eof_desc_addr(dma2d_dev_t *dev, uint32_t channel)
{
return dev->out_channel[channel].out_eof_des_addr.val;
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_tx_get_prefetched_desc_addr(dma2d_dev_t *dev, uint32_t channel)
{
return dev->out_channel[channel].out_dscr.val;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_connect_to_periph(dma2d_dev_t *dev, uint32_t channel, dma2d_trigger_peripheral_t periph, int periph_id)
{
(void)periph;
dev->out_channel[channel].out_peri_sel.out_peri_sel_chn = periph_id;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_disconnect_from_periph(dma2d_dev_t *dev, uint32_t channel)
{
dev->out_channel[channel].out_peri_sel.out_peri_sel_chn = DMA2D_LL_CHANNEL_PERIPH_NO_CHOICE;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_enable_reorder(dma2d_dev_t *dev, uint32_t channel, bool enable)
{
dev->out_channel[channel].out_conf0.out_reorder_en_chn = enable;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_configure_color_space_conv(dma2d_dev_t *dev, uint32_t channel, dma2d_csc_tx_option_t csc_sel)
{
uint32_t input_sel = 7;
bool proc_en = false;
int (*table)[4] = NULL;
uint32_t output_sel = 2;
const int color_space_conv_param_rgb2yuv_bt601_table[3][4] = DMA2D_COLOR_SPACE_CONV_PARAM_RGB2YUV_BT601;
const int color_space_conv_param_rgb2yuv_bt709_table[3][4] = DMA2D_COLOR_SPACE_CONV_PARAM_RGB2YUV_BT709;
const int color_space_conv_param_yuv2rgb_bt601_table[3][4] = DMA2D_COLOR_SPACE_CONV_PARAM_YUV2RGB_BT601;
const int color_space_conv_param_yuv2rgb_bt709_table[3][4] = DMA2D_COLOR_SPACE_CONV_PARAM_YUV2RGB_BT709;
switch (csc_sel) {
case DMA2D_CSC_TX_NONE:
input_sel = 7;
break;
case DMA2D_CSC_TX_SCRAMBLE:
input_sel = 2;
proc_en = false;
output_sel = 2;
break;
case DMA2D_CSC_TX_RGB888_TO_RGB565:
input_sel = 3;
proc_en = false;
output_sel = 0;
break;
case DMA2D_CSC_TX_RGB565_TO_RGB888:
input_sel = 0;
proc_en = false;
output_sel = 2;
break;
case DMA2D_CSC_TX_RGB888_TO_YUV444_601:
input_sel = 3;
proc_en = true;
table = (int (*)[4])color_space_conv_param_rgb2yuv_bt601_table;
output_sel = 2;
break;
case DMA2D_CSC_TX_RGB888_TO_YUV444_709:
input_sel = 3;
proc_en = true;
table = (int (*)[4])color_space_conv_param_rgb2yuv_bt709_table;
output_sel = 2;
break;
case DMA2D_CSC_TX_RGB888_TO_YUV422_601:
input_sel = 3;
proc_en = true;
table = (int (*)[4])color_space_conv_param_rgb2yuv_bt601_table;
output_sel = 1;
break;
case DMA2D_CSC_TX_RGB888_TO_YUV422_709:
input_sel = 3;
proc_en = true;
table = (int (*)[4])color_space_conv_param_rgb2yuv_bt709_table;
output_sel = 1;
break;
case DMA2D_CSC_TX_YUV444_TO_RGB888_601:
input_sel = 3;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt601_table;
output_sel = 2;
break;
case DMA2D_CSC_TX_YUV444_TO_RGB888_709:
input_sel = 3;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt709_table;
output_sel = 2;
break;
case DMA2D_CSC_TX_YUV422_TO_RGB888_601:
input_sel = 1;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt601_table;
output_sel = 2;
break;
case DMA2D_CSC_TX_YUV422_TO_RGB888_709:
input_sel = 1;
proc_en = true;
table = (int (*)[4])color_space_conv_param_yuv2rgb_bt709_table;
output_sel = 2;
break;
default:
abort();
}
dev->out_channel[channel].out_color_convert.out_color_input_sel_chn = input_sel;
dev->out_channel[channel].out_color_convert.out_color_3b_proc_en_chn = proc_en;
dev->out_channel[channel].out_color_convert.out_color_output_sel_chn = output_sel;
if (proc_en) {
HAL_ASSERT(table);
typeof(dev->out_channel[channel].out_color_param_group) color_param_group;
color_param_group.param_h.a = table[0][0];
color_param_group.param_h.b = table[0][1];
color_param_group.param_h.c = table[0][2];
color_param_group.param_h.d = table[0][3];
color_param_group.param_m.a = table[1][0];
color_param_group.param_m.b = table[1][1];
color_param_group.param_m.c = table[1][2];
color_param_group.param_m.d = table[1][3];
color_param_group.param_l.a = table[2][0];
color_param_group.param_l.b = table[2][1];
color_param_group.param_l.c = table[2][2];
color_param_group.param_l.d = table[2][3];
dev->out_channel[channel].out_color_param_group.param_h.val[0] = color_param_group.param_h.val[0];
dev->out_channel[channel].out_color_param_group.param_h.val[1] = color_param_group.param_h.val[1];
dev->out_channel[channel].out_color_param_group.param_m.val[0] = color_param_group.param_m.val[0];
dev->out_channel[channel].out_color_param_group.param_m.val[1] = color_param_group.param_m.val[1];
dev->out_channel[channel].out_color_param_group.param_l.val[0] = color_param_group.param_l.val[0];
dev->out_channel[channel].out_color_param_group.param_l.val[1] = color_param_group.param_l.val[1];
}
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_csc_pre_scramble(dma2d_dev_t *dev, uint32_t channel, dma2d_scramble_order_t order)
{
dev->out_channel[channel].out_scramble.out_scramble_sel_pre_chn = dma2d_ll_get_scramble_order_sel(order);
}
static inline void dma2d_ll_tx_enable_arb_weight(dma2d_dev_t *dev, bool enable)
{
dev->out_arb_config.out_weight_en = enable;
}
static inline void dma2d_ll_tx_set_arb_timeout(dma2d_dev_t *dev, uint32_t timeout_num)
{
HAL_FORCE_MODIFY_U32_REG_FIELD(dev->out_arb_config, out_arb_timeout_num, timeout_num);
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_arb_token_num(dma2d_dev_t *dev, uint32_t channel, uint32_t token_num)
{
dev->out_channel[channel].out_arb.out_arb_token_num_chn = token_num;
}
__attribute__((always_inline))
static inline uint32_t dma2d_ll_tx_get_arb_token_num(dma2d_dev_t *dev, uint32_t channel)
{
return dev->out_channel[channel].out_arb.out_arb_token_num_chn;
}
__attribute__((always_inline))
static inline void dma2d_ll_tx_set_priority(dma2d_dev_t *dev, uint32_t channel, uint32_t priority)
{
dev->out_channel[channel].out_arb.out_arb_priority_chn = priority;
}
#ifdef __cplusplus
}
#endif
@@ -20,6 +20,7 @@ extern "C" {
#if SOC_HAS(DMA2D)
typedef struct {
struct {
const char *module_name; // module name
const int tx_irq_id[DMA2D_LL_GET(TX_CHANS_PER_INST)];
const int rx_irq_id[DMA2D_LL_GET(RX_CHANS_PER_INST)];
} groups[DMA2D_LL_GET(INST_NUM)];
@@ -27,6 +27,10 @@ config SOC_LP_AHB_GDMA_SUPPORTED
bool
default y
config SOC_DMA2D_SUPPORTED
bool
default y
config SOC_GPTIMER_SUPPORTED
bool
default y
@@ -0,0 +1,19 @@
/*
* SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
#pragma once
// The following macros are matched with the 2D-DMA peri_sel field peripheral selection ID
#define SOC_DMA2D_TRIG_PERIPH_JPEG_RX (0)
#define SOC_DMA2D_TRIG_PERIPH_PPA_SRM_RX (1)
#define SOC_DMA2D_TRIG_PERIPH_PPA_BLEND_RX (2)
#define SOC_DMA2D_TRIG_PERIPH_M2M_RX (-1) // Any value of 3 ~ 7, TX and RX do not have to use same ID value for M2M
#define SOC_DMA2D_TRIG_PERIPH_JPEG_TX (0)
#define SOC_DMA2D_TRIG_PERIPH_PPA_SRM_TX (1)
#define SOC_DMA2D_TRIG_PERIPH_PPA_BLEND_BG_TX (2)
#define SOC_DMA2D_TRIG_PERIPH_PPA_BLEND_FG_TX (3)
#define SOC_DMA2D_TRIG_PERIPH_M2M_TX (-1) // Any value of 4 ~ 7, TX and RX do not have to use same ID value for M2M
@@ -32,7 +32,7 @@
#define SOC_AHB_GDMA_SUPPORTED 1
#define SOC_AXI_GDMA_SUPPORTED 1
#define SOC_LP_AHB_GDMA_SUPPORTED 1
// #define SOC_DMA2D_SUPPORTED 1 // TODO: [ESP32S31] IDF-14762
#define SOC_DMA2D_SUPPORTED 1
#define SOC_GPTIMER_SUPPORTED 1
// #define SOC_PCNT_SUPPORTED 1 // TODO: [ESP32S31] IDF-14699
// #define SOC_LCDCAM_SUPPORTED 1 // TODO: [ESP32S31] IDF-14722
@@ -1,5 +1,5 @@
/**
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
@@ -1856,194 +1856,90 @@ typedef union {
} dma2d_in_peri_sel_chn_reg_t;
/** Type of in/out_color_param_h/m/l_chn register
* Configures the rx/tx color convert parameter of channel n
*/
typedef union {
struct {
struct {
uint32_t a : 10;
uint32_t b : 11;
uint32_t reserved21 : 11;
};
struct {
uint32_t c : 10;
uint32_t d : 18;
uint32_t reserved60 : 4;
};
};
uint32_t val[2];
} dma2d_color_param_reg_t;
typedef struct {
volatile dma2d_out_conf0_chn_reg_t out_conf0_ch0;
volatile dma2d_out_int_raw_chn_reg_t out_int_raw_ch0;
volatile dma2d_out_int_ena_chn_reg_t out_int_ena_ch0;
volatile dma2d_out_int_st_chn_reg_t out_int_st_ch0;
volatile dma2d_out_int_clr_chn_reg_t out_int_clr_ch0;
volatile dma2d_outfifo_status_chn_reg_t outfifo_status_ch0;
volatile dma2d_out_push_chn_reg_t out_push_ch0;
volatile dma2d_out_link_conf_chn_reg_t out_link_conf_ch0;
volatile dma2d_out_link_addr_chn_reg_t out_link_addr_ch0;
volatile dma2d_out_state_chn_reg_t out_state_ch0;
volatile dma2d_out_eof_des_addr_chn_reg_t out_eof_des_addr_ch0;
volatile dma2d_out_dscr_chn_reg_t out_dscr_ch0;
volatile dma2d_out_dscr_bf0_chn_reg_t out_dscr_bf0_ch0;
volatile dma2d_out_dscr_bf1_chn_reg_t out_dscr_bf1_ch0;
volatile dma2d_out_peri_sel_chn_reg_t out_peri_sel_ch0;
volatile dma2d_out_arb_chn_reg_t out_arb_ch0;
volatile dma2d_out_ro_status_chn_reg_t out_ro_status_ch0;
volatile dma2d_out_ro_pd_conf_chn_reg_t out_ro_pd_conf_ch0;
volatile dma2d_out_color_convert_chn_reg_t out_color_convert_ch0;
volatile dma2d_out_scramble_chn_reg_t out_scramble_ch0;
volatile dma2d_out_color_param0_chn_reg_t out_color_param0_ch0;
volatile dma2d_out_color_param1_chn_reg_t out_color_param1_ch0;
volatile dma2d_out_color_param2_chn_reg_t out_color_param2_ch0;
volatile dma2d_out_color_param3_chn_reg_t out_color_param3_ch0;
volatile dma2d_out_color_param4_chn_reg_t out_color_param4_ch0;
volatile dma2d_out_color_param5_chn_reg_t out_color_param5_ch0;
volatile dma2d_out_etm_conf_chn_reg_t out_etm_conf_ch0;
volatile dma2d_out_dscr_port_blk_chn_reg_t out_dscr_port_blk_ch0;
uint32_t reserved_070[36];
volatile dma2d_out_conf0_chn_reg_t out_conf0_ch1;
volatile dma2d_out_int_raw_chn_reg_t out_int_raw_ch1;
volatile dma2d_out_int_ena_chn_reg_t out_int_ena_ch1;
volatile dma2d_out_int_st_chn_reg_t out_int_st_ch1;
volatile dma2d_out_int_clr_chn_reg_t out_int_clr_ch1;
volatile dma2d_outfifo_status_chn_reg_t outfifo_status_ch1;
volatile dma2d_out_push_chn_reg_t out_push_ch1;
volatile dma2d_out_link_conf_chn_reg_t out_link_conf_ch1;
volatile dma2d_out_link_addr_chn_reg_t out_link_addr_ch1;
volatile dma2d_out_state_chn_reg_t out_state_ch1;
volatile dma2d_out_eof_des_addr_chn_reg_t out_eof_des_addr_ch1;
volatile dma2d_out_dscr_chn_reg_t out_dscr_ch1;
volatile dma2d_out_dscr_bf0_chn_reg_t out_dscr_bf0_ch1;
volatile dma2d_out_dscr_bf1_chn_reg_t out_dscr_bf1_ch1;
volatile dma2d_out_peri_sel_chn_reg_t out_peri_sel_ch1;
volatile dma2d_out_arb_chn_reg_t out_arb_ch1;
volatile dma2d_out_ro_status_chn_reg_t out_ro_status_ch1;
uint32_t reserved_144;
volatile dma2d_out_color_convert_chn_reg_t out_color_convert_ch1;
volatile dma2d_out_scramble_chn_reg_t out_scramble_ch1;
volatile dma2d_out_color_param0_chn_reg_t out_color_param0_ch1;
volatile dma2d_out_color_param1_chn_reg_t out_color_param1_ch1;
volatile dma2d_out_color_param2_chn_reg_t out_color_param2_ch1;
volatile dma2d_out_color_param3_chn_reg_t out_color_param3_ch1;
volatile dma2d_out_color_param4_chn_reg_t out_color_param4_ch1;
volatile dma2d_out_color_param5_chn_reg_t out_color_param5_ch1;
volatile dma2d_out_etm_conf_chn_reg_t out_etm_conf_ch1;
volatile dma2d_out_dscr_port_blk_chn_reg_t out_dscr_port_blk_ch1;
uint32_t reserved_170[36];
volatile dma2d_out_conf0_chn_reg_t out_conf0_ch2;
volatile dma2d_out_int_raw_chn_reg_t out_int_raw_ch2;
volatile dma2d_out_int_ena_chn_reg_t out_int_ena_ch2;
volatile dma2d_out_int_st_chn_reg_t out_int_st_ch2;
volatile dma2d_out_int_clr_chn_reg_t out_int_clr_ch2;
volatile dma2d_outfifo_status_chn_reg_t outfifo_status_ch2;
volatile dma2d_out_push_chn_reg_t out_push_ch2;
volatile dma2d_out_link_conf_chn_reg_t out_link_conf_ch2;
volatile dma2d_out_link_addr_chn_reg_t out_link_addr_ch2;
volatile dma2d_out_state_chn_reg_t out_state_ch2;
volatile dma2d_out_eof_des_addr_chn_reg_t out_eof_des_addr_ch2;
volatile dma2d_out_dscr_chn_reg_t out_dscr_ch2;
volatile dma2d_out_dscr_bf0_chn_reg_t out_dscr_bf0_ch2;
volatile dma2d_out_dscr_bf1_chn_reg_t out_dscr_bf1_ch2;
volatile dma2d_out_peri_sel_chn_reg_t out_peri_sel_ch2;
volatile dma2d_out_arb_chn_reg_t out_arb_ch2;
volatile dma2d_out_ro_status_chn_reg_t out_ro_status_ch2;
uint32_t reserved_244;
volatile dma2d_out_color_convert_chn_reg_t out_color_convert_ch2;
volatile dma2d_out_scramble_chn_reg_t out_scramble_ch2;
volatile dma2d_out_color_param0_chn_reg_t out_color_param0_ch2;
volatile dma2d_out_color_param1_chn_reg_t out_color_param1_ch2;
volatile dma2d_out_color_param2_chn_reg_t out_color_param2_ch2;
volatile dma2d_out_color_param3_chn_reg_t out_color_param3_ch2;
volatile dma2d_out_color_param4_chn_reg_t out_color_param4_ch2;
volatile dma2d_out_color_param5_chn_reg_t out_color_param5_ch2;
volatile dma2d_out_etm_conf_chn_reg_t out_etm_conf_ch2;
volatile dma2d_out_dscr_port_blk_chn_reg_t out_dscr_port_blk_ch2;
uint32_t reserved_270[36];
volatile dma2d_out_conf0_chn_reg_t out_conf0_ch3;
volatile dma2d_out_int_raw_chn_reg_t out_int_raw_ch3;
volatile dma2d_out_int_ena_chn_reg_t out_int_ena_ch3;
volatile dma2d_out_int_st_chn_reg_t out_int_st_ch3;
volatile dma2d_out_int_clr_chn_reg_t out_int_clr_ch3;
volatile dma2d_outfifo_status_chn_reg_t outfifo_status_ch3;
volatile dma2d_out_push_chn_reg_t out_push_ch3;
volatile dma2d_out_link_conf_chn_reg_t out_link_conf_ch3;
volatile dma2d_out_link_addr_chn_reg_t out_link_addr_ch3;
volatile dma2d_out_state_chn_reg_t out_state_ch3;
volatile dma2d_out_eof_des_addr_chn_reg_t out_eof_des_addr_ch3;
volatile dma2d_out_dscr_chn_reg_t out_dscr_ch3;
volatile dma2d_out_dscr_bf0_chn_reg_t out_dscr_bf0_ch3;
volatile dma2d_out_dscr_bf1_chn_reg_t out_dscr_bf1_ch3;
volatile dma2d_out_peri_sel_chn_reg_t out_peri_sel_ch3;
volatile dma2d_out_arb_chn_reg_t out_arb_ch3;
volatile dma2d_out_ro_status_chn_reg_t out_ro_status_ch3;
uint32_t reserved_344;
volatile dma2d_out_color_convert_chn_reg_t out_color_convert_ch3;
volatile dma2d_out_scramble_chn_reg_t out_scramble_ch3;
volatile dma2d_out_color_param0_chn_reg_t out_color_param0_ch3;
volatile dma2d_out_color_param1_chn_reg_t out_color_param1_ch3;
volatile dma2d_out_color_param2_chn_reg_t out_color_param2_ch3;
volatile dma2d_out_color_param3_chn_reg_t out_color_param3_ch3;
volatile dma2d_out_color_param4_chn_reg_t out_color_param4_ch3;
volatile dma2d_out_color_param5_chn_reg_t out_color_param5_ch3;
volatile dma2d_out_etm_conf_chn_reg_t out_etm_conf_ch3;
volatile dma2d_out_dscr_port_blk_chn_reg_t out_dscr_port_blk_ch3;
uint32_t reserved_370[100];
volatile dma2d_in_conf0_chn_reg_t in_conf0_ch0;
volatile dma2d_in_int_raw_chn_reg_t in_int_raw_ch0;
volatile dma2d_in_int_ena_chn_reg_t in_int_ena_ch0;
volatile dma2d_in_int_st_chn_reg_t in_int_st_ch0;
volatile dma2d_in_int_clr_chn_reg_t in_int_clr_ch0;
volatile dma2d_infifo_status_chn_reg_t infifo_status_ch0;
volatile dma2d_in_pop_chn_reg_t in_pop_ch0;
volatile dma2d_in_link_conf_chn_reg_t in_link_conf_ch0;
volatile dma2d_in_link_addr_chn_reg_t in_link_addr_ch0;
volatile dma2d_in_state_chn_reg_t in_state_ch0;
volatile dma2d_in_suc_eof_des_addr_chn_reg_t in_suc_eof_des_addr_ch0;
volatile dma2d_in_err_eof_des_addr_chn_reg_t in_err_eof_des_addr_ch0;
volatile dma2d_in_dscr_chn_reg_t in_dscr_ch0;
volatile dma2d_in_dscr_bf0_chn_reg_t in_dscr_bf0_ch0;
volatile dma2d_in_dscr_bf1_chn_reg_t in_dscr_bf1_ch0;
volatile dma2d_in_peri_sel_chn_reg_t in_peri_sel_ch0;
volatile dma2d_in_arb_chn_reg_t in_arb_ch0;
volatile dma2d_in_ro_status_chn_reg_t in_ro_status_ch0;
volatile dma2d_in_ro_pd_conf_chn_reg_t in_ro_pd_conf_ch0;
volatile dma2d_in_color_convert_chn_reg_t in_color_convert_ch0;
volatile dma2d_in_scramble_chn_reg_t in_scramble_ch0;
volatile dma2d_in_color_param0_chn_reg_t in_color_param0_ch0;
volatile dma2d_in_color_param1_chn_reg_t in_color_param1_ch0;
volatile dma2d_in_color_param2_chn_reg_t in_color_param2_ch0;
volatile dma2d_in_color_param3_chn_reg_t in_color_param3_ch0;
volatile dma2d_in_color_param4_chn_reg_t in_color_param4_ch0;
volatile dma2d_in_color_param5_chn_reg_t in_color_param5_ch0;
volatile dma2d_in_etm_conf_chn_reg_t in_etm_conf_ch0;
uint32_t reserved_570[36];
volatile dma2d_in_conf0_chn_reg_t in_conf0_ch1;
volatile dma2d_in_int_raw_chn_reg_t in_int_raw_ch1;
volatile dma2d_in_int_ena_chn_reg_t in_int_ena_ch1;
volatile dma2d_in_int_st_chn_reg_t in_int_st_ch1;
volatile dma2d_in_int_clr_chn_reg_t in_int_clr_ch1;
volatile dma2d_infifo_status_chn_reg_t infifo_status_ch1;
volatile dma2d_in_pop_chn_reg_t in_pop_ch1;
volatile dma2d_in_link_conf_chn_reg_t in_link_conf_ch1;
volatile dma2d_in_link_addr_chn_reg_t in_link_addr_ch1;
volatile dma2d_in_state_chn_reg_t in_state_ch1;
volatile dma2d_in_suc_eof_des_addr_chn_reg_t in_suc_eof_des_addr_ch1;
volatile dma2d_in_err_eof_des_addr_chn_reg_t in_err_eof_des_addr_ch1;
volatile dma2d_in_dscr_chn_reg_t in_dscr_ch1;
volatile dma2d_in_dscr_bf0_chn_reg_t in_dscr_bf0_ch1;
volatile dma2d_in_dscr_bf1_chn_reg_t in_dscr_bf1_ch1;
volatile dma2d_in_peri_sel_chn_reg_t in_peri_sel_ch1;
volatile dma2d_in_arb_chn_reg_t in_arb_ch1;
volatile dma2d_in_ro_status_chn_reg_t in_ro_status_ch1;
uint32_t reserved_648[9];
volatile dma2d_in_etm_conf_chn_reg_t in_etm_conf_ch1;
uint32_t reserved_670[36];
volatile dma2d_in_conf0_chn_reg_t in_conf0_ch2;
volatile dma2d_in_int_raw_chn_reg_t in_int_raw_ch2;
volatile dma2d_in_int_ena_chn_reg_t in_int_ena_ch2;
volatile dma2d_in_int_st_chn_reg_t in_int_st_ch2;
volatile dma2d_in_int_clr_chn_reg_t in_int_clr_ch2;
volatile dma2d_infifo_status_chn_reg_t infifo_status_ch2;
volatile dma2d_in_pop_chn_reg_t in_pop_ch2;
volatile dma2d_in_link_conf_chn_reg_t in_link_conf_ch2;
volatile dma2d_in_link_addr_chn_reg_t in_link_addr_ch2;
volatile dma2d_in_state_chn_reg_t in_state_ch2;
volatile dma2d_in_suc_eof_des_addr_chn_reg_t in_suc_eof_des_addr_ch2;
volatile dma2d_in_err_eof_des_addr_chn_reg_t in_err_eof_des_addr_ch2;
volatile dma2d_in_dscr_chn_reg_t in_dscr_ch2;
volatile dma2d_in_dscr_bf0_chn_reg_t in_dscr_bf0_ch2;
volatile dma2d_in_dscr_bf1_chn_reg_t in_dscr_bf1_ch2;
volatile dma2d_in_peri_sel_chn_reg_t in_peri_sel_ch2;
volatile dma2d_in_arb_chn_reg_t in_arb_ch2;
volatile dma2d_in_ro_status_chn_reg_t in_ro_status_ch2;
uint32_t reserved_748[9];
volatile dma2d_in_etm_conf_chn_reg_t in_etm_conf_ch2;
uint32_t reserved_770[164];
volatile dma2d_color_param_reg_t param_h;
volatile dma2d_color_param_reg_t param_m;
volatile dma2d_color_param_reg_t param_l;
} dma2d_color_param_group_chn_reg_t;
typedef struct {
volatile dma2d_out_conf0_chn_reg_t out_conf0;
volatile dma2d_out_int_raw_chn_reg_t out_int_raw;
volatile dma2d_out_int_ena_chn_reg_t out_int_ena;
volatile dma2d_out_int_st_chn_reg_t out_int_st;
volatile dma2d_out_int_clr_chn_reg_t out_int_clr;
volatile dma2d_outfifo_status_chn_reg_t outfifo_status;
volatile dma2d_out_push_chn_reg_t out_push;
volatile dma2d_out_link_conf_chn_reg_t out_link_conf;
volatile dma2d_out_link_addr_chn_reg_t out_link_addr;
volatile dma2d_out_state_chn_reg_t out_state;
volatile dma2d_out_eof_des_addr_chn_reg_t out_eof_des_addr;
volatile dma2d_out_dscr_chn_reg_t out_dscr;
volatile dma2d_out_dscr_bf0_chn_reg_t out_dscr_bf0;
volatile dma2d_out_dscr_bf1_chn_reg_t out_dscr_bf1;
volatile dma2d_out_peri_sel_chn_reg_t out_peri_sel;
volatile dma2d_out_arb_chn_reg_t out_arb;
volatile dma2d_out_ro_status_chn_reg_t out_ro_status;
volatile dma2d_out_ro_pd_conf_chn_reg_t out_ro_pd_conf; /* only exist on channel0 */
volatile dma2d_out_color_convert_chn_reg_t out_color_convert;
volatile dma2d_out_scramble_chn_reg_t out_scramble;
volatile dma2d_color_param_group_chn_reg_t out_color_param_group;
volatile dma2d_out_etm_conf_chn_reg_t out_etm_conf;
volatile dma2d_out_dscr_port_blk_chn_reg_t out_dscr_port_blk;
uint32_t reserved_out[36];
} dma2d_out_chn_reg_t;
typedef struct {
volatile dma2d_in_conf0_chn_reg_t in_conf0;
volatile dma2d_in_int_raw_chn_reg_t in_int_raw;
volatile dma2d_in_int_ena_chn_reg_t in_int_ena;
volatile dma2d_in_int_st_chn_reg_t in_int_st;
volatile dma2d_in_int_clr_chn_reg_t in_int_clr;
volatile dma2d_infifo_status_chn_reg_t infifo_status;
volatile dma2d_in_pop_chn_reg_t in_pop;
volatile dma2d_in_link_conf_chn_reg_t in_link_conf;
volatile dma2d_in_link_addr_chn_reg_t in_link_addr;
volatile dma2d_in_state_chn_reg_t in_state;
volatile dma2d_in_suc_eof_des_addr_chn_reg_t in_suc_eof_des_addr;
volatile dma2d_in_err_eof_des_addr_chn_reg_t in_err_eof_des_addr;
volatile dma2d_in_dscr_chn_reg_t in_dscr;
volatile dma2d_in_dscr_bf0_chn_reg_t in_dscr_bf0;
volatile dma2d_in_dscr_bf1_chn_reg_t in_dscr_bf1;
volatile dma2d_in_peri_sel_chn_reg_t in_peri_sel;
volatile dma2d_in_arb_chn_reg_t in_arb;
volatile dma2d_in_ro_status_chn_reg_t in_ro_status;
volatile dma2d_in_ro_pd_conf_chn_reg_t in_ro_pd_conf; /* only exist on channel0 */
volatile dma2d_in_color_convert_chn_reg_t in_color_convert; /* only exist on channel0 */
volatile dma2d_in_scramble_chn_reg_t in_scramble; /* only exist on channel0 */
volatile dma2d_color_param_group_chn_reg_t in_color_param_group; /* only exist on channel0 */
volatile dma2d_in_etm_conf_chn_reg_t in_etm_conf;
uint32_t reserved_in[36];
} dma2d_in_chn_reg_t;
typedef struct dma2d_dev_t {
volatile dma2d_out_chn_reg_t out_channel[4];
uint32_t reserved_400[64];
volatile dma2d_in_chn_reg_t in_channel[3];
uint32_t reserved_800[128];
volatile dma2d_axi_err_reg_t axi_err;
volatile dma2d_rst_conf_reg_t rst_conf;
volatile dma2d_intr_mem_start_addr_reg_t intr_mem_start_addr;
@@ -2058,6 +1954,7 @@ typedef struct {
volatile dma2d_date_reg_t date;
} dma2d_dev_t;
extern dma2d_dev_t DMA2D;
#ifndef __cplusplus
_Static_assert(sizeof(dma2d_dev_t) == 0xa30, "Invalid size of dma2d_dev_t structure");