Merge branch 'dev/ble-log-202603_v5.2' into 'release/v5.2'

dev: BLE Log 202603 (5.2)

See merge request espressif/esp-idf!47273
This commit is contained in:
Island
2026-04-08 10:32:35 +08:00
80 changed files with 7372 additions and 1771 deletions
+12 -14
View File
@@ -32,6 +32,7 @@ set(common_include_dirs
common/btc/profile/esp/include
common/hci_log/include
common/ble_log/include
common/ble_log/deprecated/include
)
set(ble_mesh_include_dirs
@@ -152,8 +153,7 @@ if(CONFIG_BT_ENABLED)
"common/osi/osi.c"
"common/osi/semaphore.c"
"porting/mem/bt_osi_mem.c"
"common/ble_log/ble_log_spi_out.c"
"common/ble_log/ble_log_uhci_out.c"
"common/ble_log/deprecated/ble_log_spi_out.c"
)
# BLE Log Module
@@ -1004,6 +1004,16 @@ idf_component_register(SRCS "${srcs}"
PRIV_REQUIRES "${bt_priv_requires}"
LDFRAGMENTS "${ldscripts}")
# UART redir wrap flags — needed whenever BLE Log uses UART DMA on port 0,
# regardless of whether BLE controller is enabled.
if(DEFINED CONFIG_BLE_LOG_PRPH_UART_DMA_PORT)
if(CONFIG_BLE_LOG_PRPH_UART_DMA_PORT EQUAL 0)
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_tx_chars")
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_write_bytes")
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_write_bytes_with_break")
endif()
endif()
if(CONFIG_BLE_COMPRESSED_LOG_ENABLE)
if(LOG_COMPRESSION_TARGET)
add_dependencies(${COMPONENT_LIB} ${LOG_COMPRESSION_TARGET})
@@ -1037,18 +1047,6 @@ if(CONFIG_BT_ENABLED)
if(CONFIG_BT_LE_CONTROLLER_LOG_WRAP_PANIC_HANDLER_ENABLE)
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=esp_panic_handler")
endif()
if(DEFINED CONFIG_BLE_LOG_PRPH_UART_DMA_PORT)
if(CONFIG_BLE_LOG_PRPH_UART_DMA_PORT EQUAL 0)
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_tx_chars")
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_write_bytes")
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_write_bytes_with_break")
endif()
endif()
if(CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED)
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_tx_chars")
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_write_bytes")
target_link_libraries(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=uart_write_bytes_with_break")
endif()
if(CONFIG_IDF_TARGET_ESP32C6)
add_prebuilt_library(libble_app "controller/lib_${target}/${target}-bt-lib/esp32c6/libble_app.a"
REQUIRES esp_phy)
+165 -247
View File
@@ -35,256 +35,174 @@ choice BT_SMP_CRYPTO_STACK
endchoice
menu "BLE Log"
menu "BT Logs"
menu "Log Sources"
menuconfig BT_LE_CONTROLLER_LOG_ENABLED
depends on SOC_ESP_NIMBLE_CONTROLLER
bool "Enable Controller logs"
default n
choice BT_LE_CONTROLLER_LOG_OUTPUT_MODE
depends on BT_LE_CONTROLLER_LOG_ENABLED
prompt "Controller log output mode"
default BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2 if BLE_LOG_ENABLED
default BT_LE_CTRL_LEGACY_LOG_MODE_ENABLED
config BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
depends on BLE_LOG_ENABLED
bool "BLE Log v2 mode"
help
Utilize BLE Log v2 for controller log
config BT_LE_CTRL_LEGACY_LOG_MODE_ENABLED
depends on !BLE_LOG_ENABLED
bool "Legacy log mode (Deprecated)"
endchoice
if BT_LE_CONTROLLER_LOG_ENABLED && BT_LE_CTRL_LEGACY_LOG_MODE_ENABLED
config BT_LE_CONTROLLER_LOG_CTRL_ENABLED
bool "Enable controller link layer logs"
default y
config BT_LE_CONTROLLER_LOG_HCI_ENABLED
bool "Enable controller HCI logs"
default y
config BT_LE_CONTROLLER_LOG_DUMP_ONLY
bool "Controller log dump mode only"
default y
help
Only operate in dump mode. Logs are cached internally only,
not output asynchronously.
config BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
bool "Output controller logs to SPI bus (Experimental)"
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
select BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Output ble controller logs to SPI bus
config BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
bool "Store controller logs to flash (Experimental)"
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
default n
help
Store ble controller logs to flash memory.
config BT_LE_CONTROLLER_LOG_PARTITION_SIZE
int "Controller log partition size (multiples of 4K)"
depends on BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
default 65536
help
The size of ble controller log partition shall be a multiples of 4K.
The name of log partition shall be "bt_ctrl_log".
The partition type shall be ESP_PARTITION_TYPE_DATA.
The partition sub_type shall be ESP_PARTITION_SUBTYPE_ANY.
config BT_LE_LOG_CTRL_BUF1_SIZE
int "First controller log buffer size"
default 4096
help
Configure the size of the first BLE controller LOG buffer.
config BT_LE_LOG_CTRL_BUF2_SIZE
int "Second controller log buffer size"
default 1024
help
Configure the size of the second BLE controller LOG buffer.
config BT_LE_LOG_HCI_BUF_SIZE
int "HCI log buffer size"
default 4096
help
Configure the size of the BLE HCI LOG buffer.
endif
config BT_LE_CONTROLLER_LOG_OUTPUT_LEVEL
int "Controller log output level"
depends on BT_LE_CONTROLLER_LOG_ENABLED
range 0 5
default 2 if BT_LOG_CRITICAL_ONLY_CTRL
default 1
help
The output level of controller log.
config BT_LE_CONTROLLER_LOG_MOD_OUTPUT_SWITCH
hex "Controller log module output switch"
depends on BT_LE_CONTROLLER_LOG_ENABLED
range 0 0xFFFFFFFF
default 0xFFFFFFFF
help
Bitmask to enable/disable logging for individual controller
modules. 0xFFFFFFFF enables all modules.
config BT_LE_CONTROLLER_LOG_WRAP_PANIC_HANDLER_ENABLE
bool "Enable wrap panic handler"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Wrap esp_panic_handler to get controller logs when PC pointer exception crashes.
config BT_LE_CONTROLLER_LOG_TASK_WDT_USER_HANDLER_ENABLE
bool "Enable esp_task_wdt_isr_user_handler implementation"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Implement esp_task_wdt_isr_user_handler to get controller logs when task wdt issue is triggered.
endmenu
menuconfig BT_LOG_CRITICAL_ONLY
bool "Enable bandwidth-optimized log mode (critical logs only)"
default n
depends on !BT_STACK_NO_LOG
select BLE_LOG_ENABLED
select BLE_COMPRESSED_LOG_ENABLE
help
Enable bandwidth-optimized logging for the BLE Log Async Output
system. When enabled, only high-severity logs are captured and
log encoding is applied to reduce UART/SPI DMA bandwidth usage.
Each stack component below can be independently enabled.
Requires a DMA-backed output peripheral — configure in:
BLE Log Module → BLE Log peripheral choice.
if BT_LOG_CRITICAL_ONLY
config BT_LOG_CRITICAL_ONLY_CTRL
bool "Controller: bandwidth-optimized logging"
depends on SOC_ESP_NIMBLE_CONTROLLER || BT_CTRL_RUN_IN_FLASH_ONLY
select BT_LE_CONTROLLER_LOG_ENABLED if SOC_ESP_NIMBLE_CONTROLLER
select BT_CTRL_LE_LOG_EN if !SOC_ESP_NIMBLE_CONTROLLER
default y
help
Enable controller log output via the async transport with
a reduced output level for bandwidth optimization.
The controller log level defaults to 2 when active.
config BT_LOG_CRITICAL_ONLY_HOST
bool "Host: bandwidth-optimized logging"
select BLE_HOST_COMPRESSED_LOG_ENABLE if BT_BLUEDROID_ENABLED
default y
help
Enable host stack log encoding via the async transport.
For Bluedroid, the per-level compression options control
which severity levels are encoded — configure in:
Settings of BLE Log Compression → BLE Host log compression.
config BT_LOG_CRITICAL_ONLY_MESH
bool "Mesh: bandwidth-optimized logging"
depends on SOC_BLE_MESH_SUPPORTED && BLE_MESH
select BLE_MESH_COMPRESSED_LOG_ENABLE
default y
help
Enable mesh log encoding via the async transport.
The per-level compression options are configured in:
Settings of BLE Log Compression → BLE Mesh log compression.
endif # BT_LOG_CRITICAL_ONLY
source "$IDF_PATH/components/bt/common/ble_log/Kconfig.in"
endmenu
config BT_BLE_LOG_SPI_OUT_ENABLED
bool "Output ble logs to SPI bus (Experimental)"
default n
help
Output ble logs to SPI bus
config BT_BLE_LOG_SPI_OUT_UL_TASK_BUF_SIZE
int "SPI transaction buffer size for upper layer task logs"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default 512
help
SPI transaction buffer size for upper layer task logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_HCI_ENABLED
bool "Enable HCI log output to SPI"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Enable logging of HCI packets to the SPI bus when BLE SPI log output is enabled.
config BT_BLE_LOG_SPI_OUT_HCI_BUF_SIZE
int "SPI transaction buffer size for HCI logs"
depends on BT_BLE_LOG_SPI_OUT_HCI_ENABLED
default 1024
help
SPI transaction buffer size for HCI logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_HCI_TASK_CNT
int "HCI task count"
depends on BT_BLE_LOG_SPI_OUT_HCI_ENABLED
default 1
help
HCI task count
config BT_BLE_LOG_SPI_OUT_HOST_ENABLED
bool "Enable Host log output to SPI"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
This configuration applies to the logs of both Bluedroid Host and NimBLE Host.
When BLE SPI log output is enabled, this option allows host logs to be transmitted via SPI.
config BT_BLE_LOG_SPI_OUT_HOST_BUF_SIZE
int "SPI transaction buffer size for host logs"
depends on BT_BLE_LOG_SPI_OUT_HOST_ENABLED
default 1024
help
SPI transaction buffer size for host logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_HOST_TASK_CNT
int "Host task count"
depends on BT_BLE_LOG_SPI_OUT_HOST_ENABLED
default 2
help
Host task count.
config BT_BLE_LOG_SPI_OUT_LL_ENABLED
bool "Enable Controller log output to SPI"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
depends on BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
default n
help
Enable controller log output to SPI bus.
config BT_BLE_LOG_SPI_OUT_LL_TASK_BUF_SIZE
int "SPI transaction buffer size for lower layer task logs"
depends on BT_BLE_LOG_SPI_OUT_LL_ENABLED
default 1024
help
SPI transaction buffer size for lower layer task logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_LL_ISR_BUF_SIZE
int "SPI transaction buffer size for lower layer ISR logs"
depends on BT_BLE_LOG_SPI_OUT_LL_ENABLED
default 512
help
SPI transaction buffer size for lower layer ISR logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_LL_HCI_BUF_SIZE
int "SPI transaction buffer size for lower layer HCI logs"
depends on BT_BLE_LOG_SPI_OUT_LL_ENABLED
default 512
help
SPI transaction buffer size for upper layer HCI logs.
There will be 2 SPI DMA buffers with the same size
config BT_BLE_LOG_SPI_OUT_MOSI_IO_NUM
int "GPIO number of SPI MOSI"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default 0
help
GPIO number of SPI MOSI
config BT_BLE_LOG_SPI_OUT_SCLK_IO_NUM
int "GPIO number of SPI SCLK"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default 1
help
GPIO number of SPI SCLK
config BT_BLE_LOG_SPI_OUT_CS_IO_NUM
int "GPIO number of SPI CS"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default 2
help
GPIO number of SPI CS
config BT_BLE_LOG_SPI_OUT_TS_SYNC_ENABLED
bool "Enable ble log & logic analyzer log time sync"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default y
help
Enable ble log & logic analyzer log time sync
config BT_BLE_LOG_SPI_OUT_SYNC_IO_NUM
int "GPIO number of SYNC IO"
depends on BT_BLE_LOG_SPI_OUT_TS_SYNC_ENABLED
default 3
help
GPIO number of SYNC IO
config BT_BLE_LOG_SPI_OUT_FLUSH_TIMER_ENABLED
bool "Enable periodic buffer flush out"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Enable periodic buffer flush out
Not recommended when SPI receiver is unavailable
config BT_BLE_LOG_SPI_OUT_FLUSH_TIMEOUT
int "Buffer flush out period in unit of ms"
depends on BT_BLE_LOG_SPI_OUT_FLUSH_TIMER_ENABLED
default 1000
help
Buffer flush out period in unit of ms
config BT_BLE_LOG_SPI_OUT_LE_AUDIO_ENABLED
bool "Enable LE Audio log output to SPI"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Enable LE Audio log output to SPI
config BT_BLE_LOG_SPI_OUT_LE_AUDIO_BUF_SIZE
int "SPI transaction buffer size for LE Audio logs"
depends on BT_BLE_LOG_SPI_OUT_LE_AUDIO_ENABLED
default 1024
help
SPI transaction buffer size for LE Audio logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_LE_AUDIO_TASK_CNT
int "LE audio task count"
depends on BT_BLE_LOG_SPI_OUT_LE_AUDIO_ENABLED
default 1
help
LE audio task count
config BT_BLE_LOG_SPI_OUT_MESH_ENABLED
bool "Enable BLE mesh log output to SPI"
depends on BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Enable BLE mesh log output to SPI
config BT_BLE_LOG_SPI_OUT_MESH_BUF_SIZE
int "SPI transaction buffer size for BLE mesh logs"
depends on BT_BLE_LOG_SPI_OUT_MESH_ENABLED
default 1024
help
SPI transaction buffer size for BLE mesh logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_MESH_TASK_CNT
int "Mesh task count"
depends on BT_BLE_LOG_SPI_OUT_MESH_ENABLED
default 3
help
Mesh task count
config BT_BLE_LOG_UHCI_OUT_ENABLED
bool "Output ble logs via UHCI (UART DMA) driver (Experimental)"
default n
help
Output ble logs via UHCI (UART DMA) driver
On enable, BT_BLE_LOG_UHCI_OUT_UART_PORT would be reinited with
BT_BLE_LOG_UHCI_OUT_UART_BAUD_RATE as new baud rate and
BT_BLE_LOG_UHCI_OUT_UART_IO_NUM_TX as new UART Tx IO
config BT_BLE_LOG_UHCI_OUT_UART_PORT
int "UART port connected to UHCI controller"
depends on BT_BLE_LOG_UHCI_OUT_ENABLED
default 0
help
UART port connected to UHCI controller
If UART port 0 is selected, UART VFS Driver, UART ROM Driver
and UART Driver output would be redirected to BLE Log UHCI Out
to solve UART Tx FIFO multi-task access issue
config BT_BLE_LOG_UHCI_OUT_LL_TASK_BUF_SIZE
int "UHCI transaction buffer size for lower layer task logs"
depends on BT_BLE_LOG_UHCI_OUT_ENABLED
default 1024
help
UHCI transaction buffer size for lower layer task logs
config BT_BLE_LOG_UHCI_OUT_LL_ISR_BUF_SIZE
int "UHCI transaction buffer size for lower layer ISR logs"
depends on BT_BLE_LOG_UHCI_OUT_ENABLED
default 1024
help
UHCI transaction buffer size for lower layer ISR logs
config BT_BLE_LOG_UHCI_OUT_LL_HCI_BUF_SIZE
int "UHCI transaction buffer size for lower layer HCI logs"
depends on BT_BLE_LOG_UHCI_OUT_ENABLED
default 1024
help
UHCI transaction buffer size for lower layer HCI logs
config BT_BLE_LOG_UHCI_OUT_UART_NEED_INIT
bool "Enable to init UART port"
depends on BT_BLE_LOG_UHCI_OUT_ENABLED
default y
help
Enable to init UART port
config BT_BLE_LOG_UHCI_OUT_UART_BAUD_RATE
int "Baud rate for BT_BLE_LOG_UHCI_OUT_UART_PORT"
depends on BT_BLE_LOG_UHCI_OUT_UART_NEED_INIT
default 3000000
help
Baud rate for BT_BLE_LOG_UHCI_OUT_UART_PORT
config BT_BLE_LOG_UHCI_OUT_UART_IO_NUM_TX
int "IO number for UART TX port"
depends on BT_BLE_LOG_UHCI_OUT_UART_NEED_INIT
default 0
help
IO number for UART TX port
config BT_LE_USED_MEM_STATISTICS_ENABLED
bool "Enable used memory statistics"
default n
+55 -43
View File
@@ -1,8 +1,8 @@
config BLE_LOG_ENABLED
bool "Enable BLE Log Module (Experimental)"
bool "Enable BT Log Async Output (Dev Only)"
default n
help
Enable BLE Log Module
Enable BT Log Async Output
if BLE_LOG_ENABLED
config BLE_LOG_TASK_STACK_SIZE
@@ -13,15 +13,22 @@ if BLE_LOG_ENABLED
help
Stack size for BLE Log Task
config BLE_LOG_LBM_TRANS_SIZE
int "Buffer size for each peripheral transport"
default 512
config BLE_LOG_LBM_TRANS_BUF_SIZE
int "Total buffer memory per common LBM (bytes)"
default 2048
help
There're 2 log buffer managers (LBMs) with compare-and-swap
(CAS) protection, 1 LBM with FreeRTOS mutex protection, 1 LBM
without protection for critical section. Each LBM is managing
2 ping-pong buffers, which means there will be 4 * 2 *
BLE_LOG_LBM_TRANS_SIZE bytes buffer allocated
Total buffer memory allocated for each common pool log buffer
manager (LBM). This memory is divided equally among internal
transport buffers. Must be a multiple of BLE_LOG_TRANS_BUF_CNT
(currently 4).
The common pool contains:
- BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT atomic LBMs (task context)
- BLE_LOG_LBM_ATOMIC_LOCK_ISR_CNT atomic LBMs (ISR context)
- 2 spinlock-protected LBMs (one for task, one for ISR fallback)
Total common pool memory:
(ATOMIC_TASK_CNT + ATOMIC_ISR_CNT + 2) * BLE_LOG_LBM_TRANS_BUF_SIZE
config BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT
int "Count of log buffer managers with atomic lock protection for task context"
@@ -47,7 +54,6 @@ if BLE_LOG_ENABLED
depends on SOC_ESP_NIMBLE_CONTROLLER
default y
select BT_LE_CONTROLLER_LOG_ENABLED
select BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
select BLE_LOG_LL_ENABLED
help
Current BLE Controller is ESP BLE Controller
@@ -71,14 +77,21 @@ if BLE_LOG_ENABLED
Enable BLE Log for Link Layer
if BLE_LOG_LL_ENABLED
config BLE_LOG_LBM_LL_TRANS_SIZE
int "Buffer size for each peripheral transport of Link Layer LBM"
default 1024
config BLE_LOG_LBM_LL_TRANS_BUF_SIZE
int "Total buffer memory per Link Layer LBM (bytes)"
default 2048
help
There're 2 Link Layer dedicated log buffer managers (LBMs) with
compare-and-swap (CAS) protection. Each LBM is managing 2 ping-
pong buffers, which means there will be additional 2 * 2 *
BLE_LOG_LBM_LL_TRANS_SIZE bytes buffer allocated
Total buffer memory allocated for each Link Layer dedicated
log buffer manager (LBM). This memory is divided equally among
internal transport buffers. Must be a multiple of
BLE_LOG_TRANS_BUF_CNT (currently 4).
There are 2 Link Layer LBMs without lock protection (each is
accessed from a single context only):
- LL task LBM (Link Layer task context logs)
- LL HCI LBM (Link Layer HCI context logs)
Total LL pool memory: 2 * BLE_LOG_LBM_LL_TRANS_BUF_SIZE
config BLE_LOG_LL_HCI_LOG_PAYLOAD_LEN_LIMIT_ENABLED
bool "Enable LL HCI Log Payload Length Limit"
@@ -104,30 +117,6 @@ if BLE_LOG_ENABLED
help
Enable BLE Host side HCI Logging
config BLE_LOG_PAYLOAD_CHECKSUM_ENABLED
bool "Enable payload checksum for BLE Log data integrity check"
default y
help
Checksum is the default method for BLE Log data integrity check,
but for targets with slow CPU speed, it may cause significant system
performance decrease; a compromise could be made to balance the
realtime performance and log data integrity, which is calculating the
checksum of frame head and payload all together by default, or only
calculate the checksum of frame head to minimize performance decrease
config BLE_LOG_XOR_CHECKSUM_ENABLED
bool "Enable XOR checksum for BLE Log payload integrity check"
default y
help
XOR checksum is introduced for integrity check performance optimization.
config BLE_LOG_ENH_STAT_ENABLED
bool "Enable enhanced statistics for BLE Log"
default n
help
Enable enhanced statistics for written/lost frame/bytes count, which may
cost additional ~100kB memory
config BLE_LOG_TS_ENABLED
bool "Enable BLE Log Timestamp Synchronization (TS)"
default n
@@ -177,6 +166,7 @@ if BLE_LOG_ENABLED
choice BLE_LOG_PRPH_CHOICE
prompt "BLE Log peripheral choice"
default BLE_LOG_PRPH_UART_DMA if SOC_UHCI_SUPPORTED
default BLE_LOG_PRPH_DUMMY
help
Choose BLE Log peripheral
@@ -228,7 +218,7 @@ if BLE_LOG_ENABLED
config BLE_LOG_PRPH_UART_DMA_BAUD_RATE
int "Baud rate of UART port for UART DMA transport"
default 921600
default 3000000
help
Determine the baud rate of UART port
@@ -242,4 +232,26 @@ if BLE_LOG_ENABLED
menu "Settings of BLE Log Compression"
source "$IDF_PATH/components/bt/common/ble_log/extension/log_compression/Kconfig.in"
endmenu
# Deprecated options -- retained for backward sdkconfig compatibility.
# These symbols have no prompt so they never appear in menuconfig.
config BLE_LOG_LBM_TRANS_SIZE
int
default 512
config BLE_LOG_ENH_STAT_ENABLED
bool
default y
config BLE_LOG_PAYLOAD_CHECKSUM_ENABLED
bool
default y
config BLE_LOG_LBM_LL_TRANS_SIZE
int
default 512
endif
menu "Legacy SPI Log Output (Deprecated - use BT Log Async Output instead)"
source "$IDF_PATH/components/bt/common/ble_log/deprecated/Kconfig.in"
endmenu
+76 -40
View File
@@ -1,6 +1,6 @@
# BLE Log Module
A high-performance, modular Bluetooth logging system that provides real-time log capture and transmission capabilities for the ESP-IDF Bluetooth stack.
A high-performance, modular Bluetooth logging system that provides real-time log capture and asynchronous transmission capabilities for the ESP-IDF Bluetooth stack.
## Table of Contents
@@ -32,18 +32,21 @@ The BLE Log module is an efficient logging system specifically designed for the
### Core Functionality
- **Multi-source Log Collection**: Supports multiple log sources including Link Layer, Host, HCI, etc.
- **Multi-source Log Collection**: Supports multiple log sources including Link Layer, Host, HCI, UART redirection, etc.
- **High Concurrency Processing**: Uses atomic and spin lock mechanisms for multi-task concurrent writing
- **Real-time Transmission**: Asynchronous transmission mechanism based on FreeRTOS tasks
- **Data Integrity**: Configurable checksum mechanism ensures data integrity
- **Memory Optimization**: Ping-pong buffer design minimizes memory usage
- **Data Integrity**: Checksum mechanism ensures data integrity (always enabled)
- **Multi-buffer Transport**: Each LBM manages multiple transport buffers (default 4) for improved throughput over the legacy ping-pong design
- **Cross-pool Buffer Fallback**: LBM acquire attempts all atomic LBMs before falling back to spinlock LBMs, improving buffer availability under contention
### Advanced Features
- **UART Redirection**: When using UART DMA on PORT 0, UART output (including `esp_rom_printf`) is transparently redirected through the async log pipeline
- **Timestamp Synchronization**: Supports timestamp synchronization with external devices (optional)
- **Enhanced Statistics**: Detailed logging statistics including loss rate analysis (optional)
- **Enhanced Statistics**: Detailed logging statistics including written/lost frame and byte counts (always enabled)
- **Buffer Utilization Reporting**: Per-LBM buffer utilization and inflight peak tracking for diagnostics
- **Link Layer Integration**: Deep integration with ESP-IDF Bluetooth Link Layer
- **Multiple Transmission Methods**: Supports SPI DMA, UART DMA, and Dummy transmission
- **Multiple Transmission Methods**: Supports SPI Master DMA, UART DMA, and Dummy transmission
### Performance Features
@@ -58,7 +61,7 @@ The BLE Log module is an efficient logging system specifically designed for the
Enable the BLE Log module in `menuconfig`:
```
Component config → Bluetooth → Enable BLE Log Module (Experimental)
Component config → Bluetooth → Enable BT Log Async Output (Dev Only)
```
### 2. Basic Configuration
@@ -102,8 +105,8 @@ void ble_log_write_hex_ll(uint32_t len, const uint8_t *addr,
| Configuration | Default | Description |
|---------------|---------|-------------|
| `CONFIG_BLE_LOG_ENABLED` | n | Enable BLE Log module |
| `CONFIG_BLE_LOG_LBM_TRANS_SIZE` | 512 | Size of each transport buffer |
| `CONFIG_BLE_LOG_ENABLED` | n | Enable BT Log Async Output |
| `CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE` | 2048 | Total buffer memory per common LBM (bytes). Divided equally among `BLE_LOG_TRANS_BUF_CNT` (4) internal transport buffers. |
| `CONFIG_BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT` | 2 | Number of atomic lock LBMs for task context |
| `CONFIG_BLE_LOG_LBM_ATOMIC_LOCK_ISR_CNT` | 1 | Number of atomic lock LBMs for ISR context |
@@ -112,23 +115,31 @@ void ble_log_write_hex_ll(uint32_t len, const uint8_t *addr,
| Configuration | Default | Description |
|---------------|---------|-------------|
| `CONFIG_BLE_LOG_LL_ENABLED` | y | Enable Link Layer logging |
| `CONFIG_BLE_LOG_LBM_LL_TRANS_SIZE` | 1024 | Link Layer transport buffer size |
| `CONFIG_BLE_LOG_LBM_LL_TRANS_BUF_SIZE` | 2048 | Total buffer memory per Link Layer LBM (bytes). Divided equally among `BLE_LOG_TRANS_BUF_CNT` (4) internal transport buffers. |
### Advanced Features
### Other Features
| Configuration | Default | Description |
|---------------|---------|-------------|
| `CONFIG_BLE_LOG_PAYLOAD_CHECKSUM_ENABLED` | y | Enable payload checksum |
| `CONFIG_BLE_LOG_ENH_STAT_ENABLED` | n | Enable enhanced statistics |
| `CONFIG_BLE_LOG_TS_ENABLED` | n | Enable timestamp synchronization |
| `CONFIG_BLE_LOG_HOST_HCI_LOG_ENABLED` | n | Enable BLE Host side HCI logging |
> **Note**: Payload checksum and enhanced statistics are now always enabled and no longer have separate Kconfig options.
### Transport Method Configuration
| Transport | Configuration | Description |
|-----------|---------------|-------------|
| Dummy | `CONFIG_BLE_LOG_PRPH_DUMMY` | Debug dummy transport |
| Dummy | `CONFIG_BLE_LOG_PRPH_DUMMY` | Debug dummy transport (default unless `SOC_UHCI_SUPPORTED`) |
| SPI Master DMA | `CONFIG_BLE_LOG_PRPH_SPI_MASTER_DMA` | SPI DMA transport |
| UART DMA | `CONFIG_BLE_LOG_PRPH_UART_DMA` | UART DMA transport |
| UART DMA | `CONFIG_BLE_LOG_PRPH_UART_DMA` | UART DMA transport (default when `SOC_UHCI_SUPPORTED`). Default baud rate: 3000000. |
### Deprecated / Removed
| Module | Status | Notes |
|--------|--------|-------|
| Legacy SPI Log Output | Deprecated | Moved to `deprecated/` directory. Use BT Log Async Output instead. A separate Kconfig menu "Legacy SPI Log Output" is available for backward compatibility. |
| UHCI Out | Removed | The standalone UHCI Out module (`ble_log_uhci_out.c`) has been removed. UART DMA transport under the main BLE Log peripheral interface replaces it. |
## API Reference
@@ -187,10 +198,19 @@ typedef enum {
BLE_LOG_SRC_HOST, // Host layer logs
BLE_LOG_SRC_HCI, // HCI layer logs
BLE_LOG_SRC_ENCODE, // Encoding layer logs
BLE_LOG_SRC_REDIR, // UART redirection (PORT 0 only)
BLE_LOG_SRC_MAX,
} ble_log_src_t;
```
### HCI Log Macro
```c
#define ble_log_write_hci(direction, data, len)
```
Writes an HCI packet with direction encoding. `direction` is `BLE_LOG_HCI_DOWNSTREAM` (0) or `BLE_LOG_HCI_UPSTREAM` (1). Direction is encoded in the MSB of the first byte (HCI type).
### Link Layer API (Conditional Compilation)
#### `void ble_log_write_hex_ll(uint32_t len, const uint8_t *addr, uint32_t len_append, const uint8_t *addr_append, uint32_t flag)`
@@ -213,6 +233,7 @@ enum {
BLE_LOG_LL_FLAG_ISR,
BLE_LOG_LL_FLAG_HCI,
BLE_LOG_LL_FLAG_RAW,
BLE_LOG_LL_FLAG_OMDATA,
BLE_LOG_LL_FLAG_HCI_UPSTREAM,
};
```
@@ -326,31 +347,37 @@ void example_performance_test() {
### Memory Usage Estimation
Each LBM's total buffer memory is configured directly via Kconfig. The configured value is divided equally among `BLE_LOG_TRANS_BUF_CNT` (currently 4) internal transport buffers.
Memory usage under default configuration:
```
Total Buffers = (Atomic Task LBMs + Atomic ISR LBMs + Spin LBMs) × 2 × Transport Buffer Size
Default Config = (2 + 1 + 2) × 2 × 512 = 5120 bytes
Common Pool:
LBM count = Atomic Task (2) + Atomic ISR (1) + Spinlock (2) = 5
Total = 5 × BLE_LOG_LBM_TRANS_BUF_SIZE = 5 × 2048 = 10240 bytes
Additional when Link Layer enabled:
LL Buffers = 2 × 2 × 1024 = 4096 bytes
Link Layer Pool (when CONFIG_BLE_LOG_LL_ENABLED):
LBM count = 2 (LL task + LL HCI)
Total = 2 × BLE_LOG_LBM_LL_TRANS_BUF_SIZE = 2 × 2048 = 4096 bytes
Additional when Enhanced Statistics enabled:
Statistics Data = Log Source Count × sizeof(ble_log_stat_mgr_t) = 8 × 40 = 320 bytes
Statistics (always enabled):
Total = BLE_LOG_SRC_MAX × sizeof(ble_log_stat_mgr_t) = 9 × 20 = 180 bytes
UART Redirect (when UART DMA on PORT 0):
Additional BLE_LOG_TRANS_BUF_CNT (4) transport buffers
```
### Performance Optimization Recommendations
1. **Adjust LBM Count**: Adjust atomic lock LBM count based on concurrency requirements
2. **Buffer Size**: Adjust transport buffer size based on log volume
3. **Transport Method**: Choose optimal transport method based on hardware (SPI DMA typically has best performance)
4. **Checksum**: Consider disabling payload checksum when performance requirements are extremely high
2. **Buffer Size**: Adjust total buffer memory per LBM based on log volume; must be a multiple of `BLE_LOG_TRANS_BUF_CNT` (4)
3. **Transport Method**: Choose optimal transport method based on hardware (UART DMA is default on supported SoCs)
### Real-time Considerations
- Critical code paths are marked with `BLE_LOG_IRAM_ATTR` and run in IRAM
- Atomic operations avoid lock contention
- Ping-pong buffers ensure continuous writing
- Multi-buffer transport ensures continuous writing even when some buffers are in-flight
## Troubleshooting
@@ -388,13 +415,11 @@ if (!initialized) {
**Solutions**:
```c
// Enable enhanced statistics to check loss rate
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
// Statistics will be automatically included in logs
#endif
// Enhanced statistics are always enabled — check written/lost frame
// and byte counts in the log stream output
// Adjust buffer size
// CONFIG_BLE_LOG_LBM_TRANS_SIZE=1024
// Increase total buffer memory per LBM
// CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE=4096
// Increase atomic lock LBM count
// CONFIG_BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT=4
@@ -405,17 +430,16 @@ if (!initialized) {
**Symptoms**: System response becomes slow
**Possible Causes**:
- Checksum calculation overhead
- Transmission bottleneck
- Lock contention
**Solutions**:
```c
// Disable payload checksum
// CONFIG_BLE_LOG_PAYLOAD_CHECKSUM_ENABLED=n
// Use faster transmission method
// CONFIG_BLE_LOG_PRPH_SPI_MASTER_DMA=y
// CONFIG_BLE_LOG_PRPH_UART_DMA=y (default on SOC_UHCI_SUPPORTED targets)
// Increase baud rate (default is now 3000000)
// CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE=3000000
// Adjust task priority
#define BLE_LOG_TASK_PRIO configMAX_PRIORITIES-3
@@ -431,12 +455,11 @@ if (!initialized) {
ble_log_dump_to_console();
```
#### 2. Enable Enhanced Statistics
#### 2. Check Enhanced Statistics
```c
// Enable in menuconfig
// CONFIG_BLE_LOG_ENH_STAT_ENABLED=y
// Statistics will be automatically output to logs
// Enhanced statistics are always enabled
// Written/lost frame and byte counts are automatically output to logs
```
#### 3. Monitor Memory Usage
@@ -448,3 +471,16 @@ void monitor_memory() {
printf("Free heap after init: %d\n", esp_get_free_heap_size());
}
```
## Important Notes
### Buffer Size Constraints
- `CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE` and `CONFIG_BLE_LOG_LBM_LL_TRANS_BUF_SIZE` must be multiples of `BLE_LOG_TRANS_BUF_CNT` (currently 4)
- The per-buffer size (total ÷ 4) must be at least large enough to hold one frame overhead (`BLE_LOG_FRAME_OVERHEAD`)
- `BLE_LOG_TRANS_BUF_CNT` must be a power of 2
### Migration from Legacy Modules
- **UHCI Out**: The standalone `ble_log_uhci_out` module has been removed. Use the UART DMA peripheral transport (`CONFIG_BLE_LOG_PRPH_UART_DMA`) instead.
- **SPI Out**: The legacy SPI log output has been moved to `deprecated/`. A separate Kconfig menu "Legacy SPI Log Output (Deprecated)" is available for backward compatibility, but new projects should use BT Log Async Output with the SPI Master DMA peripheral transport.
@@ -1,795 +0,0 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#include "ble_log/ble_log_uhci_out.h"
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
// Private includes
#include "esp_bt.h"
// sdkconfig defines
#define UHCI_OUT_LL_TASK_BUF_SIZE CONFIG_BT_BLE_LOG_UHCI_OUT_LL_TASK_BUF_SIZE
#define UHCI_OUT_LL_ISR_BUF_SIZE CONFIG_BT_BLE_LOG_UHCI_OUT_LL_ISR_BUF_SIZE
#define UHCI_OUT_LL_HCI_BUF_SIZE CONFIG_BT_BLE_LOG_UHCI_OUT_LL_HCI_BUF_SIZE
#define UHCI_OUT_UART_PORT CONFIG_BT_BLE_LOG_UHCI_OUT_UART_PORT
#define UHCI_OUT_UART_NEED_INIT CONFIG_BT_BLE_LOG_UHCI_OUT_UART_NEED_INIT
#if UHCI_OUT_UART_NEED_INIT
#define UHCI_OUT_UART_BAUD_RATE CONFIG_BT_BLE_LOG_UHCI_OUT_UART_BAUD_RATE
#define UHCI_OUT_UART_IO_NUM_TX CONFIG_BT_BLE_LOG_UHCI_OUT_UART_IO_NUM_TX
#endif // UHCI_OUT_UART_NEED_INIT
// Private defines
#define UHCI_OUT_MAX_TRANSFER_SIZE (10240)
#define UHCI_OUT_MALLOC(size) heap_caps_malloc(size, MALLOC_CAP_INTERNAL | MALLOC_CAP_8BIT)
#define UHCI_OUT_FLUSH_TIMEOUT_MS (100)
#define UHCI_OUT_FLUSH_TIMEOUT_US (UHCI_OUT_FLUSH_TIMEOUT_MS * 1000)
#define UHCI_OUT_USER_BUF_SIZE (512)
#define UHCI_OUT_UART_PORT0 (0)
#define UHCI_OUT_UART_PORT1 (1)
#define UHCI_OUT_UART_DRIVER_RX_BUF_SIZE (32)
// Queue size defines
#define UHCI_OUT_PING_PONG_BUF_CNT (2)
#define UHCI_OUT_USER_QUEUE_SIZE (UHCI_OUT_PING_PONG_BUF_CNT)
#define UHCI_OUT_LL_QUEUE_SIZE (3 * UHCI_OUT_PING_PONG_BUF_CNT)
#define UHCI_OUT_QUEUE_SIZE (UHCI_OUT_USER_QUEUE_SIZE + UHCI_OUT_LL_QUEUE_SIZE)
#if CONFIG_SOC_ESP_NIMBLE_CONTROLLER
#include "os/os_mbuf.h"
#endif /* CONFIG_SOC_ESP_NIMBLE_CONTROLLER */
// Private typedefs
typedef struct {
// This flag is for multithreading, must be a word, do not modify
volatile uint32_t flag;
uint16_t buf_size;
uint16_t length;
uint8_t buffer[0];
} uhci_out_trans_cb_t;
typedef struct {
uhci_out_trans_cb_t *trans_cb[2];
uint8_t trans_cb_idx;
uint8_t type;
uint16_t lost_frame_cnt;
uint32_t lost_bytes_cnt;
uint32_t frame_sn;
} uhci_out_log_cb_t;
typedef struct {
uint16_t length;
uint8_t source;
uint8_t type;
uint16_t frame_sn;
} __attribute__((packed)) frame_head_t;
typedef struct {
uint8_t type;
uint16_t lost_frame_cnt;
uint32_t lost_bytes_cnt;
} __attribute__((packed)) loss_payload_t;
// Private enums
enum {
TRANS_CB_FLAG_AVAILABLE = 0,
TRANS_CB_FLAG_NEED_QUEUE,
TRANS_CB_FLAG_IN_QUEUE,
};
enum {
LOG_CB_TYPE_USER = 0,
LOG_CB_TYPE_LL,
};
enum {
LOG_CB_LL_SUBTYPE_TASK = 0,
LOG_CB_LL_SUBTYPE_ISR,
LOG_CB_LL_SUBTYPE_HCI
};
enum {
LL_LOG_FLAG_CONTINUE = 0,
LL_LOG_FLAG_END,
LL_LOG_FLAG_TASK,
LL_LOG_FLAG_ISR,
LL_LOG_FLAG_HCI,
LL_LOG_FLAG_RAW,
LL_LOG_FLAG_OMDATA,
LL_LOG_FLAG_HCI_UPSTREAM,
};
enum {
LL_EV_FLAG_ISR_APPEND = 0,
LL_EV_FLAG_FLUSH_LOG,
};
// Private variables
static bool uhci_out_inited = false;
static uhci_controller_handle_t uhci_handle = NULL;
static bool user_log_inited = false;
static SemaphoreHandle_t user_log_mutex = NULL;
static uhci_out_log_cb_t *user_log_cb = NULL;
static uint32_t user_last_write_ts = 0;
static bool ll_log_inited = false;
static uhci_out_log_cb_t *ll_task_log_cb = NULL;
static uhci_out_log_cb_t *ll_isr_log_cb = NULL;
static uhci_out_log_cb_t *ll_hci_log_cb = NULL;
static uint32_t ll_ev_flags = 0;
static uint32_t ll_last_write_ts = 0;
static esp_timer_handle_t flush_timer = NULL;
// Private function declarations
extern void esp_panic_handler_feed_wdts(void);
static int uhci_out_init_trans(uhci_out_trans_cb_t **trans_cb, uint16_t buf_size);
static void uhci_out_deinit_trans(uhci_out_trans_cb_t **trans_cb);
static bool uhci_out_tx_done_cb(uhci_controller_handle_t uhci_ctrl,
const uhci_tx_done_event_data_t *edata, void *user_ctx);
static inline void uhci_out_append_trans(uhci_out_trans_cb_t *trans_cb);
static int uhci_out_log_cb_init(uhci_out_log_cb_t **log_cb, uint16_t buf_size, uint8_t type, uint8_t idx);
static void uhci_out_log_cb_deinit(uhci_out_log_cb_t **log_cb);
static inline bool uhci_out_log_cb_check_trans(uhci_out_log_cb_t *log_cb, uint16_t len, bool *need_append);
static inline void uhci_out_log_cb_append_trans(uhci_out_log_cb_t *log_cb);
static inline void uhci_out_log_cb_flush_trans(uhci_out_log_cb_t *log_cb);
static bool uhci_out_log_cb_write(uhci_out_log_cb_t *log_cb, const uint8_t *addr, uint16_t len,
const uint8_t *addr_append, uint16_t len_append, uint8_t source, bool omdata);
static void uhci_out_log_cb_write_loss(uhci_out_log_cb_t *log_cb);
static void uhci_out_log_cb_dump(uhci_out_log_cb_t *log_cb);
static void esp_timer_cb_log_flush(void);
static void uhci_out_user_write_str(const uint8_t *src, uint16_t len);
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
static void uhci_out_user_write_char(char c);
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
static int uhci_out_user_log_init(void);
static void uhci_out_user_log_deinit(void);
static int uhci_out_ll_log_init(void);
static void uhci_out_ll_log_deinit(void);
static void uhci_out_ll_log_flush(void);
#if defined(CONFIG_IDF_TARGET_ESP32H2) || defined(CONFIG_IDF_TARGET_ESP32C6) || defined(CONFIG_IDF_TARGET_ESP32C5) ||\
defined(CONFIG_IDF_TARGET_ESP32C61) || defined(CONFIG_IDF_TARGET_ESP32H21)
extern void r_ble_log_simple_put_ev(void);
#define UHCI_OUT_LL_PUT_EV r_ble_log_simple_put_ev()
#elif defined(CONFIG_IDF_TARGET_ESP32C2)
extern void ble_log_simple_put_ev(void);
#define UHCI_OUT_LL_PUT_EV ble_log_simple_put_ev()
#else
#define UHCI_OUT_LL_PUT_EV
#endif
// Private macros
#define UHCI_OUT_FRAME_HEAD_LEN (sizeof(frame_head_t))
#define UHCI_OUT_FRAME_TAIL_LEN (sizeof(uint32_t))
#define UHCI_OUT_FRAME_OVERHEAD (UHCI_OUT_FRAME_HEAD_LEN + UHCI_OUT_FRAME_TAIL_LEN)
#define UHCI_OUT_GET_FRAME_SN(VAR) __atomic_fetch_add(VAR, 1, __ATOMIC_RELAXED)
// Private functions
static int uhci_out_init_trans(uhci_out_trans_cb_t **trans_cb, uint16_t buf_size)
{
// Memory allocations
size_t cb_size = sizeof(uhci_out_trans_cb_t) + buf_size;
*trans_cb = (uhci_out_trans_cb_t *)UHCI_OUT_MALLOC(cb_size);
if (!(*trans_cb)) {
return -1;
}
memset(*trans_cb, 0, sizeof(uhci_out_trans_cb_t));
// Initialization
(*trans_cb)->buf_size = buf_size;
return 0;
}
static void uhci_out_deinit_trans(uhci_out_trans_cb_t **trans_cb)
{
if (!(*trans_cb)) {
return;
}
free(*trans_cb);
*trans_cb = NULL;
return;
}
IRAM_ATTR static bool uhci_out_tx_done_cb(uhci_controller_handle_t uhci_ctrl,
const uhci_tx_done_event_data_t *edata, void *user_ctx)
{
uhci_out_trans_cb_t *trans_cb = (uhci_out_trans_cb_t *)((uint8_t *)edata->buffer - sizeof(uhci_out_trans_cb_t));
trans_cb->length = 0;
trans_cb->flag = TRANS_CB_FLAG_AVAILABLE;
return true;
}
IRAM_ATTR static inline void uhci_out_append_trans(uhci_out_trans_cb_t *trans_cb)
{
if ((trans_cb->flag != TRANS_CB_FLAG_NEED_QUEUE) || !trans_cb->length) {
return;
}
// Note: If task yield after transmission but before flag set
// flag might be reset in tx done ISR before flag set, leading to buffer access failure
trans_cb->flag = TRANS_CB_FLAG_IN_QUEUE;
if (uhci_transmit(uhci_handle, trans_cb->buffer, trans_cb->length) != ESP_OK) {
goto recycle;
}
return;
recycle:
trans_cb->length = 0;
trans_cb->flag = TRANS_CB_FLAG_AVAILABLE;
return;
}
static int uhci_out_log_cb_init(uhci_out_log_cb_t **log_cb, uint16_t buf_size, uint8_t type, uint8_t idx)
{
// Initialize log control block
*log_cb = (uhci_out_log_cb_t *)UHCI_OUT_MALLOC(sizeof(uhci_out_log_cb_t));
if (!(*log_cb)) {
return -1;
}
memset(*log_cb, 0, sizeof(uhci_out_log_cb_t));
// Initialize transactions
int ret = 0;
for (uint8_t i = 0; i < 2; i++) {
ret |= uhci_out_init_trans(&((*log_cb)->trans_cb[i]), buf_size);
}
if (ret != 0) {
uhci_out_log_cb_deinit(log_cb);
return -1;
}
(*log_cb)->type = (type << 4) | (idx);
return 0;
}
static void uhci_out_log_cb_deinit(uhci_out_log_cb_t **log_cb)
{
if (!(*log_cb)) {
return;
}
for (uint8_t i = 0; i < 2; i++) {
if ((*log_cb)->trans_cb[i]) {
uhci_out_deinit_trans(&((*log_cb)->trans_cb[i]));
}
}
free(*log_cb);
*log_cb = NULL;
return;
}
IRAM_ATTR static inline bool uhci_out_log_cb_check_trans(uhci_out_log_cb_t *log_cb, uint16_t len, bool *need_append)
{
uhci_out_trans_cb_t *trans_cb;
*need_append = false;
for (uint8_t i = 0; i < 2; i++) {
trans_cb = log_cb->trans_cb[log_cb->trans_cb_idx];
if (len > trans_cb->buf_size) {
goto failed;
}
if (trans_cb->flag == TRANS_CB_FLAG_AVAILABLE) {
if ((trans_cb->buf_size - trans_cb->length) >= len) {
return true;
} else {
trans_cb->flag = TRANS_CB_FLAG_NEED_QUEUE;
*need_append = true;
}
}
log_cb->trans_cb_idx = !(log_cb->trans_cb_idx);
}
failed:
log_cb->lost_bytes_cnt += len;
log_cb->lost_frame_cnt++;
return false;
}
// CRITICAL: Shall not be called from ISR!
IRAM_ATTR static inline void uhci_out_log_cb_append_trans(uhci_out_log_cb_t *log_cb)
{
uhci_out_trans_cb_t *trans_cb;
uint8_t idx = !log_cb->trans_cb_idx;
for (uint8_t i = 0; i < 2; i++) {
trans_cb = log_cb->trans_cb[idx];
if (trans_cb->flag == TRANS_CB_FLAG_NEED_QUEUE) {
uhci_out_append_trans(trans_cb);
}
idx = !idx;
}
}
IRAM_ATTR static inline void uhci_out_log_cb_flush_trans(uhci_out_log_cb_t *log_cb)
{
uhci_out_trans_cb_t *trans_cb;
for (uint8_t i = 0; i < 2; i++) {
trans_cb = log_cb->trans_cb[i];
if (trans_cb->length && (trans_cb->flag == TRANS_CB_FLAG_AVAILABLE)) {
trans_cb->flag = TRANS_CB_FLAG_NEED_QUEUE;
}
}
}
// Return value: Need append
IRAM_ATTR static bool uhci_out_log_cb_write(uhci_out_log_cb_t *log_cb, const uint8_t *addr, uint16_t len,
const uint8_t *addr_append, uint16_t len_append, uint8_t source, bool omdata)
{
uhci_out_trans_cb_t *trans_cb = log_cb->trans_cb[log_cb->trans_cb_idx];
uint8_t *buf = trans_cb->buffer + trans_cb->length;
uint16_t total_length = len + len_append;
frame_head_t head = {
.length = total_length,
.source = source,
.type = log_cb->type,
.frame_sn = UHCI_OUT_GET_FRAME_SN(&(log_cb->frame_sn)) & 0xFFFF,
};
memcpy(buf, (const uint8_t *)&head, UHCI_OUT_FRAME_HEAD_LEN);
memcpy(buf + UHCI_OUT_FRAME_HEAD_LEN, addr, len);
if (len_append && addr_append) {
#if CONFIG_SOC_ESP_NIMBLE_CONTROLLER
if (omdata) {
os_mbuf_copydata((struct os_mbuf *)addr_append, 0,
len_append, buf + UHCI_OUT_FRAME_HEAD_LEN + len);
}
else
#endif /* CONFIG_SOC_ESP_NIMBLE_CONTROLLER */
{
memcpy(buf + UHCI_OUT_FRAME_HEAD_LEN + len, addr_append, len_append);
}
}
uint32_t checksum = 0;
for (int i = 0; i < UHCI_OUT_FRAME_HEAD_LEN + total_length; i++) {
checksum += buf[i];
}
memcpy(buf + UHCI_OUT_FRAME_HEAD_LEN + total_length, &checksum, UHCI_OUT_FRAME_TAIL_LEN);
trans_cb->length += total_length + UHCI_OUT_FRAME_OVERHEAD;
if ((trans_cb->buf_size - trans_cb->length) <= UHCI_OUT_FRAME_OVERHEAD) {
trans_cb->flag = TRANS_CB_FLAG_NEED_QUEUE;
return true;
}
return false;
}
IRAM_ATTR static void uhci_out_log_cb_write_loss(uhci_out_log_cb_t *log_cb)
{
if (!log_cb->lost_bytes_cnt || !log_cb->lost_frame_cnt) {
return;
}
bool need_append;
uint16_t frame_len = sizeof(loss_payload_t) + UHCI_OUT_FRAME_OVERHEAD;
if (uhci_out_log_cb_check_trans(log_cb, frame_len, &need_append)) {
loss_payload_t payload = {
.type = log_cb->type,
.lost_frame_cnt = log_cb->lost_frame_cnt,
.lost_bytes_cnt = log_cb->lost_bytes_cnt,
};
uhci_out_log_cb_write(log_cb, (const uint8_t *)&payload, sizeof(loss_payload_t),
NULL, 0, BLE_LOG_UHCI_OUT_SOURCE_LOSS, false);
log_cb->lost_frame_cnt = 0;
log_cb->lost_bytes_cnt = 0;
}
}
static void uhci_out_log_cb_dump(uhci_out_log_cb_t *log_cb)
{
uhci_out_trans_cb_t *trans_cb;
uint8_t *buf;
for (uint8_t i = 0; i < 2; i++) {
// Dump the last transaction before dumping the current transaction
log_cb->trans_cb_idx = !(log_cb->trans_cb_idx);
trans_cb = log_cb->trans_cb[log_cb->trans_cb_idx];
buf = (uint8_t *)trans_cb->buffer;
for (uint16_t j = 0; j < trans_cb->buf_size; j++) {
esp_rom_printf("%02x ", buf[j]);
// Feed watchdogs periodically to avoid wdts timeout
if ((j % 100) == 0) {
esp_panic_handler_feed_wdts();
}
}
}
}
static void esp_timer_cb_log_flush(void)
{
uint32_t os_ts = pdTICKS_TO_MS(xTaskGetTickCount());
if ((os_ts - user_last_write_ts) > UHCI_OUT_FLUSH_TIMEOUT_MS) {
xSemaphoreTake(user_log_mutex, portMAX_DELAY);
uhci_out_log_cb_flush_trans(user_log_cb);
uhci_out_log_cb_append_trans(user_log_cb);
xSemaphoreGive(user_log_mutex);
}
if ((esp_bt_controller_get_status() >= ESP_BT_CONTROLLER_STATUS_INITED) &&
((os_ts - ll_last_write_ts) > UHCI_OUT_FLUSH_TIMEOUT_MS)) {
ll_ev_flags |= BIT(LL_EV_FLAG_FLUSH_LOG);
UHCI_OUT_LL_PUT_EV;
}
esp_timer_start_once(flush_timer, UHCI_OUT_FLUSH_TIMEOUT_US);
}
static void uhci_out_user_write_str(const uint8_t *src, uint16_t len)
{
if (!user_log_inited || !src || !len) {
return;
}
xSemaphoreTake(user_log_mutex, portMAX_DELAY);
bool need_append;
if (uhci_out_log_cb_check_trans(user_log_cb, len, &need_append)) {
uhci_out_trans_cb_t *trans_cb = user_log_cb->trans_cb[user_log_cb->trans_cb_idx];
uint8_t *buf = trans_cb->buffer + trans_cb->length;
memcpy(buf, (const uint8_t *)src, len);
trans_cb->length += len;
}
if (need_append) {
uhci_out_log_cb_append_trans(user_log_cb);
}
user_last_write_ts = pdTICKS_TO_MS(xTaskGetTickCount());
xSemaphoreGive(user_log_mutex);
}
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
static void uhci_out_user_write_char(char c)
{
uhci_out_user_write_str((const uint8_t *)&c, 1);
}
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
static int uhci_out_user_log_init(void)
{
if (user_log_inited) {
return 0;
}
// Initialize mutex
user_log_mutex = xSemaphoreCreateMutex();
if (!user_log_mutex) {
goto failed;
}
// Initialize log control block
if (uhci_out_log_cb_init(&user_log_cb, UHCI_OUT_USER_BUF_SIZE, LOG_CB_TYPE_USER, 0) != 0) {
goto failed;
}
// Initialization done
user_log_inited = true;
return 0;
failed:
uhci_out_user_log_deinit();
return -1;
}
static void uhci_out_user_log_deinit(void)
{
user_log_inited = false;
if (!user_log_mutex) {
return;
}
xSemaphoreTake(user_log_mutex, portMAX_DELAY);
uhci_out_log_cb_deinit(&user_log_cb);
xSemaphoreGive(user_log_mutex);
vSemaphoreDelete(user_log_mutex);
user_log_mutex = NULL;
}
static int uhci_out_ll_log_init(void)
{
if (ll_log_inited) {
return 0;
}
if (uhci_out_log_cb_init(&ll_task_log_cb, UHCI_OUT_LL_TASK_BUF_SIZE,
LOG_CB_TYPE_LL, LOG_CB_LL_SUBTYPE_TASK) != 0) {
goto failed;
}
if (uhci_out_log_cb_init(&ll_isr_log_cb, UHCI_OUT_LL_ISR_BUF_SIZE,
LOG_CB_TYPE_LL, LOG_CB_LL_SUBTYPE_ISR) != 0) {
goto failed;
}
if (uhci_out_log_cb_init(&ll_hci_log_cb, UHCI_OUT_LL_HCI_BUF_SIZE,
LOG_CB_TYPE_LL, LOG_CB_LL_SUBTYPE_HCI) != 0) {
goto failed;
}
ll_log_inited = true;
return 0;
failed:
uhci_out_ll_log_deinit();
return -1;
}
static void uhci_out_ll_log_deinit(void)
{
ll_log_inited = false;
uhci_out_log_cb_deinit(&ll_hci_log_cb);
uhci_out_log_cb_deinit(&ll_isr_log_cb);
uhci_out_log_cb_deinit(&ll_task_log_cb);
}
static void uhci_out_ll_log_flush(void)
{
if (!ll_log_inited) {
return;
}
uhci_out_log_cb_write_loss(ll_task_log_cb);
uhci_out_log_cb_write_loss(ll_hci_log_cb);
uhci_out_log_cb_flush_trans(ll_task_log_cb);
uhci_out_log_cb_flush_trans(ll_hci_log_cb);
portMUX_TYPE spinlock = portMUX_INITIALIZER_UNLOCKED;
portENTER_CRITICAL_SAFE(&spinlock);
uhci_out_log_cb_write_loss(ll_isr_log_cb);
uhci_out_log_cb_flush_trans(ll_isr_log_cb);
portEXIT_CRITICAL_SAFE(&spinlock);
uhci_out_log_cb_append_trans(ll_task_log_cb);
uhci_out_log_cb_append_trans(ll_hci_log_cb);
uhci_out_log_cb_append_trans(ll_isr_log_cb);
}
// Public functions
int ble_log_uhci_out_init(void)
{
// Avoid double init
if (uhci_out_inited) {
return 0;
}
#if UHCI_OUT_UART_NEED_INIT
uart_config_t uart_config = {
.baud_rate = UHCI_OUT_UART_BAUD_RATE,
.data_bits = UART_DATA_8_BITS,
.parity = UART_PARITY_DISABLE,
.stop_bits = UART_STOP_BITS_1,
.flow_ctrl = UART_HW_FLOWCTRL_CTS_RTS,
.rx_flow_ctrl_thresh = 122,
};
// Configure UART parameters
uart_param_config(UHCI_OUT_UART_PORT, &uart_config);
uart_set_pin(UHCI_OUT_UART_PORT, UHCI_OUT_UART_IO_NUM_TX, -1, -1, -1);
#endif // UHCI_OUT_UART_NEED_INIT
uhci_controller_config_t uhci_config = {
.uart_port = UHCI_OUT_UART_PORT,
.tx_trans_queue_depth = UHCI_OUT_QUEUE_SIZE,
.max_receive_internal_mem = 1024,
.max_transmit_size = UHCI_OUT_MAX_TRANSFER_SIZE,
.dma_burst_size = 32,
.rx_eof_flags.idle_eof = 1,
};
if (uhci_new_controller(&uhci_config, &uhci_handle) != ESP_OK) {
goto failed;
}
uhci_event_callbacks_t uhci_cbs = {
.on_tx_trans_done = uhci_out_tx_done_cb,
};
uhci_register_event_callbacks(uhci_handle, &uhci_cbs, NULL);
if (uhci_out_user_log_init() != 0) {
goto failed;
}
if (uhci_out_ll_log_init() != 0) {
goto failed;
}
esp_timer_create_args_t timer_args = {
.callback = (esp_timer_cb_t)esp_timer_cb_log_flush,
.dispatch_method = ESP_TIMER_TASK
};
if (esp_timer_create(&timer_args, &flush_timer) != ESP_OK) {
goto failed;
}
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
// Install UART Driver if not installed
if (!uart_is_driver_installed(UHCI_OUT_UART_PORT0)) {
uart_driver_install(UHCI_OUT_UART_PORT0, UHCI_OUT_UART_DRIVER_RX_BUF_SIZE, 0, 0, NULL, 0);
}
// Redirect UART VFS Driver to UART Driver
esp_vfs_dev_uart_use_driver(UHCI_OUT_UART_PORT0);
// Redirect esp_rom_printf to BLE Log UHCI Out
esp_rom_install_channel_putc(1, uhci_out_user_write_char);
esp_rom_install_channel_putc(2, NULL);
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
uhci_out_inited = true;
esp_timer_start_once(flush_timer, UHCI_OUT_FLUSH_TIMEOUT_US);
return 0;
failed:
ble_log_uhci_out_deinit();
return -1;
}
void ble_log_uhci_out_deinit(void)
{
uhci_out_inited = false;
if (flush_timer) {
esp_timer_stop(flush_timer);
esp_timer_delete(flush_timer);
flush_timer = NULL;
}
if (uhci_handle) {
uhci_wait_all_tx_transaction_done(uhci_handle, portMAX_DELAY);
uhci_del_controller(uhci_handle);
uhci_handle = NULL;
}
uhci_out_ll_log_deinit();
uhci_out_user_log_deinit();
}
IRAM_ATTR void ble_log_uhci_out_ll_write(uint32_t len, const uint8_t *addr, uint32_t len_append,
const uint8_t *addr_append, uint32_t flag)
{
// Raw logs will come in case of assert, shall be printed to console directly
if (flag & BIT(LL_LOG_FLAG_RAW)) {
if (len && addr) {
for (uint32_t i = 0; i < len; i++) { esp_rom_printf("%02x ", addr[i]); }
}
if (len_append && addr_append) {
for (uint32_t i = 0; i < len_append; i++) { esp_rom_printf("%02x ", addr_append[i]); }
}
if (flag & BIT(LL_LOG_FLAG_END)) { esp_rom_printf("\n"); }
}
if (!ll_log_inited) {
return;
}
bool in_isr = false;
uint8_t source;
uhci_out_log_cb_t *log_cb;
if (flag & BIT(LL_LOG_FLAG_ISR)) {
log_cb = ll_isr_log_cb;
source = BLE_LOG_UHCI_OUT_SOURCE_ESP_ISR;
in_isr = true;
} else if (flag & BIT(LL_LOG_FLAG_HCI)) {
log_cb = ll_hci_log_cb;
source = BLE_LOG_UHCI_OUT_SOURCE_LL_HCI;
} else {
log_cb = ll_task_log_cb;
source = BLE_LOG_UHCI_OUT_SOURCE_ESP;
}
bool omdata = flag & BIT(LL_LOG_FLAG_OMDATA);
bool need_append;
uint16_t frame_len = len + len_append + UHCI_OUT_FRAME_OVERHEAD;
if (uhci_out_log_cb_check_trans(log_cb, frame_len, &need_append)) {
need_append |= uhci_out_log_cb_write(log_cb, addr, len, addr_append,
len_append, source, omdata);
}
ll_last_write_ts = in_isr?\
pdTICKS_TO_MS(xTaskGetTickCountFromISR()):\
pdTICKS_TO_MS(xTaskGetTickCount());
if (need_append) {
if (in_isr) {
ll_ev_flags |= BIT(LL_EV_FLAG_ISR_APPEND);
UHCI_OUT_LL_PUT_EV;
} else {
uhci_out_log_cb_append_trans(log_cb);
}
}
}
IRAM_ATTR void ble_log_uhci_out_ll_log_ev_proc(void)
{
if (!ll_log_inited) {
return;
}
if (ll_ev_flags & BIT(LL_EV_FLAG_ISR_APPEND)) {
uhci_out_log_cb_append_trans(ll_isr_log_cb);
ll_ev_flags &= ~BIT(LL_EV_FLAG_ISR_APPEND);
}
if (ll_ev_flags & BIT(LL_EV_FLAG_FLUSH_LOG)) {
uhci_out_ll_log_flush();
ll_ev_flags &= ~BIT(LL_EV_FLAG_FLUSH_LOG);
}
ll_ev_flags = 0;
}
// Redirect UART Driver to BLE Log UHCI Out
int __real_uart_tx_chars(uart_port_t uart_num, const char *buffer, uint32_t len);
int __wrap_uart_tx_chars(uart_port_t uart_num, const char *buffer, uint32_t len)
{
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
uhci_out_user_write_str((const uint8_t *)buffer, len);
return 0;
#else
return __real_uart_tx_chars(uart_num, buffer, len);
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
}
int __real_uart_write_bytes(uart_port_t uart_num, const void *src, size_t size);
int __wrap_uart_write_bytes(uart_port_t uart_num, const void *src, size_t size)
{
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
uhci_out_user_write_str((const uint8_t *)src, size);
return 0;
#else
return __real_uart_write_bytes(uart_num, src, size);
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
}
int __real_uart_write_bytes_with_break(uart_port_t uart_num, const void *src, size_t size, int brk_len);
int __wrap_uart_write_bytes_with_break(uart_port_t uart_num, const void *src, size_t size, int brk_len)
{
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
return __wrap_uart_write_bytes(uart_num, src, size);
#else
return __real_uart_write_bytes_with_break(uart_num, src, size, brk_len);
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
}
void ble_log_uhci_out_dump_all(void)
{
if (!uhci_out_inited) {
return;
}
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
esp_rom_uart_tx_wait_idle(UHCI_OUT_UART_PORT0);
esp_rom_install_uart_printf();
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
portMUX_TYPE spinlock = portMUX_INITIALIZER_UNLOCKED;
portENTER_CRITICAL_SAFE(&spinlock);
if (ll_log_inited) {
esp_rom_printf("[DUMP_START:\n");
uhci_out_log_cb_dump(ll_isr_log_cb);
uhci_out_log_cb_dump(ll_task_log_cb);
uhci_out_log_cb_dump(ll_hci_log_cb);
esp_rom_printf("\n:DUMP_END]\n\n");
}
portEXIT_CRITICAL_SAFE(&spinlock);
#if UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
esp_rom_install_channel_putc(1, uhci_out_user_write_char);
#endif // UHCI_OUT_UART_PORT == UHCI_OUT_UART_PORT0
}
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
@@ -0,0 +1,176 @@
config BT_BLE_LOG_SPI_OUT_ENABLED
bool "Output ble logs to SPI bus (Experimental)"
depends on !BLE_LOG_ENABLED
default n
help
Output ble logs to SPI bus
if BT_BLE_LOG_SPI_OUT_ENABLED
config BT_BLE_LOG_SPI_OUT_UL_TASK_BUF_SIZE
int "SPI transaction buffer size for upper layer task logs"
default 512
help
SPI transaction buffer size for upper layer task logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_HCI_ENABLED
bool "Enable HCI log output to SPI"
default n
help
Enable logging of HCI packets to the SPI bus when BLE SPI log output is enabled.
config BT_BLE_LOG_SPI_OUT_HCI_BUF_SIZE
int "SPI transaction buffer size for HCI logs"
depends on BT_BLE_LOG_SPI_OUT_HCI_ENABLED
default 1024
help
SPI transaction buffer size for HCI logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_HCI_TASK_CNT
int "HCI task count"
depends on BT_BLE_LOG_SPI_OUT_HCI_ENABLED
default 1
help
HCI task count
config BT_BLE_LOG_SPI_OUT_HOST_ENABLED
bool "Enable Host log output to SPI"
default n
help
This configuration applies to the logs of both Bluedroid Host and NimBLE Host.
When BLE SPI log output is enabled, this option allows host logs to be transmitted via SPI.
config BT_BLE_LOG_SPI_OUT_HOST_BUF_SIZE
int "SPI transaction buffer size for host logs"
depends on BT_BLE_LOG_SPI_OUT_HOST_ENABLED
default 1024
help
SPI transaction buffer size for host logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_HOST_TASK_CNT
int "Host task count"
depends on BT_BLE_LOG_SPI_OUT_HOST_ENABLED
default 2
help
Host task count.
config BT_BLE_LOG_SPI_OUT_LL_ENABLED
bool "Enable Controller log output to SPI"
depends on BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
default n
help
Enable controller log output to SPI bus.
config BT_BLE_LOG_SPI_OUT_LL_TASK_BUF_SIZE
int "SPI transaction buffer size for lower layer task logs"
depends on BT_BLE_LOG_SPI_OUT_LL_ENABLED
default 1024
help
SPI transaction buffer size for lower layer task logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_LL_ISR_BUF_SIZE
int "SPI transaction buffer size for lower layer ISR logs"
depends on BT_BLE_LOG_SPI_OUT_LL_ENABLED
default 512
help
SPI transaction buffer size for lower layer ISR logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_LL_HCI_BUF_SIZE
int "SPI transaction buffer size for lower layer HCI logs"
depends on BT_BLE_LOG_SPI_OUT_LL_ENABLED
default 512
help
SPI transaction buffer size for upper layer HCI logs.
There will be 2 SPI DMA buffers with the same size
config BT_BLE_LOG_SPI_OUT_MOSI_IO_NUM
int "GPIO number of SPI MOSI"
default 0
help
GPIO number of SPI MOSI
config BT_BLE_LOG_SPI_OUT_SCLK_IO_NUM
int "GPIO number of SPI SCLK"
default 1
help
GPIO number of SPI SCLK
config BT_BLE_LOG_SPI_OUT_CS_IO_NUM
int "GPIO number of SPI CS"
default 2
help
GPIO number of SPI CS
config BT_BLE_LOG_SPI_OUT_TS_SYNC_ENABLED
bool "Enable ble log & logic analyzer log time sync"
default y
help
Enable ble log & logic analyzer log time sync
config BT_BLE_LOG_SPI_OUT_SYNC_IO_NUM
int "GPIO number of SYNC IO"
depends on BT_BLE_LOG_SPI_OUT_TS_SYNC_ENABLED
default 3
help
GPIO number of SYNC IO
config BT_BLE_LOG_SPI_OUT_FLUSH_TIMER_ENABLED
bool "Enable periodic buffer flush out"
default n
help
Enable periodic buffer flush out
Not recommended when SPI receiver is unavailable
config BT_BLE_LOG_SPI_OUT_FLUSH_TIMEOUT
int "Buffer flush out period in unit of ms"
depends on BT_BLE_LOG_SPI_OUT_FLUSH_TIMER_ENABLED
default 1000
help
Buffer flush out period in unit of ms
config BT_BLE_LOG_SPI_OUT_LE_AUDIO_ENABLED
bool "Enable LE Audio log output to SPI"
default n
help
Enable LE Audio log output to SPI
config BT_BLE_LOG_SPI_OUT_LE_AUDIO_BUF_SIZE
int "SPI transaction buffer size for LE Audio logs"
depends on BT_BLE_LOG_SPI_OUT_LE_AUDIO_ENABLED
default 1024
help
SPI transaction buffer size for LE Audio logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_LE_AUDIO_TASK_CNT
int "LE audio task count"
depends on BT_BLE_LOG_SPI_OUT_LE_AUDIO_ENABLED
default 1
help
LE audio task count
config BT_BLE_LOG_SPI_OUT_MESH_ENABLED
bool "Enable BLE mesh log output to SPI"
default n
help
Enable BLE mesh log output to SPI
config BT_BLE_LOG_SPI_OUT_MESH_BUF_SIZE
int "SPI transaction buffer size for BLE mesh logs"
depends on BT_BLE_LOG_SPI_OUT_MESH_ENABLED
default 1024
help
SPI transaction buffer size for BLE mesh logs.
There will be 2 SPI DMA buffers with the same size.
config BT_BLE_LOG_SPI_OUT_MESH_TASK_CNT
int "Mesh task count"
depends on BT_BLE_LOG_SPI_OUT_MESH_ENABLED
default 3
help
Mesh task count
endif
@@ -35,6 +35,9 @@ typedef enum {
BLE_LOG_SRC_HCI,
BLE_LOG_SRC_ENCODE,
/* UART redirection (PORT 0 only) */
BLE_LOG_SRC_REDIR,
BLE_LOG_SRC_MAX,
} ble_log_src_t;
@@ -1,35 +0,0 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef __BT_SPI_OUT_H__
#define __BT_SPI_OUT_H__
#include <stdarg.h>
#include <string.h>
#include "esp_private/uhci.h"
#include "driver/uart.h"
#include "esp_vfs_dev.h"
#include "esp_rom_uart.h"
#include "esp_timer.h"
#include "freertos/semphr.h"
// Public enums
enum {
BLE_LOG_UHCI_OUT_SOURCE_ESP = 0,
BLE_LOG_UHCI_OUT_SOURCE_ESP_ISR = 6,
BLE_LOG_UHCI_OUT_SOURCE_LL_HCI = 8,
BLE_LOG_UHCI_OUT_SOURCE_USER = 0x10,
BLE_LOG_UHCI_OUT_SOURCE_LOSS = 0xFF,
};
// Public functions
int ble_log_uhci_out_init(void);
void ble_log_uhci_out_deinit(void);
void ble_log_uhci_out_ll_write(uint32_t len, const uint8_t *addr, uint32_t len_append,
const uint8_t *addr_append, uint32_t flag);
void ble_log_uhci_out_ll_log_ev_proc(void);
void ble_log_uhci_out_dump_all(void);
#endif // __BT_SPI_OUT_H__
+1 -1
View File
@@ -46,7 +46,7 @@ bool ble_log_init(void)
}
/* Initialize BLE Log peripheral interface */
if (!ble_log_prph_init(BLE_LOG_LBM_CNT)) {
if (!ble_log_prph_init(BLE_LOG_TRANS_TOTAL_CNT)) {
goto exit;
}
+254 -83
View File
@@ -24,23 +24,34 @@ BLE_LOG_STATIC ble_log_lbm_ctx_t *lbm_ctx = NULL;
BLE_LOG_STATIC ble_log_stat_mgr_t *stat_mgr_ctx[BLE_LOG_SRC_MAX] = {0};
/* PRIVATE FUNCTION DECLARATION */
BLE_LOG_STATIC ble_log_lbm_t *ble_log_lbm_acquire(void);
BLE_LOG_STATIC
bool ble_log_lbm_acquire_trans(size_t log_len, ble_log_lbm_t **out_lbm,
ble_log_prph_trans_t ***out_trans);
BLE_LOG_STATIC void ble_log_lbm_release(ble_log_lbm_t *lbm);
BLE_LOG_STATIC
ble_log_prph_trans_t **ble_log_lbm_get_trans(ble_log_lbm_t *lbm, size_t log_len);
BLE_LOG_STATIC
void ble_log_lbm_write_trans(ble_log_prph_trans_t **trans, ble_log_src_t src_code,
const uint8_t *addr, uint16_t len,
const uint8_t *addr_append, uint16_t len_append, bool omdata);
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
#if BLE_LOG_UART_REDIR_ENABLED
BLE_LOG_STATIC
void ble_log_lbm_stream_seal(ble_log_prph_trans_t **trans, ble_log_src_t src_code);
#endif /* BLE_LOG_UART_REDIR_ENABLED */
BLE_LOG_STATIC void ble_log_stat_mgr_update(ble_log_src_t src_code, uint32_t len, bool lost);
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
/* ------------------------- */
/* PRIVATE INTERFACE */
/* ------------------------- */
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC
ble_log_lbm_t *ble_log_lbm_acquire(void)
bool ble_log_lbm_acquire_trans(size_t log_len, ble_log_lbm_t **out_lbm,
ble_log_prph_trans_t ***out_trans)
{
ble_log_lbm_t *lbm = NULL;
*out_lbm = NULL;
*out_trans = NULL;
ble_log_lbm_t *lbm;
ble_log_prph_trans_t **trans;
ble_log_lbm_t *atomic_pool;
ble_log_lbm_t *spin_lbm;
int atomic_pool_size;
@@ -55,18 +66,32 @@ ble_log_lbm_t *ble_log_lbm_acquire(void)
atomic_pool_size = BLE_LOG_LBM_ATOMIC_TASK_CNT;
}
/* Try to acquire atomic LBM first */
/* Try each atomic LBM: acquire lock, check buffer, fallback on failure */
for (int i = 0; i < atomic_pool_size; i++) {
lbm = &atomic_pool[i];
if (ble_log_cas_acquire(&(lbm->atomic_lock))) {
return lbm;
trans = ble_log_lbm_get_trans(lbm, log_len);
if (trans) {
*out_lbm = lbm;
*out_trans = trans;
return true;
}
ble_log_cas_release(&(lbm->atomic_lock));
}
}
/* Fallback to spinlock LBM */
/* Last resort: spinlock LBM */
lbm = spin_lbm;
BLE_LOG_ACQUIRE_SPIN_LOCK(&(lbm->spin_lock));
return lbm;
trans = ble_log_lbm_get_trans(lbm, log_len);
if (trans) {
*out_lbm = lbm;
*out_trans = trans;
return true;
}
BLE_LOG_RELEASE_SPIN_LOCK(&(lbm->spin_lock));
return false;
}
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC
@@ -119,22 +144,13 @@ void ble_log_lbm_write_trans(ble_log_prph_trans_t **trans, ble_log_src_t src_cod
}
/* Data integrity check */
#if CONFIG_BLE_LOG_PAYLOAD_CHECKSUM_ENABLED
uint32_t checksum = ble_log_fast_checksum((const uint8_t *)buf, BLE_LOG_FRAME_HEAD_LEN + payload_len);
#else /* !CONFIG_BLE_LOG_PAYLOAD_CHECKSUM_ENABLED */
/* Note:
* Minimum data integrity check is still required for log parsing reliability,
* which can be achieved by validating the checksum of frame head only */
uint32_t checksum = ble_log_fast_checksum((const uint8_t *)buf, BLE_LOG_FRAME_HEAD_LEN);
#endif /* CONFIG_BLE_LOG_PAYLOAD_CHECKSUM_ENABLED */
BLE_LOG_MEMCPY(buf + BLE_LOG_FRAME_HEAD_LEN + payload_len, &checksum, BLE_LOG_FRAME_TAIL_LEN);
/* Update peripheral transport */
(*trans)->pos += payload_len + BLE_LOG_FRAME_OVERHEAD;
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
ble_log_stat_mgr_update(src_code, payload_len, false);
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
/* Queue trans if full */
if (BLE_LOG_TRANS_FREE_SPACE((*trans)) <= BLE_LOG_FRAME_OVERHEAD) {
@@ -142,24 +158,50 @@ void ble_log_lbm_write_trans(ble_log_prph_trans_t **trans, ble_log_src_t src_cod
}
}
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
#if BLE_LOG_UART_REDIR_ENABLED
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC
void ble_log_lbm_stream_seal(ble_log_prph_trans_t **trans, ble_log_src_t src_code)
{
if ((*trans)->pos <= BLE_LOG_FRAME_HEAD_LEN) {
return;
}
uint16_t payload_len = (*trans)->pos - BLE_LOG_FRAME_HEAD_LEN;
ble_log_stat_mgr_t *stat_mgr = stat_mgr_ctx[src_code];
uint32_t frame_sn = BLE_LOG_GET_FRAME_SN(&(stat_mgr->frame_sn));
ble_log_frame_head_t frame_head = {
.length = payload_len,
.frame_meta = BLE_LOG_MAKE_FRAME_META(src_code, frame_sn),
};
BLE_LOG_MEMCPY((*trans)->buf, &frame_head, BLE_LOG_FRAME_HEAD_LEN);
uint32_t checksum = ble_log_fast_checksum((*trans)->buf, (*trans)->pos);
BLE_LOG_MEMCPY((*trans)->buf + (*trans)->pos, &checksum, BLE_LOG_FRAME_TAIL_LEN);
(*trans)->pos += BLE_LOG_FRAME_TAIL_LEN;
ble_log_stat_mgr_update(src_code, payload_len, false);
ble_log_rt_queue_trans(trans);
}
#endif /* BLE_LOG_UART_REDIR_ENABLED */
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC
void ble_log_stat_mgr_update(ble_log_src_t src_code, uint32_t len, bool lost)
{
/* Get statistic manager by source code */
ble_log_stat_mgr_t *stat_mgr = stat_mgr_ctx[src_code];
/* Update statistics */
/* Update aligned counters */
uint32_t bytes_cnt = len + BLE_LOG_FRAME_OVERHEAD;
if (lost) {
stat_mgr->enh_stat.lost_frame_cnt++;
stat_mgr->enh_stat.lost_bytes_cnt += bytes_cnt;
BLE_LOG_GET_FRAME_SN(&(stat_mgr->frame_sn)); /* consume SN for loss detection */
stat_mgr->lost_frame_cnt++;
stat_mgr->lost_bytes_cnt += bytes_cnt;
} else {
stat_mgr->enh_stat.written_frame_cnt++;
stat_mgr->enh_stat.written_bytes_cnt += bytes_cnt;
stat_mgr->written_frame_cnt++;
stat_mgr->written_bytes_cnt += bytes_cnt;
}
}
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
/* -------------------------- */
/* INTERNAL INTERFACE */
@@ -182,11 +224,12 @@ bool ble_log_lbm_init(void)
ble_log_lbm_t *lbm;
for (int i = 0; i < BLE_LOG_LBM_COMMON_CNT; i++) {
lbm = &(lbm_ctx->lbm_common_pool[i]);
for (int j = 0; j < BLE_LOG_TRANS_PING_PONG_BUF_CNT; j++) {
for (int j = 0; j < BLE_LOG_TRANS_BUF_CNT; j++) {
if (!ble_log_prph_trans_init(&(lbm->trans[j]),
CONFIG_BLE_LOG_LBM_TRANS_SIZE)) {
BLE_LOG_TRANS_SIZE)) {
goto exit;
}
lbm->trans[j]->owner = (void *)lbm;
}
}
@@ -204,11 +247,12 @@ bool ble_log_lbm_init(void)
#if CONFIG_BLE_LOG_LL_ENABLED
for (int i = 0; i < BLE_LOG_LBM_LL_MAX; i++) {
lbm = &(lbm_ctx->lbm_ll_pool[i]);
for (int j = 0; j < BLE_LOG_TRANS_PING_PONG_BUF_CNT; j++) {
for (int j = 0; j < BLE_LOG_TRANS_BUF_CNT; j++) {
if (!ble_log_prph_trans_init(&(lbm->trans[j]),
CONFIG_BLE_LOG_LBM_LL_TRANS_SIZE)) {
BLE_LOG_TRANS_LL_SIZE)) {
goto exit;
}
lbm->trans[j]->owner = (void *)lbm;
}
}
@@ -225,11 +269,6 @@ bool ble_log_lbm_init(void)
goto exit;
}
BLE_LOG_MEMSET(stat_mgr_ctx[i], 0, sizeof(ble_log_stat_mgr_t));
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
stat_mgr_ctx[i]->enh_stat.int_src_code = BLE_LOG_INT_SRC_ENH_STAT;
stat_mgr_ctx[i]->enh_stat.src_code = i;
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
}
/* Initialization done */
@@ -270,7 +309,7 @@ void ble_log_lbm_deinit(void)
ble_log_lbm_t *lbm;
for (int i = 0; i < BLE_LOG_LBM_CNT; i++) {
lbm = &(lbm_ctx->lbm_pool[i]);
for (int j = 0; j < BLE_LOG_TRANS_PING_PONG_BUF_CNT; j++) {
for (int j = 0; j < BLE_LOG_TRANS_BUF_CNT; j++) {
ble_log_prph_trans_deinit(&(lbm->trans[j]));
}
}
@@ -281,18 +320,14 @@ void ble_log_lbm_deinit(void)
}
}
/* Note:
* The function below should be private, but when UART redirection is required,
* it would be a waste to implement get transport function again, thus
* make it available internally */
BLE_LOG_IRAM_ATTR
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC
ble_log_prph_trans_t **ble_log_lbm_get_trans(ble_log_lbm_t *lbm, size_t log_len)
{
/* Check if available buffer can contain incoming log */
ble_log_prph_trans_t **trans;
for (int i = 0; i < BLE_LOG_TRANS_PING_PONG_BUF_CNT; i++) {
for (int i = 0; i < BLE_LOG_TRANS_BUF_CNT; i++) {
trans = &(lbm->trans[lbm->trans_idx]);
if (!(*trans)->prph_owned) {
if (!__atomic_load_n(&(*trans)->prph_owned, __ATOMIC_ACQUIRE)) {
/* Return if there's enough free space in current transport */
if (BLE_LOG_TRANS_FREE_SPACE((*trans)) >= (log_len + BLE_LOG_FRAME_OVERHEAD)) {
return trans;
@@ -305,14 +340,13 @@ ble_log_prph_trans_t **ble_log_lbm_get_trans(ble_log_lbm_t *lbm, size_t log_len)
}
/* Current transport unavailable, switch to the other */
lbm->trans_idx = !lbm->trans_idx;
lbm->trans_idx = (lbm->trans_idx + 1) & (BLE_LOG_TRANS_BUF_CNT - 1);
}
/* Both ping-pong buffers are unavailable */
/* All buffers are unavailable */
return NULL;
}
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
void ble_log_write_enh_stat(void)
{
BLE_LOG_REF_COUNT_ACQUIRE(&lbm_ref_count);
@@ -320,15 +354,152 @@ void ble_log_write_enh_stat(void)
goto deref;
}
/* Snapshot all sources under one critical section so the set of
* counters is mutually consistent, then write outside the lock. */
ble_log_enh_stat_t snapshots[BLE_LOG_SRC_MAX];
for (int i = 0; i < BLE_LOG_SRC_MAX; i++) {
ble_log_enh_stat_t *enh_stat = &(stat_mgr_ctx[i]->enh_stat);
ble_log_write_hex(BLE_LOG_SRC_INTERNAL, (const uint8_t *)enh_stat, sizeof(ble_log_enh_stat_t));
snapshots[i].int_src_code = BLE_LOG_INT_SRC_ENH_STAT;
snapshots[i].src_code = i;
}
BLE_LOG_ENTER_CRITICAL();
for (int i = 0; i < BLE_LOG_SRC_MAX; i++) {
BLE_LOG_MEMCPY(&snapshots[i].written_frame_cnt,
&stat_mgr_ctx[i]->written_frame_cnt,
4 * sizeof(uint32_t));
}
BLE_LOG_EXIT_CRITICAL();
for (int i = 0; i < BLE_LOG_SRC_MAX; i++) {
ble_log_write_hex(BLE_LOG_SRC_INTERNAL, (const uint8_t *)&snapshots[i], sizeof(ble_log_enh_stat_t));
}
deref:
BLE_LOG_REF_COUNT_RELEASE(&lbm_ref_count);
}
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
#if BLE_LOG_UART_REDIR_ENABLED
/* ------------------------------------------------- */
/* STREAM WRITE INTERFACE */
/* */
/* Stream mode appends raw data into a transport */
/* buffer with deferred frame encapsulation: */
/* - Header space is reserved on first write */
/* - Data is memcpy'd after the reserved header */
/* - Header and checksum are filled in at seal */
/* */
/* get_trans(lbm, 0) reuse safety: */
/* */
/* get_trans auto-queues a buffer raw (no seal) when */
/* free_space < log_len + FRAME_OVERHEAD. With */
/* log_len = 0, this triggers at free_space < 10. */
/* */
/* To prevent unsealed stream data from being sent */
/* raw, stream_write auto-seals at free_space <= 10. */
/* This guarantees that any unsealed stream buffer */
/* seen by get_trans always has free_space > 10, */
/* so get_trans returns it directly without queuing. */
/* ------------------------------------------------- */
BLE_LOG_IRAM_ATTR
void ble_log_lbm_stream_write(ble_log_lbm_t *lbm, ble_log_src_t src_code,
const uint8_t *data, size_t len)
{
while (len > 0) {
ble_log_prph_trans_t **trans = ble_log_lbm_get_trans(lbm, 0);
if (!trans) {
ble_log_stat_mgr_update(src_code, len, true);
return;
}
if ((*trans)->pos == 0) {
(*trans)->pos = BLE_LOG_FRAME_HEAD_LEN;
}
uint16_t available = BLE_LOG_TRANS_FREE_SPACE((*trans));
if (available <= BLE_LOG_FRAME_TAIL_LEN) {
ble_log_lbm_stream_seal(trans, src_code);
continue;
}
available -= BLE_LOG_FRAME_TAIL_LEN;
size_t to_write = (len < available) ? len : available;
BLE_LOG_MEMCPY((*trans)->buf + (*trans)->pos, data, to_write);
(*trans)->pos += to_write;
data += to_write;
len -= to_write;
if (BLE_LOG_TRANS_FREE_SPACE((*trans)) <= BLE_LOG_FRAME_OVERHEAD) {
ble_log_lbm_stream_seal(trans, src_code);
}
}
}
BLE_LOG_IRAM_ATTR
void ble_log_lbm_stream_flush(ble_log_lbm_t *lbm, ble_log_src_t src_code)
{
int trans_idx = lbm->trans_idx;
for (int i = 0; i < BLE_LOG_TRANS_BUF_CNT; i++) {
ble_log_prph_trans_t **trans = &(lbm->trans[trans_idx]);
if (!__atomic_load_n(&(*trans)->prph_owned, __ATOMIC_ACQUIRE) &&
(*trans)->pos > BLE_LOG_FRAME_HEAD_LEN) {
ble_log_lbm_stream_seal(trans, src_code);
}
trans_idx = (trans_idx + 1) & (BLE_LOG_TRANS_BUF_CNT - 1);
}
}
#endif /* BLE_LOG_UART_REDIR_ENABLED */
BLE_LOG_STATIC void ble_log_emit_buf_util(ble_log_lbm_t *lbm, uint8_t lbm_id)
{
ble_log_buf_util_t util = {
.int_src_code = BLE_LOG_INT_SRC_BUF_UTIL,
.lbm_id = lbm_id,
.trans_cnt = BLE_LOG_TRANS_BUF_CNT,
.inflight_peak = (uint8_t)__atomic_load_n(
&lbm->trans_inflight_peak, __ATOMIC_RELAXED),
};
ble_log_write_hex(BLE_LOG_SRC_INTERNAL,
(const uint8_t *)&util, sizeof(ble_log_buf_util_t));
}
void ble_log_write_buf_util(void)
{
BLE_LOG_REF_COUNT_ACQUIRE(&lbm_ref_count);
if (!lbm_enabled) {
goto deref;
}
ble_log_emit_buf_util(&lbm_ctx->spin_task,
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_COMMON_TASK, 0));
for (int i = 0; i < BLE_LOG_LBM_ATOMIC_TASK_CNT; i++) {
ble_log_emit_buf_util(&lbm_ctx->atomic_pool_task[i],
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_COMMON_TASK, 1 + i));
}
ble_log_emit_buf_util(&lbm_ctx->spin_isr,
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_COMMON_ISR, 0));
for (int i = 0; i < BLE_LOG_LBM_ATOMIC_ISR_CNT; i++) {
ble_log_emit_buf_util(&lbm_ctx->atomic_pool_isr[i],
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_COMMON_ISR, 1 + i));
}
#if CONFIG_BLE_LOG_LL_ENABLED
ble_log_emit_buf_util(&lbm_ctx->lbm_ll_task,
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_LL, 0));
ble_log_emit_buf_util(&lbm_ctx->lbm_ll_hci,
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_LL, 1));
#endif
#if BLE_LOG_UART_REDIR_ENABLED
ble_log_lbm_t *redir_lbm = ble_log_prph_get_redir_lbm();
if (redir_lbm) {
ble_log_emit_buf_util(redir_lbm,
BLE_LOG_BUF_UTIL_MAKE_ID(BLE_LOG_BUF_UTIL_POOL_REDIR, 0));
}
#endif
deref:
BLE_LOG_REF_COUNT_RELEASE(&lbm_ref_count);
}
/* ------------------------ */
/* PUBLIC INTERFACE */
@@ -349,10 +520,9 @@ void ble_log_flush(void)
goto deref;
}
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
/* Write enhanced statistics before module disable */
ble_log_write_enh_stat();
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
ble_log_write_buf_util();
/* Write BLE Log flush log */
ble_log_info_t ble_log_info = {
@@ -378,12 +548,13 @@ void ble_log_flush(void)
for (int i = 0; i < BLE_LOG_LBM_CNT; i++) {
lbm = &(lbm_ctx->lbm_pool[i]);
int trans_idx = lbm->trans_idx;
for (int j = 0; j < BLE_LOG_TRANS_PING_PONG_BUF_CNT; j++) {
for (int j = 0; j < BLE_LOG_TRANS_BUF_CNT; j++) {
trans = &(lbm->trans[trans_idx]);
if (!(*trans)->prph_owned && (*trans)->pos) {
if (!__atomic_load_n(&(*trans)->prph_owned, __ATOMIC_ACQUIRE) &&
(*trans)->pos) {
ble_log_rt_queue_trans(trans);
}
trans_idx = !trans_idx;
trans_idx = (trans_idx + 1) & (BLE_LOG_TRANS_BUF_CNT - 1);
}
}
@@ -394,9 +565,9 @@ void ble_log_flush(void)
in_progress = false;
for (int i = 0; i < BLE_LOG_LBM_CNT; i++) {
lbm = &(lbm_ctx->lbm_pool[i]);
for (int j = 0; j < BLE_LOG_TRANS_PING_PONG_BUF_CNT; j++) {
for (int j = 0; j < BLE_LOG_TRANS_BUF_CNT; j++) {
trans = &(lbm->trans[j]);
in_progress |= (*trans)->prph_owned;
in_progress |= __atomic_load_n(&(*trans)->prph_owned, __ATOMIC_ACQUIRE);
}
}
if (in_progress) {
@@ -408,13 +579,15 @@ void ble_log_flush(void)
/* Reset statistics manager after all operations complete */
for (int i = 0; i < BLE_LOG_SRC_MAX; i++) {
BLE_LOG_MEMSET(stat_mgr_ctx[i], 0, sizeof(ble_log_stat_mgr_t));
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
/* Reinitialize enhanced statistics fields */
stat_mgr_ctx[i]->enh_stat.int_src_code = BLE_LOG_INT_SRC_ENH_STAT;
stat_mgr_ctx[i]->enh_stat.src_code = i;
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
}
for (int i = 0; i < BLE_LOG_LBM_CNT; i++) {
lbm = &(lbm_ctx->lbm_pool[i]);
__atomic_store_n(&lbm->trans_inflight, 0, __ATOMIC_RELAXED);
__atomic_store_n(&lbm->trans_inflight_peak, 0, __ATOMIC_RELAXED);
}
ble_log_prph_reset_util_counters();
/* Resume enable status */
lbm_enabled = lbm_enabled_copy;
@@ -430,12 +603,11 @@ bool ble_log_write_hex(ble_log_src_t src_code, const uint8_t *addr, size_t len)
goto exit;
}
/* Get transport */
/* Get transport from the best available pool */
size_t payload_len = len + sizeof(uint32_t);
ble_log_lbm_t *lbm = ble_log_lbm_acquire();
ble_log_prph_trans_t **trans = ble_log_lbm_get_trans(lbm, payload_len);
if (!trans) {
ble_log_lbm_release(lbm);
ble_log_lbm_t *lbm;
ble_log_prph_trans_t **trans;
if (!ble_log_lbm_acquire_trans(payload_len, &lbm, &trans)) {
goto failed;
}
@@ -452,11 +624,9 @@ bool ble_log_write_hex(ble_log_src_t src_code, const uint8_t *addr, size_t len)
return true;
failed:
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
if (lbm_inited) {
ble_log_stat_mgr_update(src_code, payload_len, true);
}
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
exit:
BLE_LOG_REF_COUNT_RELEASE(&lbm_ref_count);
return false;
@@ -487,14 +657,19 @@ void ble_log_write_hex_ll(uint32_t len, const uint8_t *addr,
}
bool omdata = flag & BIT(BLE_LOG_LL_FLAG_OMDATA);
/* Determine LBM by flag */
/* Determine LBM and get transport */
ble_log_lbm_t *lbm;
if (BLE_LOG_IN_ISR()) {
/* Reuse common LBM acquire logic */
lbm = ble_log_lbm_acquire();
ble_log_prph_trans_t **trans;
size_t payload_len;
if (BLE_LOG_IN_ISR()) {
/* os_mbuf_copydata is in flash and not safe to call from ISR */
omdata = false;
payload_len = len + len_append;
if (!ble_log_lbm_acquire_trans(payload_len, &lbm, &trans)) {
goto failed;
}
} else {
if (use_ll_task) {
lbm = &(lbm_ctx->lbm_ll_task);
@@ -506,14 +681,12 @@ void ble_log_write_hex_ll(uint32_t len, const uint8_t *addr,
}
#endif /* CONFIG_BLE_LOG_LL_HCI_LOG_PAYLOAD_LEN_LIMIT_ENABLED */
}
}
/* Get transport */
size_t payload_len = len + len_append;
ble_log_prph_trans_t **trans = ble_log_lbm_get_trans(lbm, payload_len);
if (!trans) {
ble_log_lbm_release(lbm);
goto failed;
payload_len = len + len_append;
trans = ble_log_lbm_get_trans(lbm, payload_len);
if (!trans) {
/* LL pools use LOCK_NONE, release is no-op */
goto failed;
}
}
/* Write transport */
@@ -524,11 +697,9 @@ void ble_log_write_hex_ll(uint32_t len, const uint8_t *addr,
return;
failed:
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
if (lbm_inited) {
ble_log_stat_mgr_update(src_code, payload_len, true);
}
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
exit:
BLE_LOG_REF_COUNT_RELEASE(&lbm_ref_count);
return;
@@ -550,7 +721,7 @@ void ble_log_dump_to_console(void)
for (int i = 0; i < BLE_LOG_LBM_CNT; i++) {
lbm = &(lbm_ctx->lbm_pool[i]);
trans_idx = lbm->trans_idx;
for (int j = 0; j < BLE_LOG_TRANS_PING_PONG_BUF_CNT; j++) {
for (int j = 0; j < BLE_LOG_TRANS_BUF_CNT; j++) {
trans = lbm->trans[trans_idx];
BLE_LOG_FEED_WDT();
@@ -560,7 +731,7 @@ void ble_log_dump_to_console(void)
BLE_LOG_FEED_WDT();
}
}
trans_idx = !trans_idx;
trans_idx = (trans_idx + 1) & (BLE_LOG_TRANS_BUF_CNT - 1);
}
}
BLE_LOG_CONSOLE("\n:BLE_LOG_DUMP_END]\n\n");
+17 -5
View File
@@ -69,9 +69,8 @@ BLE_LOG_IRAM_ATTR BLE_LOG_STATIC void ble_log_rt_task(void *pvParameters)
ble_log_rt_ts_trigger(NULL);
#endif /* CONFIG_BLE_LOG_TS_TRIGGER_TASK_EVENT */
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
ble_log_write_enh_stat();
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
ble_log_write_buf_util();
}
}
@@ -102,7 +101,7 @@ bool ble_log_rt_init(void)
/* CRITICAL:
* Queue must be initialized before creating task */
rt_queue_handle = xQueueCreate(BLE_LOG_LBM_CNT, sizeof(ble_log_prph_trans_t *));
rt_queue_handle = xQueueCreate(BLE_LOG_TRANS_TOTAL_CNT, sizeof(ble_log_prph_trans_t *));
if (!rt_queue_handle) {
goto exit;
}
@@ -172,9 +171,22 @@ void ble_log_rt_deinit(void)
BLE_LOG_IRAM_ATTR void ble_log_rt_queue_trans(ble_log_prph_trans_t **trans)
{
(*trans)->prph_owned = true;
__atomic_store_n(&(*trans)->prph_owned, true, __ATOMIC_RELAXED);
ble_log_lbm_t *lbm = (ble_log_lbm_t *)(*trans)->owner;
uint32_t inflight = __atomic_add_fetch(&lbm->trans_inflight, 1, __ATOMIC_RELAXED);
uint32_t peak = __atomic_load_n(&lbm->trans_inflight_peak, __ATOMIC_RELAXED);
while (inflight > peak) {
if (__atomic_compare_exchange_n(&lbm->trans_inflight_peak, &peak, inflight,
true, __ATOMIC_RELAXED, __ATOMIC_RELAXED)) {
break;
}
}
if (BLE_LOG_IN_ISR()) {
xQueueSendFromISR(rt_queue_handle, trans, NULL);
BaseType_t woken = pdFALSE;
xQueueSendFromISR(rt_queue_handle, trans, &woken);
portYIELD_FROM_ISR(woken);
} else {
xQueueSend(rt_queue_handle, trans, portMAX_DELAY);
}
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -17,7 +17,6 @@ portMUX_TYPE ble_log_spin_lock = portMUX_INITIALIZER_UNLOCKED;
#endif /* !UNIT_TEST */
/* INTERNAL INTERFACE */
#if CONFIG_BLE_LOG_XOR_CHECKSUM_ENABLED
#include "esp_compiler.h"
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC BLE_LOG_INLINE
@@ -69,35 +68,3 @@ uint32_t ble_log_fast_checksum(const uint8_t *data, size_t len)
/* Step 6: Rotate the final result */
return ror32(checksum, start_offset_shift);
}
#else /* !CONFIG_BLE_LOG_XOR_CHECKSUM_ENABLED */
BLE_LOG_IRAM_ATTR
uint32_t ble_log_fast_checksum(const uint8_t *data, size_t len)
{
uint32_t sum = 0;
size_t i = 0;
/* Step 1: Sum up until 4-byte aligned */
while (((uintptr_t)(data + i) & 0x3) && (i < len)) {
sum += data[i++];
}
/* Step 2: Sum up 4-byte aligned blocks */
const uint32_t *p32 = (const uint32_t *)(data + i);
size_t blocks = (len - i) / 4;
for (size_t b = 0; b < blocks; b++) {
uint32_t v = p32[b];
sum += (v & 0xFF)
+ ((v >> 8) & 0xFF)
+ ((v >> 16) & 0xFF)
+ ((v >> 24) & 0xFF);
}
i += blocks * 4;
/* Step 3: Sum up remaining bytes */
while (i < len) {
sum += data[i++];
}
return sum;
}
#endif /* CONFIG_BLE_LOG_XOR_CHECKSUM_ENABLED */
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -16,6 +16,12 @@
#include "ble_log_prph.h"
#include "freertos/FreeRTOS.h"
#if defined(CONFIG_BLE_LOG_PRPH_UART_DMA) && (CONFIG_BLE_LOG_PRPH_UART_DMA_PORT == 0)
#define BLE_LOG_UART_REDIR_ENABLED (1)
#else
#define BLE_LOG_UART_REDIR_ENABLED (0)
#endif
#include "freertos/semphr.h"
/* ------------------------- */
@@ -43,7 +49,7 @@ typedef enum {
typedef struct {
int trans_idx;
ble_log_prph_trans_t *trans[BLE_LOG_TRANS_PING_PONG_BUF_CNT];
ble_log_prph_trans_t *trans[BLE_LOG_TRANS_BUF_CNT];
ble_log_lbm_lock_t lock_type;
union {
/* BLE_LOG_LBM_LOCK_NONE */
@@ -55,6 +61,8 @@ typedef struct {
/* BLE_LOG_LBM_LOCK_MUTEX */
SemaphoreHandle_t mutex;
};
uint32_t trans_inflight;
uint32_t trans_inflight_peak;
} ble_log_lbm_t;
/* --------------------------------------- */
@@ -80,7 +88,19 @@ enum {
BLE_LOG_LBM_ATOMIC_ISR_CNT)
#define BLE_LOG_LBM_COMMON_CNT (BLE_LOG_LBM_ATOMIC_CNT + BLE_LOG_LBM_SPIN_MAX)
#define BLE_LOG_LBM_CNT (BLE_LOG_LBM_COMMON_CNT + BLE_LOG_LBM_LL_MAX)
#define BLE_LOG_TRANS_CNT (BLE_LOG_LBM_CNT * BLE_LOG_TRANS_PING_PONG_BUF_CNT)
/* Derived per-buffer size from user-configured total-per-LBM budget */
#define BLE_LOG_TRANS_SIZE (CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE / BLE_LOG_TRANS_BUF_CNT)
#define BLE_LOG_TRANS_LL_SIZE (CONFIG_BLE_LOG_LBM_LL_TRANS_BUF_SIZE / BLE_LOG_TRANS_BUF_CNT)
/* Unified queue depth derivation */
#define BLE_LOG_TRANS_POOL_CNT (BLE_LOG_LBM_CNT * BLE_LOG_TRANS_BUF_CNT)
#if BLE_LOG_UART_REDIR_ENABLED
#define BLE_LOG_TRANS_REDIR_CNT BLE_LOG_TRANS_BUF_CNT
#else
#define BLE_LOG_TRANS_REDIR_CNT (0)
#endif
#define BLE_LOG_TRANS_TOTAL_CNT (BLE_LOG_TRANS_POOL_CNT + BLE_LOG_TRANS_REDIR_CNT)
/* ------------------------------------------ */
/* Log Buffer Manager Context Defines */
@@ -121,10 +141,30 @@ typedef struct {
};
} ble_log_lbm_ctx_t;
/* -------------------------------------------- */
/* Buffer Utilization Reporting Defines */
/* -------------------------------------------- */
typedef enum {
BLE_LOG_BUF_UTIL_POOL_COMMON_TASK = 0,
BLE_LOG_BUF_UTIL_POOL_COMMON_ISR = 1,
BLE_LOG_BUF_UTIL_POOL_LL = 2,
BLE_LOG_BUF_UTIL_POOL_REDIR = 3,
} ble_log_buf_util_pool_t;
typedef struct {
uint8_t int_src_code;
uint8_t lbm_id;
uint8_t trans_cnt;
uint8_t inflight_peak;
} __attribute__((packed)) ble_log_buf_util_t;
#define BLE_LOG_BUF_UTIL_MAKE_ID(pool, idx) (((pool) << 4) | ((idx) & 0x0F))
#define BLE_LOG_BUF_UTIL_GET_POOL(id) (((id) >> 4) & 0x0F)
#define BLE_LOG_BUF_UTIL_GET_INDEX(id) ((id) & 0x0F)
/* ---------------------------------------- */
/* Enhanced Statistics Data Defines */
/* ---------------------------------------- */
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
typedef struct {
uint8_t int_src_code;
uint8_t src_code;
@@ -133,16 +173,22 @@ typedef struct {
uint32_t written_bytes_cnt;
uint32_t lost_bytes_cnt;
} __attribute__((packed)) ble_log_enh_stat_t;
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
/* -------------------------------------- */
/* Log Statistics Manager Context */
/* -------------------------------------- */
typedef struct {
uint32_t frame_sn;
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
ble_log_enh_stat_t enh_stat;
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
/* Aligned live counters — updated by ble_log_stat_mgr_update(),
* snapshot by ble_log_write_enh_stat(). Natural 4-byte alignment
* ensures each load/store compiles to a single l32i/s32i on
* Xtensa/RISC-V, so individual field access is atomic without locks.
* The packed ble_log_enh_stat_t wire format is built on the stack
* only when serializing to UART. */
uint32_t written_frame_cnt;
uint32_t lost_frame_cnt;
uint32_t written_bytes_cnt;
uint32_t lost_bytes_cnt;
} ble_log_stat_mgr_t;
#define BLE_LOG_GET_FRAME_SN(VAR) __atomic_fetch_add(VAR, 1, __ATOMIC_RELAXED)
@@ -163,15 +209,39 @@ enum {
};
#endif /* CONFIG_BLE_LOG_LL_ENABLED */
/* ------------------------------- */
/* Compile-Time Guards */
/* ------------------------------- */
_Static_assert(CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE % BLE_LOG_TRANS_BUF_CNT == 0,
"Common LBM total buffer size must be a multiple of BLE_LOG_TRANS_BUF_CNT (4)");
#if CONFIG_BLE_LOG_LL_ENABLED
_Static_assert(CONFIG_BLE_LOG_LBM_LL_TRANS_BUF_SIZE % BLE_LOG_TRANS_BUF_CNT == 0,
"LL LBM total buffer size must be a multiple of BLE_LOG_TRANS_BUF_CNT (4)");
#endif
_Static_assert(CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE / BLE_LOG_TRANS_BUF_CNT >= BLE_LOG_FRAME_OVERHEAD,
"Common LBM per-buffer size too small for a single frame");
_Static_assert((BLE_LOG_TRANS_BUF_CNT & (BLE_LOG_TRANS_BUF_CNT - 1)) == 0,
"BLE_LOG_TRANS_BUF_CNT must be a power of 2");
_Static_assert(1 + BLE_LOG_LBM_ATOMIC_TASK_CNT <= 16,
"Common task pool exceeds lbm_id 4-bit index limit (max 15)");
_Static_assert(1 + BLE_LOG_LBM_ATOMIC_ISR_CNT <= 16,
"Common ISR pool exceeds lbm_id 4-bit index limit (max 15)");
_Static_assert(BLE_LOG_TRANS_BUF_CNT <= 255,
"BLE_LOG_TRANS_BUF_CNT must fit in uint8_t for ble_log_buf_util_t");
/* --------------------------- */
/* Internal Interfaces */
/* --------------------------- */
bool ble_log_lbm_init(void);
void ble_log_lbm_deinit(void);
ble_log_prph_trans_t **ble_log_lbm_get_trans(ble_log_lbm_t *lbm, size_t log_len);
void ble_log_lbm_enable(bool enable);
#if CONFIG_BLE_LOG_ENH_STAT_ENABLED
void ble_log_write_enh_stat(void);
#endif /* CONFIG_BLE_LOG_ENH_STAT_ENABLED */
void ble_log_write_buf_util(void);
#if BLE_LOG_UART_REDIR_ENABLED
void ble_log_lbm_stream_write(ble_log_lbm_t *lbm, ble_log_src_t src_code,
const uint8_t *data, size_t len);
void ble_log_lbm_stream_flush(ble_log_lbm_t *lbm, ble_log_src_t src_code);
ble_log_lbm_t *ble_log_prph_get_redir_lbm(void);
#endif
#endif /* __BLE_LOG_LBM_H__ */
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -15,17 +15,20 @@
/* TYPEDEF */
typedef struct {
volatile bool prph_owned;
bool prph_owned;
uint8_t *buf;
uint16_t size;
uint16_t pos;
/* Peripheral implementation specific context */
void *ctx;
/* Opaque back-reference to owning LBM, set once at init */
void *owner;
} ble_log_prph_trans_t;
#define BLE_LOG_TRANS_FREE_SPACE(trans) (trans->size - trans->pos)
#define BLE_LOG_TRANS_PING_PONG_BUF_CNT (2)
#define BLE_LOG_TRANS_BUF_CNT (4)
/* INTERFACE */
bool ble_log_prph_init(size_t trans_cnt);
@@ -33,5 +36,6 @@ void ble_log_prph_deinit(void);
bool ble_log_prph_trans_init(ble_log_prph_trans_t **trans, size_t trans_size);
void ble_log_prph_trans_deinit(ble_log_prph_trans_t **trans);
void ble_log_prph_send_trans(ble_log_prph_trans_t *trans);
void ble_log_prph_reset_util_counters(void);
#endif /* __BLE_LOG_PRPH_H__ */
@@ -130,7 +130,7 @@ bool ble_log_cas_acquire(volatile bool *cas_lock);
void ble_log_cas_release(volatile bool *cas_lock);
#endif /* UNIT_TEST */
#define BLE_LOG_VERSION (3)
#define BLE_LOG_VERSION (4)
/* TYPEDEF */
typedef enum {
@@ -139,6 +139,7 @@ typedef enum {
BLE_LOG_INT_SRC_ENH_STAT,
BLE_LOG_INT_SRC_INFO,
BLE_LOG_INT_SRC_FLUSH,
BLE_LOG_INT_SRC_BUF_UTIL,
BLE_LOG_INT_SRC_MAX,
} ble_log_int_src_t;
@@ -9,6 +9,7 @@
/* INCLUDE */
#include "ble_log_prph_dummy.h"
#include "ble_log_lbm.h"
/* INTERFACE */
bool ble_log_prph_init(size_t trans_cnt)
@@ -80,7 +81,16 @@ void ble_log_prph_trans_deinit(ble_log_prph_trans_t **trans)
*trans = NULL;
}
/* Dummy transport has no DMA/hardware -- recycle the buffer immediately
* so that ble_log_lbm_get_trans() can reuse it and ble_log_flush() does
* not hang waiting for prph_owned to clear. Real peripherals (UART DMA,
* SPI DMA) do the same work inside their asynchronous tx_done callbacks. */
void ble_log_prph_send_trans(ble_log_prph_trans_t *trans)
{
(void)trans;
trans->pos = 0;
ble_log_lbm_t *lbm = (ble_log_lbm_t *)trans->owner;
__atomic_fetch_sub(&lbm->trans_inflight, 1, __ATOMIC_RELAXED);
__atomic_store_n(&trans->prph_owned, false, __ATOMIC_RELEASE);
}
void ble_log_prph_reset_util_counters(void) {}
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -9,6 +9,7 @@
/* INCLUDE */
#include "ble_log_prph_spi_master_dma.h"
#include "ble_log_lbm.h"
#include "esp_timer.h"
@@ -35,7 +36,9 @@ BLE_LOG_IRAM_ATTR BLE_LOG_STATIC void spi_master_dma_tx_done_cb(spi_transaction_
/* Recycle transport */
ble_log_prph_trans_t *trans = (ble_log_prph_trans_t *)(spi_trans->user);
trans->pos = 0;
trans->prph_owned = false;
ble_log_lbm_t *lbm = (ble_log_lbm_t *)trans->owner;
__atomic_fetch_sub(&lbm->trans_inflight, 1, __ATOMIC_RELAXED);
__atomic_store_n(&trans->prph_owned, false, __ATOMIC_RELEASE);
}
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC void spi_master_dma_pre_tx_cb(spi_transaction_t *spi_trans)
@@ -179,6 +182,10 @@ BLE_LOG_IRAM_ATTR void ble_log_prph_send_trans(ble_log_prph_trans_t *trans)
spi_trans->length = (trans->pos << 3);
spi_trans->rxlength = 0;
if (spi_device_queue_trans(dev_handle, spi_trans, 0) != ESP_OK) {
trans->prph_owned = false;
ble_log_lbm_t *lbm = (ble_log_lbm_t *)trans->owner;
__atomic_fetch_sub(&lbm->trans_inflight, 1, __ATOMIC_RELAXED);
__atomic_store_n(&trans->prph_owned, false, __ATOMIC_RELEASE);
}
}
void ble_log_prph_reset_util_counters(void) {}
@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2025-2026 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -10,11 +10,10 @@
/* INCLUDE */
#include "ble_log_prph_uart_dma.h"
#include "ble_log.h"
#include "ble_log_lbm.h"
#if BLE_LOG_PRPH_UART_DMA_REDIR
#include "ble_log.h"
#include "ble_log_rt.h"
#include "ble_log_lbm.h"
#include "esp_timer.h"
#include "driver/uart.h"
@@ -27,7 +26,7 @@
#define BLE_LOG_UART_DMA_BURST_SIZE (32)
#if BLE_LOG_PRPH_UART_DMA_REDIR
#define BLE_LOG_UART_REDIR_BUF_SIZE (512)
#define BLE_LOG_UART_REDIR_FLUSH_TIMEOUT (100)
#define BLE_LOG_UART_REDIR_FLUSH_PERIOD_US (1000 * 1000)
#endif /* BLE_LOG_PRPH_UART_DMA_REDIR */
/* VARIABLE */
@@ -36,7 +35,6 @@ BLE_LOG_STATIC uhci_controller_handle_t dev_handle = NULL;
#if BLE_LOG_PRPH_UART_DMA_REDIR
BLE_LOG_STATIC bool uart_driver_inited = false;
BLE_LOG_STATIC ble_log_lbm_t *redir_lbm = NULL;
BLE_LOG_STATIC uint32_t redir_last_write_ts = 0;
BLE_LOG_STATIC esp_timer_handle_t redir_flush_timer = NULL;
#endif /* BLE_LOG_PRPH_UART_DMA_REDIR */
@@ -58,24 +56,26 @@ BLE_LOG_IRAM_ATTR BLE_LOG_STATIC bool uart_dma_tx_done_cb(
);
ble_log_prph_trans_t *trans = uart_trans_ctx->trans;
trans->pos = 0;
trans->prph_owned = false;
ble_log_lbm_t *lbm = (ble_log_lbm_t *)trans->owner;
__atomic_fetch_sub(&lbm->trans_inflight, 1, __ATOMIC_RELAXED);
__atomic_store_n(&trans->prph_owned, false, __ATOMIC_RELEASE);
return true;
}
#if BLE_LOG_PRPH_UART_DMA_REDIR
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC void esp_timer_cb_flush_log(void)
BLE_LOG_IRAM_ATTR BLE_LOG_STATIC void esp_timer_cb_flush_log(void *arg)
{
uint32_t os_ts = pdTICKS_TO_MS(xTaskGetTickCount());
if ((os_ts - redir_last_write_ts) > BLE_LOG_UART_REDIR_FLUSH_TIMEOUT) {
xSemaphoreTake(redir_lbm->mutex, portMAX_DELAY);
int trans_idx = redir_lbm->trans_idx;
for (int i = 0; i < BLE_LOG_TRANS_PING_PONG_BUF_CNT; i++) {
ble_log_prph_trans_t **trans = &(redir_lbm->trans[trans_idx]);
if (!(*trans)->prph_owned && (*trans)->pos) {
ble_log_rt_queue_trans(trans);
}
trans_idx = !trans_idx;
}
(void)arg;
if (!prph_inited) {
return;
}
/* Non-blocking trylock: skip if mutex is held by a writer.
* The periodic timer will retry on the next tick.
* stream_flush is a no-op when buffer is empty. */
if (xSemaphoreTake(redir_lbm->mutex, 0) == pdTRUE) {
ble_log_lbm_stream_flush(redir_lbm, BLE_LOG_SRC_REDIR);
xSemaphoreGive(redir_lbm->mutex);
}
}
@@ -127,16 +127,18 @@ bool ble_log_prph_init(size_t trans_cnt)
goto exit;
}
BLE_LOG_MEMSET(redir_lbm, 0, sizeof(ble_log_lbm_t));
redir_lbm->lock_type = BLE_LOG_LBM_LOCK_MUTEX;
/* Transport initialization */
for (int i = 0; i < BLE_LOG_TRANS_PING_PONG_BUF_CNT; i++) {
for (int i = 0; i < BLE_LOG_TRANS_BUF_CNT; i++) {
if (!ble_log_prph_trans_init(&(redir_lbm->trans[i]),
BLE_LOG_UART_REDIR_BUF_SIZE)) {
goto exit;
}
redir_lbm->trans[i]->owner = (void *)redir_lbm;
}
/* Mutex initilaization */
/* Mutex initialization */
redir_lbm->mutex = xSemaphoreCreateMutex();
if (!redir_lbm->mutex) {
goto exit;
@@ -151,7 +153,7 @@ bool ble_log_prph_init(size_t trans_cnt)
/* Initialize periodic flush timer */
esp_timer_create_args_t timer_args = {
.callback = (esp_timer_cb_t)esp_timer_cb_flush_log,
.callback = esp_timer_cb_flush_log,
.dispatch_method = ESP_TIMER_TASK,
};
if (esp_timer_create(&timer_args, &redir_flush_timer) != ESP_OK) {
@@ -161,7 +163,7 @@ bool ble_log_prph_init(size_t trans_cnt)
prph_inited = true;
#if BLE_LOG_PRPH_UART_DMA_REDIR
esp_timer_start_periodic(redir_flush_timer, BLE_LOG_UART_REDIR_FLUSH_TIMEOUT);
esp_timer_start_periodic(redir_flush_timer, BLE_LOG_UART_REDIR_FLUSH_PERIOD_US);
#endif /* BLE_LOG_PRPH_UART_DMA_REDIR */
return true;
@@ -190,15 +192,15 @@ void ble_log_prph_deinit(void)
/* Release redirection LBM */
if (redir_lbm) {
/* Release mutex */
if (redir_lbm->mutex) {
xSemaphoreTake(redir_lbm->mutex, portMAX_DELAY);
ble_log_lbm_stream_flush(redir_lbm, BLE_LOG_SRC_REDIR);
xSemaphoreGive(redir_lbm->mutex);
vSemaphoreDelete(redir_lbm->mutex);
}
/* Release transport */
for (int i = 0; i < BLE_LOG_TRANS_PING_PONG_BUF_CNT; i++) {
for (int i = 0; i < BLE_LOG_TRANS_BUF_CNT; i++) {
ble_log_prph_trans_deinit(&(redir_lbm->trans[i]));
}
@@ -272,7 +274,9 @@ void ble_log_prph_trans_deinit(ble_log_prph_trans_t **trans)
BLE_LOG_IRAM_ATTR void ble_log_prph_send_trans(ble_log_prph_trans_t *trans)
{
if (uhci_transmit(dev_handle, trans->buf, trans->pos) != ESP_OK) {
trans->prph_owned = false;
ble_log_lbm_t *lbm = (ble_log_lbm_t *)trans->owner;
__atomic_fetch_sub(&lbm->trans_inflight, 1, __ATOMIC_RELAXED);
__atomic_store_n(&trans->prph_owned, false, __ATOMIC_RELEASE);
}
}
@@ -282,17 +286,8 @@ BLE_LOG_IRAM_ATTR BLE_LOG_STATIC
void ble_log_redir_uart_tx_chars(const char *src, size_t len)
{
xSemaphoreTake(redir_lbm->mutex, portMAX_DELAY);
ble_log_prph_trans_t **trans = ble_log_lbm_get_trans(redir_lbm, len);
if (trans) {
uint8_t *buf = (*trans)->buf + (*trans)->pos;
BLE_LOG_MEMCPY(buf, src, len);
(*trans)->pos += len;
redir_last_write_ts = pdTICKS_TO_MS(xTaskGetTickCount());
if (BLE_LOG_TRANS_FREE_SPACE((*trans)) <= BLE_LOG_FRAME_OVERHEAD) {
ble_log_rt_queue_trans(trans);
}
}
ble_log_lbm_stream_write(redir_lbm, BLE_LOG_SRC_REDIR,
(const uint8_t *)src, len);
xSemaphoreGive(redir_lbm->mutex);
}
@@ -326,4 +321,19 @@ int __wrap_uart_write_bytes_with_break(uart_port_t uart_num, const void *src, si
return __wrap_uart_write_bytes(uart_num, src, size);
}
}
ble_log_lbm_t *ble_log_prph_get_redir_lbm(void)
{
return redir_lbm;
}
#endif /* BLE_LOG_PRPH_UART_DMA_REDIR */
void ble_log_prph_reset_util_counters(void)
{
#if BLE_LOG_PRPH_UART_DMA_REDIR
if (redir_lbm) {
__atomic_store_n(&redir_lbm->trans_inflight, 0, __ATOMIC_RELAXED);
__atomic_store_n(&redir_lbm->trans_inflight_peak, 0, __ATOMIC_RELAXED);
}
#endif
}
@@ -282,104 +282,6 @@ config BT_LE_CONTROLLER_TASK_STACK_SIZE
This configures stack size of NimBLE controller task
menu "Controller debug features"
menuconfig BT_LE_CONTROLLER_LOG_ENABLED
bool "Controller log enable"
default n
help
Enable controller log
config BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
bool "Utilize BLE Log v2 for controller log"
depends on BLE_LOG_ENABLED
default y
help
Utilize BLE Log v2 for controller log
if !BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
config BT_LE_CONTROLLER_LOG_CTRL_ENABLED
bool "enable controller log module"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Enable controller log module
config BT_LE_CONTROLLER_LOG_HCI_ENABLED
bool "enable HCI log module"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Enable hci log module
config BT_LE_CONTROLLER_LOG_DUMP_ONLY
bool "Controller log dump mode only"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Only operate in dump mode
config BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
bool "Output ble controller logs to SPI bus (Experimental)"
depends on BT_LE_CONTROLLER_LOG_ENABLED
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
select BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Output ble controller logs to SPI bus
config BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
bool "Store ble controller logs to flash(Experimental)"
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Store ble controller logs to flash memory.
config BT_LE_CONTROLLER_LOG_PARTITION_SIZE
int "size of ble controller log partition(Multiples of 4K)"
depends on BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
default 65536
help
The size of ble controller log partition shall be a multiples of 4K.
The name of log partition shall be "bt_ctrl_log".
The partition type shall be ESP_PARTITION_TYPE_DATA.
The partition sub_type shall be ESP_PARTITION_SUBTYPE_ANY.
config BT_LE_LOG_CTRL_BUF1_SIZE
int "size of the first BLE controller LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 4096
help
Configure the size of the first BLE controller LOG buffer.
config BT_LE_LOG_CTRL_BUF2_SIZE
int "size of the second BLE controller LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 1024
help
Configure the size of the second BLE controller LOG buffer.
config BT_LE_LOG_HCI_BUF_SIZE
int "size of the BLE HCI LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 4096
help
Configure the size of the BLE HCI LOG buffer.
endif
config BT_LE_CONTROLLER_LOG_WRAP_PANIC_HANDLER_ENABLE
bool "Enable wrap panic handler"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Wrap esp_panic_handler to get controller logs when PC pointer exception crashes.
config BT_LE_CONTROLLER_LOG_TASK_WDT_USER_HANDLER_ENABLE
bool "Enable esp_task_wdt_isr_user_handler implementation"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Implement esp_task_wdt_isr_user_handler to get controller logs when task wdt issue is triggered.
config BT_LE_MEM_CHECK_ENABLED
bool "Enable memory allocation check"
default n
@@ -622,6 +622,7 @@ menu "Controller debug log Options (Experimental)"
depends on BT_CTRL_LE_LOG_EN
int "The level of BLE log"
range 0 5
default 2 if BT_LOG_CRITICAL_ONLY_CTRL
default 1
config BT_CTRL_LE_LOG_BUF1_SIZE
-124
View File
@@ -339,130 +339,6 @@ config BT_LE_CONTROLLER_TASK_STACK_SIZE
This configures stack size of NimBLE controller task
menu "Controller debug features"
menuconfig BT_LE_CONTROLLER_LOG_ENABLED
bool "Controller log enable"
default n
help
Enable controller log
config BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
bool "Utilize BLE Log v2 for controller log"
depends on BLE_LOG_ENABLED
default y
help
Utilize BLE Log v2 for controller log
if !BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
config BT_LE_CONTROLLER_LOG_CTRL_ENABLED
bool "enable controller log module"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Enable controller log module
config BT_LE_CONTROLLER_LOG_HCI_ENABLED
bool "enable HCI log module"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Enable hci log module
config BT_LE_CONTROLLER_LOG_DUMP_ONLY
bool "Controller log dump mode only"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Only operate in dump mode
config BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
bool "Output ble controller logs to SPI bus (Experimental)"
depends on BT_LE_CONTROLLER_LOG_ENABLED
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
select BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Output ble controller logs to SPI bus
config BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
bool "Output ble controller logs via UART DMA (Experimental)"
depends on BT_LE_CONTROLLER_LOG_ENABLED
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
depends on !BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
select BT_BLE_LOG_UHCI_OUT_ENABLED
default y
help
Output ble controller logs via UART DMA
config BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
bool "Store ble controller logs to flash(Experimental)"
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Store ble controller logs to flash memory.
config BT_LE_CONTROLLER_LOG_PARTITION_SIZE
int "size of ble controller log partition(Multiples of 4K)"
depends on BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
default 65536
help
The size of ble controller log partition shall be a multiples of 4K.
The name of log partition shall be "bt_ctrl_log".
The partition type shall be ESP_PARTITION_TYPE_DATA.
The partition sub_type shall be ESP_PARTITION_SUBTYPE_ANY.
config BT_LE_LOG_CTRL_BUF1_SIZE
int "size of the first BLE controller LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 4096
help
Configure the size of the first BLE controller LOG buffer.
config BT_LE_LOG_CTRL_BUF2_SIZE
int "size of the second BLE controller LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 1024
help
Configure the size of the second BLE controller LOG buffer.
config BT_LE_LOG_HCI_BUF_SIZE
int "size of the BLE HCI LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 4096
help
Configure the size of the BLE HCI LOG buffer.
endif
config BT_LE_CONTROLLER_LOG_WRAP_PANIC_HANDLER_ENABLE
bool "Enable wrap panic handler"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Wrap esp_panic_handler to get controller logs when PC pointer exception crashes.
config BT_LE_CONTROLLER_LOG_TASK_WDT_USER_HANDLER_ENABLE
bool "Enable esp_task_wdt_isr_user_handler implementation"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Implement esp_task_wdt_isr_user_handler to get controller logs when task wdt issue is triggered.
config BT_LE_CONTROLLER_LOG_OUTPUT_LEVEL
int "The output level of controller log"
depends on BT_LE_CONTROLLER_LOG_ENABLED
range 0 5
default 1
help
The output level of controller log.
config BT_LE_CONTROLLER_LOG_MOD_OUTPUT_SWITCH
hex "The switch of module log output"
depends on BT_LE_CONTROLLER_LOG_ENABLED
range 0 0xFFFFFFFF
default 0xFFFFFFFF
help
The switch of module log output, this is an unsigned 32-bit hexadecimal value.
config BT_LE_ERROR_SIM_ENABLED
bool "Enable controller features for internal testing"
default n
+5 -35
View File
@@ -69,10 +69,6 @@
#if CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#include "ble_log/ble_log_spi_out.h"
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#include "ble_log/ble_log_uhci_out.h"
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#endif /* CONFIG_BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2 */
/* Macro definition
@@ -226,9 +222,9 @@ static int esp_ecc_gen_dh_key(const uint8_t *peer_pub_key_x, const uint8_t *peer
const uint8_t *our_priv_key, uint8_t *out_dhkey);
#if CONFIG_BT_LE_CONTROLLER_LOG_ENABLED
#if !CONFIG_BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
static void esp_bt_controller_log_interface(uint32_t len, const uint8_t *addr, uint32_t len_append, const uint8_t *addr_append, uint32_t flag);
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
static void esp_bt_ctrl_log_partition_get_and_erase_first_block(void);
#endif // CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
@@ -302,20 +298,10 @@ esp_err_t esp_bt_controller_log_init(void)
}
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
if (ble_log_uhci_out_init() != 0) {
goto uhci_out_init_failed;
}
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
if (r_ble_log_init_simple(ble_log_spi_out_ll_write, ble_log_spi_out_ll_log_ev_proc) != 0) {
goto log_init_failed;
}
#elif CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
if (r_ble_log_init_simple(ble_log_uhci_out_ll_write, ble_log_uhci_out_ll_log_ev_proc) != 0) {
goto log_init_failed;
}
#else
uint8_t buffers = 0;
#if CONFIG_BT_LE_CONTROLLER_LOG_CTRL_ENABLED
@@ -346,8 +332,6 @@ esp_err_t esp_bt_controller_log_init(void)
ctrl_level_init_failed:
#if CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
r_ble_log_deinit_simple();
#elif CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
r_ble_log_deinit_simple();
#else
r_ble_log_deinit_async();
#endif
@@ -356,10 +340,6 @@ log_init_failed:
ble_log_spi_out_deinit();
spi_out_init_failed:
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
ble_log_uhci_out_deinit();
uhci_out_init_failed:
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
return ESP_FAIL;
}
@@ -369,14 +349,8 @@ void esp_bt_controller_log_deinit(void)
ble_log_spi_out_deinit();
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
ble_log_uhci_out_deinit();
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
r_ble_log_deinit_simple();
#elif CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
r_ble_log_deinit_simple();
#else
r_ble_log_deinit_async();
#endif
@@ -1611,7 +1585,7 @@ void esp_ble_controller_log_dump_all(bool output)
ble_log_dump_to_console();
}
#else /* !CONFIG_BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2 */
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
static void esp_bt_controller_log_interface(uint32_t len, const uint8_t *addr, uint32_t len_append, const uint8_t *addr_append, uint32_t flag)
{
bool end = (flag & BIT(BLE_LOG_INTERFACE_FLAG_END));
@@ -1633,7 +1607,7 @@ static void esp_bt_controller_log_interface(uint32_t len, const uint8_t *addr, u
portEXIT_CRITICAL_SAFE(&spinlock);
#endif // CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
}
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
void esp_ble_controller_log_dump_all(bool output)
{
@@ -1641,13 +1615,9 @@ void esp_ble_controller_log_dump_all(bool output)
ble_log_spi_out_dump_all();
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
ble_log_uhci_out_dump_all();
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
esp_bt_read_ctrl_log_from_flash(output);
#elif !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#elif !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
portMUX_TYPE spinlock = portMUX_INITIALIZER_UNLOCKED;
portENTER_CRITICAL_SAFE(&spinlock);
esp_panic_handler_feed_wdts();
-124
View File
@@ -333,130 +333,6 @@ config BT_LE_CONTROLLER_TASK_STACK_SIZE
This configures stack size of NimBLE controller task
menu "Controller debug features"
menuconfig BT_LE_CONTROLLER_LOG_ENABLED
bool "Controller log enable"
default n
help
Enable controller log
config BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
bool "Utilize BLE Log v2 for controller log"
depends on BLE_LOG_ENABLED
default y
help
Utilize BLE Log v2 for controller log
if !BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
config BT_LE_CONTROLLER_LOG_CTRL_ENABLED
bool "enable controller log module"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Enable controller log module
config BT_LE_CONTROLLER_LOG_HCI_ENABLED
bool "enable HCI log module"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Enable hci log module
config BT_LE_CONTROLLER_LOG_DUMP_ONLY
bool "Controller log dump mode only"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default y
help
Only operate in dump mode
config BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
bool "Output ble controller logs to SPI bus (Experimental)"
depends on BT_LE_CONTROLLER_LOG_ENABLED
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
select BT_BLE_LOG_SPI_OUT_ENABLED
default n
help
Output ble controller logs to SPI bus
config BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
bool "Output ble controller logs via UART DMA (Experimental)"
depends on BT_LE_CONTROLLER_LOG_ENABLED
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
depends on !BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
select BT_BLE_LOG_UHCI_OUT_ENABLED
default y
help
Output ble controller logs via UART DMA
config BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
bool "Store ble controller logs to flash(Experimental)"
depends on !BT_LE_CONTROLLER_LOG_DUMP_ONLY
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Store ble controller logs to flash memory.
config BT_LE_CONTROLLER_LOG_PARTITION_SIZE
int "size of ble controller log partition(Multiples of 4K)"
depends on BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
default 65536
help
The size of ble controller log partition shall be a multiples of 4K.
The name of log partition shall be "bt_ctrl_log".
The partition type shall be ESP_PARTITION_TYPE_DATA.
The partition sub_type shall be ESP_PARTITION_SUBTYPE_ANY.
config BT_LE_LOG_CTRL_BUF1_SIZE
int "size of the first BLE controller LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 4096
help
Configure the size of the first BLE controller LOG buffer.
config BT_LE_LOG_CTRL_BUF2_SIZE
int "size of the second BLE controller LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 1024
help
Configure the size of the second BLE controller LOG buffer.
config BT_LE_LOG_HCI_BUF_SIZE
int "size of the BLE HCI LOG buffer"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default 4096
help
Configure the size of the BLE HCI LOG buffer.
endif
config BT_LE_CONTROLLER_LOG_WRAP_PANIC_HANDLER_ENABLE
bool "Enable wrap panic handler"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Wrap esp_panic_handler to get controller logs when PC pointer exception crashes.
config BT_LE_CONTROLLER_LOG_TASK_WDT_USER_HANDLER_ENABLE
bool "Enable esp_task_wdt_isr_user_handler implementation"
depends on BT_LE_CONTROLLER_LOG_ENABLED
default n
help
Implement esp_task_wdt_isr_user_handler to get controller logs when task wdt issue is triggered.
config BT_LE_CONTROLLER_LOG_OUTPUT_LEVEL
int "The output level of controller log"
depends on BT_LE_CONTROLLER_LOG_ENABLED
range 0 5
default 1
help
The output level of controller log.
config BT_LE_CONTROLLER_LOG_MOD_OUTPUT_SWITCH
hex "The switch of module log output"
depends on BT_LE_CONTROLLER_LOG_ENABLED
range 0 0xFFFFFFFF
default 0xFFFFFFFF
help
The switch of module log output, this is an unsigned 32-bit hexadecimal value.
config BT_LE_ERROR_SIM_ENABLED
bool "Enable controller features for internal testing"
default n
+5 -35
View File
@@ -62,10 +62,6 @@
#if CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#include "ble_log/ble_log_spi_out.h"
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#include "ble_log/ble_log_uhci_out.h"
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#endif /* CONFIG_BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2 */
/* Macro definition
@@ -218,9 +214,9 @@ static int esp_ecc_gen_dh_key(const uint8_t *peer_pub_key_x, const uint8_t *peer
const uint8_t *our_priv_key, uint8_t *out_dhkey);
#if CONFIG_BT_LE_CONTROLLER_LOG_ENABLED
#if !CONFIG_BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
static void esp_bt_controller_log_interface(uint32_t len, const uint8_t *addr, uint32_t len_append, const uint8_t *addr_append, uint32_t flag);
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
static void esp_bt_ctrl_log_partition_get_and_erase_first_block(void);
#endif // CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
@@ -293,20 +289,10 @@ esp_err_t esp_bt_controller_log_init(void)
}
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
if (ble_log_uhci_out_init() != 0) {
goto uhci_out_init_failed;
}
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
if (r_ble_log_init_simple(ble_log_spi_out_ll_write, ble_log_spi_out_ll_log_ev_proc) != 0) {
goto log_init_failed;
}
#elif CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
if (r_ble_log_init_simple(ble_log_uhci_out_ll_write, ble_log_uhci_out_ll_log_ev_proc) != 0) {
goto log_init_failed;
}
#else
uint8_t buffers = 0;
#if CONFIG_BT_LE_CONTROLLER_LOG_CTRL_ENABLED
@@ -337,8 +323,6 @@ esp_err_t esp_bt_controller_log_init(void)
ctrl_level_init_failed:
#if CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
r_ble_log_deinit_simple();
#elif CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
r_ble_log_deinit_simple();
#else
r_ble_log_deinit_async();
#endif
@@ -347,10 +331,6 @@ log_init_failed:
ble_log_spi_out_deinit();
spi_out_init_failed:
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
ble_log_uhci_out_deinit();
uhci_out_init_failed:
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
return ESP_FAIL;
}
@@ -360,14 +340,8 @@ void esp_bt_controller_log_deinit(void)
ble_log_spi_out_deinit();
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
ble_log_uhci_out_deinit();
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
r_ble_log_deinit_simple();
#elif CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
r_ble_log_deinit_simple();
#else
r_ble_log_deinit_async();
#endif
@@ -1564,7 +1538,7 @@ void esp_ble_controller_log_dump_all(bool output)
ble_log_dump_to_console();
}
#else /* !CONFIG_BT_LE_CONTROLLER_LOG_MODE_BLE_LOG_V2 */
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#if !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
static void esp_bt_controller_log_interface(uint32_t len, const uint8_t *addr, uint32_t len_append, const uint8_t *addr_append, uint32_t flag)
{
bool end = (flag & BIT(BLE_LOG_INTERFACE_FLAG_END));
@@ -1586,7 +1560,7 @@ static void esp_bt_controller_log_interface(uint32_t len, const uint8_t *addr, u
portEXIT_CRITICAL_SAFE(&spinlock);
#endif // CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
}
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#endif // !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
void esp_ble_controller_log_dump_all(bool output)
{
@@ -1594,13 +1568,9 @@ void esp_ble_controller_log_dump_all(bool output)
ble_log_spi_out_dump_all();
#endif // CONFIG_BT_BLE_LOG_SPI_OUT_ENABLED
#if CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
ble_log_uhci_out_dump_all();
#endif // CONFIG_BT_BLE_LOG_UHCI_OUT_ENABLED
#if CONFIG_BT_LE_CONTROLLER_LOG_STORAGE_ENABLE
esp_bt_read_ctrl_log_from_flash(output);
#elif !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED && !CONFIG_BT_LE_CONTROLLER_LOG_UHCI_OUT_ENABLED
#elif !CONFIG_BT_LE_CONTROLLER_LOG_SPI_OUT_ENABLED
portMUX_TYPE spinlock = portMUX_INITIALIZER_UNLOCKED;
portENTER_CRITICAL_SAFE(&spinlock);
esp_panic_handler_feed_wdts();
+425
View File
@@ -0,0 +1,425 @@
# BLE Log Console
A Textual-based TUI tool for real-time capture, parsing, and display of BLE Log frames from UART DMA output. Designed for both Espressif internal developers and ESP-IDF customers.
**User Guide**: [English](docs/User-Guide-EN.md) | [中文](docs/User-Guide-CN.md)
## Table of Contents
- [Features](#features)
- [Prerequisites](#prerequisites)
- [Installation](#installation)
- [Usage](#usage)
- [Firmware Configuration](#firmware-configuration)
- [How It Works](#how-it-works)
- [Offline Analysis](#offline-analysis)
- [Keyboard Shortcuts](#keyboard-shortcuts)
- [Building Executable](#building-executable)
- [Architecture](#architecture)
- [Development](#development)
- [Troubleshooting](#troubleshooting)
## Features
- **Real-time frame parsing** with automatic checksum mode detection (XOR/Sum × Full/Header-only)
- **Frame sync state machine** with loss detection and recovery (SEARCHING → CONFIRMING → SYNCED → CONFIRMING_LOSS)
- **Internal frame decoding**: INIT_DONE (firmware version), INFO, ENH_STAT (per-source write/loss counters), FLUSH
- **UART redirect display**: When firmware uses UART PORT 0, redirected `ESP_LOG` output is decoded from `REDIR` frames and displayed as ASCII log lines
- **Dimmed internal logs**: Console-generated messages (sync, warnings, errors) are dimmed to visually separate from user application logs
- **Live status panel**: Sync state, RX bytes, transport speed (current + max), frame rate
- **Per-source frame loss warnings**: Real-time `[WARN]` notifications when firmware reports new frame loss, with source name (e.g., `LL_TASK`)
- **Per-source statistics view**: Press `d` to open a modal overlay showing written/lost frame and byte counts per source
- **Raw binary capture**: All received bytes are saved to a `.bin` file for [offline analysis](#offline-analysis)
- **Scrollable log view** with auto-scroll toggle
## Prerequisites
### 1. ESP-IDF Environment
BLE Log Console runs within the ESP-IDF Python environment. You must source `export.sh` before use:
```bash
cd <esp-idf-root>
. ./export.sh
```
This sets up the Python virtual environment at `~/.espressif/python_env/` which includes all required dependencies (`textual`, `pyserial`, `click`, etc.).
### 2. Firmware Configuration
The target ESP32 device must have the BLE Log module enabled and configured for UART DMA output. Configure via `idf.py menuconfig`:
```
Component config → Bluetooth → BT Logs → Enable BLE Log Module (Experimental) [y]
```
Then select the transport peripheral and UART settings:
```
Component config → Bluetooth → BT Logs → BLE Log Module
→ Peripheral Selection → UART DMA
→ UART DMA Configuration
→ UART Port Number (default: 0)
→ Baud Rate (default: 3000000)
→ TX GPIO Number (set to match your hardware)
```
#### Quick Setup: Critical-Log-Only Mode
The simplest way to enable BLE Log with UART DMA output:
```
Component config → Bluetooth → BT Logs → Enable critical-log-only mode [y]
```
This automatically enables the BLE Log Module, selects UART DMA as the default peripheral, and restricts each stack (Controller/Host/Mesh) to critical logs only.
#### Recommended Kconfig Options
| Kconfig Option | Recommended | Why |
|----------------|-------------|-----|
| `CONFIG_BT_LOG_CRITICAL_ONLY` | `y` | One-click setup — enables BLE Log + UART DMA + compression |
| `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE` | `3000000` | 3 Mbps — balances throughput and reliability |
| `CONFIG_BLE_LOG_LL_ENABLED` | `y` (auto) | Auto-enabled by ESP BLE Controller detection |
> **Note**: Payload checksum (XOR, full scope) and enhanced statistics are always enabled — no Kconfig options needed.
> **Note on UART PORT 0**: When `CONFIG_BLE_LOG_PRPH_UART_DMA_PORT=0`, the firmware automatically wraps `ESP_LOG` output in BLE Log frames (`BLE_LOG_SRC_REDIR`). The console decodes and displays these as regular ASCII log lines. See the [BLE Log module README](../../../components/bt/common/ble_log/README.md#uart-redirect-port-0) for details.
### 3. Hardware Connection
Connect the ESP32 UART TX pin to a USB-to-serial adapter:
```
ESP32 TX GPIO ──────── USB-Serial RX
ESP32 GND ──────── USB-Serial GND
```
Ensure your USB-serial adapter supports the configured baud rate (3 Mbps by default). Adapters based on CP2102N, CH343, or FT232H are recommended.
## Installation
No separate installation is needed. The `textual` and `textual-fspicker` packages are included in ESP-IDF's core requirements (`tools/requirements/requirements.core.txt`) and installed automatically by `./install.sh`.
Verify the dependency is available:
```bash
. ./export.sh
python -c "import textual; print(textual.__version__)"
```
## Usage
### Interactive Mode (Launch Screen)
Run `python console.py` with no arguments to open the **Launch Screen** — an interactive TUI where you can select the serial port, baud rate, and log directory before starting capture:
```bash
cd <esp-idf-root>
. ./export.sh
cd tools/bt/ble_log_console
python console.py
```
The Launch Screen lets you browse available ports and configure options without memorising CLI flags.
### Capture Mode (CLI)
Pass `--port` directly to skip the Launch Screen and start capture immediately:
```bash
cd <esp-idf-root>
. ./export.sh
cd tools/bt/ble_log_console
# Basic usage (--port is now optional; omit to use Launch Screen)
python console.py --port /dev/ttyUSB0
# With custom baud rate
python console.py --port /dev/ttyUSB0 --baudrate 2000000
# With custom log directory
python console.py --port /dev/ttyUSB0 --log-dir /tmp/my_captures
# With custom output file (deprecated — prefer --log-dir)
python console.py --port /dev/ttyUSB0 --output /tmp/my_capture.bin
# Short form
python console.py -p /dev/ttyUSB0 -b 3000000 -d /tmp/my_captures
```
#### Options
| Option | Short | Default | Description |
|--------|-------|---------|-------------|
| `--port` | `-p` | (optional) | UART port device path (e.g., `/dev/ttyUSB0`, `COM3`). Omit to use Launch Screen. |
| `--baudrate` | `-b` | `3000000` | Baud rate — must match `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE` |
| `--log-dir` | `-d` | current working directory | Directory where capture `.bin` files are saved |
| `--output` | `-o` | auto-generated | *(Deprecated)* Explicit output file path — use `--log-dir` instead |
When `--log-dir` is not specified, capture files are saved to the **current working directory** with a timestamp-based filename:
```
<cwd>/ble_log_YYYYMMDD_HHMMSS.bin
```
### List Saved Captures (`ls`)
List all previously saved `.bin` capture files, sorted by most recent first:
```bash
python console.py ls
```
Example output:
```
Captures in /tmp/ble_log_console:
2026-03-17 14:30:25 2.3 MB ble_log_20260317_143025.bin
2026-03-17 10:15:03 512.0 KB ble_log_20260317_101503.bin
2026-03-16 18:42:11 1.1 MB ble_log_20260316_184211.bin
```
These `.bin` files contain raw binary data exactly as received from UART, suitable for [offline analysis](#offline-analysis).
## Firmware Configuration
### Checksum Mode Detection
The console automatically detects the firmware's checksum mode by probing all 4 combinations during the SEARCHING phase:
| Firmware Config | Console Detects |
|-----------------|-----------------|
| XOR checksum + Full scope | `XOR / Header+Payload` |
| XOR checksum + Header-only scope | `XOR / Header` |
| Sum checksum + Full scope | `Sum / Header+Payload` |
| Sum checksum + Header-only scope | `Sum / Header` |
The detected mode is logged in the log view after sync is achieved (3 consecutive valid frames).
### Enhanced Statistics (ENH_STAT)
The firmware periodically emits `INTERNAL` frames containing per-source write/loss counters (enhanced statistics is always enabled). The console decodes these and uses them as the authoritative source of frame and byte loss. Loss counters are baselined on the first ENH_STAT received per source, so the console only shows loss since it started.
When new frame loss is detected in an ENH_STAT report, a `[WARN]` notification is displayed in the log view with the source name and incremental loss count. Press `d` at any time to view a per-source breakdown of written and lost frames/bytes.
## How It Works
### Sync State Machine
```
frame valid
┌──────────┐ ──────────────▶ ┌────────────────┐
│ SEARCHING │ │ CONFIRMING_SYNC │ ──(N frames)──▶ SYNCED
└──────────┘ ◀────────────── └────────────────┘
frame invalid
frame invalid
┌────────┐ ──────────────▶ ┌─────────────────┐
│ SYNCED │ │ CONFIRMING_LOSS │ ──(M+1 fails)──▶ SEARCHING
└────────┘ ◀────────────── └─────────────────┘
frame valid
```
- **N** = 3 (sync confirmation threshold)
- **M** = 3 (loss tolerance — consecutive failures before resync)
### Frame Format
The console parses the standard BLE Log frame format:
```
[payload_len: 2B LE][frame_meta: 4B LE][payload: variable][checksum: 4B LE]
└── Header (6B) ──┘ └── Tail (4B) ──┘
```
- `frame_meta` = `source_code[7:0] | frame_sn[31:8]`
- For most sources, payload starts with 4-byte `os_ts` (OS timestamp in ms)
- For `REDIR` source (code 8), payload is raw ASCII (no `os_ts` prefix)
### REDIR Frame Decoding
When the firmware uses UART PORT 0, `ESP_LOG` output is wrapped in frames with source code `REDIR` (8). The console:
1. Extracts the raw ASCII payload from each REDIR frame
2. Buffers partial lines across frames (a single log line may span multiple frames due to batch sealing)
3. Emits complete lines to the log view on each `\n` boundary
## Offline Analysis
### Raw Binary Capture
Every byte received from UART is saved to the output `.bin` file **before** parsing. This ensures the capture is complete and unmodified, regardless of parser state or sync loss.
Use `python console.py ls` to find saved captures.
### Parsing with BLE Log Analyzer
The saved `.bin` files can be parsed offline using the **BLE Log Analyzer**'s `ble_log_parser_v2` module for detailed analysis:
- Frame-by-frame decoding with source filtering
- HCI log extraction and conversion to btsnoop format (for Wireshark)
- Timestamp reconstruction and event correlation
- Link Layer log decoding
The binary format is identical whether captured by BLE Log Console, a logic analyzer, or any other tool — the parser reads the same frame structure documented above.
> **Tip**: The `bt_hci_to_btsnoop` tool at `tools/bt/bt_hci_to_btsnoop/` can convert extracted HCI logs to btsnoop format for analysis in Wireshark.
## Keyboard Shortcuts
All shortcuts are **case-insensitive** (e.g., `Q` and `q` both quit).
| Key | Action |
|-----|--------|
| `q` | Quit the application |
| `Ctrl+C` | Quit the application |
| `c` | Clear the log view |
| `s` | Toggle auto-scroll (on by default) |
| `d` | Show per-source frame statistics (press `Escape` to return) |
| `h` | Show keyboard shortcuts (press `Escape` to return) |
| `r` | Reset chip via DTR/RTS toggle |
## Building Executable
To distribute BLE Log Console as a standalone single-file executable (no Python installation required on the target machine), use the provided `build_exe.py` script with [PyInstaller](https://pyinstaller.org/):
```bash
pip install pyinstaller
cd tools/bt/ble_log_console
python build_exe.py
```
The executable is written to `dist/ble_log_console` (Linux/macOS) or `dist\ble_log_console.exe` (Windows). Copy it to any machine and run it directly — no ESP-IDF environment needed.
> **Note**: Build the executable on the same OS/architecture as the target machine. PyInstaller does not cross-compile.
## Architecture
```
console.py (Click CLI)
BLELogApp (Textual App)
├── Backend Worker (thread)
│ │
│ ├── UART Transport ── open_serial() ── raw .bin file
│ │
│ ├── FrameParser ── sync state machine + checksum auto-detection
│ │ │
│ │ └── ParsedFrame { source_code, frame_sn, payload, os_ts_ms }
│ │
│ ├── InternalDecoder ── decode INIT_DONE, INFO, ENH_STAT, FLUSH
│ │
│ └── StatsAccumulator ── RX bytes, BPS, FPS, firmware-reported loss
└── Frontend (Textual widgets)
├── LogView ── scrollable RichLog with styled output
├── StatusPanel ── fixed bottom bar with live stats
├── StatsScreen ── modal overlay for per-source statistics (d key)
└── ShortcutScreen ── modal overlay for keyboard shortcuts (h key)
```
### Source Layout
```
src/
__init__.py # Python 3.10 guard + textual dependency check
app.py # Textual App — wires backend worker to frontend
backend/
models.py # Enums, dataclasses, Textual Message types
checksum.py # XOR and Sum checksum (matches firmware impl)
frame_parser.py # Sync state machine with checksum auto-detection
internal_decoder.py # INTERNAL frame payload decoder
stats/ # Statistics sub-package
__init__.py # Re-exports StatsAccumulator
accumulator.py # Thin composition of sub-modules
transport.py # RX bytes, BPS, FPS tracking
firmware_loss.py # ENH_STAT loss delta tracking
firmware_written.py # ENH_STAT write tracking
sn_gap.py # SN gap detection
peak_burst.py # 1ms window peak write burst
traffic_spike.py # Wire saturation detection
uart_transport.py # Serial port helpers, file I/O
frontend/
log_view.py # RichLog wrapper with color-coded write methods
shortcut_screen.py # Modal screen for keyboard shortcuts
stats_screen.py # Modal screen for per-source statistics
status_panel.py # Reactive status bar (sync, speed, help hint)
tests/
helpers.py # Synthetic frame builder for tests
test_checksum.py # Checksum algorithm tests
test_frame_parser.py # State machine + auto-detection tests
test_internal_decoder.py # Internal frame decoding tests
test_stats.py # Stats accumulator and firmware loss tests
```
## Development
### Running Tests
```bash
cd <esp-idf-root>
. ./export.sh
cd tools/bt/ble_log_console
python -m pytest tests/ -v
```
### Linting & Formatting
```bash
python -m ruff format src/ tests/
python -m ruff check --fix src/ tests/
```
### Type Checking
```bash
python -m mypy src/backend/
```
## Troubleshooting
### "UART port not found"
- Check the device is connected: `ls /dev/ttyUSB*` (Linux) or `ls /dev/tty.usb*` (macOS)
- Ensure you have permission: `sudo usermod -aG dialout $USER` (Linux, then re-login)
- On WSL, USB devices need [usbipd-win](https://github.com/dorssel/usbipd-win) to pass through
### Sync stays in SEARCHING
- **Baud rate mismatch**: Ensure `--baudrate` matches `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE`
- **Wrong port**: Verify you're connected to the correct UART TX pin
- **Firmware not running**: Check the device has booted and BLE Log is initialized
- **Signal integrity**: At 3 Mbps, use short cables and ensure solid GND connection
### No ESP_LOG output
When using UART PORT 0, `ESP_LOG` is redirected through BLE Log frames. If you don't see log lines:
- Ensure the firmware has `CONFIG_BLE_LOG_PRPH_UART_DMA_PORT=0`
- The console automatically decodes REDIR frames — no extra configuration needed
- Logs are flushed by a 1-second periodic timer, so there may be a short delay
### High frame loss
- Press `d` to view per-source loss counters (enhanced statistics is always enabled)
- Increase buffer sizes: `CONFIG_BLE_LOG_LBM_TRANS_SIZE`, `CONFIG_BLE_LOG_LBM_LL_TRANS_SIZE`
- Add more LBMs: `CONFIG_BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT`
- Increase baud rate if your adapter supports higher speeds
### Import errors
```
ModuleNotFoundError: No module named 'textual'
```
Re-run the ESP-IDF installer:
```bash
cd <esp-idf-root>
./install.sh
. ./export.sh
```
+242
View File
@@ -0,0 +1,242 @@
# BLE Log Console 用户指南
## 简介
BLE Log Console 是一个基于终端的实时 BLE 日志捕获与解析工具。它通过 UART DMA 接收 ESP 芯片固件发出的 BLE Log 帧,实时解析并展示在终端界面中,同时将原始二进制数据保存到文件供离线分析。
## 准备工作
### 1. 固件配置
`idf.py menuconfig` 中启用 BLE Log 模块:
**最简配置(推荐):**
```
Component config → Bluetooth → BT Logs → Enable critical-log-only mode [y]
```
勾选即可一键启用 BLE Log 模块 + UART DMA 输出 + 仅关键日志模式。
**手动配置:**
```
Component config → Bluetooth → BT Logs → Enable BT Log Async Output (Dev Only) [y]
```
UART DMA 传输、3 Mbps 波特率、PORT 0 均为默认值,大多数情况下无需额外配置。
> **关于 UART PORT 0**:当配置为 PORT 0 时,固件会自动将 `ESP_LOG` 输出包装为 BLE Log 帧(`REDIR` source),Console 会自动解码并显示为普通日志行。
### 2. 硬件连接
**PORT 0(推荐):** 直接通过 USB 线连接开发板的 UART 口即可,无需额外接线。
**其他 PORT** 需要将指定的 TX GPIO 连接到外部 USB 串口适配器:
```
ESP32 TX GPIO ──────── USB 串口适配器 RX
ESP32 GND ──────── USB 串口适配器 GND
```
确保 USB 串口适配器支持所配置的波特率(默认 3 Mbps)。推荐使用 CP2102N、CH343 或 FT232H 芯片的适配器。
## 启动
### 快速启动(推荐)
使用自带的启动脚本 -- 自动激活 ESP-IDF 环境并安装依赖,可在任意目录下运行:
```bash
# Linux / macOS
<esp-idf 根目录>/tools/bt/ble_log_console/run.sh
# Windows
<esp-idf 根目录>\tools\bt\ble_log_console\run.bat
```
不带参数启动时,工具会打开 **启动界面(Launch Screen** -- 一个交互式配置界面,可以:
- 从下拉列表中 **选择串口**(自动检测可用设备,支持 **Refresh** 按钮重新扫描)
- 从预设选项中 **选择波特率**115200 至 3000000,默认 3000000
- 通过文本输入或 **Browse** 文件选择器 **设置日志保存目录**
- 点击 **Connect** 开始捕获
传入 `--port` 可跳过启动界面,直接开始捕获:
```bash
# Linux / macOS
./run.sh -p /dev/ttyUSB0
./run.sh -p /dev/ttyUSB0 -b 3000000 -d /tmp/my_captures
# Windows
run.bat -p COM3
```
所有 CLI 选项均会转发给 `console.py`
### 手动启动
如果你希望自行管理 ESP-IDF 环境:
```bash
cd <esp-idf 根目录>
. ./export.sh
python -m pip install textual textual-fspicker # 安装额外依赖
cd tools/bt/ble_log_console
python console.py # 启动界面
python console.py -p /dev/ttyUSB0 # 直连捕获
python console.py -p /dev/ttyUSB0 -b 3000000 # 指定波特率
python console.py -p /dev/ttyUSB0 -d /tmp/captures # 指定日志目录
```
### CLI 选项
| 参数 | 缩写 | 默认值 | 说明 |
|------|------|--------|------|
| `--port` | `-p` | (可选) | 串口设备路径,如 `/dev/ttyUSB0``COM3`。省略时打开启动界面。 |
| `--baudrate` | `-b` | `3000000` | 波特率,须与固件 `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE` 一致 |
| `--log-dir` | `-d` | 当前工作目录 | 捕获文件保存目录 |
捕获文件保存到当前工作目录(或 `--log-dir` 指定的目录),文件名按时间戳自动生成:
```
ble_log_YYYYMMDD_HHMMSS.bin
```
## 界面说明
启动后,界面分为两个区域:
### 日志区域(上方)
滚动显示实时日志,包括:
- **`[INFO]`**(绿色):系统信息,如固件版本
- **`[WARN]`**(黄色):丢帧警告,格式为 `Frame loss [LL_TASK]: 5 frames, 200 bytes`,表示该 source 新增了丢帧
- **`[SYNC]`**(青色):同步状态变化
- 普通文本:UART redirect 输出的 `ESP_LOG` 日志(仅 PORT 0 时出现)
### 状态栏(下方)
固定显示在底部,实时更新:
```
Sync: SYNCED | Checksum: XOR / Header+Payload | Press h for help
RX: 1.2 MB Speed: 293.0 KB/s Max: 300.0 KB/s Rate: 3421 fps Lost: 12 frames, 480 B
```
- **Sync**: 同步状态(SEARCHING -> CONFIRMING -> SYNCED -> CONFIRMING_LOSS
- **Checksum**: 自动检测到的校验模式(算法 / 范围)
- **RX**: 累计接收字节数
- **Speed / Max**: 当前/峰值传输速度(单位 KB/s 或 MB/s)
- **Rate**: 当前帧率
- **Lost**: 自 Console 启动以来的累计丢帧数和丢失字节数
## 快捷键
| 按键 | 功能 |
|------|------|
| `q` | 退出 |
| `Ctrl+C` | 退出 |
| `c` | 清屏(清除日志区域) |
| `s` | 切换自动滚动(默认开启) |
| `d` | 打开每个 Source 的帧统计详情(按 `Escape``d` 关闭) |
| `m` | 查看缓冲区利用率(按 `Escape``m` 关闭) |
| `h` | 显示快捷键帮助(按 `Escape` 关闭) |
| `r` | 通过 DTR/RTS 复位芯片 |
### 帧统计详情(`d` 键)
`d` 键会弹出一个覆盖层,包含两个表格,每秒自动刷新:
**固件计数器(自芯片启动以来)** -- 固件上报的每个 Source 的写入和缓冲区丢帧统计:
| Source | Written Frames | Written Bytes | Buffer Loss Frames | Buffer Loss Bytes |
|--------|---------------|---------------|-------------------|-------------------|
| LL_TASK | 12345 | 56.7 KB | 5 | 200 B |
| LL_HCI | 890 | 34.2 KB | - | - |
| HOST | 456 | 12.1 KB | - | - |
| HCI | 234 | 8.5 KB | 2 | 80 B |
**Console 测量(自 Console 启动以来)** -- Console 端测量的每个 Source 的接收速率和峰值突发:
| Source | Received Frames | Received Bytes | Avg Frames/s | Avg Bytes/s | Peak Frames/10ms | Peak Bytes/s |
|--------|----------------|---------------|-------------|------------|-----------------|-------------|
| LL_TASK | 12340 | 56.5 KB | 412 | 18.8 KB/s | 8 | 24.0 KB/s |
| LL_HCI | 890 | 34.2 KB | 30 | 1.1 KB/s | 3 | 3.6 KB/s |
有缓冲区丢帧的 Source 会在固件计数器表格中以红色高亮显示。
### 缓冲区利用率(`m` 键)
`m` 键会弹出一个覆盖层,展示固件上报的每个 LBMLog Buffer Manager)缓冲区利用率:
| Pool | Idx | Name | Peak | Total | Util% |
|------|-----|------|------|-------|-------|
| COMMON_TASK | 0 | spin | 3 | 4 | 75% |
| COMMON_TASK | 1 | atomic[0] | 4 | 4 | 100% |
| COMMON_ISR | 0 | spin | 2 | 4 | 50% |
| LL | 0 | ll_task | 4 | 4 | 100% |
| LL | 1 | ll_hci | 2 | 4 | 50% |
- **Pool**: 缓冲池类别(COMMON_TASK、COMMON_ISR、LL、REDIR
- **Idx**: 池内 LBM 索引
- **Name**: LBM 可读名称(spin、atomic[N]、ll_task、ll_hci、redir
- **Peak**: 同时在途的传输缓冲区峰值数量
- **Total**: 该 LBM 可用的传输缓冲区总数
- **Util%**: Peak / Total 的百分比;100%(红色高亮)表示所有缓冲区曾同时被占用,可能导致丢帧
此功能有助于诊断丢帧时哪个缓冲池资源不足。
## 打包为独立可执行文件
使用自带的构建脚本可将 BLE Log Console 打包为单文件可执行程序,目标机器无需安装 Python 或 ESP-IDF 环境:
```bash
# Linux / macOS
<esp-idf 根目录>/tools/bt/ble_log_console/build.sh
# Windows
<esp-idf 根目录>\tools\bt\ble_log_console\build.bat
```
脚本会自动激活 ESP-IDF 环境、安装 PyInstaller、构建可执行文件、将其放置在当前工作目录下,并清理中间产物。
输出:`./ble_log_console`Linux/macOS)或 `.\ble_log_console.exe`Windows)。
> **注意**:请在与目标机器相同的操作系统和架构上构建。PyInstaller 不支持交叉编译。
## 常见问题
### 状态一直停在 SEARCHING
- **波特率不匹配**:确认 `--baudrate` 与固件 `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE` 一致
- **串口错误**:确认连接的是正确的 UART TX 引脚
- **固件未运行**:确认设备已启动且 BLE Log 已初始化
- **信号质量**:3 Mbps 下请使用短线缆,确保 GND 连接可靠
### 出现 Buffer overflow warning
表示解析器内部缓冲区累积超过 8 KB 未能解析出有效帧。通常发生在:
- 设备启动初期,UART 上还没有有效的 BLE Log 帧数据
- 波特率不匹配导致接收到的全是乱码
如果只在启动时出现一次,属于正常现象;如果持续出现,请检查波特率和硬件连接。
### 丢帧严重
- 按 `d` 查看各 Source 的丢帧详情
- 按 `m` 查看缓冲区利用率 -- 100% 的池需要增加缓冲区
- 增大固件 buffer`CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE``CONFIG_BLE_LOG_LBM_LL_TRANS_BUF_SIZE`
- 增加 LBM 数量:`CONFIG_BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT`
- 提高波特率(需适配器支持)
### 看不到 ESP_LOG 输出
- 确认固件配置了 `CONFIG_BLE_LOG_PRPH_UART_DMA_PORT=0`
- Console 会自动解码 REDIR 帧,无需额外配置
- 日志由 1 秒周期定时器刷新,可能有短暂延迟
+242
View File
@@ -0,0 +1,242 @@
# BLE Log Console User Guide
## Introduction
BLE Log Console is a terminal-based tool for real-time capture and parsing of BLE Log frames from ESP chip firmware via UART DMA. It displays parsed frames in an interactive TUI and saves the raw binary data to a file for offline analysis.
## Prerequisites
### 1. Firmware Configuration
Enable the BLE Log module in `idf.py menuconfig`:
**Quick setup (recommended):**
```
Component config → Bluetooth → BT Logs → Enable critical-log-only mode [y]
```
This enables the BLE Log module, selects UART DMA as the transport, and restricts each stack to critical logs only — all in one toggle.
**Manual configuration:**
```
Component config → Bluetooth → BT Logs → Enable BT Log Async Output (Dev Only) [y]
```
UART DMA transport, 3 Mbps baud rate, and PORT 0 are all enabled by default -- no further configuration needed in most cases.
> **About UART PORT 0**: When configured for PORT 0, the firmware automatically wraps `ESP_LOG` output in BLE Log frames (`REDIR` source). The console decodes and displays these as regular log lines.
### 2. Hardware Connection
**PORT 0 (recommended):** Simply connect the development board's UART port via USB — no additional wiring needed.
**Other PORTs:** Connect the designated TX GPIO to an external USB-to-serial adapter:
```
ESP32 TX GPIO ──────── USB-Serial RX
ESP32 GND ──────── USB-Serial GND
```
Ensure your USB-serial adapter supports the configured baud rate (3 Mbps by default). Adapters based on CP2102N, CH343, or FT232H are recommended.
## Getting Started
### Quick Start (Recommended)
Use the provided launcher script -- it automatically activates the ESP-IDF environment and installs dependencies. Can be run from any directory:
```bash
# Linux / macOS
<esp-idf-root>/tools/bt/ble_log_console/run.sh
# Windows
<esp-idf-root>\tools\bt\ble_log_console\run.bat
```
When launched without arguments, the tool opens a **Launch Screen** -- an interactive configuration interface where you can:
- **Select a serial port** from a dropdown of auto-detected devices (with a **Refresh** button to re-scan)
- **Choose a baud rate** from preset options (115200 to 3000000, default: 3000000)
- **Set the log directory** via text input or a **Browse** file picker
- Press **Connect** to start capture
To skip the Launch Screen and start capture directly, pass `--port`:
```bash
# Linux / macOS
./run.sh -p /dev/ttyUSB0
./run.sh -p /dev/ttyUSB0 -b 3000000 -d /tmp/my_captures
# Windows
run.bat -p COM3
```
All CLI options are forwarded to `console.py`.
### Manual Launch
If you prefer to manage the ESP-IDF environment yourself:
```bash
cd <esp-idf-root>
. ./export.sh
python -m pip install textual textual-fspicker # Install extra dependencies
cd tools/bt/ble_log_console
python console.py # Launch Screen
python console.py -p /dev/ttyUSB0 # Direct capture
python console.py -p /dev/ttyUSB0 -b 3000000 # Custom baud rate
python console.py -p /dev/ttyUSB0 -d /tmp/captures # Custom log directory
```
### CLI Options
| Option | Short | Default | Description |
|--------|-------|---------|-------------|
| `--port` | `-p` | (optional) | Serial port path, e.g., `/dev/ttyUSB0`, `COM3`. Omit to use Launch Screen. |
| `--baudrate` | `-b` | `3000000` | Baud rate -- must match `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE` |
| `--log-dir` | `-d` | current working directory | Directory where capture `.bin` files are saved |
Capture files are saved to the current working directory (or `--log-dir` if specified) with a timestamp-based filename:
```
ble_log_YYYYMMDD_HHMMSS.bin
```
## UI Overview
The interface has two areas:
### Log View (upper area)
A scrollable area displaying real-time logs:
- **`[INFO]`** (green): System information, e.g., firmware version
- **`[WARN]`** (yellow): Frame loss warnings, e.g., `Frame loss [LL_TASK]: 5 frames, 200 bytes`, indicating new frame loss on that source
- **`[SYNC]`** (cyan): Sync state transitions
- Plain text: `ESP_LOG` output via UART redirect (PORT 0 only)
### Status Panel (bottom bar)
Fixed at the bottom, updated in real time:
```
Sync: SYNCED | Checksum: XOR / Header+Payload | Press h for help
RX: 1.2 MB Speed: 293.0 KB/s Max: 300.0 KB/s Rate: 3421 fps Lost: 12 frames, 480 B
```
- **Sync**: Sync state (SEARCHING -> CONFIRMING -> SYNCED -> CONFIRMING_LOSS)
- **Checksum**: Auto-detected checksum mode (algorithm / scope)
- **RX**: Total received bytes
- **Speed / Max**: Current / peak transport speed (in KB/s or MB/s)
- **Rate**: Current frame rate
- **Lost**: Cumulative lost frames and bytes since console start
## Keyboard Shortcuts
| Key | Action |
|-----|--------|
| `q` | Quit |
| `Ctrl+C` | Quit |
| `c` | Clear the log view |
| `s` | Toggle auto-scroll (on by default) |
| `d` | Open per-source frame statistics (press `Escape` or `d` to close) |
| `m` | Show buffer utilization (press `Escape` or `m` to close) |
| `h` | Show keyboard shortcuts (press `Escape` to close) |
| `r` | Reset chip via DTR/RTS toggle |
### Frame Statistics Detail (`d` key)
Pressing `d` opens a modal overlay with two tables, refreshed every second:
**Firmware Counters (since chip init)** -- per-source write and buffer loss counts as reported by the firmware:
| Source | Written Frames | Written Bytes | Buffer Loss Frames | Buffer Loss Bytes |
|--------|---------------|---------------|-------------------|-------------------|
| LL_TASK | 12345 | 56.7 KB | 5 | 200 B |
| LL_HCI | 890 | 34.2 KB | - | - |
| HOST | 456 | 12.1 KB | - | - |
| HCI | 234 | 8.5 KB | 2 | 80 B |
**Console Measurements (since console start)** -- per-source receive rates and peak bursts measured by the console:
| Source | Received Frames | Received Bytes | Avg Frames/s | Avg Bytes/s | Peak Frames/10ms | Peak Bytes/s |
|--------|----------------|---------------|-------------|------------|-----------------|-------------|
| LL_TASK | 12340 | 56.5 KB | 412 | 18.8 KB/s | 8 | 24.0 KB/s |
| LL_HCI | 890 | 34.2 KB | 30 | 1.1 KB/s | 3 | 3.6 KB/s |
Sources with buffer loss are highlighted in red in the firmware table.
### Buffer Utilization (`m` key)
Pressing `m` opens a modal overlay showing per-LBM (Log Buffer Manager) buffer utilization as reported by the firmware:
| Pool | Idx | Name | Peak | Total | Util% |
|------|-----|------|------|-------|-------|
| COMMON_TASK | 0 | spin | 3 | 4 | 75% |
| COMMON_TASK | 1 | atomic[0] | 4 | 4 | 100% |
| COMMON_ISR | 0 | spin | 2 | 4 | 50% |
| LL | 0 | ll_task | 4 | 4 | 100% |
| LL | 1 | ll_hci | 2 | 4 | 50% |
- **Pool**: Buffer pool category (COMMON_TASK, COMMON_ISR, LL, REDIR)
- **Idx**: LBM index within the pool
- **Name**: Human-readable LBM name (spin, atomic[N], ll_task, ll_hci, redir)
- **Peak**: Maximum number of transport buffers in flight simultaneously
- **Total**: Total transport buffers available for this LBM
- **Util%**: Peak / Total as percentage; 100% (highlighted in red) means all buffers were in use simultaneously, indicating potential frame loss
This helps diagnose which buffer pool is under-provisioned when experiencing frame loss.
## Building Standalone Executable
Use the provided build script to package BLE Log Console as a single-file executable that requires no Python or ESP-IDF environment on the target machine:
```bash
# Linux / macOS
<esp-idf-root>/tools/bt/ble_log_console/build.sh
# Windows
<esp-idf-root>\tools\bt\ble_log_console\build.bat
```
The script automatically activates the ESP-IDF environment, installs PyInstaller, builds the executable, places it in the current working directory, and cleans up intermediate artifacts.
Output: `./ble_log_console` (Linux/macOS) or `.\ble_log_console.exe` (Windows).
> **Note**: Build on the same OS/architecture as the target machine. PyInstaller does not cross-compile.
## Troubleshooting
### Sync stays in SEARCHING
- **Baud rate mismatch**: Ensure `--baudrate` matches `CONFIG_BLE_LOG_PRPH_UART_DMA_BAUD_RATE`
- **Wrong port**: Verify you are connected to the correct UART TX pin
- **Firmware not running**: Confirm the device has booted and BLE Log is initialized
- **Signal quality**: At 3 Mbps, use short cables and ensure a solid GND connection
### Buffer overflow warning
This means the parser's internal buffer exceeded 8 KB without successfully parsing any frames. Common causes:
- Device is still booting and no valid BLE Log frames have been sent yet
- Baud rate mismatch causing all received data to be garbage
If it only appears once at startup, this is normal. If persistent, check the baud rate and hardware connection.
### High frame loss
- Press `d` to view per-source loss details
- Press `m` to check buffer utilization -- pools at 100% need more buffers
- Increase firmware buffers: `CONFIG_BLE_LOG_LBM_TRANS_BUF_SIZE`, `CONFIG_BLE_LOG_LBM_LL_TRANS_BUF_SIZE`
- Add more LBMs: `CONFIG_BLE_LOG_LBM_ATOMIC_LOCK_TASK_CNT`
- Increase baud rate (if your adapter supports it)
### No ESP_LOG output
- Confirm the firmware has `CONFIG_BLE_LOG_PRPH_UART_DMA_PORT=0`
- The console decodes REDIR frames automatically — no extra configuration needed
- Logs are flushed by a 1-second periodic timer, so there may be a short delay
+57
View File
@@ -0,0 +1,57 @@
@echo off
rem SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
rem SPDX-License-Identifier: Apache-2.0
rem Build a single-file BLE Log Console executable via PyInstaller.
rem The executable is placed in the caller's working directory.
rem All intermediate build artifacts are cleaned up automatically.
setlocal
set "SCRIPT_DIR=%~dp0"
set "SCRIPT_DIR=%SCRIPT_DIR:~0,-1%"
set "CALLER_DIR=%cd%"
rem Derive IDF_PATH (three levels up from script directory)
for %%I in ("%SCRIPT_DIR%\..\..\..") do set "IDF_PATH=%%~fI"
echo Activating ESP-IDF environment ...
call "%IDF_PATH%\export.bat" > nul 2>&1
if %errorlevel% neq 0 (
echo ERROR: Failed to activate ESP-IDF environment.
exit /b 1
)
echo Installing build dependencies ...
python -m pip install --quiet textual textual-fspicker pyinstaller
if %errorlevel% neq 0 (
echo ERROR: Failed to install dependencies.
exit /b 1
)
echo Building executable ...
cd /d "%SCRIPT_DIR%"
python build_exe.py
if %errorlevel% neq 0 (
echo ERROR: Build failed.
cd /d "%CALLER_DIR%"
exit /b 1
)
set "EXE_NAME=ble_log_console.exe"
if exist "dist\%EXE_NAME%" (
move /y "dist\%EXE_NAME%" "%CALLER_DIR%\%EXE_NAME%" > nul
echo.
echo Executable ready: %CALLER_DIR%\%EXE_NAME%
) else (
echo ERROR: Build produced no executable.
cd /d "%CALLER_DIR%"
exit /b 1
)
rem Remove intermediate artifacts
if exist "%SCRIPT_DIR%\build" rd /s /q "%SCRIPT_DIR%\build"
if exist "%SCRIPT_DIR%\dist" rd /s /q "%SCRIPT_DIR%\dist"
del /q "%SCRIPT_DIR%\*.spec" 2>nul
cd /d "%CALLER_DIR%"
+41
View File
@@ -0,0 +1,41 @@
#!/usr/bin/env bash
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# Build a single-file BLE Log Console executable via PyInstaller.
# The executable is placed in the caller's working directory.
# All intermediate build artifacts are cleaned up automatically.
set -e
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
IDF_PATH="$(cd "$SCRIPT_DIR/../../.." && pwd)"
CALLER_DIR="$(pwd)"
export IDF_PATH
echo "Activating ESP-IDF environment ..."
# shellcheck source=/dev/null
. "$IDF_PATH/export.sh" > /dev/null 2>&1
echo "Installing build dependencies ..."
python -m pip install --quiet textual textual-fspicker pyinstaller
echo "Building executable ..."
cd "$SCRIPT_DIR"
python build_exe.py
# Move executable to caller's directory and clean up
EXE_NAME="ble_log_console"
if [ -f "dist/$EXE_NAME" ]; then
mv "dist/$EXE_NAME" "$CALLER_DIR/$EXE_NAME"
echo ""
echo "Executable ready: $CALLER_DIR/$EXE_NAME"
else
echo "ERROR: Build produced no executable." >&2
exit 1
fi
# Remove intermediate artifacts
rm -rf "$SCRIPT_DIR/build" "$SCRIPT_DIR/dist" "$SCRIPT_DIR"/*.spec
cd "$CALLER_DIR"
+62
View File
@@ -0,0 +1,62 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Build a single-file executable for BLE Log Console using PyInstaller.
Usage:
pip install pyinstaller
python build_exe.py
"""
import subprocess
import sys
def main() -> None:
cmd = [
sys.executable,
'-m',
'PyInstaller',
'console.py',
'--onefile',
'--name',
'ble_log_console',
'--hidden-import',
'textual',
'--hidden-import',
'textual.drivers',
'--hidden-import',
'textual.css',
'--hidden-import',
'textual_fspicker',
'--hidden-import',
'serial',
'--hidden-import',
'serial.tools',
'--hidden-import',
'serial.tools.list_ports',
'--hidden-import',
'serial.tools.list_ports_common',
'--hidden-import',
'serial.tools.list_ports_linux',
'--hidden-import',
'serial.tools.list_ports_windows',
'--hidden-import',
'serial.tools.list_ports_osx',
'--collect-data',
'textual',
'--collect-data',
'textual_fspicker',
'--noconfirm',
]
print(f'Running: {" ".join(cmd)}')
result = subprocess.run(cmd, check=False)
if result.returncode != 0:
print(f'\nBuild failed (exit code {result.returncode}).', file=sys.stderr)
print('If you see hidden import errors, add the missing module to the cmd list above.', file=sys.stderr)
sys.exit(result.returncode)
print('\nBuild complete. Executable: dist/ble_log_console')
if __name__ == '__main__':
main()
+7
View File
@@ -0,0 +1,7 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent))
+93
View File
@@ -0,0 +1,93 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""BLE Log Console entry point.
Usage:
python console.py # interactive setup
python console.py --port /dev/ttyUSB0 # direct connect
python console.py ls # list saved files
"""
from datetime import datetime
from pathlib import Path
from typing import Optional
import click
from src.app import BLELogApp
from src.backend.models import format_bytes
from src.backend.uart_transport import validate_uart_port
@click.group(invoke_without_command=True)
@click.option('--port', '-p', default=None, help='UART port. If omitted, shows interactive setup.')
@click.option('--baudrate', '-b', type=int, default=3_000_000, show_default=True, help='Baud rate')
@click.option(
'--log-dir', '-d', type=click.Path(), default=None, help='Log save directory. Default: current working directory.'
)
@click.option(
'--output',
'-o',
type=click.Path(),
default=None,
hidden=True,
help='[Deprecated] Output binary file path. Use --log-dir instead.',
)
@click.pass_context
def cli(ctx: click.Context, port: Optional[str], baudrate: int, log_dir: Optional[str], output: Optional[str]) -> None:
"""BLE Log Console — real-time BLE log monitor."""
if ctx.invoked_subcommand is not None:
return
# Resolve log directory
resolved_log_dir: Optional[Path] = None
if output is not None:
# Legacy --output: treat as full file path, use its parent as log_dir
click.echo(
'Warning: --output is deprecated and the filename is ignored. '
'Use --log-dir instead. Saving to directory: ' + str(Path(output).parent),
err=True,
)
resolved_log_dir = Path(output).parent
elif log_dir is not None:
resolved_log_dir = Path(log_dir)
if port is not None:
error = validate_uart_port(port)
if error:
raise click.BadParameter(error, param_hint="'--port'")
app = BLELogApp(
port=port,
baudrate=baudrate,
log_dir=resolved_log_dir,
)
app.run()
@cli.command(name='ls')
@click.option(
'--dir',
'-d',
'log_dir',
type=click.Path(exists=True),
default=None,
help='Directory to list. Default: current directory.',
)
def list_files(log_dir: Optional[str]) -> None:
"""List saved binary capture files."""
search_dir = Path(log_dir) if log_dir else Path.cwd()
files = sorted(search_dir.glob('ble_log_*.bin'), key=lambda f: f.stat().st_mtime, reverse=True)
if not files:
click.echo(f'No captures found in {search_dir}')
return
click.echo(f'Captures in {search_dir}:\n')
for f in files:
size = f.stat().st_size
mtime = datetime.fromtimestamp(f.stat().st_mtime).strftime('%Y-%m-%d %H:%M:%S')
size_str = format_bytes(size)
click.echo(f' {mtime} {size_str:>10} {f.name}')
if __name__ == '__main__':
cli()
+5
View File
@@ -0,0 +1,5 @@
# Runtime dependencies (textual, pyserial, click) are managed by the ESP-IDF
# virtual environment via tools/requirements/requirements.core.txt, not here.
[tool.pytest.ini_options]
testpaths = ["tests"]
+31
View File
@@ -0,0 +1,31 @@
@echo off
rem SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
rem SPDX-License-Identifier: Apache-2.0
rem BLE Log Console launcher for Windows CMD.
rem Works from any directory. All arguments are forwarded to console.py.
setlocal
set "SCRIPT_DIR=%~dp0"
set "SCRIPT_DIR=%SCRIPT_DIR:~0,-1%"
rem Derive IDF_PATH (three levels up from script directory)
for %%I in ("%SCRIPT_DIR%\..\..\..") do set "IDF_PATH=%%~fI"
echo Activating ESP-IDF environment ...
call "%IDF_PATH%\export.bat" > nul 2>&1
if %errorlevel% neq 0 (
echo ERROR: Failed to activate ESP-IDF environment.
exit /b 1
)
echo Installing extra dependencies ...
python -m pip install --quiet textual textual-fspicker
if %errorlevel% neq 0 (
echo ERROR: Failed to install dependencies.
exit /b 1
)
python "%SCRIPT_DIR%\console.py" %*
exit /b %errorlevel%
+22
View File
@@ -0,0 +1,22 @@
#!/usr/bin/env bash
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# BLE Log Console launcher.
# Works from any directory: ./run.sh, or /full/path/to/run.sh
# All arguments are forwarded to console.py.
set -e
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
IDF_PATH="$(cd "$SCRIPT_DIR/../../.." && pwd)"
export IDF_PATH
echo "Activating ESP-IDF environment ..."
# shellcheck source=/dev/null
. "$IDF_PATH/export.sh" > /dev/null 2>&1
echo "Installing extra dependencies ..."
python -m pip install --quiet textual textual-fspicker
exec python "$SCRIPT_DIR/console.py" "$@"
+18
View File
@@ -0,0 +1,18 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import sys
if sys.version_info < (3, 10): # noqa: UP036 — runtime guard for users on old Python
print(f'Error: Python 3.10 or later is required.\nCurrent version: {sys.version}')
sys.exit(1)
try:
import textual # noqa: F401
except ImportError:
print(
"Error: 'textual' package is not installed.\n"
"Run 'run.sh' (Linux/macOS) or 'run.bat' (Windows) "
'to launch with auto-setup.'
)
sys.exit(1)
+419
View File
@@ -0,0 +1,419 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Textual App wiring backend Worker to frontend widgets.
See Spec Section 6.
"""
import struct
import threading
import time
from datetime import datetime
from pathlib import Path
from typing import cast
import serial
from textual.app import App
from textual.app import ComposeResult
from textual.binding import Binding
from textual.message import Message
from src.backend.frame_parser import FrameParser
from src.backend.internal_decoder import decode_internal_frame
from src.backend.models import FRAME_OVERHEAD
from src.backend.models import LL_TS_OFFSET
from src.backend.models import BackendStopped
from src.backend.models import BleLogSource
from src.backend.models import BufUtilEntry
from src.backend.models import BufUtilResult
from src.backend.models import EnhStatResult
from src.backend.models import FrameLossDetected
from src.backend.models import FunnelSnapshot
from src.backend.models import InfoResult
from src.backend.models import InternalFrameDecoded
from src.backend.models import InternalSource
from src.backend.models import LaunchConfig
from src.backend.models import LogLine
from src.backend.models import LossType
from src.backend.models import ParsedFrame
from src.backend.models import SourcePeakWrite
from src.backend.models import StatsUpdated
from src.backend.models import SyncState
from src.backend.models import SyncStateChanged
from src.backend.models import TrafficSpikeDetected
from src.backend.models import has_os_ts
from src.backend.models import is_ll_source
from src.backend.models import resolve_source_name
from src.backend.stats import StatsAccumulator
from src.backend.uart_transport import UART_BLOCK_SIZE
from src.backend.uart_transport import open_serial
from src.frontend.launch_screen import LaunchScreen
from src.frontend.log_view import LogView
from src.frontend.shortcut_screen import ShortcutScreen
from src.frontend.stats_screen import BufUtilScreen
from src.frontend.stats_screen import StatsScreen
from src.frontend.status_panel import StatusPanel
STATS_INTERVAL = 0.25 # seconds
class BLELogApp(App):
CSS = """
Screen {
layout: vertical;
}
"""
BINDINGS = [
Binding('q', 'quit', 'Quit'),
Binding('Q', 'quit', show=False),
Binding('ctrl+c', 'quit', show=False, priority=True),
Binding('c', 'clear_log', 'Clear'),
Binding('C', 'clear_log', show=False),
Binding('s', 'toggle_scroll', 'Auto-scroll'),
Binding('S', 'toggle_scroll', show=False),
Binding('d', 'dump_stats', 'Stats'),
Binding('D', 'dump_stats', show=False),
Binding('m', 'show_buf_util', 'BufUtil'),
Binding('M', 'show_buf_util', show=False),
Binding('h', 'show_help', 'Help'),
Binding('H', 'show_help', show=False),
Binding('r', 'reset_chip', 'Reset'),
Binding('R', 'reset_chip', show=False),
]
def __init__(
self,
port: str | None = None,
baudrate: int = 3_000_000,
log_dir: Path | None = None,
) -> None:
super().__init__()
self._port = port
self._baudrate = baudrate
self._log_dir = log_dir or Path.cwd()
self._output_path: Path | None = None
self._serial: serial.Serial | None = None
# All-time per-source chip write peak (updated from StatsUpdated messages)
self._max_per_source_peak: dict[int, SourcePeakWrite] | None = None
self._ll_max_per_source_peak: dict[int, SourcePeakWrite] | None = None
# Console-side per-source received bytes (from StatsUpdated snapshots)
self._per_source_rx_bytes: dict[int, int] | None = None
self._funnel_snapshots: list[FunnelSnapshot] = []
self._buf_util_snapshots: list[BufUtilEntry] = []
# Wall-clock capture start (set when backend loop begins)
self._capture_start_time: float = 0.0
self._serial_lock = threading.Lock()
def compose(self) -> ComposeResult:
yield LogView()
yield StatusPanel()
def on_mount(self) -> None:
if self._port is not None:
self._resolve_output_path()
self.run_worker(self._backend_loop, thread=True, exclusive=True)
else:
self.push_screen(LaunchScreen(default_log_dir=self._log_dir), callback=self._on_launch_result)
@property
def funnel_snapshots(self) -> list[FunnelSnapshot]:
return self._funnel_snapshots
@property
def buf_util_snapshots(self) -> list[BufUtilEntry]:
return self._buf_util_snapshots
def _on_launch_result(self, config: LaunchConfig | None) -> None:
"""Handle Launch Screen dismissal."""
if config is None:
self.exit()
return
self._port = config.port
self._baudrate = config.baudrate
self._log_dir = config.log_dir
self._resolve_output_path()
self.run_worker(self._backend_loop, thread=True, exclusive=True)
def _resolve_output_path(self) -> None:
"""Generate timestamped output file path in the log directory."""
self._log_dir.mkdir(parents=True, exist_ok=True)
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
self._output_path = self._log_dir / f'ble_log_{ts}.bin'
def _post(self, msg: Message) -> None:
"""Thread-safe message posting from backend worker."""
self.call_from_thread(self.post_message, msg)
def _emit_stats(self, stats: StatsAccumulator, parser: FrameParser, last_time: float) -> float:
"""Emit a stats snapshot if the interval has elapsed. Returns updated timestamp."""
now = time.perf_counter()
if now - last_time < STATS_INTERVAL:
return last_time
elapsed = now - last_time
snapshot = stats.snapshot(
elapsed,
sync_state=parser.sync_state,
checksum_mode=parser.checksum_mode,
)
funnel = stats.funnel_snapshot(elapsed)
buf_util = stats.buf_util_snapshot()
self._post(StatsUpdated(snapshot, funnel, buf_util))
return now
def _backend_loop(self) -> None:
"""Background worker: UART read -> parse -> stats -> messages."""
if self._port is None or self._output_path is None:
self._post(LogLine('Backend started without port/output configuration'))
self._post(BackendStopped('Configuration missing'))
return
parser = FrameParser()
stats = StatsAccumulator()
stats.set_wire_max(self._baudrate)
redir_line_buf = ''
prev_sync_state = SyncState.SEARCHING
last_snapshot_time = time.perf_counter()
try:
self._serial = open_serial(self._port, self._baudrate)
except Exception as e:
self._post(LogLine(f'Failed to open UART: {e}'))
self._post(BackendStopped(f'Failed to open UART: {e}'))
return
self._capture_start_time = time.perf_counter()
ser = self._serial
self._post(LogLine(f'Connected to {self._port} at {self._baudrate} baud'))
# Lazy file handles — created on first data arrival
output_file = None
console_log_file = None
console_log_path = self._output_path.with_name(self._output_path.stem + '_console.log')
try:
while True:
with self._serial_lock:
block = ser.read(UART_BLOCK_SIZE)
if not block:
last_snapshot_time = self._emit_stats(stats, parser, last_snapshot_time)
continue
# 1. Save raw binary (lazy-open on first block)
if output_file is None:
output_file = open(self._output_path, 'wb') # noqa: SIM115
self._post(LogLine(f'Saving to {self._output_path}'))
output_file.write(block)
output_file.flush()
# 2. Track bytes
stats.record_bytes(len(block))
# 3. Parse frames
results = parser.feed(block)
# 4. Check sync state transition
if parser.sync_state != prev_sync_state:
self._post(SyncStateChanged(parser.sync_state))
prev_sync_state = parser.sync_state
# 5. Process results
for item in results:
if isinstance(item, ParsedFrame):
frame_size = len(item.payload) + FRAME_OVERHEAD
if item.source_code != BleLogSource.INTERNAL:
stats.record_frame(frame_size, item.source_code, item.frame_sn)
stats.record_frame_traffic(frame_size, item.source_code)
else:
stats.record_frame() # count frame for transport metrics, no SN tracking
if has_os_ts(item.source_code) and item.source_code != BleLogSource.INTERNAL:
stats.record_frame_ts(item.os_ts_ms, frame_size, item.source_code)
elif is_ll_source(item.source_code) and len(item.payload) >= 6:
(lc_ts_us,) = struct.unpack_from('<I', item.payload, LL_TS_OFFSET)
stats.record_ll_frame_ts(lc_ts_us, frame_size, item.source_code)
elif item.source_code == BleLogSource.REDIR:
wall_ms = int(time.perf_counter() * 1000) & 0xFFFFFFFF
stats.record_frame_wall_ts(wall_ms, frame_size, item.source_code)
# Decode internal frames
if item.source_code == BleLogSource.INTERNAL:
decoded = decode_internal_frame(item.payload)
if decoded:
int_src = decoded['int_src']
# Reject false INIT_DONE from misaligned data:
# real firmware always has version >= 1.
if int_src == InternalSource.INIT_DONE:
info = cast(InfoResult, decoded)
if info['version'] == 0:
continue
self._post(InternalFrameDecoded(int_src, decoded))
if int_src in (InternalSource.INIT_DONE, InternalSource.INFO):
info = cast(InfoResult, decoded)
stats.set_firmware_version(info['version'])
if int_src == InternalSource.INIT_DONE:
stats.reset('init')
elif int_src == InternalSource.FLUSH:
stats.reset('flush')
elif int_src == InternalSource.ENH_STAT:
enh = cast(EnhStatResult, decoded)
new_frames, new_bytes = stats.record_enh_stat(
src_code=enh['src_code'],
written_frames=enh['written_frame_cnt'],
lost_frames=enh['lost_frame_cnt'],
written_bytes=enh['written_bytes_cnt'],
lost_bytes=enh['lost_bytes_cnt'],
baudrate=self._baudrate,
)
if new_frames > 0:
source_name = resolve_source_name(enh['src_code'])
self._post(
FrameLossDetected(
source_name,
loss_type=LossType.BUFFER,
lost_frames=new_frames,
lost_bytes=new_bytes,
)
)
elif int_src == InternalSource.BUF_UTIL:
buf = cast(BufUtilResult, decoded)
stats.record_buf_util(
lbm_id=buf['lbm_id'],
trans_cnt=buf['trans_cnt'],
inflight_peak=buf['inflight_peak'],
)
# Decode UART redirect frames (raw ASCII, no os_ts prefix).
# A single log line may span multiple frames due to
# batch sealing, so buffer partial lines until '\n'.
elif item.source_code == BleLogSource.REDIR:
payload_text = item.payload.decode('ascii', errors='replace')
# Write raw payload to console log (independent of line buffering)
if console_log_file is None:
console_log_file = open(console_log_path, 'w') # noqa: SIM115
console_log_file.write(payload_text)
console_log_file.flush()
redir_line_buf += payload_text
while '\n' in redir_line_buf:
line, redir_line_buf = redir_line_buf.split('\n', 1)
if line:
self._post(LogLine(line))
elif isinstance(item, str):
self._post(LogLine(item))
# 6. Traffic spike detection
spike = stats.check_traffic()
if spike is not None:
self._post(
TrafficSpikeDetected(
throughput_kbs=spike.throughput_kbs,
wire_max_kbs=spike.wire_max_kbs,
utilization_pct=spike.utilization_pct,
duration_ms=spike.duration_ms,
per_source=spike.per_source,
)
)
# 7. Periodic stats snapshot
last_snapshot_time = self._emit_stats(stats, parser, last_snapshot_time)
except Exception as e:
self._post(LogLine(f'Error: {e}'))
finally:
ser.close()
if output_file is not None:
output_file.close()
if console_log_file is not None:
console_log_file.close()
self._post(BackendStopped('Serial connection closed'))
# --- Message handlers ---
def on_sync_state_changed(self, msg: SyncStateChanged) -> None:
log_view = self.query_one(LogView)
log_view.write_sync(f'State: {msg.state.value}')
def on_stats_updated(self, msg: StatsUpdated) -> None:
panel = self.query_one(StatusPanel)
panel.stats = msg.stats
self._funnel_snapshots = msg.funnel_snapshots
self._buf_util_snapshots = msg.buf_util_snapshots
# Preserve all-time per-source peak for the stats screen
if msg.stats.os_peak.max_per_source is not None:
self._max_per_source_peak = msg.stats.os_peak.max_per_source
if msg.stats.ll_peak.max_per_source is not None:
self._ll_max_per_source_peak = msg.stats.ll_peak.max_per_source
if msg.stats.per_source_rx_bytes is not None:
self._per_source_rx_bytes = msg.stats.per_source_rx_bytes
def on_internal_frame_decoded(self, msg: InternalFrameDecoded) -> None:
if msg.int_src == InternalSource.INIT_DONE:
info = cast(InfoResult, msg.payload)
log_view = self.query_one(LogView)
log_view.write_info(f'BLE Log v{info["version"]} initialized - statistics reset')
elif msg.int_src == InternalSource.FLUSH:
log_view = self.query_one(LogView)
log_view.write_info('Firmware flush - SN counters reset')
def on_log_line(self, msg: LogLine) -> None:
log_view = self.query_one(LogView)
log_view.write_ascii(msg.text)
def on_frame_loss_detected(self, msg: FrameLossDetected) -> None:
log_view = self.query_one(LogView)
log_view.write_warning(
f'Frame loss [{msg.source_name}] ({msg.loss_type.value}): {msg.lost_frames} frames, {msg.lost_bytes} bytes'
)
def on_backend_stopped(self, msg: BackendStopped) -> None:
log_view = self.query_one(LogView)
log_view.write_warning(f'Backend stopped: {msg.reason}')
panel = self.query_one(StatusPanel)
panel.disconnected = True
def on_traffic_spike_detected(self, msg: TrafficSpikeDetected) -> None:
top_sources = sorted(msg.per_source.items(), key=lambda x: x[1], reverse=True)
src_parts = ', '.join(f'{resolve_source_name(s)} {p:.0f}%' for s, p in top_sources if p >= 1.0)
if msg.utilization_pct >= 100.0:
util_str = 'saturated'
else:
util_str = f'{msg.utilization_pct:.0f}% wire'
log_view = self.query_one(LogView)
log_view.write_traffic(f'{msg.throughput_kbs:.0f} KB/s ({util_str}) over {msg.duration_ms:.0f}ms | {src_parts}')
# --- Actions ---
def action_clear_log(self) -> None:
self.query_one(LogView).clear()
def action_toggle_scroll(self) -> None:
log_view = self.query_one(LogView)
log_view.auto_scroll = not log_view.auto_scroll
def action_dump_stats(self) -> None:
self.push_screen(StatsScreen(start_time=self._capture_start_time))
def action_show_buf_util(self) -> None:
self.push_screen(BufUtilScreen())
def action_show_help(self) -> None:
self.push_screen(ShortcutScreen())
def action_reset_chip(self) -> None:
"""Reset ESP32 via DTR/RTS toggle (same sequence as esptool)."""
ser = self._serial
if ser is None or not ser.is_open:
return
with self._serial_lock:
ser.dtr = False
ser.rts = True
time.sleep(0.1)
ser.rts = False
log_view = self.query_one(LogView)
log_view.write_info('Chip reset triggered')
@@ -0,0 +1,2 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
@@ -0,0 +1,43 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Checksum implementations matching BLE Log firmware (ble_log_util.c).
Two algorithms:
- sum_checksum: byte-by-byte sum
- xor_checksum: 32-bit word XOR matching firmware ble_log_fast_checksum()
The firmware's ror32 alignment compensation makes the XOR checksum
alignment-independent simple word-by-word XOR produces the same result
regardless of the original buffer alignment.
"""
import struct
def sum_checksum(data: bytes) -> int:
return sum(data) & 0xFFFFFFFF
def xor_checksum(data: bytes) -> int:
"""Compute XOR checksum matching firmware ble_log_fast_checksum().
XORs consecutive 4-byte little-endian words. Partial last word is
zero-padded. Alignment-independent due to firmware's ror32 compensation.
"""
length = len(data)
if length == 0:
return 0
checksum = 0
for i in range(0, length, 4):
remaining = length - i
if remaining >= 4:
(word,) = struct.unpack_from('<I', data, i)
else:
chunk = data[i:] + b'\x00' * (4 - remaining)
(word,) = struct.unpack('<I', chunk)
checksum ^= word
return checksum & 0xFFFFFFFF
@@ -0,0 +1,272 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Frame sync state machine with checksum auto-detection.
Parses BLE Log module frames from a raw byte stream.
See Spec Sections 7, 8.
"""
from collections.abc import Callable
from src.backend.checksum import sum_checksum
from src.backend.checksum import xor_checksum
from src.backend.models import CHECKSUM_STRUCT
from src.backend.models import FRAME_HEADER_SIZE
from src.backend.models import FRAME_OVERHEAD
from src.backend.models import HEADER_STRUCT
from src.backend.models import MAX_FRAME_SIZE
from src.backend.models import MAX_REMAINDER_SIZE
from src.backend.models import ChecksumAlgorithm
from src.backend.models import ChecksumMode
from src.backend.models import ChecksumScope
from src.backend.models import ParsedFrame
from src.backend.models import SyncState
ChecksumProbe = tuple[ChecksumAlgorithm, ChecksumScope, Callable[[bytes], int]]
_CHECKSUM_PROBES: list[ChecksumProbe] = [
(ChecksumAlgorithm.XOR, ChecksumScope.FULL, xor_checksum),
(ChecksumAlgorithm.XOR, ChecksumScope.HEADER_ONLY, xor_checksum),
(ChecksumAlgorithm.SUM, ChecksumScope.FULL, sum_checksum),
(ChecksumAlgorithm.SUM, ChecksumScope.HEADER_ONLY, sum_checksum),
]
SYNC_CONFIRM_THRESHOLD = 3 # N consecutive valid frames to confirm sync
LOSS_TOLERANCE = 3 # M consecutive failures before resync
class FrameParser:
"""Stateful frame parser with sync state machine and checksum auto-detection."""
def __init__(self) -> None:
self._remained = b''
self._sync_state = SyncState.SEARCHING
self._checksum_mode: ChecksumMode | None = None
self._confirm_count = 0
self._loss_count = 0
self._ascii_buffer = ''
self._ever_synced = False
@property
def sync_state(self) -> SyncState:
return self._sync_state
@property
def checksum_mode(self) -> ChecksumMode | None:
return self._checksum_mode
def feed(self, data: bytes) -> list[ParsedFrame | str]:
"""Feed raw bytes into the parser.
Returns a list of:
- ParsedFrame for successfully parsed frames
- str for ASCII log lines or warning messages
"""
self._remained += data
results: list[ParsedFrame | str] = []
# Bounded buffer check (Review Correction #2)
if len(self._remained) > MAX_REMAINDER_SIZE:
self._remained = b''
self._transition_to(SyncState.SEARCHING)
results.append('[WARN] Buffer overflow — discarded remainder, resync')
return results
offset = 0
buf = self._remained
while offset < len(buf):
if self._sync_state in (SyncState.SEARCHING, SyncState.CONFIRMING_SYNC):
result = self._try_parse_with_probe(buf, offset)
if result is not None:
frame, next_offset, mode = result
self._flush_ascii(results)
results.append(frame)
offset = next_offset
self._on_frame_found(mode)
elif self._sync_state == SyncState.CONFIRMING_SYNC and self._might_be_incomplete_frame(buf, offset):
break
elif (
self._sync_state == SyncState.SEARCHING
and self._ever_synced
and self._might_be_incomplete_frame(buf, offset)
):
break
else:
if not self._ever_synced:
self._collect_ascii(buf[offset : offset + 1], results)
offset += 1
else:
# SYNCED or CONFIRMING_LOSS: use locked checksum mode
result_locked = self._try_parse_locked(buf, offset)
if result_locked is not None:
frame, next_offset = result_locked
self._flush_ascii(results)
results.append(frame)
offset = next_offset
self._on_frame_valid()
else:
# Check if we might have incomplete data at the end
if self._might_be_incomplete_frame(buf, offset):
break
self._on_frame_invalid()
if self._sync_state == SyncState.SEARCHING:
# Full resync — reprocess from current offset
continue
# Silently discard — do NOT collect ASCII here.
# In CONFIRMING_LOSS, failed bytes are corrupt frame data,
# not readable text. Collecting them would leak binary
# payload bytes that happen to be printable (0x20-0x7E).
offset += 1
# Save remainder
self._remained = buf[offset:] if offset < len(buf) else b''
self._flush_ascii(results)
return results
def _try_parse_at(
self,
buf: bytes,
offset: int,
checksum_fn: Callable[[bytes], int],
scope: ChecksumScope,
) -> tuple[ParsedFrame, int] | None:
"""Try to parse a frame at the given offset with specific checksum params."""
if offset + FRAME_HEADER_SIZE > len(buf):
return None
payload_len, frame_meta = HEADER_STRUCT.unpack_from(buf, offset)
# Sanity checks
if payload_len > MAX_FRAME_SIZE:
return None
if offset + FRAME_OVERHEAD + payload_len > len(buf):
return None
header = buf[offset : offset + FRAME_HEADER_SIZE]
payload = buf[offset + FRAME_HEADER_SIZE : offset + FRAME_HEADER_SIZE + payload_len]
checksum_offset = offset + FRAME_HEADER_SIZE + payload_len
stored_checksum = CHECKSUM_STRUCT.unpack_from(buf, checksum_offset)[0]
# Compute checksum
if scope == ChecksumScope.FULL:
checksum_data = header + payload
else:
checksum_data = header
computed = checksum_fn(checksum_data)
if computed != stored_checksum:
return None
source_code = frame_meta & 0xFF
frame_sn = frame_meta >> 8
# Extract os_ts from first 4 bytes of payload
os_ts_ms = 0
if payload_len >= 4:
os_ts_ms = int.from_bytes(payload[:4], 'little')
frame = ParsedFrame(
source_code=source_code,
frame_sn=frame_sn,
payload=payload,
os_ts_ms=os_ts_ms,
)
next_offset = offset + FRAME_OVERHEAD + payload_len
return frame, next_offset
def _try_parse_with_probe(self, buf: bytes, offset: int) -> tuple[ParsedFrame, int, ChecksumMode] | None:
"""Try all checksum combinations at the given offset (SEARCHING mode)."""
for algo, scope, fn in _CHECKSUM_PROBES:
result = self._try_parse_at(buf, offset, fn, scope)
if result is not None:
frame, next_offset = result
mode = ChecksumMode(algo, scope)
return frame, next_offset, mode
return None
def _try_parse_locked(self, buf: bytes, offset: int) -> tuple[ParsedFrame, int] | None:
"""Try to parse with the locked checksum mode."""
if self._checksum_mode is None:
return None
fn = xor_checksum if self._checksum_mode.algorithm == ChecksumAlgorithm.XOR else sum_checksum
return self._try_parse_at(buf, offset, fn, self._checksum_mode.scope)
def _on_frame_found(self, mode: ChecksumMode) -> None:
"""Called when a frame is found during SEARCHING/CONFIRMING_SYNC."""
if self._sync_state == SyncState.SEARCHING:
self._checksum_mode = mode
self._confirm_count = 1
self._transition_to(SyncState.CONFIRMING_SYNC)
elif self._sync_state == SyncState.CONFIRMING_SYNC:
# Review Correction #3: verify same checksum mode
if (
self._checksum_mode is not None
and mode.algorithm == self._checksum_mode.algorithm
and mode.scope == self._checksum_mode.scope
):
self._confirm_count += 1
if self._confirm_count >= SYNC_CONFIRM_THRESHOLD:
self._transition_to(SyncState.SYNCED)
else:
# Mode mismatch — restart confirmation with new mode
self._checksum_mode = mode
self._confirm_count = 1
def _on_frame_valid(self) -> None:
"""Called when a frame passes checksum in SYNCED/CONFIRMING_LOSS."""
self._loss_count = 0
if self._sync_state == SyncState.CONFIRMING_LOSS:
self._transition_to(SyncState.SYNCED)
def _on_frame_invalid(self) -> None:
"""Called when checksum fails in SYNCED/CONFIRMING_LOSS."""
if self._sync_state == SyncState.SYNCED:
self._loss_count = 1
self._transition_to(SyncState.CONFIRMING_LOSS)
elif self._sync_state == SyncState.CONFIRMING_LOSS:
self._loss_count += 1
if self._loss_count > LOSS_TOLERANCE:
self._transition_to(SyncState.SEARCHING)
self._checksum_mode = None
self._confirm_count = 0
self._loss_count = 0
def _might_be_incomplete_frame(self, buf: bytes, offset: int) -> bool:
"""Check if remaining data could be a partial frame waiting for more data."""
remaining = len(buf) - offset
if remaining < FRAME_OVERHEAD:
return True
if remaining >= FRAME_HEADER_SIZE:
payload_len, _ = HEADER_STRUCT.unpack_from(buf, offset)
if payload_len <= MAX_FRAME_SIZE and remaining < FRAME_OVERHEAD + payload_len:
return True
return False
def _transition_to(self, new_state: SyncState) -> None:
if new_state == SyncState.SYNCED:
self._ever_synced = True
self._sync_state = new_state
def _collect_ascii(self, byte_data: bytes, results: list[ParsedFrame | str]) -> None:
"""Collect bytes for ASCII line assembly.
Only printable ASCII (0x20-0x7E) and newline (0x0A) are collected.
Carriage return (0x0D) and other control characters are silently
dropped, which normalises \\r\\n line endings to \\n for display.
"""
for b in byte_data:
if 0x20 <= b <= 0x7E:
self._ascii_buffer += chr(b)
elif b == 0x0A: # newline
if self._ascii_buffer:
results.append(self._ascii_buffer)
self._ascii_buffer = ''
def _flush_ascii(self, results: list[ParsedFrame | str]) -> None:
"""Flush any pending ASCII buffer."""
if self._ascii_buffer:
results.append(self._ascii_buffer)
self._ascii_buffer = ''
@@ -0,0 +1,97 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Decode BLE_LOG_SRC_INTERNAL(0) frame payloads.
Payload format on wire: [4B os_ts][1B int_src_code][variable sub-payload]
See Spec Section 9.
"""
import struct
from src.backend.models import BufUtilResult
from src.backend.models import EnhStatResult
from src.backend.models import InfoResult
from src.backend.models import InternalDecoderResult
from src.backend.models import InternalSource
# Minimum payload size: 4B os_ts + 1B int_src_code
_MIN_PAYLOAD_SIZE = 5
# ble_log_info_t: [1B int_src_code][1B version] — used by INIT_DONE, INFO, FLUSH
_INFO_STRUCT = struct.Struct('<BB')
# ble_log_enh_stat_t: [1B int_src_code][1B src_code][4B written][4B lost][4B written_bytes][4B lost_bytes]
_ENH_STAT_STRUCT = struct.Struct('<BBIIII')
# ble_log_buf_util_t: [1B int_src_code][1B lbm_id][1B trans_cnt][1B inflight_peak]
_BUF_UTIL_STRUCT = struct.Struct('<BBBB')
def decode_internal_frame(payload: bytes) -> InternalDecoderResult | None:
"""Decode an INTERNAL frame payload.
Args:
payload: Full frame payload including os_ts prefix.
Returns:
Typed dict with decoded fields, or None if the frame should be ignored (TS) or is malformed.
"""
if len(payload) < _MIN_PAYLOAD_SIZE:
return None
os_ts_ms = struct.unpack_from('<I', payload, 0)[0]
int_src_code = payload[4]
sub_payload = payload[4:] # starts at int_src_code
try:
int_src = InternalSource(int_src_code)
except ValueError:
return None
if int_src == InternalSource.TS:
return None # Ignored per spec
if int_src in (InternalSource.INIT_DONE, InternalSource.INFO, InternalSource.FLUSH):
if len(sub_payload) < _INFO_STRUCT.size:
return None
_, version = _INFO_STRUCT.unpack_from(sub_payload, 0)
return InfoResult(
int_src=int_src,
version=version,
os_ts_ms=os_ts_ms,
)
if int_src == InternalSource.ENH_STAT:
if len(sub_payload) < _ENH_STAT_STRUCT.size:
return None
_, src_code, written_frame_cnt, lost_frame_cnt, written_bytes_cnt, lost_bytes_cnt = (
_ENH_STAT_STRUCT.unpack_from(sub_payload, 0)
)
return EnhStatResult(
int_src=int_src,
src_code=src_code,
written_frame_cnt=written_frame_cnt,
lost_frame_cnt=lost_frame_cnt,
written_bytes_cnt=written_bytes_cnt,
lost_bytes_cnt=lost_bytes_cnt,
os_ts_ms=os_ts_ms,
)
if int_src == InternalSource.BUF_UTIL:
if len(sub_payload) < _BUF_UTIL_STRUCT.size:
return None
_, lbm_id, trans_cnt, inflight_peak = _BUF_UTIL_STRUCT.unpack_from(sub_payload, 0)
pool = (lbm_id >> 4) & 0x0F
index = lbm_id & 0x0F
return BufUtilResult(
int_src=int_src,
lbm_id=lbm_id,
pool=pool,
index=index,
trans_cnt=trans_cnt,
inflight_peak=inflight_peak,
os_ts_ms=os_ts_ms,
)
return None
@@ -0,0 +1,401 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import struct
from dataclasses import dataclass
from dataclasses import field
from enum import Enum
from pathlib import Path
from typing import TypedDict
from textual.message import Message
# --- Frame constants (Spec Section 7) ---
FRAME_HEADER_SIZE = 6 # 2B payload_len + 4B frame_meta
FRAME_TAIL_SIZE = 4 # 4B checksum
FRAME_OVERHEAD = FRAME_HEADER_SIZE + FRAME_TAIL_SIZE # 10
MAX_FRAME_SIZE = 2048 # Max payload_len sanity check
MAX_REMAINDER_SIZE = 131072 # 128KB bounded buffer
HEADER_FMT = '<HI' # payload_len (uint16), frame_meta (uint32)
CHECKSUM_FMT = '<I' # checksum (uint32)
HEADER_STRUCT = struct.Struct(HEADER_FMT)
CHECKSUM_STRUCT = struct.Struct(CHECKSUM_FMT)
# --- Formatting helpers ---
def format_bytes(cnt: int) -> str:
"""Format byte count as human-readable string (B / KB / MB)."""
if cnt < 1024:
return f'{cnt} B'
elif cnt < 1024 * 1024:
return f'{cnt / 1024:.1f} KB'
else:
return f'{cnt / 1024 / 1024:.2f} MB'
def format_throughput(bytes_per_sec: float) -> str:
"""Format throughput as human-readable string with auto KB/s ↔ MB/s switching."""
kb_per_sec = bytes_per_sec / 1024
if kb_per_sec < 1024:
return f'{kb_per_sec:.1f} KB/s'
else:
return f'{kb_per_sec / 1024:.2f} MB/s'
# --- Enums ---
class SyncState(str, Enum):
SEARCHING = 'SEARCHING'
CONFIRMING_SYNC = 'CONFIRMING'
SYNCED = 'SYNCED'
CONFIRMING_LOSS = 'CONFIRMING_LOSS'
class ChecksumAlgorithm(str, Enum):
XOR = 'XOR'
SUM = 'Sum'
class ChecksumScope(str, Enum):
FULL = 'Header+Payload'
HEADER_ONLY = 'Header'
class BleLogSource(int, Enum):
INTERNAL = 0
CUSTOM = 1
LL_TASK = 2
LL_HCI = 3
LL_ISR = 4
HOST = 5
HCI = 6
ENCODE = 7
REDIR = 8 # BLE_LOG_SRC_REDIR in firmware ble_log.h (UART PORT 0 only)
# Type alias for source code values (BleLogSource member or unknown firmware code).
SourceCode = int
# Sources written via ble_log_write_hex_ll() or stream_write -- no 4-byte os_ts prefix.
_NO_OS_TS_SOURCES: frozenset[int] = frozenset(
{BleLogSource.LL_TASK, BleLogSource.LL_HCI, BleLogSource.LL_ISR, BleLogSource.REDIR}
)
_LL_SOURCES: frozenset[int] = frozenset({BleLogSource.LL_TASK, BleLogSource.LL_HCI, BleLogSource.LL_ISR})
LL_TS_OFFSET = 2 # lc_ts starts at payload[2:6]
LL_TS_SIZE = 4
def has_os_ts(source_code: int) -> bool:
"""Return True if frames from this source carry a valid os_ts prefix."""
return source_code not in _NO_OS_TS_SOURCES
def is_ll_source(source_code: int) -> bool:
"""Return True if this is a Link Layer source with lc_ts timestamp."""
return source_code in _LL_SOURCES
def resolve_source_name(src_code: int) -> str:
"""Resolve source code to BleLogSource name, with fallback for unknown codes."""
try:
return str(BleLogSource(src_code).name)
except ValueError:
return f'SRC_{src_code}'
class InternalSource(int, Enum):
INIT_DONE = 0
TS = 1
ENH_STAT = 2
INFO = 3
FLUSH = 4
BUF_UTIL = 5
class BufUtilPool(int, Enum):
COMMON_TASK = 0
COMMON_ISR = 1
LL = 2
REDIR = 3
# --- Data classes ---
@dataclass(slots=True)
class ChecksumMode:
algorithm: ChecksumAlgorithm
scope: ChecksumScope
@dataclass(slots=True)
class ParsedFrame:
source_code: int
frame_sn: int
payload: bytes # includes os_ts prefix for ble_log_write_hex() frames
os_ts_ms: int # extracted from first 4 bytes of payload; only valid when has_os_ts(source_code) is True
@dataclass(slots=True)
class SourcePeakWrite:
"""Peak write burst for a single source within a 1ms window."""
peak_frames: int = 0 # max frame count in any 1ms window
peak_bytes: int = 0 # total bytes in that same window
@dataclass(slots=True)
class SourceStats:
"""Console-side accumulated per-source statistics (resilient to firmware counter resets)."""
written_frames: int = 0
written_bytes: int = 0
lost_frames: int = 0
lost_bytes: int = 0
@dataclass(frozen=True)
class BufUtilEntry:
"""Single LBM buffer utilization snapshot."""
lbm_id: int
pool: int
index: int
trans_cnt: int
inflight_peak: int
# --- Buffer utilization name resolution ---
_LBM_NAMES: dict[tuple[int, int], str] = {
(0, 0): 'spin',
(1, 0): 'spin',
(2, 0): 'll_task',
(2, 1): 'll_hci',
(3, 0): 'redir',
}
def resolve_pool_name(pool: int) -> str:
"""Resolve pool code to BufUtilPool name, with fallback for unknown codes."""
try:
return BufUtilPool(pool).name
except ValueError:
return f'POOL_{pool}'
def resolve_lbm_name(pool: int, index: int) -> str:
"""Resolve pool + index to human-readable LBM name."""
key = (pool, index)
if key in _LBM_NAMES:
return _LBM_NAMES[key]
if pool in (0, 1) and index >= 1:
return f'atomic[{index - 1}]'
return f'lbm_{pool}_{index}'
@dataclass(slots=True)
class TransportSnapshot:
"""Snapshot of transport-layer metrics for the current stats interval."""
rx_bytes: int = 0
bps: float = 0.0
max_bps: float = 0.0
fps: float = 0.0
@dataclass(slots=True)
class LossSnapshot:
"""Snapshot of firmware-reported cumulative loss."""
total_frames: int = 0
total_bytes: int = 0
@dataclass(slots=True)
class PeakBurstSnapshot:
"""Peak write burst metrics for a single clock domain (os_ts or lc_ts)."""
per_source: dict[SourceCode, SourcePeakWrite] | None = None
max_per_source: dict[SourceCode, SourcePeakWrite] | None = None
class LossType(str, Enum):
BUFFER = 'buffer' # firmware buffer full, frame dropped
TRANSPORT = 'transport' # UART/link loss
@dataclass(frozen=True)
class FrameByteCount:
"""A (frames, bytes) pair."""
frames: int
bytes: int
@dataclass(frozen=True)
class ThroughputInfo:
"""Rate metrics (frames/s and bytes/s)."""
throughput_fps: float # current console receive rate (rolling 1s window)
throughput_bps: float # current console receive byte rate
peak_write_frames: int # raw frame count in densest burst window
peak_write_bytes: int # raw byte count in that burst window
peak_window_ms: int # burst window size in ms
@dataclass(frozen=True)
class FunnelSnapshot:
"""Per-source three-layer funnel snapshot."""
source: int # SourceCode
# Three-layer funnel
produced: FrameByteCount # Layer 0: written + buffer_loss
written: FrameByteCount # Layer 1: from ENH_STAT
received: FrameByteCount # Layer 2: console-side counting
# Loss breakdown
buffer_loss: FrameByteCount # from ENH_STAT lost counts
transport_loss: FrameByteCount # max(0, written - received)
# Rate
throughput: ThroughputInfo
@dataclass(slots=True)
class LaunchConfig:
"""Configuration returned by the Launch Screen."""
port: str
baudrate: int
log_dir: Path
@dataclass(slots=True)
class FrameStats:
"""Periodic stats snapshot with metrics grouped by dimension."""
transport: TransportSnapshot = field(default_factory=TransportSnapshot)
loss: LossSnapshot = field(default_factory=LossSnapshot)
os_peak: PeakBurstSnapshot = field(default_factory=PeakBurstSnapshot)
ll_peak: PeakBurstSnapshot = field(default_factory=PeakBurstSnapshot)
per_source_rx_bytes: dict[SourceCode, int] | None = None
sync_state: SyncState = SyncState.SEARCHING
checksum_algorithm: ChecksumAlgorithm | None = None
checksum_scope: ChecksumScope | None = None
# --- TypedDicts for internal decoder results ---
class InfoResult(TypedDict):
int_src: InternalSource
version: int
os_ts_ms: int
class EnhStatResult(TypedDict):
int_src: InternalSource
src_code: int
written_frame_cnt: int
lost_frame_cnt: int
written_bytes_cnt: int
lost_bytes_cnt: int
os_ts_ms: int
class BufUtilResult(TypedDict):
int_src: InternalSource
lbm_id: int
pool: int
index: int
trans_cnt: int
inflight_peak: int
os_ts_ms: int
InternalDecoderResult = InfoResult | EnhStatResult | BufUtilResult
# --- Textual Messages (backend -> frontend) ---
class SyncStateChanged(Message):
def __init__(self, state: SyncState) -> None:
super().__init__()
self.state = state
class StatsUpdated(Message):
def __init__(
self,
stats: FrameStats,
funnel_snapshots: list[FunnelSnapshot] | None = None,
buf_util_snapshots: list[BufUtilEntry] | None = None,
) -> None:
super().__init__()
self.stats = stats
self.funnel_snapshots = funnel_snapshots or []
self.buf_util_snapshots = buf_util_snapshots or []
class InternalFrameDecoded(Message):
def __init__(self, int_src: InternalSource, payload: InternalDecoderResult) -> None:
super().__init__()
self.int_src = int_src
self.payload = payload
class LogLine(Message):
def __init__(self, text: str) -> None:
super().__init__()
self.text = text
class FrameLossDetected(Message):
def __init__(
self,
source_name: str,
loss_type: LossType,
lost_frames: int,
lost_bytes: int,
sn_range: tuple[int, int] | None = None,
) -> None:
super().__init__()
self.source_name = source_name
self.loss_type = loss_type
self.lost_frames = lost_frames
self.lost_bytes = lost_bytes
self.sn_range = sn_range
class BackendStopped(Message):
def __init__(self, reason: str = '') -> None:
super().__init__()
self.reason = reason
class TrafficSpikeDetected(Message):
def __init__(
self,
throughput_kbs: float,
wire_max_kbs: float,
utilization_pct: float,
duration_ms: float,
per_source: dict[int, float],
) -> None:
super().__init__()
self.throughput_kbs = throughput_kbs
self.wire_max_kbs = wire_max_kbs
self.utilization_pct = utilization_pct
self.duration_ms = duration_ms
self.per_source = per_source
@@ -0,0 +1,38 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Stats package -- re-exports for backward-compatible imports."""
from src.backend.stats.accumulator import StatsAccumulator
from src.backend.stats.firmware_loss import FirmwareLossTracker
from src.backend.stats.firmware_written import FirmwareWrittenTracker
from src.backend.stats.peak_burst import WRITE_RATE_WINDOW_MS
from src.backend.stats.peak_burst import PeakBurstTracker
from src.backend.stats.sn_gap import REORDER_WINDOW
from src.backend.stats.sn_gap import SN_MAX
from src.backend.stats.sn_gap import SNGapTracker
from src.backend.stats.traffic_spike import TRAFFIC_ALERT_COOLDOWN_SEC
from src.backend.stats.traffic_spike import TRAFFIC_THRESHOLD_PCT
from src.backend.stats.traffic_spike import TRAFFIC_WINDOW_SEC
from src.backend.stats.traffic_spike import TrafficSpikeDetector
from src.backend.stats.traffic_spike import TrafficSpikeResult
from src.backend.stats.transport import UART_BITS_PER_BYTE
from src.backend.stats.transport import TransportMetrics
__all__ = [
'FirmwareLossTracker',
'FirmwareWrittenTracker',
'PeakBurstTracker',
'REORDER_WINDOW',
'SN_MAX',
'SNGapTracker',
'StatsAccumulator',
'TRAFFIC_ALERT_COOLDOWN_SEC',
'TRAFFIC_THRESHOLD_PCT',
'TRAFFIC_WINDOW_SEC',
'TrafficSpikeDetector',
'TrafficSpikeResult',
'TransportMetrics',
'UART_BITS_PER_BYTE',
'WRITE_RATE_WINDOW_MS',
]
@@ -0,0 +1,243 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Thin composition of stats sub-modules into a single accumulator."""
from __future__ import annotations
from src.backend.models import BleLogSource
from src.backend.models import BufUtilEntry
from src.backend.models import ChecksumMode
from src.backend.models import FrameByteCount
from src.backend.models import FrameStats
from src.backend.models import FunnelSnapshot
from src.backend.models import SourceCode
from src.backend.models import SyncState
from src.backend.models import ThroughputInfo
from src.backend.stats.buf_util import BufUtilTracker
from src.backend.stats.firmware_loss import FirmwareLossTracker
from src.backend.stats.firmware_written import FirmwareWrittenTracker
from src.backend.stats.peak_burst import PeakBurstTracker
from src.backend.stats.peak_burst import WRITE_RATE_WINDOW_MS
from src.backend.stats.sn_gap import SNGapTracker
from src.backend.stats.traffic_spike import TrafficSpikeDetector
from src.backend.stats.traffic_spike import TrafficSpikeResult
from src.backend.stats.transport import TransportMetrics
from src.backend.stats.transport import UART_BITS_PER_BYTE
_ZERO = FrameByteCount(frames=0, bytes=0)
_SN_PRODUCED_MIN_VERSION = 4
class StatsAccumulator:
def __init__(self) -> None:
self._transport = TransportMetrics()
self._os_burst = PeakBurstTracker()
self._ll_burst = PeakBurstTracker()
self._wall_burst = PeakBurstTracker()
self._fw_loss = FirmwareLossTracker()
self._fw_written = FirmwareWrittenTracker()
self._sn_gap = SNGapTracker()
self._traffic = TrafficSpikeDetector()
self._buf_util = BufUtilTracker()
self._per_source_received_frames: dict[SourceCode, int] = {}
self._per_source_received_bytes: dict[SourceCode, int] = {}
self._enh_stat_prev: dict[SourceCode, tuple[int, int, int, int]] = {}
self._total_elapsed: float = 0.0
self._prev_written: dict[SourceCode, tuple[int, int]] = {}
self._sn_gap_enabled = False # disabled until firmware version >= 4 confirmed
def set_firmware_version(self, version: int) -> None:
self._sn_gap_enabled = version >= _SN_PRODUCED_MIN_VERSION
def record_bytes(self, count: int) -> None:
self._transport.record_bytes(count)
def record_frame(self, frame_size: int = 0, src_code: int = 0, frame_sn: int = -1) -> int:
"""Record a received frame. Returns confirmed SN gap count (0 if SN tracking disabled)."""
self._transport.record_frame()
gap = 0
if frame_sn >= 0 and src_code > 0:
if self._sn_gap_enabled:
gap = self._sn_gap.record(src_code, frame_sn)
self._per_source_received_frames[src_code] = self._per_source_received_frames.get(src_code, 0) + 1
self._per_source_received_bytes[src_code] = self._per_source_received_bytes.get(src_code, 0) + frame_size
return gap
# -- Timestamp-based burst tracking ------------------------------------------
def record_frame_ts(self, os_ts_ms: int, frame_size: int, src_code: SourceCode) -> None:
self._os_burst.record(os_ts_ms, frame_size, src_code)
def record_ll_frame_ts(self, lc_ts_us: int, frame_size: int, src_code: SourceCode) -> None:
self._ll_burst.record(lc_ts_us // 1000, frame_size, src_code)
def record_frame_wall_ts(self, wall_ms: int, frame_size: int, src_code: SourceCode) -> None:
"""Record frame with wall-clock timestamp for sources without chip-side timestamps."""
self._wall_burst.record(wall_ms, frame_size, src_code)
# -- Traffic spike -----------------------------------------------------------
def set_wire_max(self, baudrate: int) -> None:
self._traffic.set_wire_max_bps(baudrate / UART_BITS_PER_BYTE)
def record_frame_traffic(self, frame_size: int, src_code: SourceCode) -> None:
self._traffic.record(frame_size, src_code)
def check_traffic(self) -> TrafficSpikeResult | None:
return self._traffic.check()
# -- Buffer utilization ------------------------------------------------------
def record_buf_util(self, lbm_id: int, trans_cnt: int, inflight_peak: int) -> None:
self._buf_util.record(lbm_id, trans_cnt, inflight_peak)
def buf_util_snapshot(self) -> list[BufUtilEntry]:
return self._buf_util.snapshot() # type: ignore[no-any-return]
# -- Firmware ENH_STAT -------------------------------------------------------
def record_enh_stat(
self,
src_code: SourceCode,
written_frames: int,
lost_frames: int,
written_bytes: int,
lost_bytes: int,
baudrate: int,
) -> tuple[int, int]:
"""Record firmware ENH_STAT report. Returns (loss_delta_frames, loss_delta_bytes).
Torn-read guard: discards reports where byte deltas exceed 2s of wire
capacity (non-atomic enh_stat_t reads under concurrent ISR/task updates).
"""
prev = self._enh_stat_prev.get(src_code)
if prev is not None:
max_bytes_delta = baudrate * 2 // UART_BITS_PER_BYTE
d_written_bytes = written_bytes - prev[2]
d_lost_bytes = lost_bytes - prev[3]
if d_written_bytes > max_bytes_delta or d_lost_bytes > max_bytes_delta:
# Update prev to avoid cascading discards on next report
self._enh_stat_prev[src_code] = (written_frames, lost_frames, written_bytes, lost_bytes)
return (0, 0)
self._enh_stat_prev[src_code] = (written_frames, lost_frames, written_bytes, lost_bytes)
self._fw_written.record(src_code, written_frames, written_bytes)
return self._fw_loss.record(src_code, lost_frames, lost_bytes) # type: ignore[no-any-return]
# -- Reset -------------------------------------------------------------------
def reset(self, reason: str) -> None:
"""Reset components by group.
reason: "init" (INIT_DONE) or "flush" (FLUSH)
"""
# SN-coupled: always full reset
self._sn_gap.reset()
if reason == 'init':
# ENH_STAT-coupled: full reset
self._fw_loss.reset()
self._fw_written.reset()
self._enh_stat_prev.clear()
self._prev_written.clear()
self._buf_util.reset()
elif reason == 'flush':
# ENH_STAT-coupled: reset baselines only
self._fw_loss.reset_baselines()
self._fw_written.reset_baselines()
self._enh_stat_prev.clear()
# Console-local: preserve (no action)
# -- Snapshots ---------------------------------------------------------------
def snapshot(
self,
elapsed_sec: float,
sync_state: SyncState = SyncState.SEARCHING,
checksum_mode: ChecksumMode | None = None,
) -> FrameStats:
self._wall_burst.harvest()
return FrameStats(
transport=self._transport.harvest(elapsed_sec),
loss=self._fw_loss.totals(),
os_peak=self._os_burst.harvest(),
ll_peak=self._ll_burst.harvest(),
per_source_rx_bytes=(dict(self._per_source_received_bytes) if self._per_source_received_bytes else None),
sync_state=sync_state,
checksum_algorithm=checksum_mode.algorithm if checksum_mode else None,
checksum_scope=checksum_mode.scope if checksum_mode else None,
)
def funnel_snapshot(self, elapsed_sec: float = 0.0) -> list[FunnelSnapshot]:
"""Build per-source funnel snapshots from all component data."""
written_totals = self._fw_written.totals()
loss_totals = self._fw_loss.per_source_totals()
os_max_peaks = self._os_burst.max_peaks()
ll_max_peaks = self._ll_burst.max_peaks()
wall_max_peaks = self._wall_burst.max_peaks()
sources: set[int] = set()
sources.update(written_totals)
sources.update(loss_totals)
sources.update(self._per_source_received_frames)
# Exclude INTERNAL (src_code=0): its transport_loss is inherently
# unknowable — if INTERNAL frames are lost, the ENH_STAT data inside
# them never arrives, making the written-vs-received comparison circular.
sources.discard(BleLogSource.INTERNAL)
self._total_elapsed += elapsed_sec
result: list[FunnelSnapshot] = []
for src in sorted(sources):
w_frames, w_bytes = written_totals.get(src, (0, 0))
l_frames, l_bytes = loss_totals.get(src, (0, 0))
r_frames = self._per_source_received_frames.get(src, 0)
r_bytes = self._per_source_received_bytes.get(src, 0)
produced = FrameByteCount(frames=w_frames + l_frames, bytes=w_bytes + l_bytes)
written = FrameByteCount(frames=w_frames, bytes=w_bytes)
received = FrameByteCount(frames=r_frames, bytes=r_bytes)
buffer_loss = FrameByteCount(frames=l_frames, bytes=l_bytes)
pw_frames, pw_bytes = self._prev_written.get(src, (0, 0))
transport_loss = FrameByteCount(
frames=max(0, pw_frames - r_frames),
bytes=max(0, pw_bytes - r_bytes),
)
if self._total_elapsed > 0:
tp_fps = r_frames / self._total_elapsed
tp_bps = r_bytes / self._total_elapsed
else:
tp_fps = 0.0
tp_bps = 0.0
peak = os_max_peaks.get(src) or ll_max_peaks.get(src) or wall_max_peaks.get(src)
if peak:
peak_frames = peak.peak_frames
peak_bytes = peak.peak_bytes
else:
peak_frames = 0
peak_bytes = 0
result.append(
FunnelSnapshot(
source=src,
produced=produced,
written=written,
received=received,
buffer_loss=buffer_loss,
transport_loss=transport_loss,
throughput=ThroughputInfo(
throughput_fps=tp_fps,
throughput_bps=tp_bps,
peak_write_frames=peak_frames,
peak_write_bytes=peak_bytes,
peak_window_ms=WRITE_RATE_WINDOW_MS,
),
)
)
self._prev_written = dict(written_totals)
return result
@@ -0,0 +1,28 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
from src.backend.models import BufUtilEntry
class BufUtilTracker:
def __init__(self) -> None:
self._entries: dict[int, BufUtilEntry] = {}
def record(self, lbm_id: int, trans_cnt: int, inflight_peak: int) -> None:
pool = (lbm_id >> 4) & 0x0F
index = lbm_id & 0x0F
self._entries[lbm_id] = BufUtilEntry(
lbm_id=lbm_id,
pool=pool,
index=index,
trans_cnt=trans_cnt,
inflight_peak=inflight_peak,
)
def reset(self) -> None:
self._entries.clear()
def snapshot(self) -> list[BufUtilEntry]:
return sorted(self._entries.values(), key=lambda e: (e.pool, e.index))
@@ -0,0 +1,72 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Firmware ENH_STAT loss tracking with incremental delta accumulation.
Resilient to firmware counter resets from ``ble_log_bench_reset_stat``.
"""
from src.backend.models import LossSnapshot
from src.backend.models import SourceCode
class FirmwareLossTracker:
"""Tracks per-source firmware-reported loss using incremental deltas."""
def __init__(self) -> None:
self._frames_prev: dict[SourceCode, int] = {}
self._bytes_prev: dict[SourceCode, int] = {}
self._frames_accum: dict[SourceCode, int] = {}
self._bytes_accum: dict[SourceCode, int] = {}
def record(self, src_code: SourceCode, lost_frames: int, lost_bytes: int) -> tuple[int, int]:
"""Record firmware-reported loss.
Returns (new_frames, new_bytes) delta since last report.
On first report or counter reset, returns (0, 0) and suppresses alert.
"""
if src_code not in self._frames_prev:
self._frames_prev[src_code] = lost_frames
self._bytes_prev[src_code] = lost_bytes
if src_code not in self._frames_accum:
self._frames_accum[src_code] = lost_frames
self._bytes_accum[src_code] = lost_bytes
return (0, 0)
prev_frames = self._frames_prev[src_code]
prev_bytes = self._bytes_prev[src_code]
d_frames = lost_frames - prev_frames
d_bytes = lost_bytes - prev_bytes
self._frames_prev[src_code] = lost_frames
self._bytes_prev[src_code] = lost_bytes
if d_frames < 0 or d_bytes < 0:
self._frames_accum[src_code] += max(0, lost_frames)
self._bytes_accum[src_code] += max(0, lost_bytes)
return (0, 0)
self._frames_accum[src_code] += d_frames
self._bytes_accum[src_code] += d_bytes
return (d_frames, d_bytes)
def reset(self) -> None:
self._frames_prev.clear()
self._bytes_prev.clear()
self._frames_accum.clear()
self._bytes_accum.clear()
def reset_baselines(self) -> None:
self._frames_prev.clear()
self._bytes_prev.clear()
def per_source_totals(self) -> dict[SourceCode, tuple[int, int]]:
"""Return per-source cumulative loss as {src: (frames, bytes)}."""
return {src: (self._frames_accum[src], self._bytes_accum[src]) for src in self._frames_accum}
def totals(self) -> LossSnapshot:
"""Return cumulative loss across all sources."""
return LossSnapshot(
total_frames=sum(self._frames_accum.values()),
total_bytes=sum(self._bytes_accum.values()),
)
@@ -0,0 +1,51 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.models import SourceCode
class FirmwareWrittenTracker:
def __init__(self) -> None:
self._frames_prev: dict[SourceCode, int] = {}
self._bytes_prev: dict[SourceCode, int] = {}
self._frames_accum: dict[SourceCode, int] = {}
self._bytes_accum: dict[SourceCode, int] = {}
def record(self, src_code: SourceCode, written_frames: int, written_bytes: int) -> tuple[int, int]:
if src_code not in self._frames_prev:
self._frames_prev[src_code] = written_frames
self._bytes_prev[src_code] = written_bytes
if src_code not in self._frames_accum:
self._frames_accum[src_code] = written_frames
self._bytes_accum[src_code] = written_bytes
return (0, 0)
prev_frames = self._frames_prev[src_code]
prev_bytes = self._bytes_prev[src_code]
d_frames = written_frames - prev_frames
d_bytes = written_bytes - prev_bytes
self._frames_prev[src_code] = written_frames
self._bytes_prev[src_code] = written_bytes
if d_frames < 0 or d_bytes < 0:
self._frames_accum[src_code] += max(0, written_frames)
self._bytes_accum[src_code] += max(0, written_bytes)
return (0, 0)
self._frames_accum[src_code] += d_frames
self._bytes_accum[src_code] += d_bytes
return (d_frames, d_bytes)
def totals(self) -> dict[SourceCode, tuple[int, int]]:
return {src: (self._frames_accum[src], self._bytes_accum[src]) for src in self._frames_accum}
def reset(self) -> None:
self._frames_prev.clear()
self._bytes_prev.clear()
self._frames_accum.clear()
self._bytes_accum.clear()
def reset_baselines(self) -> None:
self._frames_prev.clear()
self._bytes_prev.clear()
@@ -0,0 +1,105 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Sliding-window peak write burst tracker.
Tracks the densest burst of log writes within a configurable time window
over chip-side timestamps. A single instance handles one clock domain
(os_ts or lc_ts); the accumulator holds two instances.
The window uses millisecond-resolution timestamps. Because log writes
happen at microsecond frequency, many frames share the same ms timestamp.
Instead of computing an inaccurate fps, we count frames and bytes within
the densest window.
"""
from collections import deque
from src.backend.models import PeakBurstSnapshot
from src.backend.models import SourceCode
from src.backend.models import SourcePeakWrite
# Sliding window width in chip timestamp space (milliseconds).
WRITE_RATE_WINDOW_MS = 10
_UINT32_MAX = 0xFFFF_FFFF
_UINT32_HALF = _UINT32_MAX // 2
# Type alias for a single window entry: (ts_ms, frame_size, src_code)
_WindowEntry = tuple[int, int, SourceCode]
def _ts_delta_ms(newer: int, older: int) -> int:
"""Compute forward delta between two uint32 timestamps, handling wraparound."""
diff = (newer - older) & _UINT32_MAX
if diff > _UINT32_HALF:
return -1
return diff
def _window_peak(window: deque[_WindowEntry]) -> dict[SourceCode, SourcePeakWrite]:
"""Compute per-source peak from the current window contents."""
per_source: dict[SourceCode, SourcePeakWrite] = {}
for _, frame_size, src_code in window:
if src_code in per_source:
sp = per_source[src_code]
per_source[src_code] = SourcePeakWrite(
peak_frames=sp.peak_frames + 1,
peak_bytes=sp.peak_bytes + frame_size,
)
else:
per_source[src_code] = SourcePeakWrite(peak_frames=1, peak_bytes=frame_size)
return per_source
class PeakBurstTracker:
"""Sliding-window peak frame burst over a timestamp stream."""
def __init__(self, window_ms: int = WRITE_RATE_WINDOW_MS) -> None:
self._window: deque[_WindowEntry] = deque()
self._window_ms = window_ms
self._per_source_peak: dict[SourceCode, SourcePeakWrite] = {}
self._max_per_source_peak: dict[SourceCode, SourcePeakWrite] = {}
def record(self, ts_ms: int, frame_size: int, src_code: SourceCode) -> None:
"""Record a frame timestamp for peak burst calculation."""
entry: _WindowEntry = (ts_ms, frame_size, src_code)
self._window.append(entry)
while len(self._window) > 1:
delta = _ts_delta_ms(ts_ms, self._window[0][0])
if delta < 0:
self._window.clear()
self._window.append(entry)
break
if delta < self._window_ms:
break
self._window.popleft()
cur_per_src = _window_peak(self._window)
for src, sp in cur_per_src.items():
existing = self._per_source_peak.get(src)
if existing is None or sp.peak_frames > existing.peak_frames:
self._per_source_peak[src] = sp
def harvest(self) -> PeakBurstSnapshot:
"""Take current-period peaks, update all-time max, reset current period."""
per_source = self._per_source_peak if self._per_source_peak else None
for src, sp in self._per_source_peak.items():
existing = self._max_per_source_peak.get(src)
if existing is None or sp.peak_frames > existing.peak_frames:
self._max_per_source_peak[src] = sp
self._per_source_peak = {}
max_per_source = dict(self._max_per_source_peak) if self._max_per_source_peak else None
return PeakBurstSnapshot(
per_source=per_source,
max_per_source=max_per_source,
)
def max_peaks(self) -> dict[SourceCode, SourcePeakWrite]:
"""Return all-time max peaks per source (non-destructive, no reset)."""
return dict(self._max_per_source_peak)
@@ -0,0 +1,102 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Sliding receive window gap tracker for per-source frame sequence numbers.
Frames are only declared lost when the receive window advances past their SN
without them being received, tolerating out-of-order delivery up to
REORDER_WINDOW frames.
"""
from src.backend.models import SourceCode
SN_MAX = 1 << 24 # 24-bit SN space
REORDER_WINDOW = 256 # receive window size
class SNGapTracker:
"""Tracks per-source frame gaps using a sliding receive window."""
def __init__(self) -> None:
self._window_base: dict[SourceCode, int] = {}
self._received: dict[SourceCode, set[int]] = {}
self._gap_accum: dict[SourceCode, int] = {}
def record(self, src_code: SourceCode, frame_sn: int) -> int:
"""Record a received frame SN and return newly confirmed gap count.
Returns the number of SNs confirmed lost by this call (0 for in-order
or reordered frames within the window).
"""
if src_code not in self._window_base:
# First frame: establish baseline
self._window_base[src_code] = (frame_sn + 1) % SN_MAX
self._received[src_code] = set()
self._gap_accum[src_code] = 0
return 0
dist = self._distance(frame_sn, self._window_base[src_code])
if 0 <= dist < REORDER_WINDOW:
# Within receive window: mark received, advance base
self._received[src_code].add(frame_sn)
return self._advance(src_code)
if dist >= REORDER_WINDOW:
# Beyond window: expire old slots as confirmed gaps
new_base = (frame_sn - REORDER_WINDOW + 1) % SN_MAX
gaps = self._expire_to(src_code, new_base)
self._received[src_code].add(frame_sn)
self._advance(src_code)
return gaps
if dist >= -REORDER_WINDOW:
# Behind window within tolerance: late arrival, already handled
return 0
# Far behind window: likely reset (FLUSH/INIT_DONE)
self._window_base[src_code] = (frame_sn + 1) % SN_MAX
self._received[src_code] = set()
return 0
def totals(self) -> dict[SourceCode, int]:
"""Return cumulative confirmed gap count per source."""
return dict(self._gap_accum)
def reset(self, src_code: SourceCode | None = None) -> None:
"""Reset tracker state.
If src_code is None, resets all sources.
Otherwise resets only the specified source.
"""
if src_code is None:
self._window_base.clear()
self._received.clear()
self._gap_accum.clear()
else:
self._window_base.pop(src_code, None)
self._received.pop(src_code, None)
self._gap_accum.pop(src_code, None)
def _distance(self, sn: int, base: int) -> int:
"""Signed distance from base to sn in 24-bit SN space."""
d = (sn - base) % SN_MAX
return d if d < SN_MAX // 2 else d - SN_MAX
def _advance(self, src_code: SourceCode) -> int:
"""Advance base past continuous received SNs."""
while self._window_base[src_code] in self._received[src_code]:
self._received[src_code].discard(self._window_base[src_code])
self._window_base[src_code] = (self._window_base[src_code] + 1) % SN_MAX
return 0
def _expire_to(self, src_code: SourceCode, new_base: int) -> int:
"""Advance base to new_base, counting unreceived SNs as confirmed gaps."""
gaps = 0
while self._window_base[src_code] != new_base:
if self._window_base[src_code] not in self._received[src_code]:
gaps += 1
self._received[src_code].discard(self._window_base[src_code])
self._window_base[src_code] = (self._window_base[src_code] + 1) % SN_MAX
self._gap_accum[src_code] += gaps
return gaps
@@ -0,0 +1,94 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Real-time traffic spike detection using a sliding window over wall-clock time."""
import time
from collections import deque
from dataclasses import dataclass
from src.backend.models import SourceCode
TRAFFIC_WINDOW_SEC = 0.1 # 100ms detection window
TRAFFIC_THRESHOLD_PCT = 0.8 # 80% of wire max
TRAFFIC_ALERT_COOLDOWN_SEC = 2.0 # minimum interval between alerts
@dataclass(slots=True)
class TrafficSpikeResult:
throughput_kbs: float
wire_max_kbs: float
utilization_pct: float
duration_ms: float
per_source: dict[SourceCode, float]
class TrafficSpikeDetector:
"""Detects traffic spikes exceeding a percentage of theoretical wire capacity."""
def __init__(self) -> None:
self._wire_max_bps: float = 0.0
self._window: deque[tuple[float, int, SourceCode]] = deque()
self._spike_active = False
self._spike_start: float = 0.0
self._spike_peak_bps: float = 0.0
self._spike_per_source: dict[SourceCode, int] = {}
self._last_alert_time: float = 0.0
def set_wire_max_bps(self, wire_max_bps: float) -> None:
self._wire_max_bps = wire_max_bps
def record(self, frame_size: int, src_code: SourceCode) -> None:
self._window.append((time.perf_counter(), frame_size, src_code))
if self._spike_active:
self._spike_per_source[src_code] = self._spike_per_source.get(src_code, 0) + frame_size
def check(self) -> TrafficSpikeResult | None:
now = time.perf_counter()
window = self._window
cutoff = now - TRAFFIC_WINDOW_SEC
while window and window[0][0] < cutoff:
window.popleft()
if self._wire_max_bps <= 0:
return None
window_bytes = sum(b for _, b, _ in window)
throughput_bps = window_bytes / TRAFFIC_WINDOW_SEC
utilization = throughput_bps / self._wire_max_bps
if utilization >= TRAFFIC_THRESHOLD_PCT:
if not self._spike_active:
self._spike_active = True
self._spike_start = now
self._spike_peak_bps = 0.0
self._spike_per_source = {}
for _, b, src in window:
self._spike_per_source[src] = self._spike_per_source.get(src, 0) + b
if throughput_bps > self._spike_peak_bps:
self._spike_peak_bps = throughput_bps
return None
if not self._spike_active:
return None
self._spike_active = False
duration_ms = (now - self._spike_start) * 1000.0
if now - self._last_alert_time < TRAFFIC_ALERT_COOLDOWN_SEC:
return None
self._last_alert_time = now
spike_bps = self._spike_peak_bps
src_total = max(sum(self._spike_per_source.values()), 1)
src_pcts = {src: v / src_total * 100.0 for src, v in self._spike_per_source.items()}
return TrafficSpikeResult(
throughput_kbs=spike_bps / 1024.0,
wire_max_kbs=self._wire_max_bps / 1024.0,
utilization_pct=spike_bps / self._wire_max_bps * 100.0,
duration_ms=duration_ms,
per_source=src_pcts,
)
@@ -0,0 +1,46 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Transport-layer metrics: RX bytes, throughput (bps), frame rate (fps)."""
from src.backend.models import TransportSnapshot
UART_BITS_PER_BYTE = 10 # 8 data + 1 start + 1 stop
class TransportMetrics:
"""Tracks cumulative RX bytes and frame count with delta-based rate snapshots."""
def __init__(self) -> None:
self._rx_bytes = 0
self._rx_bytes_snapshot = 0
self._frame_count = 0
self._frame_count_snapshot = 0
self._max_bps = 0.0
def record_bytes(self, count: int) -> None:
self._rx_bytes += count
def record_frame(self) -> None:
self._frame_count += 1
def harvest(self, elapsed_sec: float) -> TransportSnapshot:
"""Compute rates from deltas since last harvest, update max, and reset deltas."""
rx_delta = self._rx_bytes - self._rx_bytes_snapshot
frame_delta = self._frame_count - self._frame_count_snapshot
bps = rx_delta * UART_BITS_PER_BYTE / elapsed_sec if elapsed_sec > 0 else 0.0
fps = frame_delta / elapsed_sec if elapsed_sec > 0 else 0.0
if bps > self._max_bps:
self._max_bps = bps
self._rx_bytes_snapshot = self._rx_bytes
self._frame_count_snapshot = self._frame_count
return TransportSnapshot(
rx_bytes=self._rx_bytes,
bps=bps,
max_bps=self._max_bps,
fps=fps,
)
@@ -0,0 +1,33 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""UART read loop with raw binary file writing.
See Spec Sections 6, 12.
"""
import serial
import serial.tools.list_ports
UART_READ_TIMEOUT = 0.1
UART_BLOCK_SIZE = 50 * 1024
def list_serial_ports() -> list[str]:
ports = serial.tools.list_ports.comports()
return [port.device for port in ports]
def validate_uart_port(port: str) -> str | None:
"""Validate port exists and is accessible. Returns error message or None if valid."""
available = list_serial_ports()
if port not in available:
return f"UART port '{port}' not found. Available: {available}"
return None
def open_serial(port: str, baudrate: int) -> serial.Serial:
try:
return serial.Serial(port, baudrate=baudrate, timeout=UART_READ_TIMEOUT, exclusive=True)
except (ValueError, serial.SerialException):
return serial.Serial(port, baudrate=baudrate, timeout=UART_READ_TIMEOUT)
@@ -0,0 +1,2 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
@@ -0,0 +1,175 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Launch Screen — interactive setup for port, baud rate, and log directory.
Shown on startup when --port is not provided via CLI.
Dismissed with a LaunchConfig result on Connect, or None on quit.
"""
from pathlib import Path
from textual import on
from textual import work
from textual.app import ComposeResult
from textual.binding import Binding
from textual.containers import Center
from textual.containers import Vertical
from textual.screen import Screen
from textual.widgets import Button
from textual.widgets import Input
from textual.widgets import Label
from textual.widgets import Select
from textual_fspicker import SelectDirectory
from src.backend.models import LaunchConfig
from src.backend.uart_transport import list_serial_ports
BAUD_RATES: list[int] = [115200, 230400, 460800, 921600, 1500000, 2000000, 3000000]
DEFAULT_BAUD_RATE: int = 3000000
class LaunchScreen(Screen[LaunchConfig | None]):
"""Interactive setup screen for BLE Log Console."""
DEFAULT_CSS = """
LaunchScreen {
align: center middle;
}
#launch-container {
width: 60;
max-height: 80%;
background: $surface;
padding: 1 2;
border: thick $accent;
}
#launch-title {
text-align: center;
text-style: bold;
margin-bottom: 1;
}
.field-label {
margin-top: 1;
}
.field-row {
height: auto;
layout: horizontal;
}
#port-select {
width: 1fr;
}
#refresh-btn {
width: auto;
min-width: 12;
}
#dir-input {
width: 1fr;
}
#browse-btn {
width: auto;
min-width: 12;
}
#connect-btn {
margin-top: 2;
}
#no-ports-label {
color: $warning;
}
"""
BINDINGS = [
Binding('q', 'quit', 'Quit'),
Binding('Q', 'quit', show=False),
Binding('ctrl+c', 'quit', show=False, priority=True),
]
def __init__(self, default_log_dir: Path | None = None) -> None:
super().__init__()
self._default_log_dir = default_log_dir or Path.cwd()
def compose(self) -> ComposeResult:
ports = list_serial_ports()
port_options = [(p, p) for p in ports]
baud_options = [(str(b), b) for b in BAUD_RATES]
with Vertical(id='launch-container'):
yield Label('BLE Log Console Setup', id='launch-title')
yield Label('Serial Port', classes='field-label')
with Vertical(classes='field-row'):
if port_options:
yield Select(port_options, value=ports[0], id='port-select')
else:
yield Select([], id='port-select', prompt='No ports detected')
yield Label('No serial ports detected', id='no-ports-label')
yield Button('Refresh', id='refresh-btn')
yield Label('Baud Rate', classes='field-label')
yield Select(baud_options, value=DEFAULT_BAUD_RATE, id='baud-select')
yield Label('Log Directory', classes='field-label')
with Vertical(classes='field-row'):
yield Input(str(self._default_log_dir), id='dir-input')
yield Button('Browse...', id='browse-btn')
with Center():
yield Button('Connect', variant='primary', id='connect-btn')
@on(Button.Pressed, '#refresh-btn')
def refresh_ports(self) -> None:
"""Re-scan serial ports and update the Select widget."""
ports = list_serial_ports()
port_options = [(p, p) for p in ports]
port_select = self.query_one('#port-select', Select)
port_select.set_options(port_options)
if ports:
port_select.value = ports[0]
@on(Button.Pressed, '#browse-btn')
@work
async def browse_directory(self) -> None:
"""Open a directory picker dialog."""
current = self.query_one('#dir-input', Input).value
start = Path(current) if current else Path.cwd()
if not start.is_dir():
start = Path.cwd()
chosen = await self.app.push_screen_wait(SelectDirectory(location=start))
if chosen is not None:
self.query_one('#dir-input', Input).value = str(chosen)
@on(Button.Pressed, '#connect-btn')
def connect(self) -> None:
"""Validate and return config."""
port_select = self.query_one('#port-select', Select)
baud_select = self.query_one('#baud-select', Select)
dir_input = self.query_one('#dir-input', Input)
if port_select.value is Select.BLANK:
self.notify('Please select a serial port', severity='error')
return
if baud_select.value is Select.BLANK:
self.notify('Please select a baud rate', severity='error')
return
log_dir = Path(dir_input.value)
config = LaunchConfig(
port=str(port_select.value),
baudrate=int(baud_select.value), # type: ignore[arg-type] # guarded above
log_dir=log_dir,
)
self.dismiss(config)
def action_quit(self) -> None:
self.dismiss(None)
@@ -0,0 +1,47 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Scrollable log view widget.
See Spec Section 11.
"""
from rich.text import Text
from textual.widgets import RichLog
class LogView(RichLog):
DEFAULT_CSS = """
LogView {
height: 1fr;
}
"""
def __init__(self) -> None:
super().__init__(highlight=False, markup=True, wrap=True, auto_scroll=True)
def _write_tagged(self, tag: str, color: str, text: str) -> None:
t = Text.from_markup(f'[dim][{color}]\\[{tag}][/{color}] [/dim]')
t.append(text)
self.write(t)
def write_info(self, text: str) -> None:
self._write_tagged('INFO', 'green', text)
def write_warning(self, text: str) -> None:
self._write_tagged('WARN', 'yellow', text)
def write_error(self, text: str) -> None:
self._write_tagged('ERROR', 'red', text)
def write_sync(self, text: str) -> None:
self._write_tagged('SYNC', 'cyan', text)
def write_enh_stat(self, text: str) -> None:
self._write_tagged('ENH_STAT', 'cyan', text)
def write_traffic(self, text: str) -> None:
self._write_tagged('TRAFFIC', 'magenta', text)
def write_ascii(self, text: str) -> None:
self.write(Text(text))
@@ -0,0 +1,65 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Modal screen showing available keyboard shortcuts.
Pushed by the 'h' keybinding; dismissed by Escape or 'h' again.
"""
from rich.table import Table
from textual.app import ComposeResult
from textual.binding import Binding
from textual.screen import ModalScreen
from textual.widgets import Static
_SHORTCUTS = [
('q', 'Quit'),
('c', 'Clear log'),
('s', 'Toggle auto-scroll'),
('d', 'Frame statistics'),
('h', 'This help screen'),
('r', 'Reset chip'),
]
def _build_shortcut_table() -> Table:
"""Build a Rich Table listing all keyboard shortcuts."""
table = Table(title='Keyboard Shortcuts', expand=True)
table.add_column('Key', style='cyan', no_wrap=True)
table.add_column('Action')
for key, action in _SHORTCUTS:
table.add_row(key, action)
return table
class ShortcutScreen(ModalScreen):
"""Modal overlay showing available keyboard shortcuts."""
DEFAULT_CSS = """
ShortcutScreen {
align: center middle;
}
ShortcutScreen > Static {
width: 60;
max-height: 80%;
background: $surface;
padding: 1 2;
border: thick $accent;
}
"""
BINDINGS = [
Binding('escape', 'dismiss', 'Close'),
Binding('h', 'dismiss', 'Close'),
Binding('H', 'dismiss', show=False),
]
def compose(self) -> ComposeResult:
table = _build_shortcut_table()
content = Static()
content.update(table)
yield content
yield Static('[dim]Press Escape to return[/dim]')
@@ -0,0 +1,236 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Modal screen for per-source frame statistics display.
Pushed by the 'd' keybinding; dismissed by Escape or 'd' again.
Refreshes every second to show live throughput data.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from rich.table import Table
from rich.text import Text
from textual.app import ComposeResult
from textual.binding import Binding
from textual.containers import Vertical
from textual.screen import ModalScreen
from textual.widgets import Static
from src.backend.models import BufUtilEntry
from src.backend.models import FunnelSnapshot
from src.backend.models import format_bytes
from src.backend.models import format_throughput
from src.backend.models import resolve_lbm_name
from src.backend.models import resolve_pool_name
from src.backend.models import resolve_source_name
if TYPE_CHECKING:
from src.app import BLELogApp
REFRESH_INTERVAL_SEC = 1.0
def _fmt_frames(n: int) -> str:
return str(n) if n > 0 else '-'
def _fmt_loss_frames(n: int) -> Text:
if n == 0:
return Text('-')
return Text(str(n), style='red')
def _fmt_loss_bytes(n: int) -> Text:
if n == 0:
return Text('-')
return Text(format_bytes(n), style='red')
def _build_firmware_table(snapshots: list[FunnelSnapshot]) -> Table:
table = Table(title='Firmware Counters (since chip init)', expand=True)
table.add_column('Source', style='cyan', no_wrap=True, min_width=12, max_width=16)
table.add_column('Written\nFrames', justify='right', min_width=10, max_width=12)
table.add_column('Written\nBytes', justify='right', min_width=10, max_width=12)
table.add_column('Buffer Loss\nFrames', justify='right', min_width=12, max_width=14)
table.add_column('Buffer Loss\nBytes', justify='right', min_width=12, max_width=14)
for snap in snapshots:
table.add_row(
resolve_source_name(snap.source),
_fmt_frames(snap.written.frames),
format_bytes(snap.written.bytes) if snap.written.bytes > 0 else '-',
_fmt_loss_frames(snap.buffer_loss.frames),
_fmt_loss_bytes(snap.buffer_loss.bytes),
)
return table
def _build_buf_util_table(entries: list[BufUtilEntry]) -> Table:
table = Table(title='Buffer Utilization (since chip init)', expand=True)
table.add_column('Pool', style='cyan', no_wrap=True, min_width=12, max_width=16)
table.add_column('Idx', justify='right', min_width=4, max_width=6)
table.add_column('Name', style='cyan', no_wrap=True, min_width=10, max_width=14)
table.add_column('Peak', justify='right', min_width=6, max_width=8)
table.add_column('Total', justify='right', min_width=6, max_width=8)
table.add_column('Util%', justify='right', min_width=6, max_width=8)
for entry in entries:
if entry.trans_cnt > 0:
pct = entry.inflight_peak / entry.trans_cnt * 100
pct_text = Text(f'{pct:.0f}%', style='red' if pct >= 100 else '')
else:
pct_text = Text('-')
table.add_row(
resolve_pool_name(entry.pool),
str(entry.index),
resolve_lbm_name(entry.pool, entry.index),
str(entry.inflight_peak),
str(entry.trans_cnt),
pct_text,
)
return table
def _build_console_table(snapshots: list[FunnelSnapshot]) -> Table:
table = Table(title='Console Measurements (since console start)', expand=True)
table.add_column('Source', style='cyan', no_wrap=True, min_width=12, max_width=16)
table.add_column('Received\nFrames', justify='right', min_width=10, max_width=12)
table.add_column('Received\nBytes', justify='right', min_width=10, max_width=12)
table.add_column('Average\nFrames/s', justify='right', style='magenta', min_width=10, max_width=12)
table.add_column('Average\nBytes/s', justify='right', style='magenta', min_width=10, max_width=12)
table.add_column('Peak\nFrames/10ms', justify='right', style='magenta', min_width=12, max_width=14)
table.add_column('Peak\nBytes/s', justify='right', style='magenta', min_width=12, max_width=14)
for snap in snapshots:
tp_fps = snap.throughput.throughput_fps
tp_bps = snap.throughput.throughput_bps
pf = snap.throughput.peak_write_frames
pb = snap.throughput.peak_write_bytes
wms = snap.throughput.peak_window_ms
table.add_row(
resolve_source_name(snap.source),
_fmt_frames(snap.received.frames),
format_bytes(snap.received.bytes) if snap.received.bytes > 0 else '-',
f'{tp_fps:.0f}' if tp_fps > 0 else '-',
format_throughput(tp_bps) if tp_bps > 0 else '-',
f'{pf}' if pf > 0 else '-',
format_throughput(pb * 1000 / wms) if pf > 0 and wms > 0 else '-',
)
return table
class StatsScreen(ModalScreen):
"""Modal overlay showing per-source frame statistics with live refresh."""
DEFAULT_CSS = """
StatsScreen {
align: center middle;
}
#stats-container {
width: 90%;
max-width: 140;
height: auto;
max-height: 80%;
overflow-y: auto;
background: $surface;
padding: 1 2;
border: thick $accent;
}
#stats-container > Static {
height: auto;
}
"""
BINDINGS = [
Binding('escape', 'dismiss', 'Close'),
Binding('d', 'dismiss', 'Close'),
]
def __init__(self, start_time: float) -> None:
super().__init__()
self._start_time = start_time
def _get_app(self) -> BLELogApp:
return self.app # type: ignore[return-value]
def compose(self) -> ComposeResult:
with Vertical(id='stats-container'):
yield Static(id='firmware-table')
yield Static(id='console-table')
yield Static('[dim]Press Escape to return — refreshes every 1s[/dim]')
def on_mount(self) -> None:
self._refresh_table()
self.set_interval(REFRESH_INTERVAL_SEC, self._refresh_table)
def _refresh_table(self) -> None:
app = self._get_app()
snapshots = app.funnel_snapshots
fw = self.query_one('#firmware-table', Static)
cs = self.query_one('#console-table', Static)
if not snapshots:
fw.update('No data received yet.\n\nPress Escape to return.')
cs.update('')
return
fw.update(_build_firmware_table(snapshots))
cs.update(_build_console_table(snapshots))
class BufUtilScreen(ModalScreen):
DEFAULT_CSS = """
BufUtilScreen {
align: center middle;
}
#buf-util-container {
width: 80%;
max-width: 100;
height: auto;
max-height: 60%;
overflow-y: auto;
background: $surface;
padding: 1 2;
border: thick $accent;
}
#buf-util-container > Static {
height: auto;
}
"""
BINDINGS = [
Binding('escape', 'dismiss', 'Close'),
Binding('m', 'dismiss', 'Close'),
]
def _get_app(self) -> BLELogApp:
return self.app # type: ignore[return-value]
def compose(self) -> ComposeResult:
with Vertical(id='buf-util-container'):
yield Static(id='buf-util-table')
yield Static('[dim]Press Escape to return -- refreshes every 1s[/dim]')
def on_mount(self) -> None:
self._refresh_table()
self.set_interval(REFRESH_INTERVAL_SEC, self._refresh_table)
def _refresh_table(self) -> None:
entries = self._get_app().buf_util_snapshots
widget = self.query_one('#buf-util-table', Static)
if not entries:
widget.update('No buffer utilization data yet.\n\nPress Escape to return.')
return
widget.update(_build_buf_util_table(entries))
@@ -0,0 +1,66 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Status panel widget — docked to bottom, shows live stats.
See Spec Section 11.
"""
from rich.text import Text
from src.backend.models import format_bytes
from src.backend.models import format_throughput
from src.backend.models import FrameStats
from src.backend.models import SyncState
from src.backend.stats import UART_BITS_PER_BYTE
from textual.reactive import reactive
from textual.widget import Widget
def _format_speed(bps: float) -> str:
return format_throughput(bps / UART_BITS_PER_BYTE) # type: ignore[no-any-return]
_SYNC_COLORS = {
SyncState.SEARCHING: 'yellow',
SyncState.CONFIRMING_SYNC: 'cyan',
SyncState.SYNCED: 'green',
SyncState.CONFIRMING_LOSS: 'red',
}
class StatusPanel(Widget):
DEFAULT_CSS = """
StatusPanel {
dock: bottom;
height: 3;
border-top: solid $accent;
padding: 0 1;
}
"""
stats: reactive[FrameStats] = reactive(FrameStats)
disconnected: reactive[bool] = reactive(False)
def render(self) -> Text:
s = self.stats
if self.disconnected:
line1 = '[bold red]DISCONNECTED[/bold red]'
line2 = 'Backend stopped — serial connection closed'
return Text.from_markup(f'{line1}\n{line2}')
sync_color = _SYNC_COLORS.get(s.sync_state, 'white')
if s.checksum_algorithm and s.checksum_scope:
cksum_str = f' | Checksum: {s.checksum_algorithm.value} / {s.checksum_scope.value}'
else:
cksum_str = ''
line1 = f'Sync: [{sync_color}]{s.sync_state.value}[/{sync_color}]{cksum_str} | Press [bold]h[/bold] for help'
t = s.transport
loss = s.loss
loss_style = 'red' if loss.total_frames > 0 else 'yellow'
line2 = (
f'RX: {format_bytes(t.rx_bytes)} '
f'Speed: {_format_speed(t.bps)} '
f'Max: {_format_speed(t.max_bps)} '
f'Rate: {t.fps:.0f} fps '
f'[{loss_style}]Lost: {loss.total_frames} frames, {format_bytes(loss.total_bytes)}[/{loss_style}]'
)
return Text.from_markup(f'{line1}\n{line2}')
@@ -0,0 +1,2 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
+40
View File
@@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import struct
from collections.abc import Callable
from src.backend.models import HEADER_FMT
def build_frame_header(payload_len: int, source_code: int, frame_sn: int) -> bytes:
"""Build a 6-byte BLE Log frame header."""
frame_meta = (source_code & 0xFF) | (frame_sn << 8)
return struct.pack(HEADER_FMT, payload_len, frame_meta)
def build_frame(
payload: bytes,
source_code: int,
frame_sn: int,
checksum_fn: Callable[[bytes], int],
checksum_scope_full: bool = True,
) -> bytes:
"""Build a complete BLE Log frame with header, payload, and checksum.
Args:
payload: Frame payload bytes (should include 4B os_ts prefix if applicable)
source_code: BLE Log source code (0-7)
frame_sn: 24-bit sequence number
checksum_fn: Function(data: bytes) -> int
checksum_scope_full: If True, checksum covers header+payload; else header only
"""
header = build_frame_header(len(payload), source_code, frame_sn)
if checksum_scope_full:
checksum_data = header + payload
else:
checksum_data = header
checksum_val = checksum_fn(checksum_data)
return header + payload + struct.pack('<I', checksum_val)
@@ -0,0 +1,57 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.stats.buf_util import BufUtilTracker
class TestRecordAndSnapshot:
def test_single_entry(self) -> None:
tracker = BufUtilTracker()
tracker.record(lbm_id=0x00, trans_cnt=4, inflight_peak=3)
entries = tracker.snapshot()
assert len(entries) == 1
assert entries[0].lbm_id == 0x00
assert entries[0].pool == 0
assert entries[0].index == 0
assert entries[0].trans_cnt == 4
assert entries[0].inflight_peak == 3
class TestUpdateOverwrites:
def test_same_lbm_id_overwrites(self) -> None:
tracker = BufUtilTracker()
tracker.record(lbm_id=0x01, trans_cnt=4, inflight_peak=1)
tracker.record(lbm_id=0x01, trans_cnt=4, inflight_peak=3)
entries = tracker.snapshot()
assert len(entries) == 1
assert entries[0].inflight_peak == 3
class TestResetClears:
def test_reset_empties_tracker(self) -> None:
tracker = BufUtilTracker()
tracker.record(lbm_id=0x00, trans_cnt=4, inflight_peak=2)
tracker.record(lbm_id=0x10, trans_cnt=4, inflight_peak=1)
tracker.reset()
assert tracker.snapshot() == []
class TestMultipleLbms:
def test_multiple_lbm_ids_coexist(self) -> None:
tracker = BufUtilTracker()
tracker.record(lbm_id=0x00, trans_cnt=4, inflight_peak=1)
tracker.record(lbm_id=0x01, trans_cnt=4, inflight_peak=2)
tracker.record(lbm_id=0x10, trans_cnt=4, inflight_peak=3)
tracker.record(lbm_id=0x20, trans_cnt=4, inflight_peak=4)
entries = tracker.snapshot()
assert len(entries) == 4
def test_snapshot_sorted_by_pool_then_index(self) -> None:
tracker = BufUtilTracker()
tracker.record(lbm_id=0x21, trans_cnt=4, inflight_peak=2)
tracker.record(lbm_id=0x00, trans_cnt=4, inflight_peak=1)
tracker.record(lbm_id=0x20, trans_cnt=4, inflight_peak=4)
tracker.record(lbm_id=0x10, trans_cnt=4, inflight_peak=3)
entries = tracker.snapshot()
pools_and_indices = [(e.pool, e.index) for e in entries]
assert pools_and_indices == [(0, 0), (1, 0), (2, 0), (2, 1)]
@@ -0,0 +1,96 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.checksum import sum_checksum
from src.backend.checksum import xor_checksum
class TestSumChecksum:
def test_empty(self) -> None:
assert sum_checksum(b'') == 0
def test_single_byte(self) -> None:
assert sum_checksum(b'\x42') == 0x42
def test_multiple_bytes(self) -> None:
# Sum of bytes: 0x01 + 0x02 + 0x03 + 0x04 = 0x0A
assert sum_checksum(b'\x01\x02\x03\x04') == 0x0A
def test_overflow_wraps_u32(self) -> None:
# 256 bytes of 0xFF = 256 * 255 = 65280
data = b'\xff' * 256
assert sum_checksum(data) == 65280
class TestXorChecksum:
def test_empty(self) -> None:
assert xor_checksum(b'') == 0
def test_single_word(self) -> None:
# [0x01, 0x02, 0x03, 0x04] → LE word 0x04030201
assert xor_checksum(b'\x01\x02\x03\x04') == 0x04030201
def test_two_words(self) -> None:
data = b'\x01\x02\x03\x04\x05\x06\x07\x08'
# word1 = 0x04030201, word2 = 0x08070605
expected = 0x04030201 ^ 0x08070605
assert xor_checksum(data) == expected
def test_unaligned_length(self) -> None:
"""XOR checksum handles non-4-byte-aligned data lengths correctly."""
# 5 bytes: 1 full word + 1 trailing byte (zero-padded)
data = b'\x01\x02\x03\x04\x05'
# word0 = 0x04030201, word1 = 0x00000005 (padded)
# XOR = 0x04030201 ^ 0x00000005 = 0x04030204
assert xor_checksum(data) == 0x04030204
def test_typical_frame_data_produces_valid_result(self) -> None:
"""Verify xor_checksum produces a valid u32 result on typical frame-sized data."""
# A typical 6-byte header + 10-byte payload
header = b'\x0a\x00\x00\x01\x00\x00' # payload_len=10, src=0, sn=256
payload = b'\x00\x00\x00\x00\x03\x03' + b'\x00' * 4
data = header + payload
result = xor_checksum(data)
assert isinstance(result, int)
assert 0 <= result < 0x100000000
def test_matches_ble_log_parser_v2(self) -> None:
"""Verify our implementation matches the proven ble_log_parser_v2 approach.
Both implementations should produce identical results: simple XOR of
consecutive 4-byte LE words with zero-padding for partial last word.
"""
import struct
def reference_xor(data: bytes) -> int:
"""Reference: ble_log_parser_v2 _validate_xor logic."""
body_len = len(data)
if body_len == 0:
return 0
checksum_cal = 0
for i in range(0, body_len, 4):
remaining = body_len - i
if remaining >= 4:
(word,) = struct.unpack_from('<I', data, i)
else:
last_chunk = data[i:]
padded_chunk = last_chunk + b'\x00' * (4 - remaining)
(word,) = struct.unpack('<I', padded_chunk)
if i == 0:
checksum_cal = word
else:
checksum_cal ^= word
return checksum_cal & 0xFFFFFFFF
# Test various data sizes
test_vectors = [
b'',
b'\x01',
b'\x01\x02\x03',
b'\x01\x02\x03\x04',
b'\x01\x02\x03\x04\x05',
b'\xff' * 16,
b'\x0a\x00\x00\x01\x00\x00' + b'\x00' * 10, # typical frame
]
for data in test_vectors:
assert xor_checksum(data) == reference_xor(data), f'Mismatch for data length {len(data)}'
@@ -0,0 +1,76 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.stats.firmware_loss import FirmwareLossTracker
class TestFirmwareLossTracker:
def test_first_report_zero_delta(self) -> None:
t = FirmwareLossTracker()
new_f, new_b = t.record(src_code=1, lost_frames=1000, lost_bytes=5000)
assert new_f == 0
assert new_b == 0
totals = t.totals()
assert totals.total_frames == 1000
assert totals.total_bytes == 5000
per_src = t.per_source_totals()
assert per_src[1] == (1000, 5000)
def test_incremental_delta(self) -> None:
t = FirmwareLossTracker()
t.record(1, 0, 0)
new_f, new_b = t.record(1, 5, 200)
assert new_f == 5
assert new_b == 200
new_f, new_b = t.record(1, 8, 320)
assert new_f == 3
assert new_b == 120
def test_multi_source(self) -> None:
t = FirmwareLossTracker()
t.record(1, 100, 1000)
t.record(2, 50, 500)
t.record(1, 105, 1200)
t.record(2, 52, 580)
totals = t.totals()
assert totals.total_frames == 157
assert totals.total_bytes == 1780
def test_counter_reset(self) -> None:
t = FirmwareLossTracker()
t.record(1, 0, 0)
t.record(1, 100, 4000)
new_f, new_b = t.record(1, 30, 1200)
assert new_f == 0
totals = t.totals()
assert totals.total_frames == 130
assert totals.total_bytes == 5200
def test_normal_after_reset(self) -> None:
t = FirmwareLossTracker()
t.record(1, 0, 0)
t.record(1, 100, 4000)
t.record(1, 30, 1200)
new_f, new_b = t.record(1, 50, 2000)
assert new_f == 20
assert new_b == 800
def test_reset_clears_everything(self) -> None:
t = FirmwareLossTracker()
t.record(1, 10, 100)
t.reset()
assert t.totals().total_frames == 0
assert t.totals().total_bytes == 0
def test_reset_baselines_preserves_accumulators(self) -> None:
t = FirmwareLossTracker()
t.record(1, 10, 100)
d_frames, d_bytes = t.record(1, 15, 150)
assert d_frames == 5
t.reset_baselines()
# Next report is treated as new baseline (no delta)
d_frames, d_bytes = t.record(1, 20, 200)
assert d_frames == 0 # baseline re-established
# Accumulators preserved from before
totals = t.totals()
assert totals.total_frames == 15 # initial absolute + pre-reset delta
@@ -0,0 +1,86 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.stats.firmware_written import FirmwareWrittenTracker
class TestFirmwareWrittenTracker:
def test_first_report_zero_delta(self) -> None:
t = FirmwareWrittenTracker()
new_f, new_b = t.record(src_code=1, written_frames=1000, written_bytes=5000)
assert new_f == 0
assert new_b == 0
totals = t.totals()
assert totals[1] == (1000, 5000)
def test_incremental_delta(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 0, 0)
new_f, new_b = t.record(1, 5, 200)
assert new_f == 5
assert new_b == 200
new_f, new_b = t.record(1, 8, 320)
assert new_f == 3
assert new_b == 120
def test_multi_source(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 100, 1000)
t.record(2, 50, 500)
t.record(1, 105, 1200)
t.record(2, 52, 580)
totals = t.totals()
assert totals[1] == (105, 1200)
assert totals[2] == (52, 580)
def test_counter_reset(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 0, 0)
t.record(1, 100, 4000)
new_f, new_b = t.record(1, 30, 1200)
assert new_f == 0
assert new_b == 0
totals = t.totals()
assert totals[1] == (130, 5200)
def test_normal_after_reset(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 0, 0)
t.record(1, 100, 4000)
t.record(1, 30, 1200)
new_f, new_b = t.record(1, 50, 2000)
assert new_f == 20
assert new_b == 800
def test_reset_clears_all(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 10, 100)
t.record(1, 20, 200)
t.reset()
assert t.totals() == {}
new_f, new_b = t.record(1, 50, 500)
assert new_f == 0
assert new_b == 0
def test_reset_baselines_preserves_accum(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 0, 0)
t.record(1, 100, 4000)
t.reset_baselines()
new_f, new_b = t.record(1, 30, 1200)
assert new_f == 0
assert new_b == 0
totals = t.totals()
assert totals[1] == (100, 4000)
def test_reset_baselines_then_incremental(self) -> None:
t = FirmwareWrittenTracker()
t.record(1, 0, 0)
t.record(1, 50, 2000)
t.reset_baselines()
t.record(1, 10, 400)
new_f, new_b = t.record(1, 25, 1000)
assert new_f == 15
assert new_b == 600
totals = t.totals()
assert totals[1] == (65, 2600)
@@ -0,0 +1,237 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.checksum import sum_checksum
from src.backend.checksum import xor_checksum
from src.backend.frame_parser import FrameParser
from src.backend.models import ChecksumAlgorithm
from src.backend.models import ChecksumScope
from src.backend.models import SyncState
from tests.helpers import build_frame
def _make_sum_frame(payload: bytes, src: int, sn: int) -> bytes:
return build_frame(payload, src, sn, sum_checksum, checksum_scope_full=True) # type: ignore[no-any-return]
def _make_xor_frame(payload: bytes, src: int, sn: int) -> bytes:
return build_frame(payload, src, sn, xor_checksum, checksum_scope_full=True) # type: ignore[no-any-return]
class TestFrameParserStateTransitions:
def test_initial_state_is_searching(self) -> None:
parser = FrameParser()
assert parser.sync_state == SyncState.SEARCHING
def test_three_valid_frames_reach_synced(self) -> None:
"""N=3 consecutive valid frames should transition SEARCHING -> CONFIRMING -> SYNCED."""
parser = FrameParser()
payload = b'\x00' * 8 # 4B os_ts + 4B data
frames_data = b''
for sn in range(3):
frames_data += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(frames_data)
assert parser.sync_state == SyncState.SYNCED
def test_garbage_stays_searching(self) -> None:
parser = FrameParser()
garbage = b'\xde\xad\xbe\xef' * 100
parser.feed(garbage)
assert parser.sync_state == SyncState.SEARCHING
def test_mixed_garbage_then_valid_frames(self) -> None:
parser = FrameParser()
payload = b'\x00' * 8
garbage = b'\xff' * 50
frames = b''
for sn in range(3):
frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(garbage + frames)
assert parser.sync_state == SyncState.SYNCED
def test_checksum_failure_in_synced_triggers_confirming_loss(self) -> None:
parser = FrameParser()
payload = bytes(range(0xA0, 0xA8))
good_frames = b''
for sn in range(3):
good_frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(good_frames)
assert parser.sync_state == SyncState.SYNCED
bad_frame = _make_sum_frame(payload, src=1, sn=99)
corrupt = bytearray(bad_frame)
corrupt[-1] ^= 0xFF
parser.feed(bytes(corrupt))
assert parser.sync_state == SyncState.CONFIRMING_LOSS
def test_confirming_loss_recovers_to_synced(self) -> None:
"""After corrupt bytes, enough valid frames should re-establish SYNCED."""
parser = FrameParser()
payload = bytes(range(0xA0, 0xA8))
good_frames = b''
for sn in range(3):
good_frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(good_frames)
assert parser.sync_state == SyncState.SYNCED
corrupt = b'\xfe' * 20
recovery_frames = b''
for sn in range(3, 6):
recovery_frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(corrupt + recovery_frames)
assert parser.sync_state == SyncState.SYNCED
def test_confirming_loss_to_searching_after_m_plus_1_failures(self) -> None:
from src.backend.frame_parser import LOSS_TOLERANCE
parser = FrameParser()
payload = bytes(range(0xA0, 0xA8))
good_frames = b''
for sn in range(3):
good_frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(good_frames)
assert parser.sync_state == SyncState.SYNCED
garbage = b'\xfe' * (LOSS_TOLERANCE + 20)
parser.feed(garbage)
assert parser.sync_state == SyncState.SEARCHING
def test_confirming_sync_rejects_mismatched_mode(self) -> None:
"""Review Correction #3: CONFIRMING_SYNC must verify same checksum mode."""
parser = FrameParser()
payload = b'\x00' * 8
# Feed one SUM frame to enter CONFIRMING_SYNC
sum_frame = _make_sum_frame(payload, src=1, sn=0)
parser.feed(sum_frame)
assert parser.sync_state == SyncState.CONFIRMING_SYNC
# Feed an XOR frame — mode mismatch should restart confirmation
xor_frame = _make_xor_frame(payload, src=1, sn=1)
parser.feed(xor_frame)
# Should still be in CONFIRMING_SYNC (restarted with new mode), not SYNCED
assert parser.sync_state == SyncState.CONFIRMING_SYNC
class TestFrameParserOutput:
def test_parsed_frames_returned(self) -> None:
parser = FrameParser()
payload = b'\x00\x00\x00\x00\xaa\xbb' # 4B os_ts + 2B data
frames_data = b''
for sn in range(3):
frames_data += _make_sum_frame(payload, src=2, sn=sn)
results = parser.feed(frames_data)
parsed = [r for r in results if hasattr(r, 'source_code')]
assert len(parsed) == 3
assert all(f.source_code == 2 for f in parsed)
def test_ascii_lines_extracted_from_non_frame_data(self) -> None:
parser = FrameParser()
# In SEARCHING state, non-frame data should be collected as ASCII
ascii_data = b'Hello world\n'
results = parser.feed(ascii_data)
lines = [r for r in results if isinstance(r, str)]
assert any('Hello world' in line for line in lines)
def _make_sum_header_only_frame(payload: bytes, src: int, sn: int) -> bytes:
return build_frame(payload, src, sn, sum_checksum, checksum_scope_full=False) # type: ignore[no-any-return]
def _make_xor_header_only_frame(payload: bytes, src: int, sn: int) -> bytes:
return build_frame(payload, src, sn, xor_checksum, checksum_scope_full=False) # type: ignore[no-any-return]
class TestChecksumAutoDetection:
def test_detects_sum_full(self) -> None:
parser = FrameParser()
payload = b'\x00' * 8
frames = b''
for sn in range(3):
frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(frames)
assert parser.sync_state == SyncState.SYNCED
assert parser.checksum_mode is not None
assert parser.checksum_mode.algorithm == ChecksumAlgorithm.SUM
assert parser.checksum_mode.scope == ChecksumScope.FULL
def test_detects_xor_full(self) -> None:
parser = FrameParser()
payload = b'\x00' * 8
frames = b''
for sn in range(3):
frames += _make_xor_frame(payload, src=1, sn=sn)
parser.feed(frames)
assert parser.sync_state == SyncState.SYNCED
assert parser.checksum_mode is not None
assert parser.checksum_mode.algorithm == ChecksumAlgorithm.XOR
def test_detects_sum_header_only(self) -> None:
parser = FrameParser()
payload = b'\x01\x02\x03\x04\xaa\xbb\xcc\xdd'
frames = b''
for sn in range(3):
frames += _make_sum_header_only_frame(payload, src=1, sn=sn)
parser.feed(frames)
assert parser.sync_state == SyncState.SYNCED
assert parser.checksum_mode is not None
assert parser.checksum_mode.algorithm == ChecksumAlgorithm.SUM
assert parser.checksum_mode.scope == ChecksumScope.HEADER_ONLY
def test_detects_xor_header_only(self) -> None:
parser = FrameParser()
payload = b'\x01\x02\x03\x04\xaa\xbb\xcc\xdd'
frames = b''
for sn in range(3):
frames += _make_xor_header_only_frame(payload, src=1, sn=sn)
parser.feed(frames)
assert parser.sync_state == SyncState.SYNCED
assert parser.checksum_mode is not None
assert parser.checksum_mode.algorithm == ChecksumAlgorithm.XOR
assert parser.checksum_mode.scope == ChecksumScope.HEADER_ONLY
class TestBoundedBuffer:
def test_remainder_buffer_bounded(self) -> None:
parser = FrameParser()
# Feed more than MAX_REMAINDER_SIZE of garbage
huge_garbage = b'\xfe' * (131072 + 1)
parser.feed(huge_garbage)
# Buffer should have been reset, state should be SEARCHING
assert parser.sync_state == SyncState.SEARCHING
# Verify parser can still sync after overflow (buffer was cleared)
payload = b'\x00' * 8
frames = b''
for sn in range(3):
frames += _make_sum_frame(payload, src=1, sn=sn)
parser.feed(frames)
assert parser.sync_state == SyncState.SYNCED
def test_buffer_overflow_emits_warning(self) -> None:
"""Review Correction #2: buffer overflow must log warning."""
parser = FrameParser()
huge_garbage = b'\xfe' * (131072 + 1)
results = parser.feed(huge_garbage)
warnings = [r for r in results if isinstance(r, str) and 'WARN' in r]
assert len(warnings) >= 1
class TestFrameSplitAcrossChunks:
def test_frame_split_across_chunks(self) -> None:
"""Review Correction #7: partial frames split across feed() calls."""
parser = FrameParser()
payload = b'\x00' * 8
frames = b''
for sn in range(3):
frames += _make_sum_frame(payload, src=1, sn=sn)
# Split in the middle of the second frame
mid = len(frames) // 2
parser.feed(frames[:mid])
parser.feed(frames[mid:])
assert parser.sync_state == SyncState.SYNCED
@@ -0,0 +1,107 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import struct
from src.backend.internal_decoder import decode_internal_frame
from src.backend.models import InternalSource
def _make_internal_payload(os_ts: int, int_src: int, sub_payload: bytes) -> bytes:
"""Build a full INTERNAL frame payload (os_ts + int_src_code + sub_payload)."""
return struct.pack('<I', os_ts) + bytes([int_src]) + sub_payload
class TestInitDone:
def test_decode_init_done(self) -> None:
payload = _make_internal_payload(os_ts=1234, int_src=0, sub_payload=b'\x03')
result = decode_internal_frame(payload)
assert result is not None
assert result['int_src'] == InternalSource.INIT_DONE
assert result['version'] == 3
assert result['os_ts_ms'] == 1234
class TestInfo:
def test_decode_info(self) -> None:
payload = _make_internal_payload(os_ts=5678, int_src=3, sub_payload=b'\x03')
result = decode_internal_frame(payload)
assert result is not None
assert result['int_src'] == InternalSource.INFO
assert result['version'] == 3
class TestEnhStat:
def test_decode_enh_stat(self) -> None:
sub = struct.pack('<BIIII', 2, 100, 5, 4096, 256) # src=2, written=100, lost=5, ...
payload = _make_internal_payload(os_ts=9999, int_src=2, sub_payload=sub)
result = decode_internal_frame(payload)
assert result is not None
assert result['int_src'] == InternalSource.ENH_STAT
assert result['src_code'] == 2
assert result['written_frame_cnt'] == 100
assert result['lost_frame_cnt'] == 5
assert result['written_bytes_cnt'] == 4096
assert result['lost_bytes_cnt'] == 256
class TestFlush:
def test_decode_flush(self) -> None:
payload = _make_internal_payload(os_ts=0, int_src=4, sub_payload=b'\x03')
result = decode_internal_frame(payload)
assert result is not None
assert result['int_src'] == InternalSource.FLUSH
assert result['version'] == 3
class TestTs:
def test_ts_ignored(self) -> None:
sub = struct.pack('<BIII', 1, 100, 200, 300) # io_level, lc_ts, esp_ts, os_ts
payload = _make_internal_payload(os_ts=0, int_src=1, sub_payload=sub)
result = decode_internal_frame(payload)
assert result is None # TS is ignored
class TestUnknown:
def test_unknown_int_src(self) -> None:
payload = _make_internal_payload(os_ts=0, int_src=99, sub_payload=b'\x00')
result = decode_internal_frame(payload)
assert result is None
class TestBufUtil:
def test_decode_buf_util(self) -> None:
# lbm_id=0x01 (pool=0, index=1), trans_cnt=4, inflight_peak=3
sub = b'\x01\x04\x03'
payload = _make_internal_payload(os_ts=7777, int_src=5, sub_payload=sub)
result = decode_internal_frame(payload)
assert result is not None
assert result['int_src'] == InternalSource.BUF_UTIL
assert result['lbm_id'] == 0x01
assert result['pool'] == 0
assert result['index'] == 1
assert result['trans_cnt'] == 4
assert result['inflight_peak'] == 3
assert result['os_ts_ms'] == 7777
def test_buf_util_pool_index_extraction(self) -> None:
# lbm_id=0x21 -> pool=2 (LL), index=1 (ll_hci)
sub = b'\x21\x04\x02'
payload = _make_internal_payload(os_ts=0, int_src=5, sub_payload=sub)
result = decode_internal_frame(payload)
assert result is not None
assert result['pool'] == 2
assert result['index'] == 1
def test_buf_util_too_short(self) -> None:
# Only 2 bytes of sub_payload (need 3 after int_src_code)
sub = b'\x00\x04'
payload = _make_internal_payload(os_ts=0, int_src=5, sub_payload=sub)
result = decode_internal_frame(payload)
assert result is None
class TestMalformed:
def test_too_short_payload(self) -> None:
result = decode_internal_frame(b'\x00\x00\x00')
assert result is None
@@ -0,0 +1,259 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from unittest.mock import MagicMock
from unittest.mock import patch
from src.backend.models import LaunchConfig
from src.frontend.launch_screen import BAUD_RATES
from src.frontend.launch_screen import DEFAULT_BAUD_RATE
from src.frontend.launch_screen import LaunchScreen
# ---------------------------------------------------------------------------
# Constants
# ---------------------------------------------------------------------------
class TestBaudRateConstants:
def test_baud_rates_is_list_of_ints(self) -> None:
assert isinstance(BAUD_RATES, list)
assert all(isinstance(b, int) for b in BAUD_RATES)
def test_baud_rates_not_empty(self) -> None:
assert len(BAUD_RATES) > 0
def test_baud_rates_ascending(self) -> None:
assert BAUD_RATES == sorted(BAUD_RATES)
def test_default_baud_rate_in_list(self) -> None:
assert DEFAULT_BAUD_RATE in BAUD_RATES
def test_default_baud_rate_value(self) -> None:
assert DEFAULT_BAUD_RATE == 3_000_000
def test_common_rates_present(self) -> None:
"""Standard UART baud rates used by ESP-IDF should be available."""
assert 115200 in BAUD_RATES
assert 921600 in BAUD_RATES
# ---------------------------------------------------------------------------
# LaunchConfig dataclass
# ---------------------------------------------------------------------------
class TestLaunchConfig:
def test_create_with_required_fields(self) -> None:
cfg = LaunchConfig(port='/dev/ttyUSB0', baudrate=3000000, log_dir=Path('/tmp'))
assert cfg.port == '/dev/ttyUSB0'
assert cfg.baudrate == 3000000
assert cfg.log_dir == Path('/tmp')
def test_different_ports(self) -> None:
for port in ['/dev/ttyUSB0', '/dev/ttyACM0', 'COM3', '/dev/tty.usbserial-1420']:
cfg = LaunchConfig(port=port, baudrate=115200, log_dir=Path('.'))
assert cfg.port == port
def test_various_baud_rates(self) -> None:
for baud in BAUD_RATES:
cfg = LaunchConfig(port='/dev/ttyUSB0', baudrate=baud, log_dir=Path('.'))
assert cfg.baudrate == baud
def test_log_dir_is_path(self) -> None:
cfg = LaunchConfig(port='COM1', baudrate=115200, log_dir=Path('/var/log'))
assert isinstance(cfg.log_dir, Path)
# ---------------------------------------------------------------------------
# LaunchScreen instantiation
# ---------------------------------------------------------------------------
class TestLaunchScreenInit:
def test_default_log_dir_is_cwd(self) -> None:
screen = LaunchScreen()
assert screen._default_log_dir == Path.cwd()
def test_custom_log_dir(self) -> None:
custom = Path('/tmp/my_logs')
screen = LaunchScreen(default_log_dir=custom)
assert screen._default_log_dir == custom
def test_none_log_dir_falls_back_to_cwd(self) -> None:
screen = LaunchScreen(default_log_dir=None)
assert screen._default_log_dir == Path.cwd()
def test_is_screen_subclass(self) -> None:
from textual.screen import Screen
assert issubclass(LaunchScreen, Screen)
def test_bindings_include_quit(self) -> None:
"""LaunchScreen should have a quit binding on 'q'."""
keys = [b.key for b in LaunchScreen.BINDINGS]
assert 'q' in keys
# ---------------------------------------------------------------------------
# refresh_ports — unit-level (mocked widgets)
# ---------------------------------------------------------------------------
class TestRefreshPorts:
@patch('src.frontend.launch_screen.list_serial_ports')
def test_refresh_updates_select_with_ports(self, mock_lsp: MagicMock) -> None:
"""refresh_ports should scan ports and update the Select widget."""
mock_lsp.return_value = ['/dev/ttyUSB0', '/dev/ttyUSB1']
screen = LaunchScreen()
mock_select = MagicMock()
screen.query_one = MagicMock(return_value=mock_select) # type: ignore[method-assign]
screen.refresh_ports()
mock_lsp.assert_called_once()
mock_select.set_options.assert_called_once_with(
[('/dev/ttyUSB0', '/dev/ttyUSB0'), ('/dev/ttyUSB1', '/dev/ttyUSB1')]
)
assert mock_select.value == '/dev/ttyUSB0'
@patch('src.frontend.launch_screen.list_serial_ports')
def test_refresh_empty_ports_no_value_set(self, mock_lsp: MagicMock) -> None:
"""When no ports found, set_options is called with empty list and value is not set."""
mock_lsp.return_value = []
screen = LaunchScreen()
mock_select = MagicMock()
screen.query_one = MagicMock(return_value=mock_select) # type: ignore[method-assign]
screen.refresh_ports()
mock_select.set_options.assert_called_once_with([])
# value should NOT have been reassigned when ports list is empty
assert mock_select.value != '/dev/ttyUSB0'
# ---------------------------------------------------------------------------
# connect — unit-level (mocked widgets)
# ---------------------------------------------------------------------------
class TestConnect:
def _make_screen_with_mocks(
self,
port_value: object,
baud_value: int = 3000000,
dir_value: str = '/tmp/logs',
) -> tuple[LaunchScreen, MagicMock, MagicMock, MagicMock]:
"""Helper: create a LaunchScreen with mocked query_one results."""
screen = LaunchScreen()
mock_port_select = MagicMock()
mock_port_select.value = port_value
mock_baud_select = MagicMock()
mock_baud_select.value = baud_value
mock_dir_input = MagicMock()
mock_dir_input.value = dir_value
def fake_query_one(selector: str, widget_type: type = object) -> MagicMock:
if selector == '#port-select':
return mock_port_select
if selector == '#baud-select':
return mock_baud_select
if selector == '#dir-input':
return mock_dir_input
raise ValueError(f'Unexpected selector: {selector}')
screen.query_one = fake_query_one # type: ignore[assignment]
screen.dismiss = MagicMock() # type: ignore[method-assign]
screen.notify = MagicMock() # type: ignore[method-assign]
return screen, mock_port_select, mock_baud_select, mock_dir_input
def test_connect_with_valid_port(self) -> None:
"""connect() should dismiss with LaunchConfig when port is selected."""
screen, _, _, _ = self._make_screen_with_mocks(
port_value='/dev/ttyUSB0',
baud_value=921600,
dir_value='/tmp/logs',
)
screen.connect()
screen.dismiss.assert_called_once()
config = screen.dismiss.call_args[0][0]
assert isinstance(config, LaunchConfig)
assert config.port == '/dev/ttyUSB0'
assert config.baudrate == 921600
assert config.log_dir == Path('/tmp/logs')
def test_connect_with_blank_port_shows_error(self) -> None:
"""connect() should notify error and NOT dismiss when port is BLANK."""
from textual.widgets import Select
screen, _, _, _ = self._make_screen_with_mocks(port_value=Select.BLANK)
screen.connect()
screen.notify.assert_called_once_with('Please select a serial port', severity='error')
screen.dismiss.assert_not_called()
def test_connect_log_dir_is_path_object(self) -> None:
"""The log_dir in LaunchConfig should be a Path, not a string."""
screen, _, _, _ = self._make_screen_with_mocks(port_value='COM3', dir_value='/home/user/logs')
screen.connect()
config = screen.dismiss.call_args[0][0]
assert isinstance(config.log_dir, Path)
assert str(config.log_dir) == '/home/user/logs'
# ---------------------------------------------------------------------------
# action_quit
# ---------------------------------------------------------------------------
class TestActionQuit:
def test_action_quit_dismisses_with_none(self) -> None:
screen = LaunchScreen()
screen.dismiss = MagicMock() # type: ignore[method-assign]
screen.action_quit()
screen.dismiss.assert_called_once_with(None)
# ---------------------------------------------------------------------------
# compose — structural checks (no App context required)
# ---------------------------------------------------------------------------
class TestComposeMethod:
def test_compose_is_defined(self) -> None:
"""LaunchScreen.compose should be a callable method."""
assert callable(getattr(LaunchScreen, 'compose', None))
def test_default_css_contains_expected_ids(self) -> None:
"""DEFAULT_CSS should reference the widget IDs used in compose."""
css = LaunchScreen.DEFAULT_CSS
for widget_id in [
'launch-container',
'launch-title',
'port-select',
'refresh-btn',
'dir-input',
'browse-btn',
'connect-btn',
'no-ports-label',
]:
assert widget_id in css, f'Missing CSS rule for #{widget_id}'
def test_baud_options_built_correctly(self) -> None:
"""Verify the baud option tuples match the expected (label, value) shape."""
baud_options = [(str(b), b) for b in BAUD_RATES]
assert all(isinstance(label, str) and isinstance(val, int) for label, val in baud_options)
assert len(baud_options) == len(BAUD_RATES)
@@ -0,0 +1,34 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.models import FrameByteCount
from src.backend.models import FunnelSnapshot
from src.backend.models import LossType
from src.backend.models import ThroughputInfo
def test_frame_byte_count() -> None:
fbc = FrameByteCount(frames=100, bytes=5000)
assert fbc.frames == 100
assert fbc.bytes == 5000
def test_loss_type_enum() -> None:
assert LossType.BUFFER == 'buffer'
assert LossType.TRANSPORT == 'transport'
def test_funnel_snapshot_structure() -> None:
zero = FrameByteCount(frames=0, bytes=0)
tp = ThroughputInfo(
throughput_fps=0.0, throughput_bps=0.0, peak_write_frames=0, peak_write_bytes=0, peak_window_ms=10
)
snap = FunnelSnapshot(
source=0,
produced=zero,
written=zero,
received=zero,
buffer_loss=zero,
transport_loss=zero,
throughput=tp,
)
assert snap.produced.frames == 0
@@ -0,0 +1,98 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.stats.peak_burst import WRITE_RATE_WINDOW_MS
from src.backend.stats.peak_burst import PeakBurstTracker
from src.backend.stats.peak_burst import _ts_delta_ms
_SRC = 1
_SRC_B = 2
class TestTsDeltaMs:
def test_normal_forward(self) -> None:
assert _ts_delta_ms(1100, 1000) == 100
def test_zero_delta(self) -> None:
assert _ts_delta_ms(5000, 5000) == 0
def test_wraparound(self) -> None:
assert _ts_delta_ms(50, 0xFFFF_FF00) == 306
def test_backward_jump_returns_negative(self) -> None:
assert _ts_delta_ms(1000, 0x8000_0100) == -1
class TestPeakBurstTracker:
def test_single_frame(self) -> None:
t = PeakBurstTracker()
t.record(1000, 100, _SRC)
snap = t.harvest()
assert snap.per_source is not None
assert snap.per_source[_SRC].peak_frames == 1
def test_two_frames_same_ms(self) -> None:
t = PeakBurstTracker()
t.record(1000, 50, _SRC)
t.record(1000, 70, _SRC)
snap = t.harvest()
assert snap.per_source is not None
assert snap.per_source[_SRC].peak_frames == 2
assert snap.per_source[_SRC].peak_bytes == 120
def test_far_apart_are_separate_windows(self) -> None:
t = PeakBurstTracker()
t.record(100, 60, _SRC)
t.record(100 + WRITE_RATE_WINDOW_MS, 40, _SRC)
snap = t.harvest()
assert snap.per_source is not None
assert snap.per_source[_SRC].peak_frames == 1
def test_multi_source_independent_peaks(self) -> None:
t = PeakBurstTracker()
for _ in range(5):
t.record(1000, 30, _SRC)
t.record(1000, 30, _SRC_B)
for _ in range(4):
t.record(2000, 30, _SRC_B)
snap = t.harvest()
assert snap.per_source is not None
assert snap.per_source[_SRC].peak_frames == 5
assert snap.per_source[_SRC_B].peak_frames == 4
def test_max_persists_across_harvests(self) -> None:
t = PeakBurstTracker()
for _ in range(3):
t.record(1000, 100, _SRC)
snap1 = t.harvest()
assert snap1.max_per_source is not None
assert snap1.max_per_source[_SRC].peak_frames == 3
t.record(5000, 200, _SRC)
snap2 = t.harvest()
assert snap2.max_per_source is not None
assert snap2.max_per_source[_SRC].peak_frames == 3
def test_harvest_resets_current_period(self) -> None:
t = PeakBurstTracker()
t.record(1000, 100, _SRC)
t.harvest()
snap = t.harvest()
assert snap.per_source is None
def test_backward_timestamp_resets_window(self) -> None:
t = PeakBurstTracker()
t.record(5000, 80, _SRC)
t.record(5000, 80, _SRC)
t.record(100, 80, _SRC)
snap = t.harvest()
assert snap.per_source is not None
assert snap.per_source[_SRC].peak_frames == 2
def test_wraparound_within_window(self) -> None:
t = PeakBurstTracker()
t.record(0xFFFF_FFFF, 50, _SRC)
t.record(0, 50, _SRC)
snap = t.harvest()
assert snap.per_source is not None
assert snap.per_source[_SRC].peak_frames == 2
@@ -0,0 +1,147 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""Reset propagation matrix tests.
Verifies that reset("init") and reset("flush") dispatch correctly per the spec:
| Group | Components | INIT_DONE | FLUSH |
|------------------|-----------------------------------------|--------------|------------------------------------|
| SN-coupled | SNGapTracker | full reset | full reset |
| ENH_STAT-coupled | FirmwareLossTracker, FirmwareWritten | full reset | reset baselines, keep accumulators |
| Console-local | TransportMetrics, PeakBurstTracker, | preserve | preserve |
| | per_source_received, throughput cache | | |
"""
from src.backend.stats import StatsAccumulator
class TestResetPropagation:
"""Verify reset("init") and reset("flush") dispatch correctly per the spec."""
def _populate(self, stats: StatsAccumulator) -> None:
"""Feed data into all components so we can verify what gets reset."""
# Transport (console-local)
stats.record_bytes(1000)
stats.record_frame(100, 1, 10) # frame_size=100, src=1, sn=10
stats.record_frame(100, 1, 11)
# Peak burst (console-local)
stats.record_frame_ts(1000, 100, 1)
# ENH_STAT (firmware-coupled)
stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=5, written_bytes=5000, lost_bytes=250, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=200, lost_frames=10, written_bytes=10000, lost_bytes=500, baudrate=3_000_000
)
# === INIT_DONE Tests ===
def test_init_resets_sn_gap(self) -> None:
stats = StatsAccumulator()
stats.record_frame(100, 1, 0)
stats.record_frame(100, 1, 1)
stats.reset('init')
stats.record_frame(100, 1, 100)
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
assert snap.received.frames == 3
def test_init_resets_firmware_loss(self) -> None:
stats = StatsAccumulator()
self._populate(stats)
stats.reset('init')
# After init reset, loss tracker should be clean
# First report after reset establishes new baseline
stats.record_enh_stat(1, 50, 3, 2500, 150, 3_000_000)
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
assert snap.buffer_loss.frames == 3 # first report absolute value
def test_init_resets_firmware_written(self) -> None:
stats = StatsAccumulator()
self._populate(stats)
stats.reset('init')
stats.record_enh_stat(1, 50, 0, 2500, 0, 3_000_000)
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
assert snap.written.frames == 50 # first report absolute value
def test_init_preserves_transport_metrics(self) -> None:
stats = StatsAccumulator()
stats.record_bytes(5000)
stats.record_frame()
stats.reset('init')
snapshot = stats.snapshot(1.0)
assert snapshot.transport.rx_bytes == 5000
assert snapshot.transport.fps == 1.0
def test_init_preserves_per_source_received(self) -> None:
stats = StatsAccumulator()
stats.record_frame(100, 1, 0)
stats.reset('init')
funnel = stats.funnel_snapshot()
assert len(funnel) == 1
assert funnel[0].received.frames == 1
# === FLUSH Tests ===
def test_flush_resets_sn_gap(self) -> None:
stats = StatsAccumulator()
stats.record_frame(100, 1, 0)
stats.record_frame(100, 1, 1)
stats.reset('flush')
# After flush, SN tracker is fully reset
stats.record_frame(100, 1, 0) # SN restarts from 0
# Should not count gap from old SN=1 to new SN=0
# The per_source_received should include the 2 pre-flush frames + 1 post-flush
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
# 2 pre-flush + 1 post-flush = 3 total received
assert snap.received.frames == 3
def test_flush_preserves_firmware_loss_accumulators(self) -> None:
stats = StatsAccumulator()
# Build up some loss: baseline then delta
stats.record_enh_stat(1, 100, 5, 5000, 250, 3_000_000)
stats.record_enh_stat(1, 200, 10, 10000, 500, 3_000_000)
# Now flush
stats.reset('flush')
# Next report re-establishes baseline (no additional delta)
stats.record_enh_stat(1, 50, 3, 2500, 150, 3_000_000)
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
# Initial absolute (5) + delta (5) = 10; flush preserves accum
assert snap.buffer_loss.frames == 10
def test_flush_preserves_firmware_written_accumulators(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(1, 100, 0, 5000, 0, 3_000_000)
stats.record_enh_stat(1, 200, 0, 10000, 0, 3_000_000)
stats.reset('flush')
stats.record_enh_stat(1, 50, 0, 2500, 0, 3_000_000)
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
assert snap.written.frames == 200 # initial absolute + pre-flush delta preserved
def test_flush_preserves_transport_metrics(self) -> None:
stats = StatsAccumulator()
stats.record_bytes(5000)
stats.record_frame()
stats.reset('flush')
snapshot = stats.snapshot(1.0)
assert snapshot.transport.rx_bytes == 5000 # preserved
def test_flush_preserves_per_source_received(self) -> None:
stats = StatsAccumulator()
stats.record_frame(100, 1, 0)
stats.record_frame(100, 1, 1)
stats.reset('flush')
funnel = stats.funnel_snapshot()
for snap in funnel:
if snap.source == 1:
assert snap.received.frames == 2 # preserved
@@ -0,0 +1,79 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.stats.sn_gap import SNGapTracker
class TestSNGapTracker:
def setup_method(self) -> None:
self.tracker = SNGapTracker()
# --- Baseline ---
def test_first_frame_establishes_baseline(self) -> None:
assert self.tracker.record(src_code=1, frame_sn=42) == 0
# --- In-order ---
def test_sequential_no_gap(self) -> None:
self.tracker.record(1, 0)
assert self.tracker.record(1, 1) == 0
assert self.tracker.record(1, 2) == 0
# --- Simple reorder (within window) ---
def test_reorder_no_false_gap(self) -> None:
"""SN=8 arrives before SN=5,6,7 — no gaps should be counted."""
self.tracker.record(1, 5) # baseline → window_base=6
assert self.tracker.record(1, 8) == 0 # within window, NOT a gap
assert self.tracker.record(1, 6) == 0 # late fill
assert self.tracker.record(1, 7) == 0 # late fill
assert self.tracker.totals().get(1, 0) == 0
# --- Confirmed loss ---
def test_loss_confirmed_when_window_expires(self) -> None:
"""Frame beyond window forces expiry of unreceived SNs."""
self.tracker.record(1, 0) # baseline → base=1
# SN=1 never arrives; jump to SN=257 (beyond window of 256)
gaps = self.tracker.record(1, 257)
assert gaps > 0 # SN=1 expired as confirmed loss
assert self.tracker.totals()[1] > 0
# --- Late arrival behind window ---
def test_late_arrival_ignored(self) -> None:
self.tracker.record(1, 0)
self.tracker.record(1, 257) # force window advance past 0
assert self.tracker.record(1, 1) == 0 # too late, ignored
# --- Reset detection ---
def test_large_backward_jump_resets_baseline(self) -> None:
self.tracker.record(1, 1000)
# SN jumps back to 5 (far beyond REORDER_WINDOW backward)
assert self.tracker.record(1, 5) == 0
# After re-baseline, SN=6 should be normal
assert self.tracker.record(1, 6) == 0
# --- Multi-source independence ---
def test_sources_independent(self) -> None:
self.tracker.record(1, 10)
self.tracker.record(2, 20)
assert self.tracker.record(1, 11) == 0
assert self.tracker.record(2, 21) == 0
# --- 24-bit wraparound ---
def test_wraparound(self) -> None:
SN_MAX = 1 << 24
self.tracker.record(1, SN_MAX - 2) # base = SN_MAX-1
assert self.tracker.record(1, SN_MAX - 1) == 0
assert self.tracker.record(1, 0) == 0 # wraps to 0
assert self.tracker.record(1, 1) == 0
# --- Reset method ---
def test_reset_clears_all(self) -> None:
self.tracker.record(1, 10)
self.tracker.reset()
# After reset, next frame establishes new baseline
assert self.tracker.record(1, 0) == 0
def test_reset_single_source(self) -> None:
self.tracker.record(1, 10)
self.tracker.record(2, 20)
self.tracker.reset(src_code=1)
assert self.tracker.record(1, 0) == 0 # re-baselined
assert self.tracker.record(2, 21) == 0 # unaffected
@@ -0,0 +1,909 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from typing import Optional
from unittest.mock import patch
from src.backend.models import BleLogSource
from src.backend.models import has_os_ts
from src.backend.stats import StatsAccumulator
from src.backend.stats import TRAFFIC_ALERT_COOLDOWN_SEC
from src.backend.stats import TRAFFIC_THRESHOLD_PCT
from src.backend.stats import TRAFFIC_WINDOW_SEC
from src.backend.stats import TrafficSpikeResult
from src.backend.stats import WRITE_RATE_WINDOW_MS
from src.backend.stats.peak_burst import _ts_delta_ms
# Convenience: default frame size used in peak write tests (arbitrary but consistent)
_FRAME_SZ = 100
_SRC = 1 # default source code for single-source tests
class TestStatsAccumulator:
def test_initial_state(self) -> None:
stats = StatsAccumulator()
snapshot = stats.snapshot(0.25)
assert snapshot.transport.rx_bytes == 0
assert snapshot.loss.total_frames == 0
assert snapshot.loss.total_bytes == 0
assert snapshot.os_peak.per_source is None
assert snapshot.os_peak.max_per_source is None
def test_record_bytes(self) -> None:
stats = StatsAccumulator()
stats.record_bytes(1024)
snapshot = stats.snapshot(1.0)
assert snapshot.transport.rx_bytes == 1024
# bps = 1024 * 10 / 1.0 = 10240
assert snapshot.transport.bps == 10240.0
def test_record_frame(self) -> None:
stats = StatsAccumulator()
stats.record_frame()
stats.record_frame()
snapshot = stats.snapshot(1.0)
assert snapshot.transport.fps == 2.0
def test_max_bps_tracked(self) -> None:
stats = StatsAccumulator()
stats.record_bytes(10000)
stats.snapshot(1.0) # bps = 100000
stats.record_bytes(100)
snap2 = stats.snapshot(1.0) # bps = 1000
assert snap2.transport.max_bps == 100000.0
def _enh_stat_loss(
self, stats: StatsAccumulator, src_code: int, lost_frames: int, lost_bytes: int
) -> tuple[int, int]:
"""Helper: call record_enh_stat with dummy written/baudrate, return loss delta."""
return stats.record_enh_stat( # type: ignore[no-any-return]
src_code=src_code,
written_frames=0,
lost_frames=lost_frames,
written_bytes=0,
lost_bytes=lost_bytes,
baudrate=3_000_000,
)
def test_firmware_loss_first_report_zero_delta(self) -> None:
"""First ENH_STAT initializes prev (delta=0); subsequent reports show delta."""
stats = StatsAccumulator()
new_frames, new_bytes = self._enh_stat_loss(stats, src_code=1, lost_frames=1000, lost_bytes=5000)
assert new_frames == 0
assert new_bytes == 0
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 1000
assert snapshot.loss.total_bytes == 5000
new_frames, new_bytes = self._enh_stat_loss(stats, src_code=1, lost_frames=1003, lost_bytes=5128)
assert new_frames == 3
assert new_bytes == 128
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 1003
assert snapshot.loss.total_bytes == 5128
def test_firmware_loss_incremental_returns(self) -> None:
"""Incremental return reflects per-report delta, not cumulative."""
stats = StatsAccumulator()
self._enh_stat_loss(stats, src_code=1, lost_frames=0, lost_bytes=0)
new_frames, new_bytes = self._enh_stat_loss(stats, src_code=1, lost_frames=5, lost_bytes=200)
assert new_frames == 5
assert new_bytes == 200
new_frames, new_bytes = self._enh_stat_loss(stats, src_code=1, lost_frames=8, lost_bytes=320)
assert new_frames == 3
assert new_bytes == 120
def test_multi_source_firmware_loss(self) -> None:
"""Firmware loss tracked independently per source code."""
stats = StatsAccumulator()
self._enh_stat_loss(stats, src_code=1, lost_frames=100, lost_bytes=1000)
self._enh_stat_loss(stats, src_code=2, lost_frames=50, lost_bytes=500)
self._enh_stat_loss(stats, src_code=1, lost_frames=105, lost_bytes=1200)
self._enh_stat_loss(stats, src_code=2, lost_frames=52, lost_bytes=580)
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 157 # 100 + 50 + 5 + 2
assert snapshot.loss.total_bytes == 1780 # 1000 + 500 + 200 + 80
def test_firmware_loss_counter_reset(self) -> None:
"""Counter reset (bench_reset_stat) detected and handled correctly."""
stats = StatsAccumulator()
self._enh_stat_loss(stats, src_code=1, lost_frames=0, lost_bytes=0)
self._enh_stat_loss(stats, src_code=1, lost_frames=100, lost_bytes=4000)
new_frames, new_bytes = self._enh_stat_loss(stats, src_code=1, lost_frames=30, lost_bytes=1200)
assert new_frames == 0
assert new_bytes == 0
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 130
assert snapshot.loss.total_bytes == 5200
new_frames, new_bytes = self._enh_stat_loss(stats, src_code=1, lost_frames=50, lost_bytes=2000)
assert new_frames == 20
assert new_bytes == 800
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 150
assert snapshot.loss.total_bytes == 6000
def test_firmware_loss_multiple_resets(self) -> None:
"""Multiple resets accumulate correctly across all cycles."""
stats = StatsAccumulator()
self._enh_stat_loss(stats, src_code=1, lost_frames=0, lost_bytes=0)
self._enh_stat_loss(stats, src_code=1, lost_frames=50, lost_bytes=2000)
self._enh_stat_loss(stats, src_code=1, lost_frames=10, lost_bytes=400)
self._enh_stat_loss(stats, src_code=1, lost_frames=30, lost_bytes=1200)
self._enh_stat_loss(stats, src_code=1, lost_frames=5, lost_bytes=200)
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 85
assert snapshot.loss.total_bytes == 3400
def test_firmware_loss_uint32_overflow_treated_as_reset(self) -> None:
"""uint32 counter overflow is indistinguishable from reset -- handled same way."""
stats = StatsAccumulator()
self._enh_stat_loss(stats, src_code=1, lost_frames=0xFFFF_FF00, lost_bytes=0)
new_frames, _ = self._enh_stat_loss(stats, src_code=1, lost_frames=50, lost_bytes=0)
assert new_frames == 0
snapshot = stats.snapshot(0.25)
assert snapshot.loss.total_frames == 0xFFFF_FF00 + 50
class TestRecordFrameWithSN:
def test_backward_compatible_no_args(self) -> None:
stats = StatsAccumulator()
stats.record_frame()
snapshot = stats.snapshot(1.0)
assert snapshot.transport.fps == 1.0
def test_tracks_per_source_received(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
stats.record_frame(frame_size=200, src_code=1, frame_sn=1)
stats.record_frame(frame_size=50, src_code=2, frame_sn=0)
assert stats._per_source_received_frames[1] == 2
assert stats._per_source_received_bytes[1] == 300
assert stats._per_source_received_frames[2] == 1
assert stats._per_source_received_bytes[2] == 50
def test_sn_gap_tracked(self) -> None:
stats = StatsAccumulator()
stats.set_firmware_version(4) # enable SN gap tracking (requires version >= 4)
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
# SN=257 is beyond the reorder window (256), forcing SN=1 to be confirmed lost
stats.record_frame(frame_size=100, src_code=1, frame_sn=257)
assert stats._sn_gap.totals() == {1: 1}
def test_no_sn_tracking_when_sn_negative(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=1, frame_sn=-1)
assert 1 not in stats._per_source_received_frames
snapshot = stats.snapshot(1.0)
assert snapshot.transport.fps == 1.0
def test_no_sn_tracking_when_src_zero(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=0, frame_sn=5)
assert 0 not in stats._per_source_received_frames
class TestRecordEnhStat:
def test_feeds_both_trackers(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=10, written_bytes=5000, lost_bytes=500, baudrate=3_000_000
)
written = stats._fw_written.totals()
assert written[1] == (100, 5000)
loss = stats._fw_loss.per_source_totals()
assert loss[1] == (10, 500)
def test_returns_loss_delta(self) -> None:
stats = StatsAccumulator()
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
assert (d_f, d_b) == (0, 0)
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=50, lost_frames=5, written_bytes=2500, lost_bytes=250, baudrate=3_000_000
)
assert (d_f, d_b) == (5, 250)
def test_torn_read_guard_rejects_implausible_written_bytes(self) -> None:
stats = StatsAccumulator()
baudrate = 3_000_000
max_delta = baudrate * 2 // 10
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=baudrate
)
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=10, lost_frames=0, written_bytes=max_delta + 1, lost_bytes=0, baudrate=baudrate
)
assert (d_f, d_b) == (0, 0)
assert stats._fw_written.totals()[1] == (0, 0)
def test_torn_read_guard_rejects_implausible_lost_bytes(self) -> None:
stats = StatsAccumulator()
baudrate = 3_000_000
max_delta = baudrate * 2 // 10
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=baudrate
)
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=10, lost_frames=5, written_bytes=500, lost_bytes=max_delta + 1, baudrate=baudrate
)
assert (d_f, d_b) == (0, 0)
assert stats._fw_loss.per_source_totals()[1] == (0, 0)
def test_torn_read_guard_accepts_plausible_delta(self) -> None:
stats = StatsAccumulator()
baudrate = 3_000_000
max_delta = baudrate * 2 // 10
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=baudrate
)
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=10, lost_frames=2, written_bytes=max_delta, lost_bytes=100, baudrate=baudrate
)
assert d_f == 2
assert d_b == 100
def test_torn_read_recovery_uses_last_good_prev(self) -> None:
stats = StatsAccumulator()
baudrate = 3_000_000
max_delta = baudrate * 2 // 10
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=baudrate
)
stats.record_enh_stat(
src_code=1, written_frames=10, lost_frames=0, written_bytes=max_delta + 1, lost_bytes=0, baudrate=baudrate
)
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=20, lost_frames=3, written_bytes=1000, lost_bytes=150, baudrate=baudrate
)
assert d_f == 3
assert d_b == 150
class TestRecordFrameReturnsGap:
def test_returns_zero_for_sequential_frames(self) -> None:
stats = StatsAccumulator()
stats.set_firmware_version(4)
assert stats.record_frame(frame_size=100, src_code=1, frame_sn=0) == 0
assert stats.record_frame(frame_size=100, src_code=1, frame_sn=1) == 0
def test_returns_gap_count_for_large_jump(self) -> None:
stats = StatsAccumulator()
stats.set_firmware_version(4)
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
gap = stats.record_frame(frame_size=100, src_code=1, frame_sn=300)
assert gap > 0
def test_returns_zero_when_no_sn_tracking(self) -> None:
stats = StatsAccumulator()
assert stats.record_frame(frame_size=100, src_code=1, frame_sn=-1) == 0
assert stats.record_frame(frame_size=100, src_code=0, frame_sn=5) == 0
def test_sn_gap_disabled_for_old_firmware(self) -> None:
stats = StatsAccumulator()
stats.set_firmware_version(3)
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
gap = stats.record_frame(frame_size=100, src_code=1, frame_sn=300)
assert gap == 0
def test_sn_gap_disabled_by_default(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
gap = stats.record_frame(frame_size=100, src_code=1, frame_sn=300)
assert gap == 0
class TestReset:
def test_init_clears_firmware_preserves_console(self) -> None:
stats = StatsAccumulator()
stats.record_bytes(1000)
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=50, lost_frames=5, written_bytes=2500, lost_bytes=250, baudrate=3_000_000
)
stats.reset('init')
snapshot = stats.snapshot(1.0)
assert snapshot.transport.rx_bytes == 1000
assert snapshot.loss.total_frames == 0
assert stats._per_source_received_frames == {1: 1}
assert stats._per_source_received_bytes == {1: 100}
assert stats._fw_written.totals() == {}
def test_flush_resets_baselines_only(self) -> None:
stats = StatsAccumulator()
stats.record_bytes(1000)
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=50, lost_frames=5, written_bytes=2500, lost_bytes=250, baudrate=3_000_000
)
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
stats.reset('flush')
# Console-local data preserved
snapshot = stats.snapshot(1.0)
assert snapshot.transport.rx_bytes == 1000
assert stats._per_source_received_bytes == {1: 100}
# Loss accumulators preserved but baselines reset
assert snapshot.loss.total_frames == 5
# Next ENH_STAT re-baselines (first report = 0 delta)
d_f, d_b = stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=10, written_bytes=5000, lost_bytes=500, baudrate=3_000_000
)
assert (d_f, d_b) == (0, 0)
class TestFunnelSnapshot:
def test_empty(self) -> None:
stats = StatsAccumulator()
assert stats.funnel_snapshot() == []
def test_single_source_full_data(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=10, written_bytes=5000, lost_bytes=500, baudrate=3_000_000
)
stats.record_frame(frame_size=80, src_code=1, frame_sn=0)
stats.record_frame(frame_size=80, src_code=1, frame_sn=1)
stats.funnel_snapshot() # establishes prev_written baseline
funnels = stats.funnel_snapshot()
assert len(funnels) == 1
f = funnels[0]
assert f.source == 1
assert f.written.frames == 100
assert f.written.bytes == 5000
assert f.buffer_loss.frames == 10
assert f.buffer_loss.bytes == 500
assert f.produced.frames == 110
assert f.produced.bytes == 5500
assert f.received.frames == 2
assert f.received.bytes == 160
assert f.transport_loss.frames == 98
assert f.transport_loss.bytes == 4840
def test_transport_loss_zero_on_first_snapshot(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=0, written_bytes=5000, lost_bytes=0, baudrate=3_000_000
)
stats.record_frame(frame_size=80, src_code=1, frame_sn=0)
funnels = stats.funnel_snapshot()
assert funnels[0].transport_loss.frames == 0
def test_transport_loss_stable_after_written_jump(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=50, lost_frames=0, written_bytes=2500, lost_bytes=0, baudrate=3_000_000
)
for i in range(50):
stats.record_frame(frame_size=50, src_code=1, frame_sn=i)
stats.funnel_snapshot() # prev_written = {1: (50, 2500)}
stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=0, written_bytes=5000, lost_bytes=0, baudrate=3_000_000
)
for i in range(49):
stats.record_frame(frame_size=50, src_code=1, frame_sn=50 + i)
funnels = stats.funnel_snapshot()
assert funnels[0].transport_loss.frames == 0
def test_multi_source(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=2, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=50, lost_frames=5, written_bytes=2500, lost_bytes=250, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=2, written_frames=30, lost_frames=2, written_bytes=1500, lost_bytes=100, baudrate=3_000_000
)
funnels = stats.funnel_snapshot()
assert len(funnels) == 2
assert funnels[0].source == 1
assert funnels[1].source == 2
assert funnels[0].written.frames == 50
assert funnels[1].written.frames == 30
def test_throughput_lifetime_average(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
stats.record_frame(frame_size=100, src_code=1, frame_sn=1)
stats.record_frame(frame_size=100, src_code=1, frame_sn=2)
funnels = stats.funnel_snapshot(elapsed_sec=1.0)
assert funnels[0].throughput.throughput_fps == 3.0
assert funnels[0].throughput.throughput_bps == 300.0
def test_throughput_accumulates_across_snapshots(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
stats.record_frame(frame_size=100, src_code=1, frame_sn=1)
stats.funnel_snapshot(elapsed_sec=1.0)
stats.record_frame(frame_size=200, src_code=1, frame_sn=2)
funnels = stats.funnel_snapshot(elapsed_sec=1.0)
assert funnels[0].throughput.throughput_fps == 1.5
assert funnels[0].throughput.throughput_bps == 200.0
def test_peak_write_from_burst_tracker(self) -> None:
stats = StatsAccumulator()
for i in range(5):
stats.record_frame_ts(1000, 80, 1)
stats.snapshot(0.25)
stats.record_frame(frame_size=80, src_code=1, frame_sn=0)
funnels = stats.funnel_snapshot()
assert len(funnels) == 1
assert funnels[0].throughput.peak_write_frames == 5
assert funnels[0].throughput.peak_write_bytes == 5 * 80
assert funnels[0].throughput.peak_window_ms == WRITE_RATE_WINDOW_MS
def test_throughput_zero_without_elapsed(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=1, frame_sn=0)
funnels = stats.funnel_snapshot()
assert funnels[0].throughput.throughput_fps == 0.0
assert funnels[0].throughput.throughput_bps == 0.0
class TestFunnelExcludesInternal:
def test_internal_only_returns_empty(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=0, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=0, written_frames=50, lost_frames=0, written_bytes=2500, lost_bytes=0, baudrate=3_000_000
)
funnels = stats.funnel_snapshot()
assert funnels == []
def test_internal_excluded_alongside_others(self) -> None:
stats = StatsAccumulator()
stats.record_enh_stat(
src_code=0, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=0, written_frames=50, lost_frames=0, written_bytes=2500, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=0, lost_frames=0, written_bytes=0, lost_bytes=0, baudrate=3_000_000
)
stats.record_enh_stat(
src_code=1, written_frames=100, lost_frames=0, written_bytes=5000, lost_bytes=0, baudrate=3_000_000
)
funnels = stats.funnel_snapshot()
assert len(funnels) == 1
assert funnels[0].source == 1
class TestHasOsTs:
def test_sources_with_os_ts(self) -> None:
assert has_os_ts(BleLogSource.INTERNAL) is True
assert has_os_ts(BleLogSource.CUSTOM) is True
assert has_os_ts(BleLogSource.HOST) is True
assert has_os_ts(BleLogSource.HCI) is True
assert has_os_ts(BleLogSource.ENCODE) is True
def test_sources_without_os_ts(self) -> None:
assert has_os_ts(BleLogSource.LL_TASK) is False
assert has_os_ts(BleLogSource.LL_HCI) is False
assert has_os_ts(BleLogSource.LL_ISR) is False
assert has_os_ts(BleLogSource.REDIR) is False
class TestTsDeltaMs:
def test_normal_forward(self) -> None:
assert _ts_delta_ms(1100, 1000) == 100
def test_zero_delta(self) -> None:
assert _ts_delta_ms(5000, 5000) == 0
def test_wraparound(self) -> None:
# uint32 wraps: newer=50, older=0xFFFFFF00 -> delta=0x100+50=306
assert _ts_delta_ms(50, 0xFFFF_FF00) == 306
def test_backward_jump_returns_negative(self) -> None:
# older > newer by a large amount -> detected as backward
assert _ts_delta_ms(1000, 0x8000_0100) == -1
class TestPeakWriteBurst:
"""Tests for sliding window peak write burst (count + bytes)."""
def test_single_frame_counts_as_peak(self) -> None:
"""A single frame in window -> peak_write_count=1."""
stats = StatsAccumulator()
stats.record_frame_ts(1000, _FRAME_SZ, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 1
assert snapshot.os_peak.per_source[_SRC].peak_bytes == _FRAME_SZ
def test_two_frames_same_ms(self) -> None:
"""Two frames at same timestamp -> both in window -> count=2."""
stats = StatsAccumulator()
stats.record_frame_ts(1000, 50, _SRC)
stats.record_frame_ts(1000, 70, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 2
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 120
def test_adjacent_ms_within_window(self) -> None:
"""Frames at ts=100 and ts=101 (delta=1 < WRITE_RATE_WINDOW_MS) are in the same window."""
stats = StatsAccumulator()
stats.record_frame_ts(100, 60, _SRC)
stats.record_frame_ts(101, 40, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 2
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 100
def test_far_apart_ms_are_separate_windows(self) -> None:
"""Frames with delta >= WRITE_RATE_WINDOW_MS are in separate windows."""
stats = StatsAccumulator()
stats.record_frame_ts(100, 60, _SRC)
stats.record_frame_ts(100 + WRITE_RATE_WINDOW_MS, 40, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 1
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 60
def test_burst_same_timestamp(self) -> None:
"""Many frames at the same ms -> all in window."""
stats = StatsAccumulator()
for _ in range(10):
stats.record_frame_ts(5000, 32, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 10
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 320
def test_peak_captures_densest_burst(self) -> None:
"""Sparse phase (far apart) then dense phase (same ms) -> peak from dense."""
stats = StatsAccumulator()
# Sparse: 3 frames at 0, 10, 20 ms -- each alone in its 1ms window
for i in range(3):
stats.record_frame_ts(1000 + i * 10, 50, _SRC)
# Dense: 5 frames all at 2000 ms
for _ in range(5):
stats.record_frame_ts(2000, 80, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 5
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 400
def test_max_peak_persists_across_snapshots(self) -> None:
stats = StatsAccumulator()
# First: 3 frames same ms
for _ in range(3):
stats.record_frame_ts(1000, 100, _SRC)
snap1 = stats.snapshot(0.25)
assert snap1.os_peak.per_source[_SRC].peak_frames == 3
assert snap1.os_peak.per_source[_SRC].peak_bytes == 300
assert snap1.os_peak.max_per_source[_SRC].peak_frames == 3
assert snap1.os_peak.max_per_source[_SRC].peak_bytes == 300
# Second: only 1 frame
stats.record_frame_ts(5000, 200, _SRC)
snap2 = stats.snapshot(0.25)
assert snap2.os_peak.per_source[_SRC].peak_frames == 1
assert snap2.os_peak.per_source[_SRC].peak_bytes == 200
# All-time max preserved from first snapshot
assert snap2.os_peak.max_per_source[_SRC].peak_frames == 3
assert snap2.os_peak.max_per_source[_SRC].peak_bytes == 300
def test_peak_resets_per_snapshot(self) -> None:
stats = StatsAccumulator()
stats.record_frame_ts(1000, _FRAME_SZ, _SRC)
stats.record_frame_ts(1000, _FRAME_SZ, _SRC)
stats.snapshot(0.25)
# No new frames -> peak should be None
snap2 = stats.snapshot(0.25)
assert snap2.os_peak.per_source is None
def test_window_evicts_old_entries(self) -> None:
stats = StatsAccumulator()
stats.record_frame_ts(0, _FRAME_SZ, _SRC)
# Frame far beyond window -- old entry evicted, only 1 frame remains
stats.record_frame_ts(WRITE_RATE_WINDOW_MS + 5, _FRAME_SZ, _SRC)
snapshot = stats.snapshot(0.25)
# Peak is still 1 (each frame alone in its window), but the best was
# recorded when each individual frame entered.
assert snapshot.os_peak.per_source[_SRC].peak_frames == 1
def test_backward_timestamp_resets_window(self) -> None:
stats = StatsAccumulator()
stats.record_frame_ts(5000, 80, _SRC)
stats.record_frame_ts(5000, 80, _SRC)
# Chip rebooted -- timestamp jumps back to near-zero
stats.record_frame_ts(100, 80, _SRC)
# After reset, window contains only [100]. Peak from before reset was 2.
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 2
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 160
def test_wraparound_same_ms_bucket(self) -> None:
"""Timestamps that wrap around uint32 but have delta=0 stay in window."""
stats = StatsAccumulator()
stats.record_frame_ts(0xFFFF_FFFF, 50, _SRC)
stats.record_frame_ts(0xFFFF_FFFF, 50, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 2
assert snapshot.os_peak.per_source[_SRC].peak_bytes == 100
def test_wraparound_within_window(self) -> None:
"""Wrap from 0xFFFFFFFF to 0 (delta=1 < WRITE_RATE_WINDOW_MS) stays in window."""
stats = StatsAccumulator()
stats.record_frame_ts(0xFFFF_FFFF, 50, _SRC)
stats.record_frame_ts(0, 50, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 2
def test_wraparound_far_evicts(self) -> None:
"""Wrap with delta >= WRITE_RATE_WINDOW_MS evicts old entry."""
stats = StatsAccumulator()
stats.record_frame_ts(0xFFFF_FFFF, 50, _SRC)
stats.record_frame_ts(WRITE_RATE_WINDOW_MS, 50, _SRC)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source[_SRC].peak_frames == 1
class TestPerSourcePeak:
"""Tests for per-source peak write burst tracking."""
def test_single_source_peak(self) -> None:
stats = StatsAccumulator()
stats.record_frame_ts(1000, 50, 1)
stats.record_frame_ts(1000, 70, 1)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source is not None
assert 1 in snapshot.os_peak.per_source
assert snapshot.os_peak.per_source[1].peak_frames == 2
assert snapshot.os_peak.per_source[1].peak_bytes == 120
def test_multi_source_peak(self) -> None:
"""Two sources writing at same ms -- per-source counts are independent."""
stats = StatsAccumulator()
stats.record_frame_ts(1000, 50, 1)
stats.record_frame_ts(1000, 30, 2)
stats.record_frame_ts(1000, 60, 1)
stats.record_frame_ts(1000, 40, 2)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source is not None
assert snapshot.os_peak.per_source[1].peak_frames == 2
assert snapshot.os_peak.per_source[1].peak_bytes == 110
assert snapshot.os_peak.per_source[2].peak_frames == 2
assert snapshot.os_peak.per_source[2].peak_bytes == 70
def test_per_source_all_time_max(self) -> None:
stats = StatsAccumulator()
# First burst: src 1 has 3 frames
for _ in range(3):
stats.record_frame_ts(1000, 40, 1)
snap1 = stats.snapshot(0.25)
assert snap1.os_peak.max_per_source is not None
assert snap1.os_peak.max_per_source[1].peak_frames == 3
# Second burst: src 1 has only 1 frame -- all-time max preserved
stats.record_frame_ts(5000, 40, 1)
snap2 = stats.snapshot(0.25)
assert snap2.os_peak.max_per_source is not None
assert snap2.os_peak.max_per_source[1].peak_frames == 3
def test_per_source_peak_none_when_no_data(self) -> None:
stats = StatsAccumulator()
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source is None
assert snapshot.os_peak.max_per_source is None
def test_per_source_independent_peak_moments(self) -> None:
"""Each source's peak is tracked even if it occurs at a different moment than global peak."""
stats = StatsAccumulator()
# At ts=1000: src 1 has 5 frames, src 2 has 1
for _ in range(5):
stats.record_frame_ts(1000, 30, 1)
stats.record_frame_ts(1000, 30, 2)
# At ts=2000: src 2 has 4 frames, src 1 has 0
for _ in range(4):
stats.record_frame_ts(2000, 30, 2)
snapshot = stats.snapshot(0.25)
# Global peak is 6 (at ts=1000), but per-source:
assert snapshot.os_peak.per_source is not None
assert snapshot.os_peak.per_source[1].peak_frames == 5 # from ts=1000
assert snapshot.os_peak.per_source[2].peak_frames == 4 # from ts=2000
class TestLLPeakWriteBurst:
"""Tests for LL peak write burst tracking (lc_ts clock domain)."""
def test_ll_single_source_peak(self) -> None:
"""LL frames with same lc_ts_ms are counted in one window."""
stats = StatsAccumulator()
for _ in range(5):
stats.record_ll_frame_ts(1000000, 30, BleLogSource.LL_TASK)
snapshot = stats.snapshot(0.25)
assert snapshot.ll_peak.per_source is not None
assert snapshot.ll_peak.per_source[BleLogSource.LL_TASK].peak_frames == 5
def test_ll_multi_source_peak(self) -> None:
"""LL per-source peaks are tracked independently."""
stats = StatsAccumulator()
for _ in range(3):
stats.record_ll_frame_ts(2000000, 20, BleLogSource.LL_TASK)
for _ in range(7):
stats.record_ll_frame_ts(2000000, 20, BleLogSource.LL_ISR)
snapshot = stats.snapshot(0.25)
assert snapshot.ll_peak.per_source is not None
assert snapshot.ll_peak.per_source[BleLogSource.LL_TASK].peak_frames == 3
assert snapshot.ll_peak.per_source[BleLogSource.LL_ISR].peak_frames == 7
def test_ll_all_time_max_persists(self) -> None:
"""LL all-time peak persists across snapshots."""
stats = StatsAccumulator()
for _ in range(10):
stats.record_ll_frame_ts(1000000, 30, BleLogSource.LL_HCI)
stats.snapshot(0.25)
for _ in range(3):
stats.record_ll_frame_ts(2000000, 30, BleLogSource.LL_HCI)
snapshot = stats.snapshot(0.25)
assert snapshot.ll_peak.max_per_source is not None
assert snapshot.ll_peak.max_per_source[BleLogSource.LL_HCI].peak_frames == 10
def test_ll_window_separate_from_os_ts(self) -> None:
"""LL window does not interfere with os_ts window."""
stats = StatsAccumulator()
stats.record_frame_ts(1000, 30, BleLogSource.CUSTOM)
stats.record_ll_frame_ts(1000000, 30, BleLogSource.LL_TASK)
snapshot = stats.snapshot(0.25)
assert snapshot.os_peak.per_source is not None
assert BleLogSource.CUSTOM in snapshot.os_peak.per_source
assert BleLogSource.LL_TASK not in snapshot.os_peak.per_source
assert snapshot.ll_peak.per_source is not None
assert BleLogSource.LL_TASK in snapshot.ll_peak.per_source
assert BleLogSource.CUSTOM not in snapshot.ll_peak.per_source
class TestTrafficSpikeDetection:
"""Tests for real-time traffic spike detection."""
def _make_stats(self, baudrate: int = 3_000_000) -> StatsAccumulator:
stats = StatsAccumulator()
stats.set_wire_max(baudrate)
return stats
def test_no_spike_below_threshold(self) -> None:
"""Traffic below 80% of wire max does not trigger spike."""
stats = self._make_stats(3_000_000)
wire_max_bps = 3_000_000 / 10 # 300,000 bytes/sec
safe_bps = wire_max_bps * 0.5
bytes_in_window = int(safe_bps * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
mock_time.perf_counter.return_value = t
for _ in range(10):
stats.record_frame_traffic(bytes_in_window // 10, 1)
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.001
assert stats.check_traffic() is None
def test_spike_detected_on_exit(self) -> None:
"""Spike alert fires when traffic drops below threshold after exceeding it."""
stats = self._make_stats(3_000_000)
wire_max_bps = 3_000_000 / 10
hot_bps = wire_max_bps * 0.9
bytes_in_window = int(hot_bps * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
mock_time.perf_counter.return_value = t
stats.record_frame_traffic(bytes_in_window, 1)
mock_time.perf_counter.return_value = t + 0.05
result = stats.check_traffic()
assert result is None # still in spike, no alert yet
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.01
result = stats.check_traffic()
assert result is not None
assert result.utilization_pct > TRAFFIC_THRESHOLD_PCT * 100
assert result.duration_ms > 0
assert result.throughput_kbs > 0
def _trigger_spike(
self, stats: StatsAccumulator, mock_time: object, t: float, hot_bytes: int, src: int = 1
) -> Optional[TrafficSpikeResult]:
"""Helper: inject traffic, enter spike, then exit and return result."""
mock_time.perf_counter.return_value = t # type: ignore[attr-defined]
stats.record_frame_traffic(hot_bytes, src)
mock_time.perf_counter.return_value = t + 0.05 # type: ignore[attr-defined]
stats.check_traffic() # enter spike
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.01 # type: ignore[attr-defined]
return stats.check_traffic() # exit spike -> alert
def test_cooldown_suppresses_rapid_alerts(self) -> None:
"""Second spike within cooldown is suppressed."""
stats = self._make_stats(3_000_000)
wire_max_bps = 3_000_000 / 10
hot_bytes = int(wire_max_bps * 0.9 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
first = self._trigger_spike(stats, mock_time, t, hot_bytes)
assert first is not None
t2 = t + 0.5
second = self._trigger_spike(stats, mock_time, t2, hot_bytes)
assert second is None
def test_alert_after_cooldown_expires(self) -> None:
"""Alert fires again after cooldown period."""
stats = self._make_stats(3_000_000)
wire_max_bps = 3_000_000 / 10
hot_bytes = int(wire_max_bps * 0.9 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
first = self._trigger_spike(stats, mock_time, t, hot_bytes)
assert first is not None
t2 = t + TRAFFIC_ALERT_COOLDOWN_SEC + 1.0
second = self._trigger_spike(stats, mock_time, t2, hot_bytes)
assert second is not None
def test_per_source_breakdown(self) -> None:
"""Spike result includes per-source percentage breakdown."""
stats = self._make_stats(3_000_000)
wire_max_bps = 3_000_000 / 10
hot_bytes = int(wire_max_bps * 0.9 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
mock_time.perf_counter.return_value = t
stats.record_frame_traffic(int(hot_bytes * 0.7), 1)
stats.record_frame_traffic(int(hot_bytes * 0.3), 2)
mock_time.perf_counter.return_value = t + 0.05
stats.check_traffic() # enter spike
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.01
result = stats.check_traffic() # exit spike
assert result is not None
assert 1 in result.per_source
assert 2 in result.per_source
assert result.per_source[1] > result.per_source[2]
def test_no_wire_max_disables_detection(self) -> None:
"""Traffic detection is disabled when wire max is not set."""
stats = StatsAccumulator()
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
mock_time.perf_counter.return_value = t
stats.record_frame_traffic(999999, 1)
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.01
assert stats.check_traffic() is None
@@ -0,0 +1,175 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.models import format_throughput
from src.backend.models import FrameByteCount
from src.backend.models import FunnelSnapshot
from src.backend.models import ThroughputInfo
from src.backend.stats import StatsAccumulator
from src.frontend.stats_screen import _build_console_table
from src.frontend.stats_screen import _build_firmware_table
_SRC_HOST = 5
_SRC_LL_TASK = 2
_ZERO = FrameByteCount(frames=0, bytes=0)
_ZERO_TP = ThroughputInfo(
throughput_fps=0.0, throughput_bps=0.0, peak_write_frames=0, peak_write_bytes=0, peak_window_ms=10
)
def _snap(
src: int,
produced: tuple[int, int] = (0, 0),
written: tuple[int, int] = (0, 0),
received: tuple[int, int] = (0, 0),
buf_loss: tuple[int, int] = (0, 0),
tx_loss: tuple[int, int] = (0, 0),
tp_fps: float = 0.0,
peak_frames: int = 0,
) -> FunnelSnapshot:
return FunnelSnapshot(
source=src,
produced=FrameByteCount(*produced),
written=FrameByteCount(*written),
received=FrameByteCount(*received),
buffer_loss=FrameByteCount(*buf_loss),
transport_loss=FrameByteCount(*tx_loss),
throughput=ThroughputInfo(
throughput_fps=tp_fps,
throughput_bps=0.0,
peak_write_frames=peak_frames,
peak_write_bytes=0,
peak_window_ms=10,
),
)
class TestFormatThroughput:
def test_zero(self) -> None:
assert format_throughput(0.0) == '0.0 KB/s'
def test_small_kb(self) -> None:
assert format_throughput(512.0) == '0.5 KB/s'
def test_one_kb(self) -> None:
assert format_throughput(1024.0) == '1.0 KB/s'
def test_large_kb(self) -> None:
assert format_throughput(500 * 1024) == '500.0 KB/s'
def test_boundary_just_below_mb(self) -> None:
bps = 1023.9 * 1024
result = format_throughput(bps)
assert 'KB/s' in result
def test_boundary_at_mb(self) -> None:
bps = 1024 * 1024
assert format_throughput(bps) == '1.00 MB/s'
def test_large_mb(self) -> None:
bps = 2.5 * 1024 * 1024
assert format_throughput(bps) == '2.50 MB/s'
def test_peak_extrapolation_typical(self) -> None:
peak_bytes_1ms = 300
bps = peak_bytes_1ms * 1000
result = format_throughput(bps)
assert 'KB/s' in result
def test_peak_extrapolation_high(self) -> None:
peak_bytes_1ms = 1500
bps = peak_bytes_1ms * 1000
result = format_throughput(bps)
assert 'MB/s' in result
class TestBuildFirmwareTable:
def test_empty_returns_no_rows(self) -> None:
table = _build_firmware_table([])
assert table.row_count == 0
def test_column_headers(self) -> None:
table = _build_firmware_table([])
headers = [str(col.header) for col in table.columns]
assert 'Source' in headers
assert any('Written' in h for h in headers)
assert any('Loss' in h for h in headers)
def test_single_source(self) -> None:
snap = _snap(_SRC_HOST, written=(120, 6000))
table = _build_firmware_table([snap])
assert table.row_count == 1
assert len(table.columns) == 5
def test_with_loss_shows_red(self) -> None:
snap = _snap(_SRC_HOST, written=(110, 5500), buf_loss=(10, 500))
table = _build_firmware_table([snap])
assert table.row_count == 1
def test_multiple_sources(self) -> None:
snaps = [
_snap(_SRC_HOST, written=(100, 5000)),
_snap(_SRC_LL_TASK, written=(200, 10000)),
]
table = _build_firmware_table(snaps)
assert table.row_count == 2
class TestBuildConsoleTable:
def test_empty_returns_no_rows(self) -> None:
table = _build_console_table([])
assert table.row_count == 0
def test_column_headers(self) -> None:
table = _build_console_table([])
headers = [str(col.header) for col in table.columns]
assert 'Source' in headers
assert any('Received' in h for h in headers)
assert any('Average' in h for h in headers)
assert any('Peak' in h for h in headers)
def test_single_source(self) -> None:
snap = _snap(_SRC_HOST, tp_fps=850.0, peak_frames=12)
table = _build_console_table([snap])
assert table.row_count == 1
assert len(table.columns) == 7
def test_zero_throughput_shows_dash(self) -> None:
snap = _snap(_SRC_HOST, tp_fps=0.0, peak_frames=0)
table = _build_console_table([snap])
assert table.row_count == 1
class TestPerSourceRxBytes:
def test_single_frame(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=_SRC_HOST, frame_sn=0)
snapshot = stats.snapshot(1.0)
assert snapshot.per_source_rx_bytes == {_SRC_HOST: 100}
def test_multiple_frames_same_source(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=_SRC_HOST, frame_sn=0)
stats.record_frame(frame_size=200, src_code=_SRC_HOST, frame_sn=1)
snapshot = stats.snapshot(1.0)
assert snapshot.per_source_rx_bytes == {_SRC_HOST: 300}
def test_multiple_sources(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=_SRC_HOST, frame_sn=0)
stats.record_frame(frame_size=200, src_code=_SRC_LL_TASK, frame_sn=0)
snapshot = stats.snapshot(1.0)
assert snapshot.per_source_rx_bytes == {_SRC_HOST: 100, _SRC_LL_TASK: 200}
def test_cumulative_across_snapshots(self) -> None:
stats = StatsAccumulator()
stats.record_frame(frame_size=100, src_code=_SRC_HOST, frame_sn=0)
stats.snapshot(1.0)
stats.record_frame(frame_size=200, src_code=_SRC_HOST, frame_sn=1)
snapshot = stats.snapshot(1.0)
assert snapshot.per_source_rx_bytes == {_SRC_HOST: 300}
def test_none_when_no_data(self) -> None:
stats = StatsAccumulator()
snapshot = stats.snapshot(1.0)
assert snapshot.per_source_rx_bytes is None
@@ -0,0 +1,78 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from unittest.mock import patch
from src.backend.stats.traffic_spike import TRAFFIC_ALERT_COOLDOWN_SEC
from src.backend.stats.traffic_spike import TRAFFIC_WINDOW_SEC
from src.backend.stats.traffic_spike import TrafficSpikeDetector
from src.backend.stats.traffic_spike import TrafficSpikeResult
def _make_detector(baudrate: int = 3_000_000) -> TrafficSpikeDetector:
d = TrafficSpikeDetector()
d.set_wire_max_bps(baudrate / 10)
return d
def _trigger_spike(
d: TrafficSpikeDetector, mock_time: object, t: float, hot_bytes: int, src: int = 1
) -> TrafficSpikeResult | None:
mock_time.perf_counter.return_value = t # type: ignore[attr-defined]
d.record(hot_bytes, src)
mock_time.perf_counter.return_value = t + 0.05 # type: ignore[attr-defined]
d.check()
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.01 # type: ignore[attr-defined]
return d.check()
class TestTrafficSpikeDetector:
def test_no_spike_below_threshold(self) -> None:
d = _make_detector()
wire_max_bps = 300_000
safe_bytes = int(wire_max_bps * 0.5 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
mock_time.perf_counter.return_value = t
d.record(safe_bytes, 1)
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.001
assert d.check() is None
def test_spike_on_exit(self) -> None:
d = _make_detector()
hot_bytes = int(300_000 * 0.9 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
result = _trigger_spike(d, mock_time, t, hot_bytes)
assert result is not None
assert result.duration_ms > 0
def test_cooldown(self) -> None:
d = _make_detector()
hot_bytes = int(300_000 * 0.9 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
first = _trigger_spike(d, mock_time, t, hot_bytes)
assert first is not None
second = _trigger_spike(d, mock_time, t + 0.5, hot_bytes)
assert second is None
def test_alert_after_cooldown(self) -> None:
d = _make_detector()
hot_bytes = int(300_000 * 0.9 * TRAFFIC_WINDOW_SEC)
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
first = _trigger_spike(d, mock_time, t, hot_bytes)
assert first is not None
t2 = t + TRAFFIC_ALERT_COOLDOWN_SEC + 1.0
second = _trigger_spike(d, mock_time, t2, hot_bytes)
assert second is not None
def test_no_wire_max_disables(self) -> None:
d = TrafficSpikeDetector()
t = 1000.0
with patch('src.backend.stats.traffic_spike.time') as mock_time:
mock_time.perf_counter.return_value = t
d.record(999999, 1)
mock_time.perf_counter.return_value = t + TRAFFIC_WINDOW_SEC + 0.01
assert d.check() is None
@@ -0,0 +1,50 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from src.backend.stats.transport import TransportMetrics
class TestTransportMetrics:
def test_initial_harvest(self) -> None:
t = TransportMetrics()
snap = t.harvest(1.0)
assert snap.rx_bytes == 0
assert snap.bps == 0.0
assert snap.fps == 0.0
def test_record_bytes(self) -> None:
t = TransportMetrics()
t.record_bytes(1024)
snap = t.harvest(1.0)
assert snap.rx_bytes == 1024
assert snap.bps == 10240.0
def test_record_frame(self) -> None:
t = TransportMetrics()
t.record_frame()
t.record_frame()
snap = t.harvest(1.0)
assert snap.fps == 2.0
def test_max_bps_persists(self) -> None:
t = TransportMetrics()
t.record_bytes(10000)
t.harvest(1.0)
t.record_bytes(100)
snap = t.harvest(1.0)
assert snap.max_bps == 100000.0
def test_zero_elapsed(self) -> None:
t = TransportMetrics()
t.record_bytes(100)
snap = t.harvest(0.0)
assert snap.bps == 0.0
assert snap.fps == 0.0
def test_delta_resets_between_harvests(self) -> None:
t = TransportMetrics()
t.record_bytes(1000)
t.harvest(1.0)
snap = t.harvest(1.0)
assert snap.bps == 0.0
assert snap.rx_bytes == 1000
@@ -0,0 +1,28 @@
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from unittest.mock import patch
from src.backend.uart_transport import validate_uart_port
class TestValidateUartPort:
@patch('src.backend.uart_transport.list_serial_ports', return_value=['/dev/ttyUSB0', '/dev/ttyUSB1'])
def test_valid_port_returns_none(self, _mock: object) -> None:
assert validate_uart_port('/dev/ttyUSB0') is None
@patch('src.backend.uart_transport.list_serial_ports', return_value=['/dev/ttyUSB0'])
def test_invalid_port_returns_error(self, _mock: object) -> None:
result = validate_uart_port('/dev/ttyUSB99')
assert result is not None
assert '/dev/ttyUSB99' in result
@patch('src.backend.uart_transport.list_serial_ports', return_value=['COM3', 'COM4'])
def test_windows_com_port_valid(self, _mock: object) -> None:
"""COM ports don't exist as filesystem paths — must not use Path.exists()."""
assert validate_uart_port('COM3') is None
@patch('src.backend.uart_transport.list_serial_ports', return_value=[])
def test_empty_port_list(self, _mock: object) -> None:
result = validate_uart_port('/dev/ttyUSB0')
assert result is not None
+1 -2
View File
@@ -40,6 +40,5 @@ tools/ci/cleanup_ignore_lists.py
tools/ci/artifacts_handler.py
tools/ci/get_known_failure_cases_file.py
tools/unit-test-app/**/*
tools/bt/bt_hci_to_btsnoop.py
tools/bt/README.md
tools/ci/dynamic_pipelines/templates/known_generate_test_child_pipeline_warnings.yml
tools/bt/**/*
+2
View File
@@ -46,6 +46,8 @@ examples/system/ota/otatool/otatool_example.py
examples/system/ota/otatool/otatool_example.sh
install.fish
install.sh
tools/bt/ble_log_console/build.sh
tools/bt/ble_log_console/run.sh
tools/check_python_dependencies.py
tools/ci/build_template_app.sh
tools/ci/check_api_violation.sh