diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5286bc182..4ac7a9623 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -327,14 +327,14 @@ build_esp_matter_examples_pytest_H2_idf_v5_1:
script:
- *setup_ot_rcp
- *setup_ot_br
+ - cd ${ESP_MATTER_PATH}/examples/light
+ - echo "CONFIG_ENABLE_MEMORY_PROFILING=y" >> sdkconfig.defaults
- cd ${ESP_MATTER_PATH}
- pip install -r tools/ci/requirements-build.txt
- python tools/ci/build_apps.py ./examples --pytest_h2
- |
if [ "$CI_PIPELINE_SOURCE" == "merge_request_event" ]; then
- python tools/ci/post_results.py --chip esp32h2 --job_name "build_esp_matter_examples_pytest_H2_idf_v5_1" --ref_map_file light_mr_base.map --example "light"
- else
- echo "Not a Merge Request pipeline. Skipping post_results.py."
+ python tools/ci/memory_analyzer.py --chip esp32h2 --job_name "build_esp_matter_examples_pytest_H2_idf_v5_1" --ref_map_file light_mr_base.map --example "light"
fi
build_esp_matter_examples_pytest_C2_idf_v5_1:
@@ -356,15 +356,15 @@ build_esp_matter_examples_pytest_C2_idf_v5_1:
when: always
expire_in: 15 days
script:
+ - cd ${ESP_MATTER_PATH}/examples/light
+ - echo "CONFIG_ENABLE_MEMORY_PROFILING=y" >> sdkconfig.defaults
- cd ${ESP_MATTER_PATH}
- pip install -r tools/ci/requirements-build.txt
- echo "${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}"
- python tools/ci/build_apps.py ./examples --pytest_c2
- |
if [ "$CI_PIPELINE_SOURCE" == "merge_request_event" ]; then
- python tools/ci/post_results.py --chip esp32c2 --job_name "build_esp_matter_examples_pytest_C2_idf_v5_1" --ref_map_file light_mr_base.map --example "light"
- else
- echo "Not a Merge Request pipeline. Skipping post_results.py."
+ python tools/ci/memory_analyzer.py --chip esp32c2 --job_name "build_esp_matter_examples_pytest_C2_idf_v5_1" --ref_map_file light_mr_base.map --example "light"
fi
@@ -403,6 +403,9 @@ build_esp_matter_examples:
- cd ${ESP_MATTER_PATH}/examples/mfg_test_app
- openssl genrsa -out secure_boot_signing_key.pem 3072
+ - cd ${ESP_MATTER_PATH}/examples/light
+ - echo "CONFIG_ENABLE_MEMORY_PROFILING=y" >> sdkconfig.defaults
+
# steps for external platform build for blemesh_bridge app
- cd ${ESP_MATTER_PATH}/examples/bridge_apps/blemesh_bridge
- cp sdkconfig.defaults sdkconfig.defaults.backup
@@ -415,12 +418,8 @@ build_esp_matter_examples:
--parallel-index ${CI_NODE_INDEX:-1}
- |
if [ "$CI_PIPELINE_SOURCE" == "merge_request_event" ] && [ "${CI_NODE_INDEX:-1}" -eq 2 ]; then
- python tools/ci/post_results.py --chip esp32c3 --job_name "build_esp_matter_examples 2/2" --ref_map_file light_mr_base.map --example "light"
- else
- echo "Not a Merge Request pipeline. Skipping post_results.py."
+ python tools/ci/memory_analyzer.py --chip esp32c3 --job_name "build_esp_matter_examples 2/2" --ref_map_file light_mr_base.map --example "light"
fi
-
-
parallel: 2
build_nopytest_remaining_examples_manual:
@@ -527,7 +526,11 @@ pytest_esp32c2_esp_matter_dut:
- rm -rf connectedhomeip/connectedhomeip
- ln -s ${CHIP_SUBMODULE_PATH} connectedhomeip/connectedhomeip
- pip install -r tools/ci/requirements-pytest.txt
- - pytest examples/ --target esp32c2 -m esp_matter_dut --junitxml=XUNIT_RESULT.xml --baud 74880
+ - pytest examples/ --target esp32c2 -m esp_matter_dut --junitxml=XUNIT_RESULT.xml --baud 74880 | tee pytest_c2.log
+ - |
+ if [ "$CI_PIPELINE_SOURCE" == "merge_request_event" ]; then
+ python tools/ci/memory_analyzer.py --log_file pytest_c2.log --chip esp32c2 --example "light"
+ fi
tags: ["esp32c2", "esp_matter_dut"]
pytest_esp32h2_esp_matter_dut:
@@ -542,7 +545,11 @@ pytest_esp32h2_esp_matter_dut:
- rm -rf connectedhomeip/connectedhomeip
- ln -s ${CHIP_SUBMODULE_PATH} connectedhomeip/connectedhomeip
- pip install -r tools/ci/requirements-pytest.txt
- - pytest examples/ --target esp32h2 -m esp_matter_dut --junitxml=XUNIT_RESULT.xml
+ - pytest examples/ --target esp32h2 -m esp_matter_dut --junitxml=XUNIT_RESULT.xml | tee pytest_h2.log
+ - |
+ if [ "$CI_PIPELINE_SOURCE" == "merge_request_event" ]; then
+ python tools/ci/memory_analyzer.py --log_file pytest_h2.log --chip esp32h2 --example "light"
+ fi
tags: ["esp32h2", "esp_matter_dut"]
build_upstream_examples:
diff --git a/examples/light/main/Kconfig.projbuild b/examples/light/main/Kconfig.projbuild
new file mode 100644
index 000000000..6ec0e2df5
--- /dev/null
+++ b/examples/light/main/Kconfig.projbuild
@@ -0,0 +1,11 @@
+menu "Example Configuration"
+
+ config ENABLE_MEMORY_PROFILING
+ bool "Enable Memory Profiling"
+ default n
+ help
+ Enable this option to include memory profiling features in the example.
+ This will allow you to monitor memory usage during runtime.
+
+endmenu
+
diff --git a/examples/light/main/app_main.cpp b/examples/light/main/app_main.cpp
index 7ae6878b8..e5b4e2160 100644
--- a/examples/light/main/app_main.cpp
+++ b/examples/light/main/app_main.cpp
@@ -60,6 +60,19 @@ static const char *s_decryption_key = decryption_key_start;
static const uint16_t s_decryption_key_len = decryption_key_end - decryption_key_start;
#endif // CONFIG_ENABLE_ENCRYPTED_OTA
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+static void memory_profiler_dump_heap_stat(const char *state)
+{
+ ESP_LOGI(TAG,"========== HEAP-DUMP-START ==========\n");
+ ESP_LOGI(TAG,"state: %s\n", state);
+ ESP_LOGI(TAG,"\tDescription\tInternal\n");
+ ESP_LOGI(TAG,"Current Free Memory\t%d\n", heap_caps_get_free_size(MALLOC_CAP_8BIT));
+ ESP_LOGI(TAG,"Largest Free Block\t%d\n", heap_caps_get_largest_free_block(MALLOC_CAP_8BIT | MALLOC_CAP_INTERNAL));
+ ESP_LOGI(TAG,"Min. Ever Free Size\t%d\n", heap_caps_get_minimum_free_size(MALLOC_CAP_8BIT | MALLOC_CAP_INTERNAL));
+ ESP_LOGI(TAG,"========== HEAP-DUMP-END ==========\n");
+}
+#endif
+
static void app_event_cb(const ChipDeviceEvent *event, intptr_t arg)
{
switch (event->Type) {
@@ -69,6 +82,10 @@ static void app_event_cb(const ChipDeviceEvent *event, intptr_t arg)
case chip::DeviceLayer::DeviceEventType::kCommissioningComplete:
ESP_LOGI(TAG, "Commissioning complete");
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ memory_profiler_dump_heap_stat("commissioning complete");
+#endif
+
break;
case chip::DeviceLayer::DeviceEventType::kFailSafeTimerExpired:
@@ -85,6 +102,10 @@ static void app_event_cb(const ChipDeviceEvent *event, intptr_t arg)
case chip::DeviceLayer::DeviceEventType::kCommissioningWindowOpened:
ESP_LOGI(TAG, "Commissioning window opened");
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ memory_profiler_dump_heap_stat("commissioning window opened");
+#endif
+
break;
case chip::DeviceLayer::DeviceEventType::kCommissioningWindowClosed:
@@ -128,6 +149,9 @@ static void app_event_cb(const ChipDeviceEvent *event, intptr_t arg)
case chip::DeviceLayer::DeviceEventType::kBLEDeinitialized:
ESP_LOGI(TAG, "BLE deinitialized and memory reclaimed");
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ memory_profiler_dump_heap_stat("BLE deinitialized");
+#endif
break;
default:
@@ -168,6 +192,10 @@ extern "C" void app_main()
/* Initialize the ESP NVS layer */
nvs_flash_init();
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ memory_profiler_dump_heap_stat("Bootup");
+#endif
+
/* Initialize driver */
app_driver_handle_t light_handle = app_driver_light_init();
app_driver_handle_t button_handle = app_driver_button_init();
@@ -180,6 +208,10 @@ extern "C" void app_main()
node_t *node = node::create(&node_config, app_attribute_update_cb, app_identification_cb);
ABORT_APP_ON_FAILURE(node != nullptr, ESP_LOGE(TAG, "Failed to create Matter node"));
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ memory_profiler_dump_heap_stat("node created");
+#endif
+
extended_color_light::config_t light_config;
light_config.on_off.on_off = DEFAULT_POWER;
light_config.on_off.lighting.start_up_on_off = nullptr;
@@ -239,6 +271,10 @@ extern "C" void app_main()
err = esp_matter::start(app_event_cb);
ABORT_APP_ON_FAILURE(err == ESP_OK, ESP_LOGE(TAG, "Failed to start Matter, err:%d", err));
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ memory_profiler_dump_heap_stat("matter started");
+#endif
+
/* Starting driver with default values */
app_driver_light_set_defaults(light_endpoint_id);
@@ -256,4 +292,11 @@ extern "C" void app_main()
#endif
esp_matter::console::init();
#endif
+
+#ifdef CONFIG_ENABLE_MEMORY_PROFILING
+ while (true) {
+ memory_profiler_dump_heap_stat("Idle");
+ vTaskDelay(10000 / portTICK_PERIOD_MS);
+ }
+#endif
}
diff --git a/tools/ci/gitlab_api.py b/tools/ci/gitlab_api.py
new file mode 100644
index 000000000..3f72fe1f0
--- /dev/null
+++ b/tools/ci/gitlab_api.py
@@ -0,0 +1,66 @@
+# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
+
+# SPDX-License-Identifier: CC0-1.0
+
+import os
+import requests
+import logging
+
+class GitLabAPI:
+ def __init__(self):
+ self.gitlab_api_url = os.getenv("CI_API_V4_URL")
+ self.gitlab_token = os.getenv("GITLAB_MR_COMMENT_TOKEN")
+ self.ci_project_id = os.getenv("CI_PROJECT_ID")
+ self.ci_merge_request_iid = os.getenv("CI_MERGE_REQUEST_IID")
+
+ if not all([self.gitlab_api_url, self.gitlab_token, self.ci_project_id, self.ci_merge_request_iid]):
+ raise ValueError("Required GitLab environment variables are not set")
+
+ def fetch_merge_request_description(self):
+ url = f"{self.gitlab_api_url}/projects/{self.ci_project_id}/merge_requests/{self.ci_merge_request_iid}"
+ headers = {"PRIVATE-TOKEN": self.gitlab_token}
+ response = requests.get(url, headers=headers)
+ response.raise_for_status()
+ return response.json().get("description", "")
+
+ def update_merge_request_description(self, updated_description):
+ url = f"{self.gitlab_api_url}/projects/{self.ci_project_id}/merge_requests/{self.ci_merge_request_iid}"
+ headers = {"PRIVATE-TOKEN": self.gitlab_token}
+ data = {"description": updated_description}
+ response = requests.put(url, headers=headers, json=data)
+ response.raise_for_status()
+ logging.info("Successfully updated the MR description.")
+
+ def fetch_pipeline_for_commit(self, commit_sha, branch_name="main"):
+ url = f"{self.gitlab_api_url}/projects/{self.ci_project_id}/pipelines"
+ headers = {"PRIVATE-TOKEN": self.gitlab_token}
+ params = {"ref": branch_name, "sha": commit_sha}
+ response = requests.get(url, headers=headers, params=params)
+ response.raise_for_status()
+ pipelines = response.json()
+ if not pipelines:
+ raise ValueError(f"No pipeline found for commit: {commit_sha} on branch: {branch_name}.")
+ return pipelines[0]['id']
+
+ def fetch_merge_request_diff_versions(self):
+ url = f"{self.gitlab_api_url}/projects/{self.ci_project_id}/merge_requests/{self.ci_merge_request_iid}/versions"
+ headers = {"PRIVATE-TOKEN": self.gitlab_token}
+ response = requests.get(url, headers=headers)
+ response.raise_for_status()
+ return response.json()
+
+ def fetch_pipeline_jobs(self, pipeline_id):
+ url = f"{self.gitlab_api_url}/projects/{self.ci_project_id}/pipelines/{pipeline_id}/jobs"
+ headers = {"PRIVATE-TOKEN": self.gitlab_token}
+ response = requests.get(url, headers=headers)
+ response.raise_for_status()
+ return response.json()
+
+ def download_artifact(self, job_id, artifact_path, output_file):
+ url = f"{self.gitlab_api_url}/projects/{self.ci_project_id}/jobs/{job_id}/artifacts/{artifact_path}"
+ headers = {"PRIVATE-TOKEN": self.gitlab_token}
+ with requests.get(url, headers=headers, stream=True) as response:
+ response.raise_for_status()
+ with open(output_file, 'wb') as f:
+ for chunk in response.iter_content(chunk_size=8192):
+ f.write(chunk)
diff --git a/tools/ci/memory_analyzer.py b/tools/ci/memory_analyzer.py
new file mode 100644
index 000000000..5d6fe89ee
--- /dev/null
+++ b/tools/ci/memory_analyzer.py
@@ -0,0 +1,106 @@
+# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
+
+# SPDX-License-Identifier: CC0-1.0
+
+import argparse
+import logging
+import glob
+from memory_data_parser import StaticMemoryParser, DynamicMemoryParser
+from gitlab_api import GitLabAPI
+from results_formatter import ResultsFormatter
+
+def locate_current_map_file(chip, example):
+ pattern = f"examples/{example}/build_{chip}_default/{example}.map"
+ artifact_file_paths = glob.glob(pattern, recursive=True)
+ if not artifact_file_paths:
+ raise FileNotFoundError(f"No map file found for the example {example} with target chip {chip}")
+ return artifact_file_paths[0]
+
+def process_static_memory(gitlab_api, formatter, chip, example, ref_map_file, job_name):
+ try:
+ # Get base commit information
+ diff_versions = gitlab_api.fetch_merge_request_diff_versions()
+ base_version = diff_versions[0]
+ base_commit_sha = base_version["base_commit_sha"]
+
+ # Get pipeline and job information
+ base_commit_pipeline_id = gitlab_api.fetch_pipeline_for_commit(base_commit_sha, branch_name="main")
+ jobs = gitlab_api.fetch_pipeline_jobs(base_commit_pipeline_id)
+
+ target_job_id = next((job["id"] for job in jobs if job["name"] == job_name), None)
+ if not target_job_id:
+ raise ValueError("Target job not found.")
+
+ # Get map files
+ current_map_file = locate_current_map_file(chip, example)
+ artifact_path = f"examples/{example}/build_{chip}_default/{example}.map"
+ gitlab_api.download_artifact(target_job_id, artifact_path, ref_map_file)
+
+ # Process static memory data
+ size_diff_output = StaticMemoryParser.execute_idf_size_command(ref_map_file, current_map_file)
+
+ # Update MR description with static memory results
+ description = gitlab_api.fetch_merge_request_description()
+ description = formatter.update_memory_results_title(description)
+ description = formatter.update_static_memory_results_section(description, chip, example, size_diff_output)
+ gitlab_api.update_merge_request_description(description)
+
+ return True
+ except Exception as e:
+ logging.error(f"Error processing static memory: {str(e)}")
+ return False
+
+def process_dynamic_memory(gitlab_api, formatter, chip, example, log_file):
+ try:
+ # Extract and parse heap dump
+ extracted_lines = DynamicMemoryParser.extract_heap_dump(log_file)
+ parsed_logs = DynamicMemoryParser.parse_heap_dump(extracted_lines)
+
+ if parsed_logs:
+ # Format heap dump data
+ formatted_output = formatter.format_heap_dump(parsed_logs)
+
+ # Update MR description with heap memory results
+ description = gitlab_api.fetch_merge_request_description()
+ description = formatter.update_memory_results_title(description)
+ description = formatter.update_heap_memory_results_section(description, chip, example, formatted_output)
+ gitlab_api.update_merge_request_description(description)
+ return True
+ else:
+ logging.warning("No heap dump data found in the log file.")
+ return False
+ except Exception as e:
+ logging.error(f"Error processing dynamic memory: {str(e)}")
+ return False
+
+def main():
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
+
+ parser = argparse.ArgumentParser(description="Process and post memory analysis results.")
+ parser.add_argument("--chip", required=True, help="Target chip (e.g., esp32c2, esp32h2)")
+ parser.add_argument("--example", required=True, help="Target example (e.g., light, light_switch)")
+ parser.add_argument("--ref_map_file", help="Reference main branch map file path")
+ parser.add_argument("--job_name", help="Job name for the job id search")
+ parser.add_argument("--log_file", help="Path to the log file for heap dump analysis")
+
+ args = parser.parse_args()
+
+ gitlab_api = GitLabAPI()
+ formatter = ResultsFormatter()
+
+ # Process static memory if required parameters are provided
+ if all([args.ref_map_file, args.job_name]):
+ if process_static_memory(gitlab_api, formatter, args.chip, args.example, args.ref_map_file, args.job_name):
+ logging.info("Static memory analysis completed successfully")
+ else:
+ logging.error("Static memory analysis failed")
+
+ # Process dynamic memory if log file is provided
+ if args.log_file:
+ if process_dynamic_memory(gitlab_api, formatter, args.chip, args.example, args.log_file):
+ logging.info("Dynamic memory analysis completed successfully")
+ else:
+ logging.error("Dynamic memory analysis failed")
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/ci/memory_data_parser.py b/tools/ci/memory_data_parser.py
new file mode 100644
index 000000000..2ee887428
--- /dev/null
+++ b/tools/ci/memory_data_parser.py
@@ -0,0 +1,46 @@
+# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
+
+# SPDX-License-Identifier: CC0-1.0
+
+import subprocess
+
+class StaticMemoryParser:
+ @staticmethod
+ def execute_idf_size_command(old_file_path, new_file_path):
+ try:
+ result = subprocess.run(
+ ["python", "-m", "esp_idf_size", "--diff", old_file_path, new_file_path],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ return result.stdout
+ except subprocess.CalledProcessError as e:
+ raise
+
+class DynamicMemoryParser:
+ @staticmethod
+ def extract_heap_dump(log_file):
+ cmd = f"sed -n '/HEAP-DUMP-START/,/HEAP-DUMP-END/p' {log_file}"
+ result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
+ return result.stdout.splitlines()
+
+ @staticmethod
+ def parse_heap_dump(extracted_lines):
+ parsed_logs = []
+ current_state = None
+
+ for line in extracted_lines:
+ if "state:" in line:
+ current_state = line.split("state:")[1].strip()
+ elif "Current Free Memory" in line:
+ current_free_mem = line.split()[-1]
+ elif "Largest Free Block" in line:
+ largest_free_block = line.split()[-1]
+ elif "Min. Ever Free Size" in line:
+ min_ever_free_size = line.split()[-1]
+ parsed_logs.append([current_state, current_free_mem, largest_free_block, min_ever_free_size])
+
+ return parsed_logs
+
+
diff --git a/tools/ci/post_results.py b/tools/ci/post_results.py
deleted file mode 100644
index 7df26e5f2..000000000
--- a/tools/ci/post_results.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
-
-# SPDX-License-Identifier: CC0-1.0
-
-import os
-import subprocess
-import requests
-import glob
-import argparse
-import logging
-import re
-
-# Gitlab Configurations
-gitlab_api_url = os.getenv("CI_API_V4_URL")
-gitlab_token = os.getenv("GITLAB_MR_COMMENT_TOKEN")
-ci_project_id = os.getenv("CI_PROJECT_ID")
-ci_merge_request_iid = os.getenv("CI_MERGE_REQUEST_IID")
-
-
-# Fetch the current GitLab MR description
-def fetch_merge_request_description():
- url = f"{gitlab_api_url}/projects/{ci_project_id}/merge_requests/{ci_merge_request_iid}"
- headers = {"PRIVATE-TOKEN": gitlab_token}
- response = requests.get(url, headers=headers)
- response.raise_for_status()
- return response.json().get("description", "")
-
-# Update the GitLab MR description
-def update_merge_request_description(updated_description):
- url = f"{gitlab_api_url}/projects/{ci_project_id}/merge_requests/{ci_merge_request_iid}"
- headers = {"PRIVATE-TOKEN": gitlab_token}
- data = {"description": updated_description}
- response = requests.put(url, headers=headers, json=data)
- response.raise_for_status()
- print("Successfully updated the MR description.")
-
-def update_memory_results_title(description):
- header_start = ""
- header_end = ""
- if header_start in description and header_end in description:
- return description # Return as is if header already exists
-
- header_section_content = "#### Gitlab CI Memory Numbers (Do Not Edit) \n"
- header_section = f"{header_start}\n{header_section_content}{header_end}"
-
- updated_description = description.strip() + "\n\n" + header_section
- return updated_description
-
-# Updates the memory results section
-def update_memory_results_section(description, chip_name, example, output):
- marker_start = f""
- marker_end = f""
-
- chip_section_content = (
- f"Static Memory Footprint for target: {chip_name}, example: {example}
\n\n"
- f"```{output}```\n"
- f" \n"
- )
-
- chip_section = f"{marker_start}\n{chip_section_content}{marker_end}"
-
- if marker_start in description and marker_end in description:
- updated_description = re.sub(
- rf"{re.escape(marker_start)}.*?{re.escape(marker_end)}",
- chip_section,
- description,
- flags=re.DOTALL,
- )
- else:
- updated_description = description.strip() + "\n\n" + chip_section
-
- return updated_description
-
-# Fetch the id of the pipeline for a branch with the specified commit id (default main branch)
-def fetch_pipeline_for_commit(commit_sha, branch_name="main"):
- url = f"{gitlab_api_url}/projects/{ci_project_id}/pipelines"
- headers = {"PRIVATE-TOKEN": gitlab_token}
- params = {"ref": branch_name, "sha": commit_sha}
- response = requests.get(url, headers=headers, params=params)
- response.raise_for_status()
- pipelines = response.json()
- if not pipelines:
- raise ValueError(f"No pipeline found for commit: {commit_sha} on branch: {branch_name}.")
- return pipelines[0]['id']
-
-# Fetch the versions for the gitlab MR.
-def fetch_merge_request_diff_versions():
- url = f"{gitlab_api_url}/projects/{ci_project_id}/merge_requests/{ci_merge_request_iid}/versions"
- headers = {"PRIVATE-TOKEN": gitlab_token}
- response = requests.get(url, headers=headers)
- response.raise_for_status()
- return response.json()
-
-# Fetch the jobs specific to a pipeline id.
-def fetch_pipeline_jobs(pipeline_id):
- url = f"{gitlab_api_url}/projects/{ci_project_id}/pipelines/{pipeline_id}/jobs"
- headers = {"PRIVATE-TOKEN": gitlab_token}
- response = requests.get(url, headers=headers)
- response.raise_for_status()
- return response.json()
-
-# Download the reference map file for the MR base commit.
-def download_ref_map_file(chip_name, job_id, output_file):
- ref_artifact_path = f"examples/light/build_{chip_name}_default/light.map"
- url = f"{gitlab_api_url}/projects/{ci_project_id}/jobs/{job_id}/artifacts/{ref_artifact_path}"
- headers = {"PRIVATE-TOKEN": gitlab_token}
- with requests.get(url, headers=headers, stream=True) as response:
- response.raise_for_status()
- with open(output_file, 'wb') as f:
- for chunk in response.iter_content(chunk_size=8192):
- f.write(chunk)
-
-# Locate the map file artifact for the current pipeline.
-def locate_current_map_file(chip, example):
- pattern = f"examples/{example}/build_{chip}_default/{example}.map"
- artifact_file_paths = glob.glob(pattern, recursive=True)
- if not artifact_file_paths:
- raise FileNotFoundError("No map file found for the example {example} with target chip {chip}")
- return artifact_file_paths[0]
-
-# Execute esp_idf_size diff command to find increase/decrease in firmware size.
-def execute_idf_size_command(old_file_path, new_file_path):
- try:
- result = subprocess.run(
- ["python", "-m", "esp_idf_size", "--diff", old_file_path, new_file_path],
- capture_output=True,
- text=True,
- check=True,
- )
- return result.stdout
- except subprocess.CalledProcessError as e:
- raise
-
-def main():
-
- logging.basicConfig(level=logging.WARNING, format="%(asctime)s - %(levelname)s - %(message)s")
- parser = argparse.ArgumentParser(description="Process build results and post to GitLab.")
- parser.add_argument("--chip", required=True, help="Specify the chip name (e.g., C2, H2)")
- parser.add_argument("--ref_map_file", required=True, help="Specify the reference main branch map file")
- parser.add_argument("--job_name", required=True, help = "Specify the job name for the job id search")
- parser.add_argument("--example", required=True, help = "Specify the example name for the memory footprint")
- args = parser.parse_args()
-
- try:
- diff_versions = fetch_merge_request_diff_versions()
- base_version = diff_versions[0]
- base_commit_sha = base_version["base_commit_sha"]
-
- base_commit_pipeline_id = fetch_pipeline_for_commit(base_commit_sha, branch_name="main")
- jobs = fetch_pipeline_jobs(base_commit_pipeline_id)
-
- target_job_id = next((job["id"] for job in jobs if job["name"] == args.job_name), None)
- if not target_job_id:
- raise ValueError("Target job not found.")
-
- current_map_file = locate_current_map_file(args.chip, args.example)
- download_ref_map_file(args.chip, target_job_id, args.ref_map_file)
-
- size_diff_output = execute_idf_size_command(args.ref_map_file, current_map_file)
-
- current_description_without_title = fetch_merge_request_description()
- updated_title = update_memory_results_title(current_description_without_title)
- update_merge_request_description(updated_title)
- current_description = fetch_merge_request_description()
- updated_description = update_memory_results_section(
- current_description, args.chip, args.example, size_diff_output
- )
- update_merge_request_description(updated_description)
- except FileNotFoundError as e:
- logging.error(f"Error occurred while posting results to GitLab MR: File not found {e}")
- except Exception as e:
- logging.error(f"Error occurred while posting results to GitLab MR: An Unexpected error occurred:{e}")
-
-if __name__ == "__main__":
- main()
-
-
diff --git a/tools/ci/requirements-build.txt b/tools/ci/requirements-build.txt
index f7d7f6582..ee1bd1710 100644
--- a/tools/ci/requirements-build.txt
+++ b/tools/ci/requirements-build.txt
@@ -1,3 +1,4 @@
idf_build_apps
requests
argparse
+tabulate
diff --git a/tools/ci/requirements-pytest.txt b/tools/ci/requirements-pytest.txt
index c90d19f6f..3683ab373 100644
--- a/tools/ci/requirements-pytest.txt
+++ b/tools/ci/requirements-pytest.txt
@@ -4,3 +4,4 @@ pytest-embedded-qemu~=1.0
pytest-timeout
netifaces
esptool>=4.5
+tabulate
diff --git a/tools/ci/results_formatter.py b/tools/ci/results_formatter.py
new file mode 100644
index 000000000..fb8daf7f4
--- /dev/null
+++ b/tools/ci/results_formatter.py
@@ -0,0 +1,75 @@
+# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
+
+# SPDX-License-Identifier: CC0-1.0
+
+import re
+from tabulate import tabulate
+
+class ResultsFormatter:
+ @staticmethod
+ def update_memory_results_title(description):
+ header_start = ""
+ header_end = ""
+ if header_start in description and header_end in description:
+ return description
+
+ header_section_content = "#### Gitlab CI Memory Numbers (Do Not Edit) \n"
+ header_section = f"{header_start}\n{header_section_content}{header_end}"
+
+ updated_description = description.strip() + "\n\n" + header_section
+ return updated_description
+
+ @staticmethod
+ def update_static_memory_results_section(description, chip_name, example, output):
+ marker_start = f""
+ marker_end = f""
+
+ chip_section_content = (
+ f"Static Memory Footprint for target: {chip_name}, example: {example}
\n\n"
+ f"```{output}```\n"
+ f" \n"
+ )
+
+ chip_section = f"{marker_start}\n{chip_section_content}{marker_end}"
+
+ if marker_start in description and marker_end in description:
+ updated_description = re.sub(
+ rf"{re.escape(marker_start)}.*?{re.escape(marker_end)}",
+ chip_section,
+ description,
+ flags=re.DOTALL,
+ )
+ else:
+ updated_description = description.strip() + "\n\n" + chip_section
+
+ return updated_description
+
+ @staticmethod
+ def update_heap_memory_results_section(description, chip_name, example, output):
+ marker_start = f""
+ marker_end = f""
+
+ chip_section_content = (
+ f"Dynamic Memory Footprint for target: {chip_name}, example: {example}
\n\n"
+ f"```{output}\n```\n"
+ f" \n"
+ )
+
+ chip_section = f"{marker_start}\n{chip_section_content}{marker_end}"
+
+ if marker_start in description and marker_end in description:
+ updated_description = re.sub(
+ rf"{re.escape(marker_start)}.*?{re.escape(marker_end)}",
+ chip_section,
+ description,
+ flags=re.DOTALL,
+ )
+ else:
+ updated_description = description.strip() + "\n\n" + chip_section
+
+ return updated_description
+
+ @staticmethod
+ def format_heap_dump(parsed_logs):
+ headers = ["State", "Current Free Memory", "Largest Free Block", "Min. Ever Free Size"]
+ return tabulate(parsed_logs, headers=headers, tablefmt="grid")