ci: backport master changes to 6.0

This commit is contained in:
Fu Hanxi
2026-02-11 14:48:58 +01:00
parent 5ecafbffc1
commit 6ff6d7ad6c
14 changed files with 126 additions and 1342 deletions
+5 -15
View File
@@ -1,21 +1,11 @@
workflow:
rules:
# Disable those non-protected push triggered pipelines
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && $CI_PIPELINE_SOURCE == "push"'
when: never
# merged result pipelines
- if: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
variables:
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
# else
- if: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA == null || $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA == ""
variables:
PIPELINE_COMMIT_SHA: $CI_COMMIT_SHA
- when: always
# Place the default settings in `.gitlab/ci/common.yml` instead
include:
- project: "ci/actions/common"
file:
- "templates/stable/default-workflow.yml"
- "templates/idf/deploy-github.yml"
- "templates/idf/deploy-docs.yml"
- ".gitlab/ci/danger.yml"
- ".gitlab/ci/common.yml"
- ".gitlab/ci/rules.yml"
+4 -9
View File
@@ -1,7 +1,6 @@
.build_template:
stage: build
extends:
- .before_script:build
- .after_script:build
image: $ESP_ENV_IMAGE
tags: [build, shiny]
@@ -155,21 +154,17 @@ build_child_pipeline:
job: generate_build_child_pipeline
strategy: depend
generate_disabled_apps_report:
generate_prebuild_report:
extends:
- .build_template
tags: [fast_run, shiny]
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- pipeline_variables
- job: baseline_manifest_sha
optional: true
artifacts:
paths:
- disabled_report.html
- prebuild_report.html
expire_in: 1 week
when: always
script:
- pip install dominate idf-build-apps
- run_cmd python tools/ci/gen_disabled_report.py --output disabled_report.html --verbose --enable-preview-targets
- echo "Report generated at https://${CI_PAGES_HOSTNAME}:${CI_SERVER_PORT}/-/esp-idf/-/jobs/${CI_JOB_ID}/artifacts/disabled_report.html"
- run_cmd idf-ci build collect --format html -o prebuild_report.html
- echo "Report generated at https://${CI_PAGES_HOSTNAME}:${CI_SERVER_PORT}/-/esp-idf/-/jobs/${CI_JOB_ID}/artifacts/prebuild_report.html"
+4 -154
View File
@@ -34,8 +34,7 @@ variables:
# since we're using merged-result pipelines, the last commit should work for most cases
# --prune --prune-tags: in case remote branch or tag is force pushed
GIT_FETCH_EXTRA_FLAGS: "--no-recurse-submodules --prune --prune-tags"
# we're using .cache folder for caches
GIT_CLEAN_FLAGS: -ffdx -e .cache/
LATEST_GIT_TAG: v6.0-rc1
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
@@ -168,9 +167,6 @@ variables:
# Done after sourcing export.sh so that we could easily invoke the right pip
section_start "upgrade_ci_dependencies" "Upgrading CI dependencies"
pip install --upgrade --upgrade-strategy=eager -r $IDF_PATH/tools/requirements/requirements.ci.txt -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
pip install --upgrade --upgrade-strategy=eager -r $IDF_PATH/tools/requirements/requirements.test-specific.txt -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
fi
section_end "upgrade_ci_dependencies"
REEXPORT_NEEDED=0
@@ -245,155 +241,20 @@ variables:
before_script:
- *common-before_scripts
.before_script:build:
before_script:
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
.after_script:build:
after_script:
- source tools/ci/utils.sh
- *show_ccache_statistics
- *upload_failed_job_log_artifacts
##############################
# Git Strategy Job Templates #
##############################
.git_init: &git_init |
mkdir -p "${CI_PROJECT_DIR}"
cd "${CI_PROJECT_DIR}"
git init
.git_fetch_from_mirror_url_if_exists: &git_fetch_from_mirror_url_if_exists |
# check if set mirror
if [ -n "${LOCAL_GITLAB_HTTPS_HOST:-}" ] && [ -n "${ESPCI_TOKEN:-}" ]; then
MIRROR_REPO_URL="https://bot:${ESPCI_TOKEN}@${LOCAL_GITLAB_HTTPS_HOST}/${CI_PROJECT_PATH}"
elif [ -n "${LOCAL_GIT_MIRROR:-}" ]; then
MIRROR_REPO_URL="${LOCAL_GIT_MIRROR}/${CI_PROJECT_PATH}"
fi
# fetch from mirror first if set
if [ -n "${MIRROR_REPO_URL:-}" ]; then
if git remote -v | grep origin; then
git remote set-url origin "${MIRROR_REPO_URL}"
else
git remote add origin "${MIRROR_REPO_URL}"
fi
# mirror url may fail with authentication issue
git fetch origin --no-recurse-submodules || true
fi
# set remote url to CI_REPOSITORY_URL
if git remote -v | grep origin; then
git remote set-url origin "${CI_REPOSITORY_URL}"
else
git remote add origin "${CI_REPOSITORY_URL}"
fi
.git_checkout_ci_commit_sha: &git_checkout_ci_commit_sha |
git checkout $CI_COMMIT_SHA
eval "git clean ${GIT_CLEAN_FLAGS}"
# git diff requires two commits, with different CI env var
#
# By default, we use git strategy "clone" with depth 1 to speed up the clone process.
# But for jobs requires running `git diff`, we need to fetch more commits to get the correct diffs.
#
# Since there's no way to get the correct git_depth before the job starts,
# we can't set `GIT_DEPTH` in the job definition.
#
# Set git strategy to "none" and fetch manually instead.
.before_script:fetch:git_diff:
variables:
GIT_STRATEGY: none
before_script:
- *git_init
- *git_fetch_from_mirror_url_if_exists
- |
# Store the diff output in a temporary file
TEMP_FILE=$(mktemp)
# merged results pipelines, by default
if [[ -n $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ]]; then
git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_SHA
git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
git diff --name-only $CI_MERGE_REQUEST_TARGET_BRANCH_SHA...$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
# merge request pipelines, when the mr got conflicts
elif [[ -n $CI_MERGE_REQUEST_DIFF_BASE_SHA ]]; then
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_COMMIT_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
# other pipelines, like the protected branches pipelines
elif [[ "$CI_COMMIT_BEFORE_SHA" != "0000000000000000000000000000000000000000" ]]; then
git fetch origin $CI_COMMIT_BEFORE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git diff --name-only $CI_COMMIT_BEFORE_SHA $CI_COMMIT_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
else
# pipeline source could be web, scheduler, etc.
git fetch origin $CI_COMMIT_SHA --depth=2 ${GIT_FETCH_EXTRA_FLAGS}
git diff --name-only $CI_COMMIT_SHA~1 $CI_COMMIT_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
fi
- *git_checkout_ci_commit_sha
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
# target test runners may locate in different places
# for runners set git mirror, we fetch from the mirror first, then fetch the HEAD commit
.before_script:fetch:target_test:
variables:
GIT_STRATEGY: none
before_script:
- *git_init
- *git_fetch_from_mirror_url_if_exists
- eval "git fetch --depth=1 ${GIT_FETCH_EXTRA_FLAGS} origin ${CI_COMMIT_SHA}"
- *git_checkout_ci_commit_sha
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
# no submodules
.brew-macos-settings:
variables:
GIT_STRATEGY: none # we do manual git clone to use local mirror
GIT_STRATEGY: fetch
IDF_CCACHE_ENABLE: "0"
CCACHE_DIR: "/var/tmp/cache/idf_ccache"
tags:
- macos-tart
image: macos-sequoia-idf-v6.0
cache: [] # pip cache is created under amd64, and submodules are downloaded with brew mirror, so disable cache here
before_script:
# assert LOCAL_GIT_MIRROR is set
- echo -e "section_start:`date +%s`:check_out\r\e[0Kchecking out from local git mirror, then reset to CI_COMMIT_SHA"
- |
if [ -z "${LOCAL_GIT_MIRROR:-}" ]; then
echo "Error: LOCAL_GIT_MIRROR not set, cannot clone from mirror."
exit 1
fi
- MIRROR_REPO_URL="${LOCAL_GIT_MIRROR}/${CI_PROJECT_PATH}"
- cd "${CI_PROJECT_DIR}"
# since .cache exists in CI_PROJECT_DIR, so can't direct `git clone .`
- git clone -b ${CI_MERGE_REQUEST_TARGET_BRANCH_NAME:-${CI_COMMIT_BRANCH}} --depth=1 --recursive --shallow-submodules "${MIRROR_REPO_URL}" tmp
- mv tmp/.git ./
- rm -rf tmp
- git reset --hard
# set remote url back
- git remote set-url origin "${CI_REPOSITORY_URL}"
- eval "git fetch --depth=1 ${GIT_FETCH_EXTRA_FLAGS} origin ${CI_COMMIT_SHA}"
- git checkout FETCH_HEAD
- git submodule update --init --recursive --depth=1
- echo -e "section_end:`date +%s`:check_out\r\e[0K"
- *common-before_scripts
- *setup_tools_and_idf_python_venv
after_script: [] # ccache now is disabled for macos brew runners
timeout: 30m
@@ -421,16 +282,5 @@ default:
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
# gitlab bug, setting them here doesn't work
# - expire_in: https://gitlab.com/gitlab-org/gitlab/-/issues/404563
# - when: https://gitlab.com/gitlab-org/gitlab/-/issues/440672
# artifacts:
# expire_in: 1 week
# when: always
retry:
max: 2
when:
# In case of a runner failure we could hop to another one, or a network error could go away.
- runner_system_failure
# Job execution timeout may be caused by a network issue.
- job_execution_timeout
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
-63
View File
@@ -3,51 +3,6 @@
image: $ESP_ENV_IMAGE
tags: [ deploy ]
check_submodule_sync:
extends:
- .deploy_job_template
- .rules:test:submodule
stage: test_deploy
tags: [ brew, github_sync ]
retry: 2
variables:
GIT_STRATEGY: fetch # use brew local mirror first
SUBMODULES_TO_FETCH: "none"
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
dependencies: []
script:
- git submodule deinit --force .
- rm -rf .git/modules # remove all the cached metadata
# setting the default remote URL to the public one, to resolve relative location URLs
- git config remote.origin.url ${PUBLIC_IDF_URL}
# check if all submodules are correctly synced to public repository
- git submodule init
- git config --get-regexp '^submodule\..*\.url$' || true
- git submodule update --recursive
- echo "IDF was cloned from ${PUBLIC_IDF_URL} completely"
push_to_github:
extends:
- .deploy_job_template
- .before_script:minimal
- .rules:protected:deploy
needs:
# submodule must be synced before pushing to github
- check_submodule_sync
tags: [ brew, github_sync ]
variables:
GIT_STRATEGY: fetch # use brew local mirror first
GIT_DEPTH: 0 # github needs full record of commits
script:
- add_github_ssh_keys
- git remote remove github &>/dev/null || true
- git remote add github git@github.com:espressif/esp-idf.git
- tools/ci/push_to_github.sh
environment:
name: push_to_github_production
deployment_tier: production
url: "https://github.com/espressif/esp-idf"
deploy_update_SHA_in_esp-dockerfiles:
extends:
- .deploy_job_template
@@ -64,21 +19,3 @@ deploy_update_SHA_in_esp-dockerfiles:
environment:
name: deploy_update_SHA_in_esp-dockerfiles_production
deployment_tier: production
upload_junit_report:
extends:
- .deploy_job_template
tags: [ fast_run, shiny ]
needs:
- pipeline_variables
- job: build_child_pipeline
artifacts: false
script:
- run_cmd idf-ci gitlab download-artifacts --type junit
rules:
- when: always
artifacts:
reports:
junit: XUNIT_RESULT_*.xml
expire_in: 1 week
when: always
+9 -76
View File
@@ -16,9 +16,6 @@
.patterns-example-readme: &patterns-example-readme
- "examples/**/*.md"
.patterns-docs-preview: &patterns-docs-preview
- "docs/**/*"
.if-protected-check: &if-protected-check
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/)'
@@ -58,7 +55,9 @@
check_readme_links:
extends:
- .pre_check_template
tags: ["amd64", "brew"]
variables:
GIT_STRATEGY: "fetch"
tags: ["check_doc_links"]
allow_failure: true
rules:
- <<: *if-protected-check
@@ -100,6 +99,9 @@ check_docs_gh_links:
extends:
- .pre_check_template
- .doc-rules:build:docs-full
variables:
GIT_STRATEGY: "fetch"
tags: ["check_doc_links"]
script:
- cd docs
- build-docs gh-linkcheck
@@ -138,77 +140,6 @@ build_docs_html_partial:
- DOCLANG: "zh_CN"
DOCTGT: "esp32p4"
.deploy_docs_template:
image: $ESP_IDF_DOC_ENV_IMAGE
variables:
DOCS_BUILD_DIR: "${IDF_PATH}/docs/_build/"
PYTHONUNBUFFERED: 1
# ensure all tags are fetched, need to know the latest/stable tag for the docs
GIT_STRATEGY: clone
GIT_DEPTH: 0
stage: test_deploy
tags:
- brew
- amd64
script:
# ensure all tags are fetched, need to know the latest/stable tag for the docs
- git fetch --tags --prune
- add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
- export GIT_VER=$(git describe --always ${PIPELINE_COMMIT_SHA} --)
- deploy-docs
# stage: test_deploy
deploy_docs_preview:
extends:
- .deploy_docs_template
rules:
- <<: *if-label-build_docs
- <<: *if-label-docs_full
- <<: *if-dev-push
changes: *patterns-docs-preview
needs:
- job: build_docs_html_partial
optional: true
- job: build_docs_html_full
optional: true
variables:
TYPE: "preview"
# older branches use DOCS_DEPLOY_KEY, DOCS_SERVER, DOCS_SERVER_USER, DOCS_PATH for preview server so we keep these names for 'preview'
DOCS_DEPLOY_PRIVATEKEY: "$DOCS_DEPLOY_KEY"
DOCS_DEPLOY_SERVER: "$DOCS_SERVER"
DOCS_DEPLOY_SERVER_USER: "$DOCS_SERVER_USER"
DOCS_DEPLOY_PATH: "$DOCS_PATH"
DOCS_DEPLOY_URL_BASE: "https://$DOCS_PREVIEW_SERVER_URL/docs/esp-idf"
environment:
name: deploy_docs_preview
deployment_tier: staging
url: "https://$DOCS_PREVIEW_SERVER_URL/docs/esp-idf"
# stage: post_deploy
deploy_docs_production:
# The DOCS_PROD_* variables used by this job are "Protected" so these branches must all be marked "Protected" in Gitlab settings
extends:
- .deploy_docs_template
- .rules:protected:deploy
stage: post_deploy
dependencies: # set dependencies to null to avoid missing artifacts issue
needs: # ensure runs after push_to_github succeeded
- build_docs_html_full
- job: push_to_github
artifacts: false
variables:
TYPE: "preview"
DOCS_DEPLOY_PRIVATEKEY: "$DOCS_PROD_DEPLOY_KEY"
DOCS_DEPLOY_SERVER: "$DOCS_PROD_SERVER"
DOCS_DEPLOY_SERVER_USER: "$DOCS_PROD_SERVER_USER"
DOCS_DEPLOY_PATH: "$DOCS_PROD_PATH"
DOCS_DEPLOY_URL_BASE: "https://docs.espressif.com/projects/esp-idf"
DEPLOY_STABLE: 1
environment:
name: deploy_docs_production
deployment_tier: production
url: "https://docs.espressif.com/projects/esp-idf"
check_doc_links:
extends:
- .build_docs_template
@@ -217,7 +148,9 @@ check_doc_links:
needs:
- job: deploy_docs_production
artifacts: false
tags: ["build", "amd64", "internet"]
variables:
GIT_STRATEGY: "fetch"
tags: ["check_doc_links"]
artifacts:
when: always
paths:
-2
View File
@@ -320,7 +320,6 @@ build_docker:
test_pytest_qemu:
extends:
- .host_test_template
- .before_script:build
artifacts:
paths:
- XUNIT_RESULT.xml
@@ -355,7 +354,6 @@ test_pytest_qemu:
test_pytest_linux:
extends:
- .host_test_template
- .before_script:build
artifacts:
paths:
- XUNIT_RESULT.xml
+52 -13
View File
@@ -115,17 +115,67 @@ check_version_tag:
check_test_scripts_build_test_rules:
extends:
- .pre_check_template
- .before_script:build
script:
# requires basic pytest dependencies
- python tools/ci/check_build_test_rules.py check-test-scripts examples/ tools/test_apps components
check_submodule_sync:
extends:
- .pre_check_template
- .rules:test:submodule
tags: [ brew, github_sync ]
retry: 2
variables:
GIT_STRATEGY: fetch # use brew local mirror first
SUBMODULES_TO_FETCH: "none"
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
script:
- git submodule deinit --force .
- rm -rf .git/modules # remove all the cached metadata
# setting the default remote URL to the public one, to resolve relative location URLs
- git config remote.origin.url ${PUBLIC_IDF_URL}
# check if all submodules are correctly synced to public repository
- git submodule init
- git config --get-regexp '^submodule\..*\.url$' || true
- git submodule update --recursive
- echo "IDF was cloned from ${PUBLIC_IDF_URL} completely"
pipeline_variables:
extends:
- .pre_check_template
- .before_script:fetch:git_diff
tags: [fast_run, shiny]
variables:
GIT_STRATEGY: "fetch"
script:
- |
# Store the diff output in a temporary file
TEMP_FILE=$(mktemp)
# merged results pipelines, by default
if [[ -n $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ]]; then
git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_SHA
git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
git diff --name-only $CI_MERGE_REQUEST_TARGET_BRANCH_SHA...$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
# merge request pipelines, when the mr got conflicts
elif [[ -n $CI_MERGE_REQUEST_DIFF_BASE_SHA ]]; then
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_COMMIT_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
# other pipelines, like the protected branches pipelines
elif [[ "$CI_COMMIT_BEFORE_SHA" != "0000000000000000000000000000000000000000" ]]; then
git fetch origin $CI_COMMIT_BEFORE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git diff --name-only $CI_COMMIT_BEFORE_SHA $CI_COMMIT_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
else
# pipeline source could be web, scheduler, etc.
git fetch origin $CI_COMMIT_SHA --depth=2 ${GIT_FETCH_EXTRA_FLAGS}
git diff --name-only $CI_COMMIT_SHA~1 $CI_COMMIT_SHA > "$TEMP_FILE"
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
fi
# MODIFIED_FILES is a list of files that changed, could be used everywhere
- MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | xargs)
- echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env
@@ -191,17 +241,6 @@ baseline_manifest_sha:
expire_in: 1 week
when: always
gcc_static_analyzer:
extends:
- .pre_check_template
- .rules:build
variables:
CI_CCACHE_DISABLE: 1
ANALYZING_APP: "examples/get-started/hello_world"
script:
- echo "CONFIG_COMPILER_STATIC_ANALYZER=y" >> ${ANALYZING_APP}/sdkconfig.defaults
- idf-build-apps build -p ${ANALYZING_APP}
retry_failed_jobs:
extends:
- .pre_check_template
+11
View File
@@ -16,6 +16,17 @@ clang_tidy_check:
--limit-file tools/ci/static-analysis-rules.yml
--xtensa-include-dir
gcc_static_analyzer:
extends:
- .pre_check_template
- .rules:patterns:clang_tidy
variables:
CI_CCACHE_DISABLE: 1
ANALYZING_APP: "examples/get-started/hello_world"
script:
- echo "CONFIG_COMPILER_STATIC_ANALYZER=y" >> ${ANALYZING_APP}/sdkconfig.defaults
- idf-build-apps build -p ${ANALYZING_APP}
#
## build stage
## Sonarqube related jobs put here for this reason:
@@ -6,7 +6,6 @@
########################
.dynamic_build_template:
extends:
- .before_script:build
- .after_script:build
image: $ESP_ENV_IMAGE
tags: [build, shiny]
@@ -44,12 +43,11 @@
--modified-files ${MR_MODIFIED_FILES}
.dynamic_target_test_template:
extends:
- .before_script:fetch:target_test
image: $TARGET_TEST_ENV_IMAGE
stage: target_test
timeout: 1 hour
variables:
GIT_STRATEGY: "fetch"
SUBMODULES_TO_FETCH: "none"
# set while generating the pipeline
nodes: ""
@@ -68,10 +66,8 @@
paths:
- XUNIT_RESULT*.xml
- pytest-embedded/
# Child pipeline reports won't be collected in the main one
# https://gitlab.com/groups/gitlab-org/-/epics/8205
# reports:
# junit: XUNIT_RESULT.xml
reports:
junit: XUNIT_RESULT*.xml
when: always
expire_in: 1 week
script:
@@ -12,6 +12,7 @@ all_build_finished:
expire_in: 1 week
when: always
before_script: []
dependencies: []
script:
- echo "all test jobs finished"
-1
View File
@@ -17,7 +17,6 @@ tools/ci/dynamic_pipelines/**/*
tools/ci/envsubst.py
tools/ci/executable-list.txt
tools/ci/fix_empty_prototypes.sh
tools/ci/gen_disabled_report.py
tools/ci/generate_rules.py
tools/ci/get-full-sources.sh
tools/ci/get_all_test_results.py
-980
View File
@@ -1,980 +0,0 @@
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import fnmatch
import json
import os
import subprocess
import sys
from collections import Counter
from datetime import datetime
from dominate import document
from dominate.tags import a
from dominate.tags import button
from dominate.tags import div
from dominate.tags import h1
from dominate.tags import h3
from dominate.tags import input_
from dominate.tags import label
from dominate.tags import li
from dominate.tags import script
from dominate.tags import span
from dominate.tags import strong
from dominate.tags import style
from dominate.tags import table
from dominate.tags import tbody
from dominate.tags import td
from dominate.tags import th
from dominate.tags import thead
from dominate.tags import tr
from dominate.tags import ul
from dominate.util import raw
from idf_build_apps import App
from idf_build_apps.app import AppDeserializer
def run_idf_build_apps_find(
output_file: str = 'apps.json',
verbose: bool = False,
enable_preview_targets: bool = False,
) -> str:
"""
Run idf-build-apps find command to get application list
:param output_file: Output file path
:param verbose: Whether to enable verbose output
:param enable_preview_targets: Whether to enable preview targets
:return: Output file path
"""
cmd = [
'idf-build-apps',
'find',
'--output-format',
'json',
'--include-all-apps',
'--output',
output_file,
]
# Add verbose parameter
if verbose:
cmd.append('--verbose')
# Add enable_preview_targets parameter
if enable_preview_targets:
cmd.append('--enable-preview-targets')
print(f'Running command: {" ".join(cmd)}')
try:
result = subprocess.run(cmd, check=True, capture_output=True, text=True)
print('Command completed successfully')
print(f'stdout: {result.stdout}')
if result.stderr:
print(f'stderr: {result.stderr}')
return output_file
except subprocess.CalledProcessError as e:
print(f'Command failed with exit code {e.returncode}')
print(f'stdout: {e.stdout}')
print(f'stderr: {e.stderr}')
raise
def parse_codeowners(codeowners_path: str) -> list[tuple[str, list[str]]]:
"""
Parse CODEOWNERS file and return a list of (pattern, codeowners) tuples.
The list preserves the order of the rules in the file.
:param codeowners_path: Path to CODEOWNERS file
:return: List of (pattern, codeowners) tuples
"""
codeowners_mapping: list[tuple[str, list[str]]] = []
if not os.path.exists(codeowners_path):
print(f'Warning: CODEOWNERS file not found at {codeowners_path}')
return codeowners_mapping
try:
with open(codeowners_path, encoding='utf-8') as f:
for line_num, line in enumerate(f, 1):
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith('#'):
continue
# Parse pattern and codeowners
parts = line.split()
if len(parts) < 2:
print(f'Warning: Invalid CODEOWNERS line {line_num}: {line}')
continue
pattern = parts[0]
codeowners = parts[1:]
# Remove @ symbol and the long prefix from codeowners if present
codeowners = [owner.lstrip('@').replace('esp-idf-codeowners/', '') for owner in codeowners]
codeowners_mapping.append((pattern, codeowners))
except Exception as e:
print(f'Error reading CODEOWNERS file: {e}')
return codeowners_mapping
def match_codeowners(app_dir: str, codeowners_mapping: list[tuple[str, list[str]]]) -> list[str]:
"""
Match app directory against CODEOWNERS patterns, supporting wildcards ('*' and '**').
The matching traverses up from the app's directory and respects the 'last match wins' rule.
:param app_dir: Application directory path (e.g., './components/esp_driver_touch_sens/test_apps/touch_sens')
:param codeowners_mapping: List of (pattern, codeowners) tuples, in file order
:return: List of matching codeowners from the last matching rule
"""
# 1. Normalize the app_dir path
# Remove leading './' and ensure it starts with a single '/'
if app_dir.startswith('./'):
app_dir = app_dir[1:]
if not app_dir.startswith('/'):
app_dir = '/' + app_dir
app_dir = os.path.normpath(app_dir)
# 2. Generate all parent paths
parent_paths = []
current_path = app_dir
while True:
parent_paths.append(current_path)
if current_path == '/':
break
parent, _ = os.path.split(current_path)
if parent == current_path: # Root reached
break
current_path = parent
# 3. Iterate through rules in reverse to find the last match
for pattern, owners in reversed(codeowners_mapping):
# The pattern needs to be anchored to the root for matching
if not pattern.startswith('/'):
pattern = '/**/' + pattern # Handles patterns like '*.py' or 'docs' anywhere
for path in parent_paths:
if fnmatch.fnmatch(path, pattern) or fnmatch.fnmatch(path + '/', pattern):
return owners
# Fallback to the root-level wildcard rule if no other match is found
for pattern, owners in reversed(codeowners_mapping):
if pattern == '*':
return owners
return []
def add_codeowners_to_apps(apps: list[App], codeowners_mapping: list[tuple[str, list[str]]]) -> dict[str, list[str]]:
"""
Match apps to codeowners and return a dictionary mapping app paths to their owners.
If an app does not match any rule, it is assigned to the 'others' group.
:param apps: List of App objects
:param codeowners_mapping: List of (pattern, codeowners) tuples, in file order
:return: A dictionary mapping app.app_dir to a list of codeowners
"""
app_to_owners_map = {}
for app in apps:
matched_owners = match_codeowners(app.app_dir, codeowners_mapping)
if not matched_owners:
app_to_owners_map[app.app_dir] = ['others']
else:
app_to_owners_map[app.app_dir] = matched_owners
return app_to_owners_map
def load_apps_from_json(json_file: str) -> list[App]:
"""
Load application list from JSON file (clean version without codeowners)
"""
apps = []
with open(json_file, encoding='utf-8') as f:
if json_file.endswith('.json'):
# If it's JSON format, try to parse as array
try:
data = json.load(f)
if isinstance(data, list):
for app_data in data:
# Handle build_system compatibility: 'idf_cmake' -> 'cmake'
if app_data.get('build_system') == 'idf_cmake':
app_data['build_system'] = 'cmake'
app = AppDeserializer.from_json(json.dumps(app_data))
apps.append(app)
else:
# If not an array, try line-by-line parsing
f.seek(0)
for line in f:
line = line.strip()
if line:
app = AppDeserializer.from_json(line)
apps.append(app)
except json.JSONDecodeError:
# If JSON parsing fails, try line-by-line parsing
f.seek(0)
for line in f:
line = line.strip()
if line:
try:
app = AppDeserializer.from_json(line)
apps.append(app)
except Exception as e:
print(f'Warning: Failed to parse line: {line[:100]}... Error: {e}')
else:
# If not JSON format, parse line by line
for line in f:
line = line.strip()
if line:
try:
app = AppDeserializer.from_json(line)
apps.append(app)
except Exception as e:
print(f'Warning: Failed to parse line: {line[:100]}... Error: {e}')
print(f'Loaded {len(apps)} apps from {json_file}')
return apps
def generate_disabled_report(apps: list[App], app_to_owners_map: dict[str, list[str]], report_path: str) -> None:
"""Generate disabled report"""
# Categorize applications
cant_build_temp = []
can_build_cant_test_temp = []
cant_build_not_temp = []
can_build_cant_test_not_temp = []
can_test = []
all_codeowners: set[str] = set()
all_targets: set[str] = set()
owner_app_counts: Counter[str] = Counter()
target_app_counts: Counter[str] = Counter()
for app in apps:
# Get owners from the map
app_owners = app_to_owners_map.get(app.app_dir, [])
# Update owner app counts
for owner in app_owners:
owner_app_counts[owner] += 1
# Update target app counts
all_targets.add(app.target)
target_app_counts[app.target] += 1
# Check for unsupported build_status values and abort if found
if app.build_status not in ['should be built', 'disabled']:
print(
f'ERROR: Found unsupported build_status "{app.build_status}" for app {app.app_dir}, target {app.target}'
)
print(
'This task only supports "should be built" and "disabled" status. Please fix the build configuration.'
)
sys.exit(1)
# Map build_status to the expected status values
if app.build_status == 'should be built':
if app.test_comment is None:
status = 'can_build_and_test'
else:
status = 'can_build_no_test'
else: # if app.build_status == 'disabled':
status = 'cannot_build'
# Categorize apps based on status and temporary flags
if status == 'can_build_and_test':
can_test.append(app)
elif status == 'cannot_build':
# Handle cases where build_comment might be None for a disabled app
build_comment = app.build_comment or 'Reason not specified'
if 'temporary' in build_comment.lower():
cant_build_temp.append(app)
else:
cant_build_not_temp.append(app)
elif status == 'can_build_no_test':
# Handle cases where test_comment might be None
test_comment = app.test_comment or 'Reason not specified'
if 'temporary' in test_comment.lower():
can_build_cant_test_temp.append(app)
else:
can_build_cant_test_not_temp.append(app)
all_codeowners.update(app_owners)
# Create HTML document
doc = document(title='Build and Test Status Report')
def get_css_styles() -> str:
"""Return CSS styles"""
return """
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
line-height: 1.6;
margin: 0;
padding: 20px;
background-color: #f5f5f5;
}
.container {
max-width: 1200px;
margin: 0 auto;
background-color: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
}
h1 {
color: #333;
text-align: center;
margin-bottom: 30px;
border-bottom: 3px solid #007acc;
padding-bottom: 10px;
}
h2 {
color: #007acc;
margin-top: 40px;
margin-bottom: 20px;
padding: 10px;
background-color: #f8f9fa;
border-left: 4px solid #007acc;
border-radius: 4px;
}
.navigation {
background-color: #f8f9fa;
padding: 20px;
border-radius: 8px;
margin-bottom: 30px;
border: 1px solid #dee2e6;
}
.navigation h3 {
margin-top: 0;
color: #495057;
margin-bottom: 15px;
}
.nav-links {
display: flex;
flex-wrap: wrap;
gap: 10px;
}
.nav-link {
background-color: #007acc;
color: white;
padding: 8px 16px;
border-radius: 20px;
text-decoration: none;
font-size: 0.9em;
transition: background-color 0.3s;
}
.nav-link:hover {
background-color: #005a9e;
}
.control-buttons {
text-align: center;
margin: 20px 0;
}
.control-btn {
background-color: #28a745;
color: white;
border: none;
padding: 10px 20px;
border-radius: 5px;
cursor: pointer;
margin: 0 10px;
font-size: 0.9em;
transition: background-color 0.3s;
}
.control-btn:hover {
background-color: #218838;
}
.control-btn.collapse-all {
background-color: #dc3545;
}
.control-btn.collapse-all:hover {
background-color: #c82333;
}
.filters-wrapper {
display: flex;
gap: 20px;
margin-bottom: 20px;
justify-content: center;
}
.filter-container {
flex: 1;
max-width: 500px;
margin-bottom: 20px;
text-align: center;
padding: 15px;
border: 1px solid #dee2e6;
border-radius: 8px;
background-color: #f8f9fa;
}
#codeownerFilter {
padding: 10px;
border-radius: 5px;
border: 1px solid #ccc;
font-size: 1em;
}
table {
width: 100%;
border-collapse: collapse;
margin-bottom: 30px;
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
}
th, td {
padding: 12px;
text-align: left;
border-bottom: 1px solid #ddd;
}
th {
background-color: #007acc;
color: white;
font-weight: 600;
}
tr:nth-child(even) {
background-color: #f8f9fa;
}
tr:hover {
background-color: #e3f2fd;
}
.summary {
background-color: #e8f5e8;
padding: 20px;
border-radius: 8px;
margin-bottom: 30px;
border-left: 4px solid #4caf50;
}
.summary h3 {
margin-top: 0;
color: #2e7d32;
}
.summary ul {
margin: 10px 0;
padding-left: 20px;
}
.summary li {
margin: 5px 0;
}
.category-summary {
background-color: #f8f9fa;
padding: 15px;
border-radius: 6px;
margin-bottom: 20px;
border-left: 4px solid #007acc;
}
.category-summary h3 {
margin: 0;
color: #007acc;
display: flex;
justify-content: space-between;
align-items: center;
}
.category-summary .count {
background-color: #007acc;
color: white;
padding: 4px 8px;
border-radius: 12px;
font-size: 0.9em;
font-weight: bold;
}
.category-details {
margin-left: 20px;
margin-bottom: 30px;
}
.directory-info {
background-color: #fff3cd;
padding: 10px;
border-radius: 4px;
margin-bottom: 15px;
border-left: 3px solid #ffc107;
font-family: monospace;
font-size: 0.9em;
cursor: pointer;
transition: background-color 0.3s;
}
.directory-info:hover {
background-color: #ffeaa7;
}
.total-count {
color: #666;
font-size: 0.9em;
margin-left: 10px;
}
.empty-section {
text-align: center;
color: #666;
font-style: italic;
padding: 20px;
background-color: #f8f9fa;
border-radius: 4px;
}
.timestamp {
text-align: center;
color: #666;
font-size: 0.9em;
margin-bottom: 30px;
}
.table-container {
margin-bottom: 20px;
}
.table-header {
background-color: #f8f9fa;
padding: 10px 15px;
border-radius: 4px;
cursor: pointer;
border: 1px solid #dee2e6;
transition: background-color 0.2s;
}
.table-header:hover {
background-color: #e9ecef;
}
.table-header h4 {
margin: 0;
display: flex;
justify-content: space-between;
align-items: center;
color: #495057;
}
.toggle-icon {
font-size: 0.8em;
transition: transform 0.2s;
}
.table-content {
margin-top: 10px;
}
.app-row {
display: table-row;
}
.checkbox-group {
display: flex;
flex-wrap: wrap;
gap: 15px;
justify-content: center;
margin-top: 10px;
}
.checkbox-label {
display: flex;
align-items: center;
cursor: pointer;
font-size: 0.9em;
}
.checkbox-label input[type="checkbox"] {
margin-right: 5px;
}
.owner-label {
margin-left: 10px;
font-size: 0.9em;
color: #555;
}
"""
def create_summary_section() -> None:
"""Create summary section"""
with div(cls='summary'):
h3('Summary')
with ul():
li(strong('Total apps analyzed: '), str(len(apps)))
li(strong('Build temporarily disabled: '), str(len(cant_build_temp)))
li(strong('Test temporarily disabled: '), str(len(can_build_cant_test_temp)))
li(strong('Build disabled permanently: '), str(len(cant_build_not_temp)))
li(strong('Test disabled permanently: '), str(len(can_build_cant_test_not_temp)))
li(strong('Normal: '), str(len(can_test)))
def create_navigation_section() -> None:
"""Create navigation section with links to each category"""
with div(cls='navigation'):
h3('Quick Navigation')
with div(cls='nav-links'):
for category_id, title, _, _ in categories:
a(title, href=f'#{category_id}', cls='nav-link')
def create_filter_group(
title: str,
filter_type: str,
items: list[str],
counts: Counter,
total_count: int,
) -> None:
"""Create a generic filter checkbox group with app counts"""
with div(cls='filter-container'):
strong(title, style='display: block; margin-bottom: 10px;')
with div(id=f'{filter_type}Filter', cls='checkbox-group'):
# 'All' checkbox
with label(cls='checkbox-label'):
input_(
type='checkbox',
id=f'{filter_type}-all',
value='all',
onchange=f'toggleAll(this, "{filter_type}")',
checked=True,
)
span(
f'All ({total_count})',
style='margin-left: 5px; font-weight: bold;',
)
# Individual item checkboxes
for item in items:
count = counts.get(item, 0)
with label(cls='checkbox-label'):
input_(
type='checkbox',
cls=f'{filter_type}-checkbox',
value=item,
onchange='applyFilters()',
checked=True,
)
span(f'{item} ({count})', style='margin-left: 5px;')
def create_control_buttons() -> None:
"""Create control buttons for expand/collapse all"""
with div(cls='control-buttons'):
button('Expand All', onclick='expandAll()', cls='control-btn')
button('Collapse All', onclick='collapseAll()', cls='control-btn collapse-all')
def create_category_section(
category_id: str,
title: str,
apps: list[App],
all_apps: list[App],
show_status: bool = False,
) -> None:
"""Create category section"""
with div(cls='category-summary'):
with h3():
span(title)
span(str(len(apps)), cls='count')
# Always show section content (no collapse)
with div(cls='category-details', id=category_id, style='display: block;'):
if apps:
# Group by full directory path (preserve last level directory)
dir_groups: dict[str, list[App]] = {}
for app in apps:
# Use full path as grouping key
dir_name = app.app_dir
if dir_name not in dir_groups:
dir_groups[dir_name] = []
dir_groups[dir_name].append(app)
# Calculate total apps for each directory across all categories
total_apps_by_dir = {}
for app in all_apps:
dir_name = app.app_dir
if dir_name not in total_apps_by_dir:
total_apps_by_dir[dir_name] = 0
total_apps_by_dir[dir_name] += 1
for dir_name, apps_in_dir in dir_groups.items():
total_apps = total_apps_by_dir.get(dir_name, len(apps_in_dir))
# Determine the owners for this directory group
dir_owners = set()
for app in apps_in_dir:
dir_owners.update(app_to_owners_map.get(app.app_dir, []))
owners_str = ', '.join(sorted(list(dir_owners)))
with div(cls='directory-info', onclick=f"toggleTable('{category_id}-{hash(dir_name)}')"):
span(
'+',
cls='toggle-icon',
style='float: left; color: #666; margin-right: 8px; font-weight: bold;',
)
strong(f'{dir_name.replace("./", "")}')
span(f' ({len(apps_in_dir)}/{total_apps} apps)', style='color: #666;')
span(f'Owners: {owners_str}', cls='owner-label')
# Table can be collapsed, controlled by table header row
with div(cls='table-container'):
with div(cls='table-content', id=f'{category_id}-{hash(dir_name)}', style='display: none;'):
with table():
with thead(cls='table-header'):
with tr():
th('Target')
th('Config')
if show_status:
th('Status')
else:
th('Reason')
with tbody():
for app in apps_in_dir:
app_owners = app_to_owners_map.get(app.app_dir, [])
with tr(
cls='app-row',
data_codeowners=','.join(app_owners),
):
td(app.target)
td(app.config_name or '-')
if show_status:
td('Success')
else:
reason = (
app.build_comment
if app.build_status == 'disabled'
else app.test_comment
)
td(reason or 'Reason not specified')
else:
div('No apps in this category', cls='empty-section')
def get_javascript() -> str:
"""Return JavaScript code"""
return """
function toggleTable(id) {
const content = document.getElementById(id);
if (!content) {
console.error('Content element not found:', id);
return;
}
const directoryInfo = content.parentElement.previousElementSibling;
if (!directoryInfo) {
console.error('Directory info element not found');
return;
}
const icon = directoryInfo.querySelector('.toggle-icon');
if (content.style.display === 'none' || content.style.display === '') {
content.style.display = 'block';
if (icon) icon.textContent = '-';
} else {
content.style.display = 'none';
if (icon) icon.textContent = '+';
}
}
function expandAll() {
const allContents = document.querySelectorAll('.table-content');
const allIcons = document.querySelectorAll('.toggle-icon');
allContents.forEach(content => {
content.style.display = 'block';
});
allIcons.forEach(icon => {
icon.textContent = '-';
});
}
function collapseAll() {
const allContents = document.querySelectorAll('.table-content');
const allIcons = document.querySelectorAll('.toggle-icon');
allContents.forEach(content => {
content.style.display = 'none';
});
allIcons.forEach(icon => {
icon.textContent = '+';
});
}
function applyFilters() {
// Get selected codeowners
const selectedCodeowners = Array.from(
document.querySelectorAll('.codeowner-checkbox:checked')
).map(cb => cb.value);
// Get selected targets
const selectedTargets = Array.from(
document.querySelectorAll('.target-checkbox:checked')
).map(cb => cb.value);
// Update 'All' checkbox states
const allCodeownerCheckbox = document.getElementById('codeowner-all');
const allCodeownerCheckboxes = document.querySelectorAll('.codeowner-checkbox');
allCodeownerCheckbox.checked = selectedCodeowners.length === allCodeownerCheckboxes.length;
const allTargetCheckbox = document.getElementById('target-all');
const allTargetCheckboxes = document.querySelectorAll('.target-checkbox');
allTargetCheckbox.checked = selectedTargets.length === allTargetCheckboxes.length;
// Filter rows and hide empty directory sections
const directorySections = document.querySelectorAll('.directory-info');
directorySections.forEach(section => {
const tableContainer = section.nextElementSibling;
if (!tableContainer) return;
const rows = tableContainer.querySelectorAll('.app-row');
let visibleRows = 0;
rows.forEach(row => {
const codeowners = row.getAttribute('data-codeowners').split(',');
// Assumes target is the first cell
const target = row.querySelector('td:first-child').textContent;
const codeownerMatch = selectedCodeowners.some(owner => codeowners.includes(owner));
const targetMatch = selectedTargets.includes(target);
if (codeownerMatch && targetMatch) {
row.style.display = 'table-row';
visibleRows++;
} else {
row.style.display = 'none';
}
});
// Hide the entire directory section if no rows are visible
if (visibleRows === 0) {
section.style.display = 'none';
tableContainer.style.display = 'none';
} else {
section.style.display = 'block';
tableContainer.style.display = 'block';
}
});
}
function toggleAll(checkbox, filterType) {
const allCheckboxes = document.querySelectorAll(`.${filterType}-checkbox`);
allCheckboxes.forEach(cb => {
cb.checked = checkbox.checked;
});
applyFilters(); // Apply filters immediately
}
// Initial filter call to show all rows at the beginning
document.addEventListener('DOMContentLoaded', function() {
applyFilters();
});
"""
# Define category configuration
categories = [
('cant-build-temp', '1. Build temporarily disabled', cant_build_temp, False),
('can-build-cant-test-temp', '2. Test temporarily disabled', can_build_cant_test_temp, False),
('cant-build-not-temp', '3. Build disabled permanently', cant_build_not_temp, False),
('can-build-cant-test-not_temp', '4. Test disabled permanently', can_build_cant_test_not_temp, False),
('can-test', '5. Normal', can_test, True),
]
with doc.head:
style(get_css_styles())
script(raw(get_javascript()))
with doc:
with div(cls='container'):
h1('Build and Test Status Report')
div(f'Generated at: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}', cls='timestamp')
# Create summary section
create_summary_section()
# Create navigation section
create_navigation_section()
# Create filter section and pass owner counts
with div(cls='filters-wrapper'):
create_filter_group(
'Filter by Codeowner:',
'codeowner',
sorted(list(all_codeowners)),
owner_app_counts,
len(apps),
)
create_filter_group(
'Filter by Target:',
'target',
sorted(list(all_targets)),
target_app_counts,
len(apps),
)
# Create control buttons
create_control_buttons()
# Generate each category using configuration
for category_id, title, apps_list, show_status in categories:
create_category_section(category_id, title, apps_list, apps, show_status)
# Write to file
with open(report_path, 'w', encoding='utf-8') as f:
f.write(doc.render())
def main() -> int:
parser = argparse.ArgumentParser(
description='Generate a report of disabled and skipped builds/tests',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Basic usage (uses default paths: examples, tools/test_apps, components)
python gen_disabled_report.py --output report.html
# Use existing JSON file
python gen_disabled_report.py --input apps.json --output report.html
# Enable verbose output
python gen_disabled_report.py --input apps.json --output report.html --verbose
""",
)
# Input options
parser.add_argument('--input', type=str, help='Input JSON file containing apps data')
# Output options
parser.add_argument(
'--output',
type=str,
default='disabled_report.html',
help='Output report file path (default: disabled_report.html)',
)
# idf-build-apps find options
parser.add_argument(
'--temp-json',
type=str,
default='apps.json',
help='Temporary JSON file path for idf-build-apps find output (default: apps.json)',
)
parser.add_argument('--verbose', action='store_true', help='Enable verbose output')
parser.add_argument('--enable-preview-targets', action='store_true', help='Enable preview targets')
args = parser.parse_args()
try:
# Always run idf-build-apps find to get the most up-to-date data
print('Running idf-build-apps find to generate app list...')
input_file = run_idf_build_apps_find(
output_file=args.temp_json,
verbose=args.verbose,
enable_preview_targets=args.enable_preview_targets,
)
# Load application data
print(f'Loading apps from {input_file}...')
apps = load_apps_from_json(input_file)
# Add codeowners information
print('Adding codeowners information...')
idf_path = os.environ.get('IDF_PATH')
if not idf_path:
raise ValueError('IDF_PATH environment variable is not set')
codeowners_path = os.path.join(idf_path, '.gitlab', 'CODEOWNERS')
codeowners_mapping = parse_codeowners(codeowners_path)
app_to_owners_map = add_codeowners_to_apps(apps, codeowners_mapping)
# Generate report
print(f'Generating report to {args.output}...')
generate_disabled_report(apps, app_to_owners_map, args.output)
print(f'Report generated successfully: {args.output}')
return 0
except Exception as e:
print(f'Error: {e}')
if args.verbose:
import traceback
traceback.print_exc()
return 1
if __name__ == '__main__':
sys.exit(main())
+29 -21
View File
@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2020-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2020-2026 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# internal use only for CI
# some CI related util functions
@@ -13,7 +13,7 @@ from functools import cached_property
IDF_PATH: str = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
def get_submodule_dirs(full_path: bool = False) -> t.List[str]:
def get_submodule_dirs(full_path: bool = False) -> list[str]:
"""
To avoid issue could be introduced by multi-os or additional dependency,
we use python and git to get this output
@@ -71,7 +71,7 @@ def is_executable(full_path: str) -> bool:
return os.access(full_path, os.X_OK)
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> t.List[str]:
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> list[str]:
"""
Get the result of git ls-files
:param path: path to run git ls-files
@@ -98,11 +98,11 @@ def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> t.List[str]:
return [os.path.join(path, f) for f in files] if full_path else files
def to_list(s: t.Any) -> t.List[t.Any]:
def to_list(s: t.Any) -> list[t.Any]:
if not s:
return []
if isinstance(s, (set, tuple)):
if isinstance(s, set | tuple):
return list(s)
if isinstance(s, list):
@@ -113,8 +113,8 @@ def to_list(s: t.Any) -> t.List[t.Any]:
class GitlabYmlConfig:
def __init__(self, root_yml_filepath: str = os.path.join(IDF_PATH, '.gitlab-ci.yml')) -> None:
self._config: t.Dict[str, t.Any] = {}
self._defaults: t.Dict[str, t.Any] = {}
self._config: dict[str, t.Any] = {}
self._defaults: dict[str, t.Any] = {}
self._load(root_yml_filepath)
@@ -127,6 +127,14 @@ class GitlabYmlConfig:
# expanding "include"
for item in root_yml.pop('include', []) or []:
if isinstance(item, dict):
if 'project' in item:
continue
elif 'local' in item:
item = item['local']
else:
continue
all_config.update(yaml.load(open(os.path.join(IDF_PATH, item)), Loader=yaml.FullLoader))
if 'default' in all_config:
@@ -135,41 +143,41 @@ class GitlabYmlConfig:
self._config = all_config
# anchor is the string that will be reused in templates
self._anchor_keys: t.Set[str] = set()
self._anchor_keys: set[str] = set()
# template is a dict that will be extended
self._template_keys: t.Set[str] = set()
self._used_template_keys: t.Set[str] = set() # tracing the used templates
self._template_keys: set[str] = set()
self._used_template_keys: set[str] = set() # tracing the used templates
# job is a dict that will be executed
self._job_keys: t.Set[str] = set()
self._job_keys: set[str] = set()
self.expand_extends()
@property
def default(self) -> t.Dict[str, t.Any]:
def default(self) -> dict[str, t.Any]:
return self._defaults
@property
def config(self) -> t.Dict[str, t.Any]:
def config(self) -> dict[str, t.Any]:
return self._config
@cached_property
def global_keys(self) -> t.List[str]:
def global_keys(self) -> list[str]:
return ['default', 'include', 'workflow', 'variables', 'stages']
@cached_property
def anchors(self) -> t.Dict[str, t.Any]:
def anchors(self) -> dict[str, t.Any]:
return {k: v for k, v in self.config.items() if k in self._anchor_keys}
@cached_property
def jobs(self) -> t.Dict[str, t.Any]:
def jobs(self) -> dict[str, t.Any]:
return {k: v for k, v in self.config.items() if k in self._job_keys}
@cached_property
def templates(self) -> t.Dict[str, t.Any]:
def templates(self) -> dict[str, t.Any]:
return {k: v for k, v in self.config.items() if k in self._template_keys}
@cached_property
def used_templates(self) -> t.Set[str]:
def used_templates(self) -> set[str]:
return self._used_template_keys
def expand_extends(self) -> None:
@@ -180,7 +188,7 @@ class GitlabYmlConfig:
if k in self.global_keys:
continue
if isinstance(v, (str, list)):
if isinstance(v, str | list):
self._anchor_keys.add(k)
elif k.startswith('.if-'):
self._anchor_keys.add(k)
@@ -201,7 +209,7 @@ class GitlabYmlConfig:
for k in self._job_keys:
self._expand_extends(k)
def _merge_dict(self, d1: t.Dict[str, t.Any], d2: t.Dict[str, t.Any]) -> t.Any:
def _merge_dict(self, d1: dict[str, t.Any], d2: dict[str, t.Any]) -> t.Any:
for k, v in d2.items():
if k in d1:
if isinstance(v, dict) and isinstance(d1[k], dict):
@@ -213,7 +221,7 @@ class GitlabYmlConfig:
return d1
def _expand_extends(self, name: str) -> t.Dict[str, t.Any]:
def _expand_extends(self, name: str) -> dict[str, t.Any]:
extends = to_list(self.config[name].pop('extends', None))
if not extends:
return self.config[name] # type: ignore
+8 -1
View File
@@ -5,7 +5,14 @@ function add_ssh_keys() {
mkdir -p ~/.ssh
chmod 700 ~/.ssh
echo -n "${key_string}" >~/.ssh/id_rsa_base64
base64 --decode --ignore-garbage ~/.ssh/id_rsa_base64 >~/.ssh/id_rsa
# Detect base64 implementation via --help output
if base64 --help 2>&1 | grep -q -- '--ignore-garbage'; then
# GNU coreutils base64
base64 --decode --ignore-garbage ~/.ssh/id_rsa_base64 >~/.ssh/id_rsa
else
# macOS/BSD base64 - requires stdin or -i flag
base64 --decode -i ~/.ssh/id_rsa_base64 -o ~/.ssh/id_rsa
fi
chmod 600 ~/.ssh/id_rsa
}