Merge branch 'ci/enhace-dynamic-pipeline-with-bin-size-diff' into 'master'

ci: enhance dynamic pipeline with bin size diff

Closes IDFCI-2213 and IDF-7232

See merge request espressif/esp-idf!35356
This commit is contained in:
Aleksei Apaseev 2025-01-21 19:31:25 +08:00
commit bef7edb940
10 changed files with 1130 additions and 148 deletions

View File

@ -34,6 +34,9 @@ TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME = 'test_related_apps_download_urls.yml'
REPORT_TEMPLATE_FILEPATH = os.path.join(
IDF_PATH, 'tools', 'ci', 'dynamic_pipelines', 'templates', 'report.template.html'
)
TOP_N_APPS_BY_SIZE_DIFF = 10
SIZE_DIFFERENCE_BYTES_THRESHOLD = 500
BINARY_SIZE_METRIC_NAME = 'binary_size'
RETRY_JOB_PICTURE_PATH = 'tools/ci/dynamic_pipelines/templates/retry-jobs.png'
RETRY_JOB_TITLE = '\n\nRetry failed jobs with with help of "retry_failed_jobs" stage of the pipeline:'

View File

@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import abc
import copy
@ -9,21 +9,24 @@ import re
import typing as t
from textwrap import dedent
import yaml
from artifacts_handler import ArtifactType
from gitlab import GitlabUpdateError
from gitlab_api import Gitlab
from idf_build_apps import App
from idf_build_apps.constants import BuildStatus
from idf_ci.app import AppWithMetricsInfo
from idf_ci.uploader import AppUploader
from prettytable import PrettyTable
from .constants import BINARY_SIZE_METRIC_NAME
from .constants import COMMENT_START_MARKER
from .constants import REPORT_TEMPLATE_FILEPATH
from .constants import RETRY_JOB_PICTURE_LINK
from .constants import RETRY_JOB_PICTURE_PATH
from .constants import RETRY_JOB_TITLE
from .constants import SIZE_DIFFERENCE_BYTES_THRESHOLD
from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
from .constants import TOP_N_APPS_BY_SIZE_DIFF
from .models import GitlabJob
from .models import TestCase
from .utils import fetch_failed_testcases_failure_ratio
@ -59,7 +62,8 @@ class ReportGenerator:
return ''
def write_report_to_file(self, report_str: str, job_id: int, output_filepath: str) -> t.Optional[str]:
@staticmethod
def write_report_to_file(report_str: str, job_id: int, output_filepath: str) -> t.Optional[str]:
"""
Writes the report to a file and constructs a modified URL based on environment settings.
@ -203,51 +207,44 @@ class ReportGenerator:
@staticmethod
def _sort_items(
items: t.List[t.Union[TestCase, GitlabJob]],
key: t.Union[str, t.Callable[[t.Union[TestCase, GitlabJob]], t.Any]],
items: t.List[t.Union[TestCase, GitlabJob, AppWithMetricsInfo]],
key: t.Union[str, t.Callable[[t.Union[TestCase, GitlabJob, AppWithMetricsInfo]], t.Any]],
order: str = 'asc',
) -> t.List[t.Union[TestCase, GitlabJob]]:
sort_function: t.Optional[t.Callable[[t.Any], t.Any]] = None
) -> t.List[t.Union[TestCase, GitlabJob, AppWithMetricsInfo]]:
"""
Sort items based on a given key and order.
Sort items based on a given key, order, and optional custom sorting function.
:param items: List of items to sort.
:param key: A string representing the attribute name or a function to extract the sorting key.
:param order: Order of sorting ('asc' for ascending, 'desc' for descending).
:param sort_function: A custom function to control sorting logic (e.g., prioritizing positive/negative/zero values).
:return: List of sorted instances.
"""
key_func = None
if isinstance(key, str):
def key_func(item: t.Any) -> t.Any:
return getattr(item, key)
if key_func is not None:
try:
items = sorted(items, key=key_func, reverse=(order == 'desc'))
except TypeError:
print(f'Comparison for the key {key} is not supported')
sorting_key = sort_function if sort_function is not None else key_func
try:
items = sorted(items, key=sorting_key, reverse=(order == 'desc'))
except TypeError:
print(f'Comparison for the key {key} is not supported')
return items
@abc.abstractmethod
def _get_report_str(self) -> str:
raise NotImplementedError
def _generate_comment(self, print_report_path: bool) -> str:
def _generate_comment(self) -> str:
# Report in HTML format to avoid exceeding length limits
comment = f'#### {self.title}\n'
report_str = self._get_report_str()
comment += f'{self.additional_info}\n'
self.write_report_to_file(report_str, self.job_id, self.output_filepath)
if self.additional_info:
comment += f'{self.additional_info}\n'
report_url_path = self.write_report_to_file(report_str, self.job_id, self.output_filepath)
if print_report_path and report_url_path:
comment += dedent(
f"""
Full {self.title} here: {report_url_path} (with commit {self.commit_id[:8]})
"""
)
return comment
def _update_mr_comment(self, comment: str, print_retry_jobs_message: bool) -> None:
@ -285,8 +282,8 @@ class ReportGenerator:
updated_str = f'{existing_comment.strip()}\n\n{new_comment}'
return updated_str
def post_report(self, print_report_path: bool = True, print_retry_jobs_message: bool = False) -> None:
comment = self._generate_comment(print_report_path)
def post_report(self, print_retry_jobs_message: bool = False) -> None:
comment = self._generate_comment()
print(comment)
@ -311,123 +308,358 @@ class BuildReportGenerator(ReportGenerator):
):
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
self.apps = apps
self._uploader = AppUploader(self.pipeline_id)
self.apps_presigned_url_filepath = TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
self.report_titles_map = {
'failed_apps': 'Failed Apps',
'built_test_related_apps': 'Built Apps - Test Related',
'built_non_test_related_apps': 'Built Apps - Non Test Related',
'new_test_related_apps': 'New Apps - Test Related',
'new_non_test_related_apps': 'New Apps - Non Test Related',
'skipped_apps': 'Skipped Apps',
}
self.failed_apps_report_file = 'failed_apps.html'
self.built_apps_report_file = 'built_apps.html'
self.skipped_apps_report_file = 'skipped_apps.html'
@staticmethod
def custom_sort(item: AppWithMetricsInfo) -> t.Tuple[int, t.Any]:
"""
Custom sort function to:
1. Push items with zero binary sizes to the end.
2. Sort other items by absolute size_difference_percentage.
"""
# Priority: 0 for zero binaries, 1 for non-zero binaries
zero_binary_priority = 1 if item.metrics[BINARY_SIZE_METRIC_NAME].source_value != 0 or item.metrics[BINARY_SIZE_METRIC_NAME].target_value != 0 else 0
# Secondary sort: Negative absolute size_difference_percentage for descending order
size_difference_sort = abs(item.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)
return zero_binary_priority, size_difference_sort
def _generate_top_n_apps_by_size_table(self) -> str:
"""
Generate a markdown table for the top N apps by size difference.
Only includes apps with size differences greater than 500 bytes.
"""
filtered_apps = [app for app in self.apps if abs(app.metrics[BINARY_SIZE_METRIC_NAME].difference) > SIZE_DIFFERENCE_BYTES_THRESHOLD]
top_apps = sorted(
filtered_apps,
key=lambda app: abs(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage),
reverse=True
)[:TOP_N_APPS_BY_SIZE_DIFF]
if not top_apps:
return ''
table = (f'\n⚠️⚠️⚠️ Top {len(top_apps)} Apps with Binary Size Sorted by Size Difference\n'
f'Note: Apps with changes of less than {SIZE_DIFFERENCE_BYTES_THRESHOLD} bytes are not shown.\n')
table += '| App Dir | Build Dir | Size Diff (bytes) | Size Diff (%) |\n'
table += '|---------|-----------|-------------------|---------------|\n'
for app in top_apps:
table += dedent(
f'| {app.app_dir} | {app.build_dir} | '
f'{app.metrics[BINARY_SIZE_METRIC_NAME].difference} | '
f'{app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage}% |\n'
)
table += ('\n**For more details, please click on the numbers in the summary above '
'to view the corresponding report files.** ⬆️⬆️⬆️\n\n')
return table
@staticmethod
def split_new_and_existing_apps(apps: t.Iterable[AppWithMetricsInfo]) -> t.Tuple[t.List[AppWithMetricsInfo], t.List[AppWithMetricsInfo]]:
"""
Splits apps into new apps and existing apps.
:param apps: Iterable of apps to process.
:return: A tuple (new_apps, existing_apps).
"""
new_apps = [app for app in apps if app.is_new_app]
existing_apps = [app for app in apps if not app.is_new_app]
return new_apps, existing_apps
def filter_apps_by_criteria(self, build_status: str, preserve: bool) -> t.List[AppWithMetricsInfo]:
"""
Filters apps based on build status and preserve criteria.
:param build_status: Build status to filter by.
:param preserve: Whether to filter preserved apps.
:return: Filtered list of apps.
"""
return [
app for app in self.apps
if app.build_status == build_status and app.preserve == preserve
]
def get_built_apps_report_parts(self) -> t.List[str]:
"""
Generates report parts for new and existing apps.
:return: List of report parts.
"""
new_test_related_apps, built_test_related_apps = self.split_new_and_existing_apps(
self.filter_apps_by_criteria(BuildStatus.SUCCESS, True)
)
new_non_test_related_apps, built_non_test_related_apps = self.split_new_and_existing_apps(
self.filter_apps_by_criteria(BuildStatus.SUCCESS, False)
)
sections = []
if new_test_related_apps:
new_test_related_apps_table_section = self.create_table_section(
title=self.report_titles_map['new_test_related_apps'],
items=new_test_related_apps,
headers=[
'App Dir',
'Build Dir',
'Bin Files with Build Log (without map and elf)',
'Map and Elf Files',
'Your Branch App Size',
],
row_attrs=[
'app_dir',
'build_dir',
],
value_functions=[
(
'Your Branch App Size',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)
),
(
'Bin Files with Build Log (without map and elf)',
lambda app: self.get_download_link_for_url(
self._uploader.get_app_presigned_url(app, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES)
),
),
(
'Map and Elf Files',
lambda app: self.get_download_link_for_url(
self._uploader.get_app_presigned_url(app, ArtifactType.MAP_AND_ELF_FILES)
),
),
],
)
sections.extend(new_test_related_apps_table_section)
if built_test_related_apps:
built_test_related_apps = self._sort_items(
built_test_related_apps,
key='metrics.binary_size.difference_percentage',
order='desc',
sort_function=self.custom_sort,
)
built_test_related_apps_table_section = self.create_table_section(
title=self.report_titles_map['built_test_related_apps'],
items=built_test_related_apps,
headers=[
'App Dir',
'Build Dir',
'Bin Files with Build Log (without map and elf)',
'Map and Elf Files',
'Your Branch App Size',
'Target Branch App Size',
'Size Diff',
'Size Diff, %',
],
row_attrs=[
'app_dir',
'build_dir',
],
value_functions=[
(
'Your Branch App Size',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)
),
(
'Target Branch App Size',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].target_value)
),
(
'Size Diff',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference)
),
(
'Size Diff, %',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)
),
(
'Bin Files with Build Log (without map and elf)',
lambda app: self.get_download_link_for_url(
self._uploader.get_app_presigned_url(app, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES)
),
),
(
'Map and Elf Files',
lambda app: self.get_download_link_for_url(
self._uploader.get_app_presigned_url(app, ArtifactType.MAP_AND_ELF_FILES)
),
),
],
)
sections.extend(built_test_related_apps_table_section)
if new_non_test_related_apps:
new_non_test_related_apps_table_section = self.create_table_section(
title=self.report_titles_map['new_non_test_related_apps'],
items=new_non_test_related_apps,
headers=[
'App Dir',
'Build Dir',
'Build Log',
'Your Branch App Size',
],
row_attrs=[
'app_dir',
'build_dir',
],
value_functions=[
(
'Your Branch App Size',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)
),
('Build Log', lambda app: self.get_download_link_for_url(
self._uploader.get_app_presigned_url(app, ArtifactType.LOGS))),
],
)
sections.extend(new_non_test_related_apps_table_section)
if built_non_test_related_apps:
built_non_test_related_apps = self._sort_items(
built_non_test_related_apps,
key='metrics.binary_size.difference_percentage',
order='desc',
sort_function=self.custom_sort,
)
built_non_test_related_apps_table_section = self.create_table_section(
title=self.report_titles_map['built_non_test_related_apps'],
items=built_non_test_related_apps,
headers=[
'App Dir',
'Build Dir',
'Build Log',
'Your Branch App Size',
'Target Branch App Size',
'Size Diff',
'Size Diff, %',
],
row_attrs=[
'app_dir',
'build_dir',
],
value_functions=[
(
'Your Branch App Size',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)
),
(
'Target Branch App Size',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].target_value)
),
(
'Size Diff',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference)
),
(
'Size Diff, %',
lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)
),
('Build Log', lambda app: self.get_download_link_for_url(
self._uploader.get_app_presigned_url(app, ArtifactType.LOGS))),
],
)
sections.extend(built_non_test_related_apps_table_section)
built_apps_report_url = self.write_report_to_file(
self.generate_html_report(''.join(sections)),
self.job_id,
self.built_apps_report_file,
)
self.additional_info += self.generate_additional_info_section(
self.report_titles_map['built_test_related_apps'],
len(built_test_related_apps),
built_apps_report_url,
)
self.additional_info += self.generate_additional_info_section(
self.report_titles_map['built_non_test_related_apps'],
len(built_non_test_related_apps),
built_apps_report_url,
)
self.additional_info += self.generate_additional_info_section(
self.report_titles_map['new_test_related_apps'],
len(new_test_related_apps),
built_apps_report_url,
)
self.additional_info += self.generate_additional_info_section(
self.report_titles_map['new_non_test_related_apps'],
len(new_non_test_related_apps),
built_apps_report_url,
)
self.additional_info += self._generate_top_n_apps_by_size_table()
return sections
def get_failed_apps_report_parts(self) -> t.List[str]:
failed_apps = [app for app in self.apps if app.build_status == BuildStatus.FAILED]
if not failed_apps:
return []
failed_apps_table_section = self.create_table_section(
title=self.report_titles_map['failed_apps'],
items=failed_apps,
headers=['App Dir', 'Build Dir', 'Failed Reason', 'Build Log'],
row_attrs=['app_dir', 'build_dir', 'build_comment'],
value_functions=[
('Build Log', lambda app: self.get_download_link_for_url(self._uploader.get_app_presigned_url(app, ArtifactType.LOGS))),
],
)
failed_apps_report_url = self.write_report_to_file(
self.generate_html_report(''.join(failed_apps_table_section)),
self.job_id,
self.failed_apps_report_file,
)
self.additional_info += self.generate_additional_info_section(
self.report_titles_map['failed_apps'], len(failed_apps), failed_apps_report_url
)
return failed_apps_table_section
def get_skipped_apps_report_parts(self) -> t.List[str]:
skipped_apps = [app for app in self.apps if app.build_status == BuildStatus.SKIPPED]
if not skipped_apps:
return []
skipped_apps_table_section = self.create_table_section(
title=self.report_titles_map['skipped_apps'],
items=skipped_apps,
headers=['App Dir', 'Build Dir', 'Skipped Reason', 'Build Log'],
row_attrs=['app_dir', 'build_dir', 'build_comment'],
value_functions=[
('Build Log', lambda app: self.get_download_link_for_url(self._uploader.get_app_presigned_url(app, ArtifactType.LOGS))),
],
)
skipped_apps_report_url = self.write_report_to_file(
self.generate_html_report(''.join(skipped_apps_table_section)),
self.job_id,
self.skipped_apps_report_file,
)
self.additional_info += self.generate_additional_info_section(
self.report_titles_map['skipped_apps'], len(skipped_apps), skipped_apps_report_url
)
return skipped_apps_table_section
def _get_report_str(self) -> str:
if not self.apps:
print('No apps found, skip generating build report')
return 'No Apps Built'
self.additional_info = f'**Build Summary (with commit {self.commit_id[:8]}):**\n'
failed_apps_report_parts = self.get_failed_apps_report_parts()
skipped_apps_report_parts = self.get_skipped_apps_report_parts()
built_apps_report_parts = self.get_built_apps_report_parts()
uploader = AppUploader(self.pipeline_id)
table_str = ''
failed_apps = [app for app in self.apps if app.build_status == BuildStatus.FAILED]
if failed_apps:
table_str += '<h2>Failed Apps</h2>'
failed_apps_table = PrettyTable()
failed_apps_table.field_names = [
'App Dir',
'Build Dir',
'Failed Reason',
'Build Log',
]
for app in failed_apps:
failed_apps_table.add_row(
[
app.app_dir,
app.build_dir,
app.build_comment or '',
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(failed_apps_table)
built_test_related_apps = [app for app in self.apps if app.build_status == BuildStatus.SUCCESS and app.preserve]
if built_test_related_apps:
table_str += '<h2>Built Apps (Test Related)</h2>'
built_apps_table = PrettyTable()
built_apps_table.field_names = [
'App Dir',
'Build Dir',
'Bin Files with Build Log (without map and elf)',
'Map and Elf Files',
]
app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = {}
for app in built_test_related_apps:
_d = {
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES.value: uploader.get_app_presigned_url(
app, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
),
ArtifactType.MAP_AND_ELF_FILES.value: uploader.get_app_presigned_url(
app, ArtifactType.MAP_AND_ELF_FILES
),
}
built_apps_table.add_row(
[
app.app_dir,
app.build_dir,
self.get_download_link_for_url(_d[ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES]),
self.get_download_link_for_url(_d[ArtifactType.MAP_AND_ELF_FILES]),
]
)
app_presigned_urls_dict[app.build_path] = _d
# also generate a yaml file that includes the apps and the presigned urls
# for helping debugging locally
with open(self.apps_presigned_url_filepath, 'w') as fw:
yaml.dump(app_presigned_urls_dict, fw)
table_str += self.table_to_html_str(built_apps_table)
built_non_test_related_apps = [
app for app in self.apps if app.build_status == BuildStatus.SUCCESS and not app.preserve
]
if built_non_test_related_apps:
table_str += '<h2>Built Apps (Non Test Related)</h2>'
built_apps_table = PrettyTable()
built_apps_table.field_names = [
'App Dir',
'Build Dir',
'Build Log',
]
for app in built_non_test_related_apps:
built_apps_table.add_row(
[
app.app_dir,
app.build_dir,
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(built_apps_table)
skipped_apps = [app for app in self.apps if app.build_status == BuildStatus.SKIPPED]
if skipped_apps:
table_str += '<h2>Skipped Apps</h2>'
skipped_apps_table = PrettyTable()
skipped_apps_table.field_names = ['App Dir', 'Build Dir', 'Skipped Reason', 'Build Log']
for app in skipped_apps:
skipped_apps_table.add_row(
[
app.app_dir,
app.build_dir,
app.build_comment or '',
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(skipped_apps_table)
return self.generate_html_report(table_str)
return self.generate_html_report(
''.join(failed_apps_report_parts + built_apps_report_parts + skipped_apps_report_parts)
)
class TargetTestReportGenerator(ReportGenerator):

View File

@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
@ -9,8 +9,10 @@ import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.report import BuildReportGenerator
from dynamic_pipelines.report import JobReportGenerator
from dynamic_pipelines.report import TargetTestReportGenerator
from dynamic_pipelines.utils import fetch_app_metrics
from dynamic_pipelines.utils import fetch_failed_jobs
from dynamic_pipelines.utils import parse_testcases_from_filepattern
from idf_ci.app import enrich_apps_with_metrics_info
from idf_ci.app import import_apps_from_txt
@ -73,6 +75,11 @@ def generate_build_report(args: argparse.Namespace) -> None:
apps: t.List[t.Any] = [
app for file_name in glob.glob(args.app_list_filepattern) for app in import_apps_from_txt(file_name)
]
app_metrics = fetch_app_metrics(
source_commit_sha=os.environ.get('CI_COMMIT_SHA'),
target_commit_sha=os.environ.get('CI_MERGE_REQUEST_TARGET_BRANCH_SHA'),
)
apps = enrich_apps_with_metrics_info(app_metrics, apps)
report_generator = BuildReportGenerator(
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, apps=apps
)
@ -84,7 +91,7 @@ def generate_target_test_report(args: argparse.Namespace) -> None:
report_generator = TargetTestReportGenerator(
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, test_cases=test_cases
)
report_generator.post_report(print_report_path=False)
report_generator.post_report()
def generate_jobs_report(args: argparse.Namespace) -> None:
@ -93,8 +100,10 @@ def generate_jobs_report(args: argparse.Namespace) -> None:
if not jobs:
return
report_generator = JobReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, jobs=jobs)
report_generator.post_report(print_report_path=False, print_retry_jobs_message=any(job.is_failed for job in jobs))
report_generator = JobReportGenerator(
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, jobs=jobs
)
report_generator.post_report(print_retry_jobs_message=any(job.is_failed for job in jobs))
if __name__ == '__main__':

View File

@ -7,6 +7,9 @@ generate_pytest_build_report:
when: always
artifacts:
paths:
- failed_apps.html
- built_apps.html
- skipped_apps.html
- build_report.html
- test_related_apps_download_urls.yml
expire_in: 1 week

View File

@ -0,0 +1,9 @@
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/panic", "target": "esp32s3", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/panic/sdkconfig.ci.coredump_flash_capture_dram", "config_name": "coredump_flash_capture_dram", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "build failed", "build_comment": "Compilation error", "cmake_vars": {}, "work_dir": "tools/test_apps/system/panic", "build_dir": "build_esp32s3_coredump_flash_capture_dram", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/ram_loadable_app", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/ram_loadable_app/sdkconfig.ci.defaults", "config_name": "defaults", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/ram_loadable_app", "build_dir": "build_esp32_defaults", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/ram_loadable_app", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/ram_loadable_app/sdkconfig.ci.pure_ram", "config_name": "pure_ram", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/ram_loadable_app", "build_dir": "build_esp32_pure_ram", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/startup", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/startup/sdkconfig.ci.flash_80m_qio", "config_name": "flash_80m_qio", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/startup", "build_dir": "build_esp32_flash_80m_qio", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/startup", "target": "esp32s3", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/startup/sdkconfig.ci.stack_check_verbose_log", "config_name": "stack_check_verbose_log", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/startup", "build_dir": "build_esp32s3_stack_check_verbose_log", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/test_watchpoint", "target": "esp32", "sdkconfig_path": null, "config_name": "default", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "skipped", "build_comment": "Skipped due to unmet dependencies", "cmake_vars": {}, "work_dir": "tools/test_apps/system/test_watchpoint", "build_dir": "build_esp32_default", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/test_watchpoint", "target": "esp32c3", "sdkconfig_path": null, "config_name": "default", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "skipped", "build_comment": "Skipped due to unmet dependencies", "cmake_vars": {}, "work_dir": "tools/test_apps/system/test_watchpoint", "build_dir": "build_esp32c3_default", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/unicore_bootloader", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/unicore_bootloader/sdkconfig.ci.multicore", "config_name": "multicore", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "build failed", "build_comment": "Compilation error", "cmake_vars": {}, "work_dir": "tools/test_apps/system/unicore_bootloader", "build_dir": "build_esp32_multicore", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/unicore_bootloader", "target": "esp32s3", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/unicore_bootloader/sdkconfig.ci.unicore_psram", "config_name": "unicore_psram", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/unicore_bootloader", "build_dir": "build_esp32s3_unicore_psram", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}

View File

@ -0,0 +1,146 @@
{
"tools/test_apps/system/panic_coredump_flash_capture_dram_esp32s3": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "panic",
"config_name": "coredump_flash_capture_dram",
"target": "esp32s3",
"metrics": {
"binary_size": {
"source_value": 156936,
"target_value": 162936,
"difference": 6000,
"difference_percentage": 3.82
}
},
"app_path": "tools/test_apps/system/panic_coredump"
},
"tools/test_apps/system/ram_loadable_app_defaults_esp32": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "ram_loadable_app",
"config_name": "defaults",
"target": "esp32",
"metrics": {
"binary_size": {
"source_value": 171448,
"target_value": 173000,
"difference": 1552,
"difference_percentage": 0.91
}
},
"app_path": "tools/test_apps/system/ram_loadable_app"
},
"tools/test_apps/system/ram_loadable_app_pure_ram_esp32": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "ram_loadable_app",
"config_name": "pure_ram",
"target": "esp32",
"metrics": {
"binary_size": {
"source_value": 156632,
"target_value": 158200,
"difference": 1568,
"difference_percentage": 1.0
}
},
"app_path": "tools/test_apps/system/ram_loadable_app"
},
"tools/test_apps/system/startup_flash_80m_qio_esp32": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "startup",
"config_name": "flash_80m_qio",
"target": "esp32",
"metrics": {
"binary_size": {
"source_value": 225692,
"target_value": 230000,
"difference": 4308,
"difference_percentage": 1.91
}
},
"app_path": "tools/test_apps/system/startup"
},
"tools/test_apps/system/startup_stack_check_verbose_log_esp32s3": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "startup",
"config_name": "stack_check_verbose_log",
"target": "esp32s3",
"metrics": {
"binary_size": {
"source_value": 156936,
"target_value": 160000,
"difference": 3064,
"difference_percentage": 1.95
}
},
"app_path": "tools/test_apps/system/startup"
},
"tools/test_apps/system/test_watchpoint_default_esp32": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "test_watchpoint",
"config_name": "default",
"target": "esp32",
"metrics": {
"binary_size": {
"source_value": 147896,
"target_value": 150000,
"difference": 2104,
"difference_percentage": 1.42
}
},
"app_path": "tools/test_apps/system/test_watchpoint"
},
"tools/test_apps/system/test_watchpoint_default_esp32c3": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "test_watchpoint",
"config_name": "default",
"target": "esp32c3",
"metrics": {
"binary_size": {
"source_value": 189456,
"target_value": 190456,
"difference": 1000,
"difference_percentage": 0.53
}
},
"app_path": "tools/test_apps/system/test_watchpoint"
},
"tools/test_apps/system/unicore_bootloader_multicore_esp32": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "unicore_bootloader",
"config_name": "multicore",
"target": "esp32",
"metrics": {
"binary_size": {
"source_value": 216784,
"target_value": 220000,
"difference": 3216,
"difference_percentage": 1.48
}
},
"app_path": "tools/test_apps/system/unicore_bootloader"
},
"tools/test_apps/system/unicore_bootloader_unicore_psram_esp32s3": {
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
"app_name": "unicore_bootloader",
"config_name": "unicore_psram",
"target": "esp32s3",
"metrics": {
"binary_size": {
"source_value": 189456,
"target_value": 191456,
"difference": 2000,
"difference_percentage": 1.06
}
},
"app_path": "tools/test_apps/system/unicore_bootloader"
}
}

View File

@ -0,0 +1,246 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Build Report</title>
<link
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css"
rel="stylesheet"
/>
<link
href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css"
rel="stylesheet"
/>
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.css"
/>
<link
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css"
rel="stylesheet"
/>
<style>
.text-toggle,
.full-text {
cursor: pointer;
}
th:nth-child(1),
td:nth-child(1) {
width: 5%;
}
th:nth-child(2),
td:nth-child(2),
th:nth-child(3),
td:nth-child(3) {
width: 30%;
}
th,
td {
overflow: hidden;
text-overflow: ellipsis;
}
h2 {
margin-top: 10px;
}
.copy-link-icon {
font-size: 20px;
margin-left: 10px;
color: #8f8f97;
cursor: pointer;
}
.copy-link-icon:hover {
color: #282b2c;
}
</style>
</head>
<body>
<div class="container-fluid"><h2 id="failed-apps">Failed Apps<i class="fas fa-link copy-link-icon"
onclick="copyPermalink('#failed-apps')"></i></h2><table data-toggle="table" data-search-align="left" data-search="true" data-sticky-header="true">
<thead>
<tr>
<th>App Dir</th>
<th>Build Dir</th>
<th>Failed Reason</th>
<th>Build Log</th>
</tr>
</thead>
<tbody>
<tr>
<td>tools/test_apps/system/panic</td>
<td>build_esp32s3_coredump_flash_capture_dram</td>
<td>Compilation error</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
</tr>
<tr>
<td>tools/test_apps/system/unicore_bootloader</td>
<td>build_esp32_multicore</td>
<td>Compilation error</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
</tr>
</tbody>
</table><h2 id="built-apps---test-related">Built Apps - Test Related<i class="fas fa-link copy-link-icon"
onclick="copyPermalink('#built-apps---test-related')"></i></h2><table data-toggle="table" data-search-align="left" data-search="true" data-sticky-header="true">
<thead>
<tr>
<th>App Dir</th>
<th>Build Dir</th>
<th>Bin Files with Build Log (without map and elf)</th>
<th>Map and Elf Files</th>
<th>Your Branch App Size</th>
<th>Target Branch App Size</th>
<th>Size Diff</th>
<th>Size Diff, %</th>
</tr>
</thead>
<tbody>
<tr>
<td>tools/test_apps/system/startup</td>
<td>build_esp32s3_stack_check_verbose_log</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td>156936</td>
<td>160000</td>
<td>3064</td>
<td>1.95</td>
</tr>
<tr>
<td>tools/test_apps/system/startup</td>
<td>build_esp32_flash_80m_qio</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td>225692</td>
<td>230000</td>
<td>4308</td>
<td>1.91</td>
</tr>
<tr>
<td>tools/test_apps/system/unicore_bootloader</td>
<td>build_esp32s3_unicore_psram</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td>189456</td>
<td>191456</td>
<td>2000</td>
<td>1.06</td>
</tr>
<tr>
<td>tools/test_apps/system/ram_loadable_app</td>
<td>build_esp32_pure_ram</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td>156632</td>
<td>158200</td>
<td>1568</td>
<td>1.0</td>
</tr>
<tr>
<td>tools/test_apps/system/ram_loadable_app</td>
<td>build_esp32_defaults</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td><a href="https://example.com/presigned-url">Download</a></td>
<td>171448</td>
<td>173000</td>
<td>1552</td>
<td>0.91</td>
</tr>
</tbody>
</table><h2 id="skipped-apps">Skipped Apps<i class="fas fa-link copy-link-icon"
onclick="copyPermalink('#skipped-apps')"></i></h2><table data-toggle="table" data-search-align="left" data-search="true" data-sticky-header="true">
<thead>
<tr>
<th>App Dir</th>
<th>Build Dir</th>
<th>Skipped Reason</th>
<th>Build Log</th>
</tr>
</thead>
<tbody>
<tr>
<td>tools/test_apps/system/test_watchpoint</td>
<td>build_esp32_default</td>
<td>Skipped due to unmet dependencies</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
</tr>
<tr>
<td>tools/test_apps/system/test_watchpoint</td>
<td>build_esp32c3_default</td>
<td>Skipped due to unmet dependencies</td>
<td><a href="https://example.com/presigned-url">Download</a></td>
</tr>
</tbody>
</table></div>
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.min.js"></script>
<script>
$(window).on("load", function () {
var hash = window.location.hash;
if (hash) {
setTimeout(function () {
$("html, body").animate(
{ scrollTop: $(hash).offset().top },
100
);
}, 100);
}
});
</script>
<script>
$(document).ready(function () {
scrollToHashLocation();
setupTextToggles();
setupEventHandlers();
});
function setupEventHandlers() {
$(window).on("load", scrollToHashLocation);
$("body").on("click", ".toggle-link", toggleText);
}
function scrollToHashLocation() {
const hash = window.location.hash;
if (hash) {
setTimeout(() => {
$("html, body").animate(
{ scrollTop: $(hash).offset().top },
100
);
}, 100);
}
}
function copyPermalink(anchorId) {
const fullUrl = `${window.location.origin}${window.location.pathname}${anchorId}`;
history.pushState(null, null, anchorId);
navigator.clipboard.writeText(fullUrl);
scrollToHashLocation();
}
function toggleText(e) {
e.preventDefault();
const link = $(this),
textSpan = link.siblings(".full-text"),
toggleSpan = link.siblings(".text-toggle");
const visible = textSpan.is(":visible");
link.text(visible ? "Show More" : "Show Less");
textSpan.toggle();
toggleSpan.toggle();
}
function setupTextToggles() {
$("table.table td").each(function () {
var cell = $(this);
if (cell.text().length > 100) {
var originalText = cell.text();
var displayText =
originalText.substring(0, 100) + "...";
cell.html(
`<span class="text-toggle">${displayText}</span><span class="full-text" style="display: none;">${originalText}</span><a href="#" class="toggle-link">Show More</a>`
);
}
});
}
</script>
</body>
</html>

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import json
import os.path
@ -12,8 +12,11 @@ sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci',
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
from dynamic_pipelines.models import GitlabJob # noqa: E402
from dynamic_pipelines.report import JobReportGenerator, TargetTestReportGenerator # noqa: E402
from dynamic_pipelines.report import JobReportGenerator, TargetTestReportGenerator, BuildReportGenerator # noqa: E402
from dynamic_pipelines.utils import load_file, parse_testcases_from_filepattern # noqa: E402
from idf_build_apps.constants import BuildStatus # noqa: E402
from idf_ci.app import import_apps_from_txt # noqa: E402
from idf_ci.app import enrich_apps_with_metrics_info # noqa: E402
class TestReportGeneration(unittest.TestCase):
@ -27,6 +30,7 @@ class TestReportGeneration(unittest.TestCase):
def setup_patches(self) -> None:
self.gitlab_patcher = patch('dynamic_pipelines.report.Gitlab')
self.uploader_patcher = patch('dynamic_pipelines.report.AppUploader')
self.failure_rate_patcher = patch('dynamic_pipelines.report.fetch_failed_testcases_failure_ratio')
self.env_patcher = patch.dict('os.environ', {
'CI_DASHBOARD_HOST': 'https://test_dashboard_host',
@ -36,6 +40,7 @@ class TestReportGeneration(unittest.TestCase):
})
self.MockGitlab = self.gitlab_patcher.start()
self.MockUploader = self.uploader_patcher.start()
self.test_cases_failure_rate = self.failure_rate_patcher.start()
self.env_patcher.start()
@ -43,8 +48,10 @@ class TestReportGeneration(unittest.TestCase):
self.mock_mr = MagicMock()
self.MockGitlab.return_value.project = self.mock_project
self.mock_project.mergerequests.get.return_value = self.mock_mr
self.MockUploader.return_value.get_app_presigned_url.return_value = 'https://example.com/presigned-url'
self.addCleanup(self.gitlab_patcher.stop)
self.addCleanup(self.uploader_patcher.stop)
self.addCleanup(self.failure_rate_patcher.stop)
self.addCleanup(self.env_patcher.stop)
self.addCleanup(self.cleanup_files)
@ -54,6 +61,9 @@ class TestReportGeneration(unittest.TestCase):
self.target_test_report_generator.skipped_test_cases_report_file,
self.target_test_report_generator.succeeded_cases_report_file,
self.target_test_report_generator.failed_cases_report_file,
self.build_report_generator.failed_apps_report_file,
self.build_report_generator.built_apps_report_file,
self.build_report_generator.skipped_apps_report_file,
]
for file_path in files_to_delete:
if os.path.exists(file_path):
@ -66,13 +76,18 @@ class TestReportGeneration(unittest.TestCase):
self.expected_job_report_html = load_file(
os.path.join(self.reports_sample_data_path, 'expected_job_report.html')
)
self.expected_build_report_html = load_file(
os.path.join(self.reports_sample_data_path, 'expected_build_report.html')
)
def create_report_generators(self) -> None:
jobs_response_raw = load_file(os.path.join(self.reports_sample_data_path, 'jobs_api_response.json'))
failure_rate_jobs_response = load_file(os.path.join(self.reports_sample_data_path, 'failure_rate_jobs_response.json'))
built_apps_size_info_response = json.loads(load_file(os.path.join(self.reports_sample_data_path, 'apps_size_info_api_response.json')))
failure_rates = {item['name']: item for item in json.loads(failure_rate_jobs_response).get('jobs', [])}
jobs = [GitlabJob.from_json_data(job_json, failure_rates.get(job_json['name'], {})) for job_json in json.loads(jobs_response_raw)['jobs']]
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
apps = enrich_apps_with_metrics_info(built_apps_size_info_response, import_apps_from_txt(os.path.join(self.reports_sample_data_path, 'apps')))
self.target_test_report_generator = TargetTestReportGenerator(
project_id=123,
mr_iid=1,
@ -91,6 +106,15 @@ class TestReportGeneration(unittest.TestCase):
title='Job Report',
jobs=jobs
)
self.build_report_generator = BuildReportGenerator(
project_id=123,
mr_iid=1,
pipeline_id=456,
job_id=0,
commit_id='cccc',
title='Build Report',
apps=apps
)
self.target_test_report_generator._known_failure_cases_set = {
'*.test_wpa_supplicant_ut',
'esp32c3.release.test_esp_timer',
@ -148,6 +172,179 @@ class TestReportGeneration(unittest.TestCase):
report = self.job_report_generator._get_report_str()
self.assertEqual(report, self.expected_job_report_html)
def test_generate_top_n_apps_by_size_table(self) -> None:
apps_with_size_diff = [
MagicMock(
app_dir=f'app_dir_{i}',
build_dir=f'build_dir_{i}',
build_status=BuildStatus.SUCCESS,
metrics={
'binary_size': MagicMock(
source=i * 10000,
target=i * 10000 + i * 1000,
difference=i * 1000,
difference_percentage=i * 0.5,
)
}
)
for i in range(1, 6)
]
build_report_generator = BuildReportGenerator(
project_id=123,
mr_iid=1,
pipeline_id=456,
job_id=0,
commit_id='cccc',
title='Build Report',
apps=apps_with_size_diff
)
top_apps_table = build_report_generator._generate_top_n_apps_by_size_table()
self.assertIn('| App Dir | Build Dir | Size Diff (bytes) | Size Diff (%) |', top_apps_table)
self.assertIn('| app_dir_5 | build_dir_5 | 5000 | 2.5% |', top_apps_table)
self.assertIn('| app_dir_1 | build_dir_1 | 1000 | 0.5% |', top_apps_table)
def test_get_built_apps_report_parts(self) -> None:
apps = [
MagicMock(
app_dir='test_app_1',
build_dir='build_dir_1',
size_difference=1000,
size_difference_percentage=1.0,
build_status=BuildStatus.SUCCESS,
preserve=True,
metrics={
'binary_size': MagicMock(
difference=1000,
difference_percentage=1.0
)
}
),
MagicMock(
app_dir='test_app_2',
build_dir='build_dir_2',
size_difference=2000,
size_difference_percentage=2.0,
build_status=BuildStatus.SUCCESS,
preserve=False,
metrics={
'binary_size': MagicMock(
difference=2000,
difference_percentage=2.0
)
}
),
]
build_report_generator = BuildReportGenerator(
project_id=123,
mr_iid=1,
pipeline_id=456,
job_id=0,
commit_id='cccc',
title='Build Report',
apps=apps
)
built_apps_report_parts = build_report_generator.get_built_apps_report_parts()
self.assertGreater(len(built_apps_report_parts), 0)
self.assertIn('test_app_1', ''.join(built_apps_report_parts))
self.assertIn('test_app_2', ''.join(built_apps_report_parts))
def test_get_failed_apps_report_parts(self) -> None:
failed_apps = [
MagicMock(
app_dir='failed_app_1',
build_dir='build_dir_1',
build_comment='Compilation error',
build_status=BuildStatus.FAILED,
metrics={
'binary_size': MagicMock(
difference=None,
difference_percentage=None
)
}
),
MagicMock(
app_dir='failed_app_2',
build_dir='build_dir_2',
build_comment='Linker error',
build_status=BuildStatus.FAILED,
metrics={
'binary_size': MagicMock(
difference=None,
difference_percentage=None
)
}
),
]
build_report_generator = BuildReportGenerator(
project_id=123,
mr_iid=1,
pipeline_id=456,
job_id=0,
commit_id='cccc',
title='Build Report',
apps=failed_apps
)
failed_apps_report_parts = build_report_generator.get_failed_apps_report_parts()
self.assertGreater(len(failed_apps_report_parts), 0)
self.assertIn('failed_app_1', ''.join(failed_apps_report_parts))
self.assertIn('failed_app_2', ''.join(failed_apps_report_parts))
def test_get_skipped_apps_report_parts(self) -> None:
skipped_apps = [
MagicMock(
app_dir='skipped_app_1',
build_dir='build_dir_1',
build_comment='Dependencies unmet',
build_status=BuildStatus.SKIPPED,
metrics={
'binary_size': MagicMock(
difference=None,
difference_percentage=None
)
}
),
MagicMock(
app_dir='skipped_app_2',
build_dir='build_dir_2',
build_comment='Feature flag disabled',
build_status=BuildStatus.SKIPPED,
metrics={
'binary_size': MagicMock(
difference=None,
difference_percentage=None
)
}
),
]
build_report_generator = BuildReportGenerator(
project_id=123,
mr_iid=1,
pipeline_id=456,
job_id=0,
commit_id='cccc',
title='Build Report',
apps=skipped_apps
)
skipped_apps_report_parts = build_report_generator.get_skipped_apps_report_parts()
self.assertGreater(len(skipped_apps_report_parts), 0)
self.assertIn('skipped_app_1', ''.join(skipped_apps_report_parts))
self.assertIn('skipped_app_2', ''.join(skipped_apps_report_parts))
def test_build_report_html_structure(self) -> None:
report = self.build_report_generator._get_report_str()
self.assertEqual(report, self.expected_build_report_html)
if __name__ == '__main__':
unittest.main()

View File

@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import glob
import os
@ -168,6 +168,37 @@ def fetch_failed_testcases_failure_ratio(failed_testcases: t.List[TestCase], bra
return failed_testcases
def fetch_app_metrics(
source_commit_sha: str,
target_commit_sha: str,
) -> t.Dict:
"""
Fetches the app metrics for the given source commit SHA and target branch SHA.
:param source_commit_sha: The source commit SHA.
:param target_branch_sha: The commit SHA of the branch to compare app sizes against.
:return: A dict of sizes of built binaries.
"""
build_info_map = dict()
response = requests.post(
f'{CI_DASHBOARD_API}/apps/metrics',
headers={'CI-Job-Token': CI_JOB_TOKEN},
json={
'source_commit_sha': source_commit_sha,
'target_commit_sha': target_commit_sha,
}
)
if response.status_code != 200:
print(f'Failed to fetch build info: {response.status_code} - {response.text}')
else:
response_data = response.json()
build_info_map = {
f"{info['app_path']}_{info['config_name']}_{info['target']}": info
for info in response_data.get('data', [])
}
return build_info_map
def load_file(file_path: str) -> str:
"""
Loads the content of a file.

View File

@ -1,10 +1,11 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
import typing as t
from typing import Literal
from dynamic_pipelines.constants import BINARY_SIZE_METRIC_NAME
from idf_build_apps import App
from idf_build_apps import CMakeApp
from idf_build_apps import json_to_app
@ -29,6 +30,51 @@ class IdfCMakeApp(CMakeApp):
self.uploader.upload_app(self.build_path)
class Metrics:
"""
Represents a metric and its values for source, target, and the differences.
"""
def __init__(
self,
source_value: t.Optional[float] = None,
target_value: t.Optional[float] = None,
difference: t.Optional[float] = None,
difference_percentage: t.Optional[float] = None,
) -> None:
self.source_value = source_value or 0.0
self.target_value = target_value or 0.0
self.difference = difference or 0.0
self.difference_percentage = difference_percentage or 0.0
def to_dict(self) -> dict[str, t.Any]:
"""
Converts the Metrics object to a dictionary.
"""
return {
'source_value': self.source_value,
'target_value': self.target_value,
'difference': self.difference,
'difference_percentage': self.difference_percentage,
}
class AppWithMetricsInfo(IdfCMakeApp):
metrics: t.Dict[str, Metrics]
is_new_app: bool
def __init__(self, **kwargs: t.Any) -> None:
super().__init__(**kwargs)
self.metrics = {
metric_name: metric_data
for metric_name, metric_data in kwargs.get('metrics', {}).items()
}
self.is_new_app = kwargs.get('is_new_app', False)
class Config:
arbitrary_types_allowed = True
def dump_apps_to_txt(apps: t.List[App], output_filepath: str) -> None:
with open(output_filepath, 'w') as fw:
for app in apps:
@ -47,3 +93,63 @@ def import_apps_from_txt(input_filepath: str) -> t.List[App]:
sys.exit(1)
return apps
def enrich_apps_with_metrics_info(
app_metrics_info_map: t.Dict[str, t.Dict[str, t.Any]],
apps: t.List[App]
) -> t.List[AppWithMetricsInfo]:
def _get_full_attributes(obj: App) -> t.Dict[str, t.Any]:
"""
Retrieves all attributes of an object, including properties and computed fields.
"""
attributes: t.Dict[str, t.Any] = obj.__dict__.copy()
for attr in dir(obj):
if not attr.startswith('_'): # Skip private/internal attributes
try:
value = getattr(obj, attr)
# Include only if it's not already in __dict__
if attr not in attributes:
attributes[attr] = value
except Exception:
# Skip attributes that raise exceptions (e.g., methods needing args)
pass
return attributes
default_metrics_structure = {
BINARY_SIZE_METRIC_NAME: Metrics(
source_value=0,
target_value=0,
difference=0,
difference_percentage=0.0,
),
}
apps_with_metrics_info = []
for app in apps:
key = f'{app.app_dir}_{app.config_name}_{app.target}'
app_attributes = _get_full_attributes(app)
metrics = {
metric_name: default_metric
for metric_name, default_metric in default_metrics_structure.items()
}
is_new_app = False
if key in app_metrics_info_map:
info = app_metrics_info_map[key]
for metric_name, metric_data in info.get('metrics', {}).items():
metrics[metric_name] = Metrics(
source_value=metric_data.get('source_value', 0),
target_value=metric_data.get('target_value', 0),
difference=metric_data.get('difference', 0),
difference_percentage=metric_data.get('difference_percentage', 0.0),
)
is_new_app = info.get('is_new_app', False)
app_attributes.update({'metrics': metrics, 'is_new_app': is_new_app})
apps_with_metrics_info.append(AppWithMetricsInfo(**app_attributes))
return apps_with_metrics_info