mirror of
https://github.com/espressif/esp-idf
synced 2025-03-09 17:19:09 -04:00
ci: remove gitlab api call while generating pytest target-test child pipeline
This commit is contained in:
parent
bdc2998e4b
commit
502749d254
@ -8,7 +8,6 @@
|
|||||||
"""
|
"""
|
||||||
import argparse
|
import argparse
|
||||||
import glob
|
import glob
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import typing as t
|
import typing as t
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
@ -21,16 +20,15 @@ from dynamic_pipelines.constants import DEFAULT_CASES_TEST_PER_JOB
|
|||||||
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH
|
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH
|
||||||
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME
|
from dynamic_pipelines.constants import DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME
|
||||||
from dynamic_pipelines.constants import DEFAULT_TEST_PATHS
|
from dynamic_pipelines.constants import DEFAULT_TEST_PATHS
|
||||||
from dynamic_pipelines.constants import KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH
|
from dynamic_pipelines.constants import (
|
||||||
|
KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH,
|
||||||
|
)
|
||||||
from dynamic_pipelines.models import EmptyJob
|
from dynamic_pipelines.models import EmptyJob
|
||||||
from dynamic_pipelines.models import Job
|
from dynamic_pipelines.models import Job
|
||||||
from dynamic_pipelines.models import TargetTestJob
|
from dynamic_pipelines.models import TargetTestJob
|
||||||
from dynamic_pipelines.utils import dump_jobs_to_yaml
|
from dynamic_pipelines.utils import dump_jobs_to_yaml
|
||||||
from gitlab.v4.objects import Project
|
|
||||||
from gitlab_api import Gitlab
|
|
||||||
from idf_build_apps import App
|
from idf_build_apps import App
|
||||||
from idf_ci.app import import_apps_from_txt
|
from idf_ci.app import import_apps_from_txt
|
||||||
from idf_ci_utils import IDF_PATH
|
|
||||||
from idf_pytest.script import get_pytest_cases
|
from idf_pytest.script import get_pytest_cases
|
||||||
|
|
||||||
|
|
||||||
@ -50,22 +48,17 @@ def get_tags_with_amount(s: str) -> t.List[str]:
|
|||||||
|
|
||||||
|
|
||||||
def get_target_test_jobs(
|
def get_target_test_jobs(
|
||||||
project: Project, paths: str, apps: t.List[App]
|
paths: str, apps: t.List[App], exclude_runner_tags: t.Set[str]
|
||||||
) -> t.Tuple[t.List[Job], t.List[str], t.Dict[str, t.List[str]]]:
|
) -> t.Tuple[t.List[Job], t.List[str], t.List[str]]:
|
||||||
"""
|
"""
|
||||||
Return the target test jobs and the extra yaml files to include
|
Return the target test jobs and the extra yaml files to include
|
||||||
"""
|
"""
|
||||||
issues: t.Dict[str, t.List[str]] = {
|
|
||||||
'no_env_marker_test_cases': [],
|
|
||||||
'no_runner_tags': [],
|
|
||||||
}
|
|
||||||
|
|
||||||
if mr_labels := os.getenv('CI_MERGE_REQUEST_LABELS'):
|
if mr_labels := os.getenv('CI_MERGE_REQUEST_LABELS'):
|
||||||
print(f'MR labels: {mr_labels}')
|
print(f'MR labels: {mr_labels}')
|
||||||
|
|
||||||
if BUILD_ONLY_LABEL in mr_labels.split(','):
|
if BUILD_ONLY_LABEL in mr_labels.split(','):
|
||||||
print('MR has build only label, skip generating target test child pipeline')
|
print('MR has build only label, skip generating target test child pipeline')
|
||||||
return [EmptyJob()], [], issues
|
return [EmptyJob()], [], []
|
||||||
|
|
||||||
pytest_cases = get_pytest_cases(
|
pytest_cases = get_pytest_cases(
|
||||||
paths,
|
paths,
|
||||||
@ -73,10 +66,11 @@ def get_target_test_jobs(
|
|||||||
marker_expr='not host_test', # since it's generating target-test child pipeline
|
marker_expr='not host_test', # since it's generating target-test child pipeline
|
||||||
)
|
)
|
||||||
|
|
||||||
|
no_env_marker_test_cases: t.List[str] = []
|
||||||
res = defaultdict(list)
|
res = defaultdict(list)
|
||||||
for case in pytest_cases:
|
for case in pytest_cases:
|
||||||
if not case.env_markers:
|
if not case.env_markers:
|
||||||
issues['no_env_marker_test_cases'].append(case.item.nodeid)
|
no_env_marker_test_cases.append(case.item.nodeid)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
res[(case.target_selector, tuple(sorted(case.env_markers)))].append(case)
|
res[(case.target_selector, tuple(sorted(case.env_markers)))].append(case)
|
||||||
@ -84,13 +78,8 @@ def get_target_test_jobs(
|
|||||||
target_test_jobs: t.List[Job] = []
|
target_test_jobs: t.List[Job] = []
|
||||||
for (target_selector, env_markers), cases in res.items():
|
for (target_selector, env_markers), cases in res.items():
|
||||||
runner_tags = get_tags_with_amount(target_selector) + list(env_markers)
|
runner_tags = get_tags_with_amount(target_selector) + list(env_markers)
|
||||||
# we don't need to get all runner, as long as we get one runner, it's fine
|
if ','.join(runner_tags) in exclude_runner_tags:
|
||||||
runner_list = project.runners.list(status='online', tag_list=','.join(runner_tags), get_all=False)
|
print('WARNING: excluding test cases with runner tags:', runner_tags)
|
||||||
if not runner_list:
|
|
||||||
issues['no_runner_tags'].append(','.join(runner_tags))
|
|
||||||
logging.warning(f'No runner found for {",".join(runner_tags)}, required by cases:')
|
|
||||||
for case in cases:
|
|
||||||
logging.warning(f' - {case.item.nodeid}')
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
target_test_job = TargetTestJob(
|
target_test_job = TargetTestJob(
|
||||||
@ -115,63 +104,54 @@ def get_target_test_jobs(
|
|||||||
if fast_pipeline_flag:
|
if fast_pipeline_flag:
|
||||||
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/fast_pipeline.yml']
|
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/fast_pipeline.yml']
|
||||||
|
|
||||||
issues['no_env_marker_test_cases'] = sorted(issues['no_env_marker_test_cases'])
|
no_env_marker_test_cases.sort()
|
||||||
issues['no_runner_tags'] = sorted(issues['no_runner_tags'])
|
return target_test_jobs, extra_include_yml, no_env_marker_test_cases
|
||||||
|
|
||||||
return target_test_jobs, extra_include_yml, issues
|
|
||||||
|
|
||||||
|
|
||||||
def generate_target_test_child_pipeline(
|
def generate_target_test_child_pipeline(
|
||||||
project: Project,
|
|
||||||
paths: str,
|
paths: str,
|
||||||
apps: t.List[App],
|
apps: t.List[App],
|
||||||
output_filepath: str,
|
output_filepath: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
target_test_jobs, extra_include_yml, issues = get_target_test_jobs(project, paths, apps)
|
|
||||||
|
|
||||||
with open(KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH) as fr:
|
with open(KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH) as fr:
|
||||||
known_warnings_dict = yaml.safe_load(fr) or dict()
|
known_warnings_dict = yaml.safe_load(fr) or dict()
|
||||||
|
|
||||||
|
exclude_runner_tags_set = set(known_warnings_dict.get('no_runner_tags', []))
|
||||||
|
# EXCLUDE_RUNNER_TAGS is a string separated by ';'
|
||||||
|
# like 'esp32,generic;esp32c3,wifi'
|
||||||
|
if exclude_runner_tags := os.getenv('EXCLUDE_RUNNER_TAGS'):
|
||||||
|
exclude_runner_tags_set.update(exclude_runner_tags.split(';'))
|
||||||
|
|
||||||
|
target_test_jobs, extra_include_yml, no_env_marker_test_cases = get_target_test_jobs(
|
||||||
|
paths=paths,
|
||||||
|
apps=apps,
|
||||||
|
exclude_runner_tags=exclude_runner_tags_set,
|
||||||
|
)
|
||||||
|
|
||||||
known_no_env_marker_test_cases = set(known_warnings_dict.get('no_env_marker_test_cases', []))
|
known_no_env_marker_test_cases = set(known_warnings_dict.get('no_env_marker_test_cases', []))
|
||||||
no_env_marker_test_cases = set(issues['no_env_marker_test_cases'])
|
no_env_marker_test_cases_set = set(no_env_marker_test_cases)
|
||||||
|
|
||||||
no_env_marker_test_cases_fail = False
|
no_env_marker_test_cases_fail = False
|
||||||
if no_env_marker_test_cases - known_no_env_marker_test_cases:
|
if no_env_marker_test_cases_set - known_no_env_marker_test_cases:
|
||||||
print('ERROR: NEW "no_env_marker_test_cases" detected:')
|
print('ERROR: NEW "no_env_marker_test_cases" detected:')
|
||||||
for case in no_env_marker_test_cases - known_no_env_marker_test_cases:
|
for case in no_env_marker_test_cases_set - known_no_env_marker_test_cases:
|
||||||
print(f' - {case}')
|
print(f' - {case}')
|
||||||
no_env_marker_test_cases_fail = True
|
no_env_marker_test_cases_fail = True
|
||||||
|
|
||||||
print('Please add at least one environment markers to the test cases listed above. '
|
|
||||||
'You may check all the env markers here: tools/ci/idf_pytest/constants.py')
|
|
||||||
|
|
||||||
known_no_runner_tags = set(known_warnings_dict.get('no_runner_tags', []))
|
|
||||||
no_runner_tags = set(issues['no_runner_tags'])
|
|
||||||
|
|
||||||
no_runner_tags_fail = False
|
|
||||||
if no_runner_tags - known_no_runner_tags:
|
|
||||||
print('ERROR: NEW "no_runner_tags" detected:')
|
|
||||||
for tag in no_runner_tags - known_no_runner_tags:
|
|
||||||
print(f' - {tag}')
|
|
||||||
no_runner_tags_fail = True
|
|
||||||
|
|
||||||
print(
|
print(
|
||||||
'- If you\'re the owner of the missing runners, '
|
'Please add at least one environment markers to the test cases listed above. '
|
||||||
'please make sure the runners are online and have the required tags.\n'
|
'You may check all the env markers here: tools/ci/idf_pytest/constants.py'
|
||||||
'- If you\'re the owner of the test cases that require the missing tags, '
|
|
||||||
'please add at least one runner with the required tags.\n'
|
|
||||||
'- For other users, please contact the runner owner first, '
|
|
||||||
'or report this issue in our internal CI channel.\n'
|
|
||||||
'If the issue cannot be solved in a short time, '
|
|
||||||
'please add the missing tags to the "no_runner_tags" section '
|
|
||||||
'under the file inside ESP-IDF repo: '
|
|
||||||
f'{os.path.relpath(KNOWN_GENERATE_TEST_CHILD_PIPELINE_WARNINGS_FILEPATH, IDF_PATH)}.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if no_env_marker_test_cases_fail or no_runner_tags_fail:
|
if no_env_marker_test_cases_fail:
|
||||||
raise SystemExit('Failed to generate target test child pipeline.')
|
raise SystemExit('Failed to generate target test child pipeline.')
|
||||||
|
|
||||||
dump_jobs_to_yaml(target_test_jobs, output_filepath, DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME, extra_include_yml)
|
dump_jobs_to_yaml(
|
||||||
|
target_test_jobs,
|
||||||
|
output_filepath,
|
||||||
|
DEFAULT_TARGET_TEST_CHILD_PIPELINE_NAME,
|
||||||
|
extra_include_yml,
|
||||||
|
)
|
||||||
print(f'Generate child pipeline yaml file {output_filepath} with {sum(j.parallel for j in target_test_jobs)} jobs')
|
print(f'Generate child pipeline yaml file {output_filepath} with {sum(j.parallel for j in target_test_jobs)} jobs')
|
||||||
|
|
||||||
|
|
||||||
@ -187,18 +167,6 @@ if __name__ == '__main__':
|
|||||||
default=DEFAULT_TEST_PATHS,
|
default=DEFAULT_TEST_PATHS,
|
||||||
help='Paths to the apps to build.',
|
help='Paths to the apps to build.',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
'--project-id',
|
|
||||||
type=int,
|
|
||||||
default=os.getenv('CI_PROJECT_ID'),
|
|
||||||
help='Project ID',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--pipeline-id',
|
|
||||||
type=int,
|
|
||||||
default=os.getenv('PARENT_PIPELINE_ID'),
|
|
||||||
help='Pipeline ID',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o',
|
'-o',
|
||||||
'--output',
|
'--output',
|
||||||
@ -215,15 +183,12 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
gl_project = Gitlab(args.project_id).project
|
|
||||||
|
|
||||||
apps = []
|
apps = []
|
||||||
for f in glob.glob(args.app_info_filepattern):
|
for f in glob.glob(args.app_info_filepattern):
|
||||||
apps.extend(import_apps_from_txt(f))
|
apps.extend(import_apps_from_txt(f))
|
||||||
|
|
||||||
generate_target_test_child_pipeline(
|
generate_target_test_child_pipeline(
|
||||||
gl_project,
|
paths=args.paths,
|
||||||
args.paths,
|
apps=apps,
|
||||||
apps,
|
output_filepath=args.output,
|
||||||
args.output,
|
|
||||||
)
|
)
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
# This is the file that contains the known warnings for the generate_test_child_pipeline.py script.
|
||||||
|
# no_env_marker_test_cases: List of test cases that do not have environment markers.
|
||||||
|
# each item shall be the test node id, you may check the error message to get the node id.
|
||||||
no_env_marker_test_cases:
|
no_env_marker_test_cases:
|
||||||
- components/nvs_flash/test_apps/pytest_nvs_flash.py::test_nvs_flash[default]
|
- components/nvs_flash/test_apps/pytest_nvs_flash.py::test_nvs_flash[default]
|
||||||
- components/vfs/test_apps/pytest_vfs.py::test_vfs_ccomp[ccomp]
|
- components/vfs/test_apps/pytest_vfs.py::test_vfs_ccomp[ccomp]
|
||||||
@ -7,6 +10,12 @@ no_env_marker_test_cases:
|
|||||||
- examples/storage/nvs_rw_value/pytest_nvs_rw_value.py::test_examples_nvs_rw_value
|
- examples/storage/nvs_rw_value/pytest_nvs_rw_value.py::test_examples_nvs_rw_value
|
||||||
- examples/storage/nvs_rw_value_cxx/pytest_nvs_rw_value_cxx.py::test_examples_nvs_rw_value_cxx
|
- examples/storage/nvs_rw_value_cxx/pytest_nvs_rw_value_cxx.py::test_examples_nvs_rw_value_cxx
|
||||||
- examples/storage/wear_levelling/pytest_wear_levelling_example.py::test_wear_levelling_example
|
- examples/storage/wear_levelling/pytest_wear_levelling_example.py::test_wear_levelling_example
|
||||||
|
|
||||||
|
# no_runner_tags: List of runner tags that has no test runner set.
|
||||||
|
# each item shall be a comma separated list of runner tags.
|
||||||
|
# NOTE:
|
||||||
|
# 1. for multi dut tests, the runner tag shall be <target>_<count>, e.g. esp32_2 instead of esp32,esp32
|
||||||
|
# 2. don't have spaces in the comma separated list.
|
||||||
no_runner_tags:
|
no_runner_tags:
|
||||||
- esp32,ip101
|
- esp32,ip101
|
||||||
- esp32c2,jtag,xtal_40mhz
|
- esp32c2,jtag,xtal_40mhz
|
||||||
|
Loading…
x
Reference in New Issue
Block a user