mirror of
https://github.com/espressif/esp-idf
synced 2025-03-09 17:19:09 -04:00
Merge branch 'ci/cache_submodules' into 'master'
CI: cache submodules Closes IDFCI-1158 See merge request espressif/esp-idf!19009
This commit is contained in:
commit
9653af8d04
3
.gitignore
vendored
3
.gitignore
vendored
@ -21,6 +21,9 @@ GPATH
|
|||||||
# MacOS directory files
|
# MacOS directory files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|
||||||
|
# cache dir
|
||||||
|
.cache/
|
||||||
|
|
||||||
# Components Unit Test Apps files
|
# Components Unit Test Apps files
|
||||||
components/**/build/
|
components/**/build/
|
||||||
components/**/build_*_*/
|
components/**/build_*_*/
|
||||||
|
@ -101,12 +101,16 @@ variables:
|
|||||||
CI_PYTHON_TOOL_BRANCH: ""
|
CI_PYTHON_TOOL_BRANCH: ""
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
key: pip-cache
|
|
||||||
paths:
|
|
||||||
- .cache/pip
|
|
||||||
# pull only for most of the use cases since it's cache dir.
|
# pull only for most of the use cases since it's cache dir.
|
||||||
# Only set "push" policy for "upload_cache" stage jobs since it would install all pypi packages
|
# Only set "push" policy for "upload_cache" stage jobs
|
||||||
policy: pull
|
- key: pip-cache
|
||||||
|
paths:
|
||||||
|
- .cache/pip
|
||||||
|
policy: pull
|
||||||
|
- key: submodule-cache
|
||||||
|
paths:
|
||||||
|
- .cache/submodule_archives
|
||||||
|
policy: pull
|
||||||
|
|
||||||
.setup_tools_unless_target_test: &setup_tools_unless_target_test |
|
.setup_tools_unless_target_test: &setup_tools_unless_target_test |
|
||||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||||
|
@ -5,11 +5,14 @@
|
|||||||
tags:
|
tags:
|
||||||
- host_test
|
- host_test
|
||||||
dependencies: []
|
dependencies: []
|
||||||
|
# run host_test jobs immediately, only after upload cache
|
||||||
needs:
|
needs:
|
||||||
- job: upload-pip-cache-shiny
|
- job: upload-pip-cache
|
||||||
optional: true # run host_test jobs immediately, only after upload cache
|
optional: true
|
||||||
- job: upload-pip-cache-brew
|
artifacts: false
|
||||||
optional: true # run host_test jobs immediately, only after upload cache
|
- job: upload-submodules-cache
|
||||||
|
optional: true
|
||||||
|
artifacts: false
|
||||||
|
|
||||||
test_nvs_on_host:
|
test_nvs_on_host:
|
||||||
extends: .host_test_template
|
extends: .host_test_template
|
||||||
|
@ -142,7 +142,6 @@
|
|||||||
|
|
||||||
- "tools/split_paths_by_spaces.py"
|
- "tools/split_paths_by_spaces.py"
|
||||||
|
|
||||||
|
|
||||||
.patterns-windows: &patterns-windows
|
.patterns-windows: &patterns-windows
|
||||||
- "tools/windows/**/*"
|
- "tools/windows/**/*"
|
||||||
|
|
||||||
@ -150,7 +149,6 @@
|
|||||||
- "tools/docker/**/*"
|
- "tools/docker/**/*"
|
||||||
|
|
||||||
.patterns-submodule: &patterns-submodule
|
.patterns-submodule: &patterns-submodule
|
||||||
- "components/asio/asio"
|
|
||||||
- "components/bootloader/subproject/components/micro-ecc/micro-ecc"
|
- "components/bootloader/subproject/components/micro-ecc/micro-ecc"
|
||||||
- "components/bt/controller/lib_esp32"
|
- "components/bt/controller/lib_esp32"
|
||||||
- "components/bt/controller/lib_esp32c2/esp32c2-bt-lib"
|
- "components/bt/controller/lib_esp32c2/esp32c2-bt-lib"
|
||||||
@ -172,6 +170,7 @@
|
|||||||
- "components/tinyusb/tinyusb"
|
- "components/tinyusb/tinyusb"
|
||||||
- "components/unity/unity"
|
- "components/unity/unity"
|
||||||
- "examples/peripherals/secure_element/atecc608_ecdsa/components/esp-cryptoauthlib"
|
- "examples/peripherals/secure_element/atecc608_ecdsa/components/esp-cryptoauthlib"
|
||||||
|
- ".gitmodules"
|
||||||
|
|
||||||
.patterns-example_test-related_changes-ota: &patterns-example_test-related_changes-ota
|
.patterns-example_test-related_changes-ota: &patterns-example_test-related_changes-ota
|
||||||
- "examples/system/ota/**/*"
|
- "examples/system/ota/**/*"
|
||||||
@ -248,6 +247,11 @@
|
|||||||
- <<: *if-dev-push
|
- <<: *if-dev-push
|
||||||
changes: *patterns-python-cache
|
changes: *patterns-python-cache
|
||||||
|
|
||||||
|
.rules:patterns:submodule:
|
||||||
|
rules:
|
||||||
|
- <<: *if-dev-push
|
||||||
|
changes: *patterns-submodule
|
||||||
|
|
||||||
.rules:dev:
|
.rules:dev:
|
||||||
rules:
|
rules:
|
||||||
- <<: *if-trigger
|
- <<: *if-trigger
|
||||||
|
@ -1,17 +1,22 @@
|
|||||||
|
# pull only for most of the use cases for cache
|
||||||
|
# only set "push" policy for the jobs under this file.
|
||||||
|
# The cache would be updated when files matched specified patterns changes.
|
||||||
|
|
||||||
.upload_cache_template:
|
.upload_cache_template:
|
||||||
stage: upload_cache
|
stage: upload_cache
|
||||||
image: $ESP_ENV_IMAGE
|
image: $ESP_ENV_IMAGE
|
||||||
|
|
||||||
.upload_pip_cache_template:
|
upload-pip-cache:
|
||||||
extends:
|
extends:
|
||||||
- .upload_cache_template
|
- .upload_cache_template
|
||||||
- .rules:patterns:python-cache
|
- .rules:patterns:python-cache
|
||||||
|
tags:
|
||||||
|
- $GEO
|
||||||
|
- build
|
||||||
cache:
|
cache:
|
||||||
key: pip-cache
|
key: pip-cache
|
||||||
paths:
|
paths:
|
||||||
- .cache/pip
|
- .cache/pip
|
||||||
# pull only for most of the use cases since it's cache dir.
|
|
||||||
# Only set "push" policy for "upload_cache" stage jobs since it would install all pypi packages
|
|
||||||
policy: push
|
policy: push
|
||||||
before_script: []
|
before_script: []
|
||||||
script:
|
script:
|
||||||
@ -22,15 +27,29 @@
|
|||||||
- $IDF_PATH/tools/idf_tools.py install-python-env --features pytest
|
- $IDF_PATH/tools/idf_tools.py install-python-env --features pytest
|
||||||
# TODO: remove this, IDFCI-1207
|
# TODO: remove this, IDFCI-1207
|
||||||
- pip install esptool -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
- pip install esptool -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||||
|
parallel:
|
||||||
|
matrix:
|
||||||
|
- GEO: [ 'shiny', 'brew' ]
|
||||||
|
|
||||||
upload-pip-cache-shiny:
|
upload-submodules-cache:
|
||||||
extends: .upload_pip_cache_template
|
extends:
|
||||||
|
- .upload_cache_template
|
||||||
|
- .rules:patterns:submodule
|
||||||
tags:
|
tags:
|
||||||
- shiny
|
- $GEO
|
||||||
- build
|
|
||||||
|
|
||||||
upload-pip-cache-brew:
|
|
||||||
extends: .upload_pip_cache_template
|
|
||||||
tags:
|
|
||||||
- brew
|
|
||||||
- build
|
- build
|
||||||
|
cache:
|
||||||
|
key: submodule-cache
|
||||||
|
paths:
|
||||||
|
- .cache/submodule_archives
|
||||||
|
policy: push
|
||||||
|
before_script: []
|
||||||
|
script:
|
||||||
|
- source tools/ci/utils.sh
|
||||||
|
- is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
|
||||||
|
- source tools/ci/setup_python.sh
|
||||||
|
- rm -rf .cache/submodule_archives # clear old submodule archives
|
||||||
|
- fetch_submodules
|
||||||
|
parallel:
|
||||||
|
matrix:
|
||||||
|
- GEO: [ 'shiny', 'brew' ]
|
||||||
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
[submodule "components/bt/controller/lib_esp32"]
|
[submodule "components/bt/controller/lib_esp32"]
|
||||||
path = components/bt/controller/lib_esp32
|
path = components/bt/controller/lib_esp32
|
||||||
url = ../../espressif/esp32-bt-lib.git
|
url = ../../espressif/esp32-bt-lib.git
|
||||||
|
|
||||||
[submodule "components/bootloader/subproject/components/micro-ecc/micro-ecc"]
|
[submodule "components/bootloader/subproject/components/micro-ecc/micro-ecc"]
|
||||||
path = components/bootloader/subproject/components/micro-ecc/micro-ecc
|
path = components/bootloader/subproject/components/micro-ecc/micro-ecc
|
||||||
|
@ -9,8 +9,10 @@ Check if all rules in rules.yml used or not in CI yaml files.
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from idf_ci_utils import IDF_PATH
|
from idf_ci_utils import IDF_PATH
|
||||||
@ -36,7 +38,9 @@ class YMLConfig:
|
|||||||
return [str_or_list]
|
return [str_or_list]
|
||||||
if isinstance(str_or_list, list):
|
if isinstance(str_or_list, list):
|
||||||
return str_or_list
|
return str_or_list
|
||||||
raise ValueError('Wrong type: {}. Only supports str or list.'.format(type(str_or_list)))
|
raise ValueError(
|
||||||
|
'Wrong type: {}. Only supports str or list.'.format(type(str_or_list))
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def config(self):
|
def config(self):
|
||||||
@ -45,8 +49,7 @@ class YMLConfig:
|
|||||||
|
|
||||||
all_config = dict()
|
all_config = dict()
|
||||||
for item in self.root_yml['include']:
|
for item in self.root_yml['include']:
|
||||||
if not item.endswith('rules.yml'):
|
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
|
||||||
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
|
|
||||||
self._config = all_config
|
self._config = all_config
|
||||||
return self._config
|
return self._config
|
||||||
|
|
||||||
@ -70,16 +73,20 @@ class YMLConfig:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def validate(rules_yml):
|
YML_CONFIG = YMLConfig(ROOT_YML_FP)
|
||||||
yml_config = YMLConfig(ROOT_YML_FP)
|
|
||||||
|
|
||||||
|
def validate_needed_rules(rules_yml):
|
||||||
res = 0
|
res = 0
|
||||||
needed_rules = deepcopy(yml_config.all_extends)
|
needed_rules = deepcopy(YML_CONFIG.all_extends)
|
||||||
with open(rules_yml) as fr:
|
with open(rules_yml) as fr:
|
||||||
for index, line in enumerate(fr):
|
for index, line in enumerate(fr):
|
||||||
if line.startswith('.rules:'):
|
if line.startswith('.rules:'):
|
||||||
key = line.strip().rsplit(':', 1)[0]
|
key = line.strip().rsplit(':', 1)[0]
|
||||||
if not yml_config.exists(key):
|
if not YML_CONFIG.exists(key):
|
||||||
print('{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key))
|
print(
|
||||||
|
'{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
needed_rules.remove(key)
|
needed_rules.remove(key)
|
||||||
|
|
||||||
@ -93,10 +100,54 @@ def validate(rules_yml):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def parse_submodule_paths(
|
||||||
|
gitsubmodules: str = os.path.join(IDF_PATH, '.gitmodules')
|
||||||
|
) -> List[str]:
|
||||||
|
path_regex = re.compile(r'^\s+path = (.+)$', re.MULTILINE)
|
||||||
|
with open(gitsubmodules, 'r') as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
res = []
|
||||||
|
for item in path_regex.finditer(data):
|
||||||
|
res.append(item.group(1))
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def validate_submodule_patterns():
|
||||||
|
submodule_paths = sorted(['.gitmodules'] + parse_submodule_paths())
|
||||||
|
submodule_paths_in_patterns = sorted(
|
||||||
|
YML_CONFIG.config.get('.patterns-submodule', [])
|
||||||
|
)
|
||||||
|
|
||||||
|
res = 0
|
||||||
|
if submodule_paths != submodule_paths_in_patterns:
|
||||||
|
res = 1
|
||||||
|
print('please update the pattern ".patterns-submodule"')
|
||||||
|
should_remove = set(submodule_paths_in_patterns) - set(submodule_paths)
|
||||||
|
if should_remove:
|
||||||
|
print(f'- should remove: {should_remove}')
|
||||||
|
should_add = set(submodule_paths) - set(submodule_paths_in_patterns)
|
||||||
|
if should_add:
|
||||||
|
print(f'- should add: {should_add}')
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
parser.add_argument('rules_yml', nargs='?', default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
|
parser.add_argument(
|
||||||
help='rules.yml file path')
|
'rules_yml',
|
||||||
|
nargs='?',
|
||||||
|
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
|
||||||
|
help='rules.yml file path',
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
sys.exit(validate(args.rules_yml))
|
exit_code = 0
|
||||||
|
if validate_needed_rules(args.rules_yml):
|
||||||
|
exit_code = 1
|
||||||
|
if validate_submodule_patterns():
|
||||||
|
exit_code = 1
|
||||||
|
|
||||||
|
sys.exit(exit_code)
|
||||||
|
@ -19,6 +19,8 @@ PATH_PATTERN = re.compile(r'path\s+=\s+(\S+)')
|
|||||||
URL_PATTERN = re.compile(r'url\s+=\s+(\S+)')
|
URL_PATTERN = re.compile(r'url\s+=\s+(\S+)')
|
||||||
|
|
||||||
SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive'
|
SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive'
|
||||||
|
# need to match the one defined in CI yaml files for caching purpose
|
||||||
|
SUBMODULE_ARCHIVE_CACHE_DIR = '.cache/submodule_archives'
|
||||||
|
|
||||||
|
|
||||||
class SubModule(object):
|
class SubModule(object):
|
||||||
@ -28,6 +30,7 @@ class SubModule(object):
|
|||||||
|
|
||||||
def __init__(self, gitlab_inst, path, url):
|
def __init__(self, gitlab_inst, path, url):
|
||||||
self.path = path
|
self.path = path
|
||||||
|
self.url = url
|
||||||
self.gitlab_inst = gitlab_inst
|
self.gitlab_inst = gitlab_inst
|
||||||
self.project_id = self._get_project_id(url)
|
self.project_id = self._get_project_id(url)
|
||||||
self.commit_id = self._get_commit_id(path)
|
self.commit_id = self._get_commit_id(path)
|
||||||
@ -48,7 +51,7 @@ class SubModule(object):
|
|||||||
def download_archive(self):
|
def download_archive(self):
|
||||||
print('Update submodule: {}: {}'.format(self.path, self.commit_id))
|
print('Update submodule: {}: {}'.format(self.path, self.commit_id))
|
||||||
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
|
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
|
||||||
self.project_id)
|
self.project_id, SUBMODULE_ARCHIVE_CACHE_DIR)
|
||||||
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
|
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
|
||||||
os.rename(path_name, renamed_path)
|
os.rename(path_name, renamed_path)
|
||||||
shutil.rmtree(self.path, ignore_errors=True)
|
shutil.rmtree(self.path, ignore_errors=True)
|
||||||
|
@ -177,7 +177,7 @@ class Gitlab(object):
|
|||||||
return job_id_list
|
return job_id_list
|
||||||
|
|
||||||
@retry
|
@retry
|
||||||
def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None) -> str:
|
def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None, cache_dir: Optional[str] = None) -> str:
|
||||||
"""
|
"""
|
||||||
Download archive of certain commit of a repository and extract to destination path
|
Download archive of certain commit of a repository and extract to destination path
|
||||||
|
|
||||||
@ -191,6 +191,23 @@ class Gitlab(object):
|
|||||||
else:
|
else:
|
||||||
project = self.gitlab_inst.projects.get(project_id)
|
project = self.gitlab_inst.projects.get(project_id)
|
||||||
|
|
||||||
|
if cache_dir:
|
||||||
|
local_archive_file = os.path.join(cache_dir, f'{ref}.tar.gz')
|
||||||
|
os.makedirs(os.path.dirname(local_archive_file), exist_ok=True)
|
||||||
|
if os.path.isfile(local_archive_file):
|
||||||
|
print('Use cached archive file. Skipping download...')
|
||||||
|
else:
|
||||||
|
with open(local_archive_file, 'wb') as fw:
|
||||||
|
try:
|
||||||
|
project.repository_archive(sha=ref, streamed=True, action=fw.write)
|
||||||
|
except gitlab.GitlabGetError as e:
|
||||||
|
print('Failed to archive from project {}'.format(project_id))
|
||||||
|
raise e
|
||||||
|
print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(local_archive_file)) / (1024 * 1024)))
|
||||||
|
|
||||||
|
return self.decompress_archive(local_archive_file, destination)
|
||||||
|
|
||||||
|
# no cache
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||||
try:
|
try:
|
||||||
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
|
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
|
||||||
@ -198,9 +215,13 @@ class Gitlab(object):
|
|||||||
print('Failed to archive from project {}'.format(project_id))
|
print('Failed to archive from project {}'.format(project_id))
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
print('archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
|
print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
|
||||||
|
|
||||||
with tarfile.open(temp_file.name, 'r') as archive_file:
|
return self.decompress_archive(temp_file.name, destination)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decompress_archive(path: str, destination: str) -> str:
|
||||||
|
with tarfile.open(path, 'r') as archive_file:
|
||||||
root_name = archive_file.getnames()[0]
|
root_name = archive_file.getnames()[0]
|
||||||
archive_file.extractall(destination)
|
archive_file.extractall(destination)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user