From 6c852fe84f38aa7973d8d89bfa987ece9abd6ef5 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Wed, 19 Jun 2024 15:35:00 +0200 Subject: [PATCH 01/44] bot: Add a test run backup mode If 'use_backup' option is set to True in config.py, the bot collects a backup of the stats, so in case of unexpected termination the bot can continue the test series from the test case where it was interrupted. --- autopts/bot/common.py | 245 +++++++++++++++++++++++++++++++++++------- autopts/bot/zephyr.py | 80 +++++++++----- autopts/client.py | 131 ++++++++++++---------- autopts/config.py | 9 ++ test/unittests.py | 38 ++----- 5 files changed, 349 insertions(+), 154 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index b7d8126609..f5593d2661 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -20,12 +20,13 @@ import sys import shutil import time - +import json from pathlib import Path from argparse import Namespace from autopts import client as autoptsclient -from autopts.client import CliParser, Client, TestCaseRunStats, init_logging, TEST_CASE_DB -from autopts.config import MAX_SERVER_RESTART_TIME +from autopts.client import CliParser, Client, TestCaseRunStats, init_logging +from autopts.config import MAX_SERVER_RESTART_TIME, TEST_CASES_JSON, ALL_STATS_JSON, TC_STATS_JSON, \ + ALL_STATS_RESULTS_XML, TC_STATS_RESULTS_XML, BOT_STATE_JSON from autopts.ptsprojects.boards import get_free_device, get_tty, get_debugger_snr from autopts.ptsprojects.testcase_db import DATABASE_FILE @@ -37,6 +38,20 @@ log = logging.debug +def cleanup_tmp_files(): + files = [ALL_STATS_RESULTS_XML, + TC_STATS_RESULTS_XML, + TEST_CASES_JSON, + ALL_STATS_JSON, + TC_STATS_JSON, + BOT_STATE_JSON, + ] + + for file in files: + if os.path.exists(file): + os.remove(file) + + class BuildAndFlashException(Exception): pass @@ -99,6 +114,7 @@ def __init__(self, args, **kwargs): self.server_args = args.get('server_args', None) self.pylink_reset = args.get('pylink_reset', False) self.max_server_restart_time = args.get('max_server_restart_time', MAX_SERVER_RESTART_TIME) + self.use_backup = args.get('use_backup', False) if self.server_args is not None: from autoptsserver import SvrArgumentParser @@ -128,6 +144,15 @@ def __init__(self, get_iut, project, name, bot_config_class=BotConfigArgs, self.config_default = "default.conf" # The iut_config dictionary loaded from config.py self.iut_config = None + # Backup files with test cases and stats from a previous test run that + # has been stopped unexpectedly. + self.backup = {'available': False, + 'create': False, + 'all_stats': None, + 'tc_stats': None, + 'test_cases_file': TEST_CASES_JSON, + 'all_stats_file': ALL_STATS_JSON, + 'tc_stats_file': TC_STATS_JSON} def parse_or_find_tty(self, args): if args.tty_alias: @@ -143,6 +168,69 @@ def parse_or_find_tty(self, args): elif args.debugger_snr is None: args.debugger_snr = get_debugger_snr(args.tty_file) + def load_backup_of_previous_run(self): + """ + If the backup mode was enabled in the previous test run, and it + has been terminated unexpectedly, it is possible to resume the test series + from the last remembered config/test_case. + """ + + continue_test_case = None + continue_config = None + if os.path.exists(self.backup['all_stats_file']): + self.backup['all_stats'] = TestCaseRunStats.load_from_backup(self.backup['all_stats_file']) + continue_config = self.backup['all_stats'].pending_config + + # The last config and test case preformed in the broken test run + if os.path.exists(self.backup['tc_stats_file']): + self.backup['tc_stats'] = TestCaseRunStats.load_from_backup(self.backup['tc_stats_file']) + continue_config = self.backup['tc_stats'].pending_config + continue_test_case = self.backup['tc_stats'].pending_test_case + + if not continue_config: + return + + with open(self.backup['test_cases_file']) as f: + data = f.read() + test_cases_per_config = json.loads(data) + run_order = list(test_cases_per_config.keys()) + + # Skip already completed configs + config_index = run_order.index(continue_config) + if continue_test_case: + # Skip already completed test cases and the faulty one + tc_index = test_cases_per_config[continue_config].index(continue_test_case) + test_cases_per_config[continue_config] = test_cases_per_config[continue_config][tc_index + 1:] + + if not test_cases_per_config[continue_config]: + # The faulty test case was the last one in the config. Move to the next config + self.backup['tc_stats'].update(continue_test_case, 0, 'TIMEOUT') + self._merge_stats(self.backup['all_stats'], self.backup['tc_stats']) + self.backup['all_stats'].save_to_backup(self.backup['all_stats_file']) + self.backup['tc_stats'] = None + config_index += 1 + continue_test_case = None + + _args = {} + self.backup['args_per_config'] = _args + self.backup['available'] = True + self.backup['run_order'] = run_order[config_index:] + + if not self.backup['run_order']: + # All test cases done, the last one was faulty + self.backup['all_stats'].test_run_completed = True + return + + continue_config = self.backup['run_order'][0] + + for config in self.backup['run_order']: + _args[config] = copy.deepcopy(self.args) + _args[config].test_cases = test_cases_per_config[config] + + # Skip build and flash for the pending config as it has been + # already done in previous test run. + _args[continue_config].no_build = True + def parse_config_and_args(self, bot_config_dict=None): if self.bot_config is not None: # Do not parse a second time in the simple client layer @@ -155,71 +243,148 @@ def parse_config_and_args(self, bot_config_dict=None): self.args, errmsg = self.arg_parser.parse(bot_config_namespace) self.args.retry_config = bot_config_dict.get('retry_config', None) - if not errmsg: - # Remove default root handler that was created at first logging.debug - logging.getLogger().handlers.clear() - init_logging('_' + '_'.join(str(x) for x in self.args.cli_port)) + if errmsg: + return errmsg + + if self.args.use_backup: + self.load_backup_of_previous_run() + else: + cleanup_tmp_files() + + # Remove default root handler that was created at the first logging.debug + logging.getLogger().handlers.clear() + init_logging('_' + '_'.join(str(x) for x in self.args.cli_port)) return errmsg def apply_config(self, args, config, value): pass - def run_test_cases(self): + def _yield_next_config(self): limit_counter = 0 - all_stats = None - run_order, _args = get_filtered_test_cases(self.iut_config, self.args, - self.config_default, self.ptses[0]) + if self.backup['available']: + if self.backup['all_stats'].test_run_completed: + # All test cases have been completed before termination + return + + _args = self.backup['args_per_config'] + run_order = self.backup['run_order'] + else: + _run_order, _args = get_filtered_test_cases(self.iut_config, self.args, + self.config_default, self.ptses[0]) + + run_order = [] + test_cases = {} + for config in _run_order: + if _args.get(config) is None: + test_case_number = 0 + else: + test_case_number = len(_args[config].test_cases) + + if test_case_number == 0: + log(f'No test cases for {config} config, ignored.') + continue + + if self.args.test_case_limit: + limit = self.args.test_case_limit - limit_counter + if limit == 0: + log(f'Limit of test cases reached. No more test cases will be run.') + break + + if test_case_number > limit: + _args[config].test_cases = _args[config].test_cases[:limit] + test_case_number = limit + + limit_counter += test_case_number + + test_cases[config] = _args[config].test_cases + run_order.append(config) + + if self.args.use_backup: + with open(self.backup['test_cases_file'], 'w') as file: + file.write(json.dumps(test_cases, indent=4)) for config in run_order: - if _args.get(config) is None: - test_case_number = 0 - else: - test_case_number = len(_args[config].test_cases) + yield config, _args[config] + + def _backup_tc_stats(self, config=None, test_case=None, stats=None, **kwargs): + if not self.backup or not stats: + return + + stats.pending_config = config + stats.pending_test_case = test_case + stats.save_to_backup(self.backup['tc_stats_file']) + + def _merge_stats(self, all_stats, stats): + all_stats.merge(stats) - if test_case_number == 0: - log(f'No test cases for {config} config, ignored.') - continue + if os.path.exists(stats.xml_results): + os.remove(stats.xml_results) - if self.args.test_case_limit: - limit = self.args.test_case_limit - limit_counter - if limit == 0: - log(f'Limit of test cases reached. No more test cases will be run.') - break + if os.path.exists(TC_STATS_JSON): + os.remove(TC_STATS_JSON) - if test_case_number > limit: - _args[config].test_cases = _args[config].test_cases[:limit] - test_case_number = limit + def run_test_cases(self): + all_stats = self.backup['all_stats'] + stats = self.backup['tc_stats'] + + if not all_stats: + all_stats = TestCaseRunStats([], [], 0, xml_results_file=ALL_STATS_RESULTS_XML) + self.backup['all_stats'] = all_stats - limit_counter += test_case_number + if self.args.use_backup: + all_stats.save_to_backup(self.backup['all_stats_file']) + projects = self.ptses[0].get_project_list() + + for config, config_args in self._yield_next_config(): try: - self.apply_config(_args[config], config, self.iut_config[config]) + if not stats: + stats = TestCaseRunStats(projects, + config_args.test_cases, + config_args.retry, + self.test_case_database, + xml_results_file=TC_STATS_RESULTS_XML) + + if self.args.use_backup: + self._backup_tc_stats(config=config, test_case=None, stats=stats) + + self.apply_config(config_args, config, self.iut_config[config]) + + stats = autoptsclient.run_test_cases(self.ptses, + self.test_cases, + config_args, + stats, + config=config, + pre_test_case_fn=self._backup_tc_stats) - stats = autoptsclient.run_test_cases(self.ptses, self.test_cases, _args[config]) except BuildAndFlashException: log(f'Build and flash step failed for config {config}') - stats = TestCaseRunStats(self.ptses[0].get_project_list(), - _args[config].test_cases, 0, TEST_CASE_DB) - for tc in _args[config].test_cases: + + for tc in config_args.test_cases: status = 'BUILD_OR_FLASH ERROR' stats.update(tc, time.time(), status) - if all_stats is None: - all_stats = stats - else: - all_stats = all_stats.merge(all_stats, stats) + if stats: + self._merge_stats(all_stats, stats) + stats = None + + if self.args.use_backup: + all_stats.save_to_backup(self.backup['all_stats_file']) # End of bot run - all test cases completed - if all_stats is None: + if all_stats.num_test_cases == 0: print(f'\nNo test cases were run. Please verify your config.\n') - return TestCaseRunStats([], [], 0, None) + return all_stats + print(f'\nFinal Bot Summary:\n') all_stats.print_summary() - print(f'\nFinal Bot Summary:\n') + if self.args.use_backup: + all_stats.test_run_completed = True + all_stats.save_to_backup(self.backup['all_stats_file']) try: results = all_stats.get_results() @@ -431,6 +596,6 @@ def cleanup(): :return: None """ try: - pass + cleanup_tmp_files() except OSError: pass diff --git a/autopts/bot/zephyr.py b/autopts/bot/zephyr.py index 59096aae74..574ec2fcf7 100755 --- a/autopts/bot/zephyr.py +++ b/autopts/bot/zephyr.py @@ -19,6 +19,7 @@ import collections import datetime import importlib +import json import logging import os import sys @@ -30,6 +31,7 @@ from autopts import bot from autopts.bot.common_features.github import update_sources +from autopts.config import BOT_STATE_JSON, TMP_DIR from autopts.ptsprojects.zephyr import ZEPHYR_PROJECT_URL from autopts import client as autoptsclient @@ -236,33 +238,52 @@ def __init__(self): def main(bot_client): - bot.common.pre_cleanup() + if os.path.exists(BOT_STATE_JSON): + print('Continuing the previous terminated test run (remove tmp/ to start freshly)') - start_time = time.time() - start_time_stamp = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") + with open(BOT_STATE_JSON, "r") as f: + data = f.read() + bot_state = json.loads(data) + cfg = bot_state['config'] + args = cfg['auto_pts'] - cfg = bot_client.bot_config - args = cfg['auto_pts'] + else: + # Start fresh test run - if 'database_file' not in args: - args['database_file'] = DATABASE_FILE + bot_state = {} + bot.common.pre_cleanup() - if 'githubdrive' in cfg: - update_sources(cfg['githubdrive']['path'], - cfg['githubdrive']['remote'], - cfg['githubdrive']['branch'], True) - - args['kernel_image'] = os.path.join(args['project_path'], 'tests', - 'bluetooth', 'tester', 'outdir', - 'zephyr', 'zephyr.elf') - - if 'git' in cfg: - repos_info = github.update_repos(args['project_path'], cfg["git"]) - repo_status = report.make_repo_status(repos_info) - args['repos'] = cfg['git'] - else: - repos_info = {} - repo_status = '' + bot_state['start_time'] = time.time() + + cfg = bot_client.bot_config + args = cfg['auto_pts'] + bot_state['config'] = cfg + + if 'database_file' not in args: + args['database_file'] = DATABASE_FILE + + if 'githubdrive' in cfg: + update_sources(cfg['githubdrive']['path'], + cfg['githubdrive']['remote'], + cfg['githubdrive']['branch'], True) + + args['kernel_image'] = os.path.join(args['project_path'], 'tests', + 'bluetooth', 'tester', 'outdir', + 'zephyr', 'zephyr.elf') + + if 'git' in cfg: + bot_state['repos_info'] = github.update_repos(args['project_path'], cfg["git"]) + bot_state['repo_status'] = report.make_repo_status(bot_state['repos_info']) + args['repos'] = cfg['git'] + else: + bot_state['repos_info'] = {} + bot_state['repo_status'] = '' + + if args.get('use_backup', False): + os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) + + with open(BOT_STATE_JSON, "w") as f: + f.write(json.dumps(bot_state, indent=4)) try: stats = bot_client.run_tests() @@ -286,10 +307,11 @@ def main(bot_client): report_file = report.make_report_xlsx(results, summary, regressions, progresses, descriptions, xmls, PROJECT_NAME) report_txt = report.make_report_txt(results, regressions, - progresses, repo_status, PROJECT_NAME) + progresses, bot_state['repo_status'], PROJECT_NAME) end_time = time.time() - end_time_stamp = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") + end_time_stamp = datetime.datetime.fromtimestamp(end_time).strftime("%Y_%m_%d_%H_%M_%S") + start_time_stamp = datetime.datetime.fromtimestamp(bot_state['start_time']).strftime("%Y_%m_%d_%H_%M_%S") url = None github_link = None report_folder = None @@ -297,7 +319,7 @@ def main(bot_client): if 'githubdrive' in cfg or 'gdrive' in cfg: iut_logs = 'logs/' readme_file = make_readme_md(start_time_stamp, end_time_stamp, - repos_info, args['pts_ver']) + bot_state['repos_info'], args['pts_ver']) report_diff_txt, deleted_cases = report.make_report_diff(cfg['githubdrive'], results, regressions, progresses, new_cases) @@ -318,7 +340,7 @@ def main(bot_client): commit_msg_pattern = cfg['githubdrive']['commit_msg'] if 'git' in cfg: - commit_sha = repos_info['zephyr']['commit'] + commit_sha = bot_state['repos_info']['zephyr']['commit'] branch = cfg['git']['zephyr']['branch'] commit_msg = commit_msg_pattern.format( @@ -337,7 +359,7 @@ def main(bot_client): print("Sending email ...") # keep mail related context to simplify the code - mail_ctx = {'repos_info': repo_status, + mail_ctx = {'repos_info': bot_state['repo_status'], 'summary': f'''{mail.status_dict2summary_html(summary)} {mail.regressions2html(regressions, descriptions)} {mail.progresses2html(progresses, descriptions)} @@ -365,7 +387,7 @@ def main(bot_client): # Elapsed Time mail_ctx["elapsed_time"] = str(datetime.timedelta( - seconds=(int(end_time - start_time)))) + seconds=(int(end_time - bot_state['start_time'])))) subject, body = compose_mail(args, cfg['mail'], mail_ctx) diff --git a/autopts/client.py b/autopts/client.py index 8bb5273aa0..08285056f5 100755 --- a/autopts/client.py +++ b/autopts/client.py @@ -20,10 +20,12 @@ import datetime import errno +import json import logging import os import queue import random +import shutil import signal import socket import sys @@ -36,6 +38,7 @@ from xmlrpc.server import SimpleXMLRPCServer from termcolor import colored +from autopts.config import TC_STATS_RESULTS_XML, TEST_CASE_DB, TMP_DIR from autopts.ptsprojects import ptstypes from autopts.ptsprojects import stack from autopts.ptsprojects.boards import get_available_boards, tty_to_com @@ -52,7 +55,6 @@ log_lock = threading.RLock() RUNNING_TEST_CASE = {} -TEST_CASE_DB = None autoprojects = None TEST_CASE_TIMEOUT_MS = 300000 # milliseconds @@ -478,7 +480,7 @@ def init_pts_thread_entry(proxy, args, exceptions, finish_count): proxy.enable_maximum_logging(args.enable_max_logs) -def init_pts(args, ptses, tc_db_table_name=None): +def init_pts(args, ptses): """Initialization procedure for PTS instances""" proxy_list = ptses @@ -514,10 +516,6 @@ def init_pts(args, ptses, tc_db_table_name=None): thread_list.append(thread) thread.start() - if tc_db_table_name: - global TEST_CASE_DB - TEST_CASE_DB = TestCaseTable(tc_db_table_name, args.database_file) - # Wait until each PTS instance is initialized. try: finish_count.wait_for(thread_count, timeout=max( @@ -555,7 +553,8 @@ def get_result_color(status): class TestCaseRunStats: - def __init__(self, projects, test_cases, retry_count, db=None): + def __init__(self, projects, test_cases, retry_count, db=None, + xml_results_file=None): self.pts_ver = '' self.platform = '' self.run_count_max = retry_count + 1 # Run test at least once @@ -566,53 +565,55 @@ def __init__(self, projects, test_cases, retry_count, db=None): self.max_test_case_name = len(max(test_cases, key=len)) if test_cases else 0 self.margin = 3 self.index = 0 - - self.xml_results = tempfile.NamedTemporaryFile(delete=False).name - root = ElementTree.Element("results") - tree = ElementTree.ElementTree(root) - tree.write(self.xml_results) - + self.xml_results = xml_results_file self.db = db + self.est_duration = 0 + self.pending_config = None + self.pending_test_case = None + self.test_run_completed = False + + if self.xml_results and not os.path.exists(self.xml_results): + root = ElementTree.Element("results") + tree = ElementTree.ElementTree(root) + tree.write(self.xml_results) if self.db: self.est_duration = db.estimate_session_duration(test_cases, self.run_count_max) - if self.est_duration: - approx = str(datetime.timedelta(seconds=self.est_duration)) - print("Number of test cases to run: ", self.num_test_cases, " in approximately: ", approx) - else: - self.est_duration = 0 + def save_to_backup(self, filename): + data_to_save = {} + for key, value in self.__dict__.items(): + if isinstance(value, (int, str, bool)): + data_to_save[key] = value - def __del__(self): - try: - os.remove(self.xml_results) - except Exception as e: - print(e) + with open(filename, 'w') as json_file: + json.dump(data_to_save, json_file, indent=4) @staticmethod - def merge(stats1, stats2): - merged_stats = TestCaseRunStats([], [], 0, stats1.db) - merged_stats.num_test_cases = stats1.num_test_cases + stats2.num_test_cases - merged_stats.num_test_cases_width = max(stats1.num_test_cases_width, stats2.num_test_cases_width) - merged_stats.max_project_name = max(stats1.max_project_name, stats2.max_project_name) - merged_stats.max_test_case_name = max(stats1.max_test_case_name, stats2.max_test_case_name) - merged_stats.est_duration = stats1.est_duration + stats2.est_duration - - merged_stats_tree = ElementTree.parse(merged_stats.xml_results) - merged_stats_root = merged_stats_tree.getroot() - - stats1_tree = ElementTree.parse(stats1.xml_results) - root1 = stats1_tree.getroot() + def load_from_backup(backup_file): + with open(backup_file, 'r') as f: + data = json.load(f) + stats = TestCaseRunStats([], [], 0, None) + stats.__dict__.update(data) + return stats + + def merge(self, stats2): + self.num_test_cases = self.num_test_cases + stats2.num_test_cases + self.num_test_cases_width = max(self.num_test_cases_width, stats2.num_test_cases_width) + self.max_project_name = max(self.max_project_name, stats2.max_project_name) + self.max_test_case_name = max(self.max_test_case_name, stats2.max_test_case_name) + self.est_duration = self.est_duration + stats2.est_duration + self.pending_config = stats2.pending_config + self.pending_test_case = stats2.pending_test_case stats2_tree = ElementTree.parse(stats2.xml_results) root2 = stats2_tree.getroot() - merged_stats_root.extend(root1) - merged_stats_root.extend(root2) - merged_stats_tree.write(merged_stats.xml_results) - - return merged_stats + self_tree = ElementTree.parse(self.xml_results) + root1 = self_tree.getroot() + root1.extend(root2) + self_tree.write(self.xml_results) def update(self, test_case_name, duration, status, description=''): tree = ElementTree.parse(self.xml_results) @@ -1205,7 +1206,7 @@ def get_test_cases(pts, test_cases, excluded): return _test_cases -def run_test_cases(ptses, test_case_instances, args): +def run_test_cases(ptses, test_case_instances, args, stats, **kwargs): """Runs a list of test cases""" ports_str = '_'.join(str(x) for x in args.cli_port) @@ -1218,15 +1219,16 @@ def run_test_cases(ptses, test_case_instances, args): raise test_cases = args.test_cases - projects = ptses[0].get_project_list() retry_config = getattr(args, 'retry_config', None) repeat_until_failed = getattr(args, 'repeat_until_fail', False) - - # Statistics - stats = TestCaseRunStats(projects, test_cases, args.retry, TEST_CASE_DB) - + pre_test_case_fn = kwargs.get('pre_test_case_fn', None) exceptions = queue.Queue() + approx = '' + if stats.est_duration: + approx = f" in approximately: " + str(datetime.timedelta(seconds=stats.est_duration)) + print(f"Number of test cases to run: {stats.num_test_cases}{approx}") + for test_case in test_cases: stats.run_count = 0 test_retry_count = None @@ -1236,6 +1238,9 @@ def run_test_cases(ptses, test_case_instances, args): test_retry_count = retry_config[test_case] while True: + if pre_test_case_fn: + pre_test_case_fn(test_case=test_case, stats=stats, **kwargs) + status, duration = run_test_case(ptses, test_case_instances, test_case, stats, session_log_dir, exceptions, args.superguard) @@ -1265,8 +1270,8 @@ def run_test_cases(ptses, test_case_instances, args): if (status == 'PASS' and not args.stress_test) or \ stats.run_count == retry_limit: - if TEST_CASE_DB: - TEST_CASE_DB.update_statistics(test_case, duration, status) + if stats.db: + stats.db.update_statistics(test_case, duration, status) break @@ -1304,6 +1309,7 @@ def __init__(self, get_iut, project, name, parser_class=CliParser): # Command line arguments parser self.arg_parser = parser_class(cli_support=autoprojects.iutctl.CLI_SUPPORT, board_names=self.boards) self.prev_sigint_handler = None + self.test_case_database = None def parse_config_and_args(self, args_namespace=None): if args_namespace is None: @@ -1351,18 +1357,22 @@ def main(self, _args=None): elif self.args.sudo: sys.exit("Please run this program as root.") + os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) + if self.args.store: tc_db_table_name = self.store_tag + str(self.args.board_name) - else: - tc_db_table_name = None - init_pts(self.args, self.ptses, tc_db_table_name) + if os.path.exists(self.args.database_file) and not os.path.exists(TEST_CASE_DB): + shutil.copy(self.args.database_file, TEST_CASE_DB) + + self.test_case_database = TestCaseTable(tc_db_table_name, TEST_CASE_DB) + + init_pts(self.args, self.ptses) btp.init(self.get_iut) self.init_iutctl(self.args) stack.init_stack() - stack_inst = stack.get_stack() self.setup_project_pixits(self.ptses) self.setup_test_cases(self.ptses) @@ -1371,6 +1381,9 @@ def main(self, _args=None): self.cleanup() + if self.args.store: + shutil.move(TEST_CASE_DB, self.args.database_file) + print("\nBye!") sys.stdout.flush() @@ -1395,7 +1408,17 @@ def run_test_cases(self): self.args.test_cases = get_test_cases(self.ptses[0], self.args.test_cases, self.args.excluded) - return run_test_cases(self.ptses, self.test_cases, self.args) + + projects = self.ptses[0].get_project_list() + + if os.path.exists(TC_STATS_RESULTS_XML): + os.remove(TC_STATS_RESULTS_XML) + + stats = TestCaseRunStats(projects, self.args.test_cases, + self.args.retry, self.test_case_database, + xml_results_file=TC_STATS_RESULTS_XML) + + return run_test_cases(self.ptses, self.test_cases, self.args, stats) def cleanup(self): log(f'{self.__class__.__name__}.{self.cleanup.__name__}') diff --git a/autopts/config.py b/autopts/config.py index 3bd35cdd2c..7235786e32 100644 --- a/autopts/config.py +++ b/autopts/config.py @@ -20,3 +20,12 @@ BTMON_PORT = 65432 MAX_SERVER_RESTART_TIME = 60 + +TMP_DIR = 'tmp/' +ALL_STATS_RESULTS_XML = TMP_DIR + 'all_stats_results.xml' +TC_STATS_RESULTS_XML = TMP_DIR + 'tc_stats_results.xml' +TEST_CASES_JSON = TMP_DIR + 'test_cases_file.json' +ALL_STATS_JSON = TMP_DIR + 'all_stats.json' +TC_STATS_JSON = TMP_DIR + 'tc_stats.json' +TEST_CASE_DB = TMP_DIR + 'TestCase.db' +BOT_STATE_JSON = TMP_DIR + 'bot_state.json' diff --git a/test/unittests.py b/test/unittests.py index ea473af1da..aa3de0a41c 100644 --- a/test/unittests.py +++ b/test/unittests.py @@ -8,6 +8,7 @@ from autopts.bot.zephyr import make_readme_md from autopts.client import FakeProxy, TestCaseRunStats +from autopts.config import TMP_DIR, ALL_STATS_RESULTS_XML from autopts.ptsprojects.testcase_db import TestCaseTable from autoptsclient_bot import import_bot_projects, import_bot_module from test.mocks.mocked_test_cases import mock_workspace_test_cases, test_case_list_generation_samples @@ -17,33 +18,6 @@ DATABASE_FILE = 'test/mocks/zephyr_database.db' -class TestCaseRunStatsMock: - def __init__(self, projects, test_cases, retry_count, db=None): - pass - - def update(self, test_case_name, duration, status): - return [], [] - - def get_results(self): - return {} - - def get_regressions(self): - return [] - - def get_progresses(self): - return [] - - def get_status_count(self): - return {} - - def print_summary(self): - pass - - @staticmethod - def merge(stats1, stats2): - return stats1 - - def delete_file(file_path): try: if os.path.isfile(file_path): @@ -59,10 +33,12 @@ def setUp(self): os.chdir(dirname(dirname(abspath(__file__)))) open('ttyUSB', 'w').close() shutil.copy('test/configs/config_zephyr.py', 'autopts/bot/config.py') + os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) def tearDown(self): os.remove('ttyUSB') delete_file('autopts/bot/config.py') + delete_file('tmp/') def test_bot_startup_import_bot_projects(self): """Check that all supported methods of passing a config file @@ -139,8 +115,8 @@ def mock_get_project_list(): def mock_get_test_case_list(project): return mock_workspace_test_cases[project] - def mock_run_test_cases(ptses, test_case_instances, args): - return TestCaseRunStatsMock([], [], 0) + def mock_run_test_cases(ptses, test_case_instances, args, stats, **kwargs): + return TestCaseRunStats([], [], 0, xml_results_file=ALL_STATS_RESULTS_XML) for args in testargs: with patch.object(sys, 'argv', args.split(' ')): @@ -188,7 +164,7 @@ def generate_stats(self, files): new_cases_id = [13, 14, 15] stats1 = TestCaseRunStats(mock_workspace_test_cases.keys(), - test_cases, 0, None) + test_cases, 0, None, xml_results_file=ALL_STATS_RESULTS_XML) # Mock results from a first bot run, to generate regressions, # progresses, new cases in a second one. for i, tc in enumerate(test_cases): @@ -225,7 +201,7 @@ def generate_stats(self, files): files['first_report_txt'] = first_report_txt stats = TestCaseRunStats(mock_workspace_test_cases.keys(), - test_cases, 0, TEST_CASE_DB) + test_cases, 0, TEST_CASE_DB, xml_results_file=ALL_STATS_RESULTS_XML) # Mock results from a second bot run. # Note one deleted test case. From f2a795f50031d30d9e9431209668a220b43a72f4 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Tue, 25 Jun 2024 17:24:28 +0200 Subject: [PATCH 02/44] common: Add support for hid_gpio active hub hid_gpio is a mynewt based, DIY active hub https://github.com/kasjer/hid_gpio --- autopts/bot/common.py | 6 +++ autopts/client.py | 56 +++++++++++++-------- autopts/utils.py | 86 +++++++++++++++++++++++++------ autoptsserver.py | 63 ++++++++++++++++------- cliparser.py | 14 ++++-- tools/active_hub_server.py | 100 +++++++++++++++++++++++++++++++++++++ 6 files changed, 267 insertions(+), 58 deletions(-) create mode 100644 tools/active_hub_server.py diff --git a/autopts/bot/common.py b/autopts/bot/common.py index f5593d2661..f9573fccbe 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -105,6 +105,7 @@ def __init__(self, args, **kwargs): self.stress_test = args.get('stress_test', False) self.ykush = args.get('ykush', None) self.ykush_replug_delay = args.get('ykush_replug_delay', 3) + self.active_hub_server = args.get('active_hub_server', None) self.recovery = args.get('recovery', False) self.superguard = float(args.get('superguard', 0)) self.cron_optim = args.get('cron_optim', False) @@ -116,6 +117,11 @@ def __init__(self, args, **kwargs): self.max_server_restart_time = args.get('max_server_restart_time', MAX_SERVER_RESTART_TIME) self.use_backup = args.get('use_backup', False) + if self.ykush or self.active_hub_server: + self.usb_replug_available = True + else: + self.usb_replug_available = False + if self.server_args is not None: from autoptsserver import SvrArgumentParser _server_args = SvrArgumentParser( diff --git a/autopts/client.py b/autopts/client.py index 08285056f5..ca6a801a7a 100755 --- a/autopts/client.py +++ b/autopts/client.py @@ -48,7 +48,7 @@ from autopts.pybtp import btp, defs from autopts.pybtp.types import BTPError, SynchError, MissingWIDError from autopts.utils import InterruptableThread, ResultWithFlag, CounterWithFlag, set_global_end, \ - raise_on_global_end, RunEnd, get_global_end, have_admin_rights, ykush_replug_usb + raise_on_global_end, RunEnd, get_global_end, have_admin_rights, ykush_replug_usb, active_hub_server_replug_usb from cliparser import CliParser log = logging.debug @@ -1479,23 +1479,8 @@ def run_recovery(args, ptses): iut = autoprojects.iutctl.get_iut() iut.stop() - if args.ykush: - if sys.platform == 'win32': - device_id = tty_to_com(args.tty_file) - elif args.tty_alias: - device_id = args.tty_alias - else: - device_id = args.tty_file - - ykush_replug_usb(args.ykush, device_id=device_id, delay=args.ykush_replug_delay) - - if args.tty_alias: - while not os.path.islink(args.tty_alias) and not os.path.exists(os.path.realpath(args.tty_alias)): - raise_on_global_end() - log(f'Waiting for TTY {args.tty_alias} to appear...\n') - time.sleep(1) - - args.tty_file = os.path.realpath(args.tty_alias) + if args.usb_replug_available: + replug_usb(args) for pts in ptses: req_sent = False @@ -1513,19 +1498,23 @@ def run_recovery(args, ptses): req_sent = True err = pts.callback.get_result('recover_pts', timeout=args.max_server_restart_time) if err == True: + log('PTS recovered') break if last_restart_time < pts.get_last_recovery_time(): + log('PTS recovered') break - except Exception: - log('Server is still resetting. Wait a little more.') - time.sleep(1) + except BaseException as e: + log(e) + + log('Server is still resetting. Wait a little more.') + time.sleep(1) stack_inst = stack.get_stack() stack_inst.cleanup() - if args.ykush: + if args.usb_replug_available: # mynewt project has not been refactored yet to reduce the number of # IUT board resets. if stack_inst.core: @@ -1534,6 +1523,29 @@ def run_recovery(args, ptses): log('Recovery finished') +def replug_usb(args): + if args.ykush: + if sys.platform == 'win32': + device_id = tty_to_com(args.tty_file) + elif args.tty_alias: + device_id = args.tty_alias + else: + device_id = args.tty_file + + ykush_replug_usb(args.ykush, device_id=device_id, delay=args.ykush_replug_delay) + + if args.tty_alias: + while not os.path.islink(args.tty_alias) and not os.path.exists(os.path.realpath(args.tty_alias)): + raise_on_global_end() + log(f'Waiting for TTY {args.tty_alias} to appear...\n') + time.sleep(1) + + args.tty_file = os.path.realpath(args.tty_alias) + + elif args.active_hub_server: + active_hub_server_replug_usb(args.active_hub_server) + + def setup_project_name(project): global autoprojects autoprojects = project # importlib.import_module('ptsprojects.' + project) diff --git a/autopts/utils.py b/autopts/utils.py index a75ada2a26..8a200ba07c 100644 --- a/autopts/utils.py +++ b/autopts/utils.py @@ -21,9 +21,11 @@ import sys import threading import traceback +import xmlrpc.client +import hid +import psutil from time import sleep -import psutil PTS_WORKSPACE_FILE_EXT = ".pqw6" @@ -208,18 +210,23 @@ def interrupt(self): pass -def usb_power(ykush_port, on=True, ykush_srn=None): +def ykush_set_usb_power(ykush_port, on=True, ykush_srn=None): if not pykush_installed: print('pykush not installed') return - ykush_port = int(ykush_port) - yk = pykush.YKUSH(serial=ykush_srn) - state = pykush.YKUSH_PORT_STATE_UP if on else pykush.YKUSH_PORT_STATE_DOWN - yk.set_port_state(ykush_port, state) - if yk.get_port_state(ykush_port) != state: - ykush_name = ykush_srn if ykush_srn else '' - raise Exception(f'YKUSH {ykush_name} failed to change state {state} of port {ykush_port}') + yk = None + try: + ykush_port = int(ykush_port) + yk = pykush.YKUSH(serial=ykush_srn) + state = pykush.YKUSH_PORT_STATE_UP if on else pykush.YKUSH_PORT_STATE_DOWN + yk.set_port_state(ykush_port, state) + if yk.get_port_state(ykush_port) != state: + ykush_name = ykush_srn if ykush_srn else '' + raise Exception(f'YKUSH {ykush_name} failed to change state {state} of port {ykush_port}') + finally: + if yk: + del yk def get_own_workspaces(): @@ -341,9 +348,9 @@ def ykush_replug_usb(ykush_config, device_id=None, delay=0, end_flag=None): return if device_id is None: - usb_power(ykush_port, False, ykush_srn) + ykush_set_usb_power(ykush_port, False, ykush_srn) sleep(delay) - usb_power(ykush_port, True, ykush_srn) + ykush_set_usb_power(ykush_port, True, ykush_srn) sleep(delay) return @@ -355,7 +362,7 @@ def ykush_replug_usb(ykush_config, device_id=None, delay=0, end_flag=None): if i == 0: logging.debug(f'Power down device ({device_id}) under ykush_port:{ykush_port}') - usb_power(ykush_port, False, ykush_srn) + ykush_set_usb_power(ykush_port, False, ykush_srn) i = 20 else: i -= 1 @@ -363,7 +370,7 @@ def ykush_replug_usb(ykush_config, device_id=None, delay=0, end_flag=None): sleep(delay) logging.debug(f'Power up device ({device_id}) under ykush_port:{ykush_port}') - usb_power(ykush_port, True, ykush_srn) + ykush_set_usb_power(ykush_port, True, ykush_srn) i = 0 while not device_exists(device_id): @@ -374,9 +381,9 @@ def ykush_replug_usb(ykush_config, device_id=None, delay=0, end_flag=None): if i == 20: # Sometimes JLink falls into a bad state and cannot # be enumerated correctly at first time - usb_power(ykush_port, False, ykush_srn) + ykush_set_usb_power(ykush_port, False, ykush_srn) sleep(delay) - usb_power(ykush_port, True, ykush_srn) + ykush_set_usb_power(ykush_port, True, ykush_srn) i = 0 else: i += 1 @@ -384,6 +391,55 @@ def ykush_replug_usb(ykush_config, device_id=None, delay=0, end_flag=None): sleep(0.1) +def hid_gpio_hub_set_usb_power(vid, pid, port, on): + path = None + cmd = b"\x05xxxxxxxx" + index = int(port) + + if 1 <= index <= len(cmd) - 1: + cmd_list = list(cmd) + cmd_list[index] = ord('0' if on else '1') + cmd = bytes(cmd_list) + + for device in hid.enumerate(vid, pid): + print(device) + path = device['path'] + + device = hid.device() + device.open_path(path) + device.send_feature_report(cmd) + device.close() + + # Read the flashed versions of hid_gpio and apache-mynewt-core + # print(device.get_indexed_string(32)) + # print(device.get_indexed_string(33)) + # Read the states of hub ports + # print(device.get_feature_report(5, 9)) + + +def active_hub_server_replug_usb(config): + with xmlrpc.client.ServerProxy(uri=f"http://{config['ip']}:{config['tcp_port']}/", + allow_none=True, transport=None, + encoding=None, verbose=False, + use_datetime=False, use_builtin_types=False, + headers=(), context=None) as proxy: + logging.debug(f'Power down USB port: {config["usb_port"]}') + proxy.set_usb_power(config['usb_port'], False) + sleep(config['replug_delay']) + logging.debug(f'Power up USB port: {config["usb_port"]}') + proxy.set_usb_power(config['usb_port'], True) + + +def active_hub_server_set_usb_power(config, on): + with xmlrpc.client.ServerProxy(uri=f"http://{config['ip']}:{config['tcp_port']}/", + allow_none=True, transport=None, + encoding=None, verbose=False, + use_datetime=False, use_builtin_types=False, + headers=(), context=None) as proxy: + + proxy.set_usb_power(config['usb_port'], on) + + def print_thread_stack_trace(): logging.debug("Printing stack trace for each thread:") for thread_id, thread_obj in threading._active.items(): diff --git a/autoptsserver.py b/autoptsserver.py index b118cbf376..c59a53e2eb 100755 --- a/autoptsserver.py +++ b/autoptsserver.py @@ -52,8 +52,8 @@ from autopts import ptscontrol from autopts.config import SERVER_PORT -from autopts.utils import CounterWithFlag, get_global_end, exit_if_admin, ykush_replug_usb, usb_power, \ - print_thread_stack_trace +from autopts.utils import CounterWithFlag, get_global_end, exit_if_admin, ykush_replug_usb, ykush_set_usb_power, \ + print_thread_stack_trace, active_hub_server_replug_usb, active_hub_server_set_usb_power from autopts.winutils import kill_all_processes logging = root_logging.getLogger('server') @@ -153,19 +153,21 @@ def unregister_client_callback(self): def _replug_dongle(self): log(f"{self._replug_dongle.__name__}") - ykush_port = self.args.ykush - if not ykush_port: - return - device = self._device - log(f'Replugging device ({device}) under ykush:{ykush_port} ...') - if device: - ykush_replug_usb(self.args.ykush, device_id=device, delay=0, end_flag=self._end) - else: - # Cases where ykush was down or the dongle was - # not enumerated for any other reason. - ykush_replug_usb(self.args.ykush, device_id=None, delay=3, end_flag=self._end) - log(f'Done replugging device ({device}) under ykush:{ykush_port}') + if self.args.ykush: + ykush_port = self.args.ykush + device = self._device + log(f'Replugging device ({device}) under ykush:{ykush_port} ...') + if device: + ykush_replug_usb(self.args.ykush, device_id=device, delay=0, end_flag=self._end) + else: + # Cases where ykush was down or the dongle was + # not enumerated for any other reason. + ykush_replug_usb(self.args.ykush, device_id=None, delay=3, end_flag=self._end) + log(f'Done replugging device ({device}) under ykush:{ykush_port}') + + elif self.args.active_hub_server: + active_hub_server_replug_usb(self.args.active_hub_server) def _dispatch(self, method_name, param_tuple): """Dispatcher that is used by xmlrpc server""" @@ -309,6 +311,10 @@ def __init__(self, description): help="Specify ykush hub downstream port number, so " "during recovery steps PTS dongle could be replugged.") + self.add_argument("--active-hub-server", nargs="+", default=[], + help="Specify active hub server e.g. IP:TCP_PORT:USB_PORT, so " + "during recovery steps PTS dongle could be replugged.") + self.add_argument("--dongle", nargs="+", default=None, help='Select the dongle port.' 'COMx in case of LE only dongle. ' @@ -346,8 +352,25 @@ def check_args(arg): ykush_confs.append(config) arg.ykush = ykush_confs + arg.active_hub = True + + elif arg.active_hub_server: + active_hub_server_configs = [] + for active_hub_server_conf in arg.active_hub_server: + config = {} + ip, tcp_port, usb_port = active_hub_server_conf.split(':') + + config['ip'] = ip + config['tcp_port'] = tcp_port + config['usb_port'] = usb_port + config['replug_delay'] = 5 + active_hub_server_configs.append(config) + + arg.active_hub_server = active_hub_server_configs + arg.active_hub = True def parse_args(self, args=None, namespace=None): + namespace = argparse.Namespace(active_hub=None) arg = super().parse_args(args, namespace) self.check_args(arg) return arg @@ -574,9 +597,14 @@ def shutdown_pts_bpv(self): init_logging(_args) - if _args.ykush: - for ykush_config in _args.ykush: - usb_power(ykush_config['ports'], False, ykush_config['ykush_srn']) + if _args.active_hub: + if _args.ykush: + for ykush_config in _args.ykush: + ykush_set_usb_power(ykush_config['ports'], False, ykush_config['ykush_srn']) + + elif _args.active_hub_server: + for active_hub_server_config in _args.active_hub_server: + active_hub_server_set_usb_power(active_hub_server_config, False) autoptsservers = [] server_count = len(_args.srv_port) @@ -586,6 +614,7 @@ def shutdown_pts_bpv(self): args_copy = copy.deepcopy(_args) args_copy.srv_port = _args.srv_port[i] args_copy.ykush = _args.ykush[i] if _args.ykush else None + args_copy.active_hub_server = _args.active_hub_server[i] if _args.active_hub_server else None args_copy.dongle = _args.dongle[i] if _args.dongle else None srv = Server(finish_count, args_copy) autoptsservers.append(srv) diff --git a/cliparser.py b/cliparser.py index 0b548b303d..3174f90844 100644 --- a/cliparser.py +++ b/cliparser.py @@ -23,7 +23,8 @@ from autopts.config import SERVER_PORT, CLIENT_PORT, MAX_SERVER_RESTART_TIME from autopts.ptsprojects.boards import tty_exists, com_to_tty, get_debugger_snr from autopts.ptsprojects.testcase_db import DATABASE_FILE -from autopts.utils import ykush_replug_usb, raise_on_global_end +from autopts.utils import ykush_replug_usb, raise_on_global_end, active_hub_server_replug_usb + log = logging.debug @@ -107,6 +108,8 @@ def __init__(self, cli_support=None, board_names=None, add_help=True): self.add_argument("--ykush_replug_delay", type=float, default=3, help=argparse.SUPPRESS) + self.add_argument("--active-hub-server", type=str, help=argparse.SUPPRESS) + if cli_support is None: return @@ -178,14 +181,17 @@ def add_positional_args(self): "e.g. elf file for qemu, exe for native.") def check_args_tty(self, args): - if args.ykush: + if args.ykush or args.active_hub_server: if args.tty_alias: device_id = None else: device_id = args.tty_file - # If ykush is used, the board could be unplugged right now - ykush_replug_usb(args.ykush, device_id=device_id, delay=args.ykush_replug_delay) + if args.ykush: + # If ykush is used, the board could be unplugged right now + ykush_replug_usb(args.ykush, device_id=device_id, delay=args.ykush_replug_delay) + elif args.active_hub_server: + active_hub_server_replug_usb(args.active_hub_server) if args.tty_alias: while not os.path.islink(args.tty_alias) and not os.path.exists(os.path.realpath(args.tty_alias)): diff --git a/tools/active_hub_server.py b/tools/active_hub_server.py new file mode 100644 index 0000000000..86bc7afd49 --- /dev/null +++ b/tools/active_hub_server.py @@ -0,0 +1,100 @@ +# +# auto-pts - The Bluetooth PTS Automation Framework +# +# Copyright (c) 2024, Codecoup. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# +import argparse +import sys +import traceback +import xmlrpc.client +import xmlrpc.server +from os.path import dirname, abspath + +AUTOPTS_REPO = dirname(dirname(abspath(__file__))) +sys.path.insert(0, AUTOPTS_REPO) + +from autopts.utils import hid_gpio_hub_set_usb_power, ykush_set_usb_power + +SRN = None +VID = None +PID = None + + +class StartArgumentParser(argparse.ArgumentParser): + def __init__(self): + argparse.ArgumentParser.__init__(self) + + self.add_argument("--ip", default='', type=str, + help="IP address") + + self.add_argument("--port", default=65100, type=int, + help="TCP port") + + self.add_argument("--ykush-srn", type=str, + help="Select serial number of the YKUSH to be used " + "as an active hub.") + + self.add_argument("--hid-gpio-hub", type=str, + help="Select VID:PID of the hid_gpio device to be used" + " as an active hub.") + + @staticmethod + def check_args(arg): + """Sanity check command line arguments""" + if not 49152 <= arg.port <= 65535: + sys.exit("Invalid server port number=%s, expected range <49152,65535> " % (arg.port,)) + + def parse_args(self, args=None, namespace=None): + arg = super().parse_args(args, namespace) + self.check_args(arg) + return arg + + +def set_usb_power(port, on): + try: + if SRN: + ykush_set_usb_power(port, on=on, ykush_srn=SRN) + elif VID: + hid_gpio_hub_set_usb_power(VID, PID, port, on) + except BaseException as e: + traceback.print_exception(e) + return traceback.format_exception(e) + + +def start_server(): + global SRN, VID, PID + args = StartArgumentParser().parse_args() + + if args.ykush_srn: + SRN = args.ykush_srn + elif args.hid_gpio_hub: + VID, PID = args.hid_gpio_hub.split(':') + VID = int(VID, 16) + PID = int(PID, 16) + + print("Active USB hub serving on port {} ...".format(args.port)) + server = xmlrpc.server.SimpleXMLRPCServer((args.ip, args.port), allow_none=True) + server.register_function(set_usb_power, 'set_usb_power') + server.register_introspection_functions() + server.timeout = 1.0 + + while True: + try: + server.handle_request() + except KeyboardInterrupt as e: + return + except BaseException as e: + traceback.print_exception(e) + + +if __name__ == "__main__": + start_server() From cc887fe1864cc3cb5b8b7184f8e9f719854fa8b6 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Thu, 27 Jun 2024 17:27:24 +0200 Subject: [PATCH 03/44] cron: Add test run timeguard Since autopts bot is able to continue testing after an interruption, let's add a timeguard to cron that will detect whether a test cases run smoothly. --- tools/cron/common.py | 124 +++++++++++++++++++++++++++---------------- 1 file changed, 78 insertions(+), 46 deletions(-) diff --git a/tools/cron/common.py b/tools/cron/common.py index d406c89e3f..6a4d9712be 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -41,9 +41,9 @@ from pathlib import Path from time import sleep, time from os.path import dirname, abspath -from datetime import datetime, date +from datetime import datetime, date, timedelta -AUTOPTS_REPO=dirname(dirname(dirname(abspath(__file__)))) +AUTOPTS_REPO = dirname(dirname(dirname(abspath(__file__)))) sys.path.insert(0, AUTOPTS_REPO) from autopts.utils import get_global_end, terminate_process @@ -51,6 +51,7 @@ from autopts.bot.common import load_module_from_path from autopts.bot.common_features.github import update_repos from autopts.bot.common_features.mail import send_mail +from autopts.config import TC_STATS_JSON from tools.cron.compatibility import find_latest, find_by_project_hash, find_by_autopts_hash, find_by_pts_ver, \ get_hash_from_reference from tools.cron.remote_terminal import RemoteTerminalClientProxy @@ -403,16 +404,18 @@ def parse_yaml(file_path): return parsed_dict -def start_vm(config): +def start_vm(config, checkout_repos=False): # Reset the VM state config = config['cron'] - git_reset_head(config['vm']['path']) - # Checkout the VM instance to branch with specific PTS version - if 'pts_ver' in config: - vm_yaml = parse_yaml(config['vm']['yaml']) - vm_commit = vm_yaml[config['pts_ver']] - git_checkout(vm_commit, config['vm']['path']) + if checkout_repos: + git_reset_head(config['vm']['path']) + + # Checkout the VM instance to branch with specific PTS version + if 'pts_ver' in config: + vm_yaml = parse_yaml(config['vm']['yaml']) + vm_commit = vm_yaml[config['pts_ver']] + git_checkout(vm_commit, config['vm']['path']) log(f"Starting VM with: {config['vm']['vm_start_cmd']}") subprocess.Popen(shlex.split(config['vm']['vm_start_cmd']), @@ -449,24 +452,25 @@ def set_timeout_flag(): timer.cancel() -def start_remote_autoptsserver(config): +def start_remote_autoptsserver(config, checkout_repos): with RemoteTerminalClientProxy(config['remote_machine']['terminal_ip'], config['remote_machine']['terminal_port'], config['remote_machine'].get( 'socket_timeout', None) ) as client: - for repo in config['remote_machine']['git']: - repo_info = config['remote_machine']['git'][repo] + if checkout_repos: + for repo in config['remote_machine']['git']: + repo_info = config['remote_machine']['git'][repo] - if 'checkout_cmd' in repo_info: - log(client.run_command(repo_info['checkout_cmd'], repo_info['path'])) - else: - log(client.run_command(f"git fetch {repo_info['remote']}", repo_info['path'])) - log(client.run_command(f"git checkout {repo_info['remote']}/{repo_info['branch']}", repo_info['path'])) + if 'checkout_cmd' in repo_info: + log(client.run_command(repo_info['checkout_cmd'], repo_info['path'])) + else: + log(client.run_command(f"git fetch {repo_info['remote']}", repo_info['path'])) + log(client.run_command(f"git checkout {repo_info['remote']}/{repo_info['branch']}", repo_info['path'])) - log(f"Starting process on the remote machine: {config['server_start_cmd']}") - log(client.open_process(config['server_start_cmd'], - config['remote_machine']['git']['autopts']['path'])) + log(f"Starting process on the remote machine: {config['server_start_cmd']}") + log(client.open_process(config['server_start_cmd'], + config['remote_machine']['git']['autopts']['path'])) def close_vm(config): @@ -512,6 +516,7 @@ def win_start_autoptsserver(config): def terminate_processes(config): if 'remote_machine' in config['cron']: close_remote_autoptsserver(config) + terminate_process(cmdline='autoptsclient_bot.py') elif sys.platform == 'win32': terminate_process(name='PTS') terminate_process(name='Fts') @@ -522,12 +527,12 @@ def terminate_processes(config): close_vm(config) -def _run_test(config): +def _start_processes(config, checkout_repos): srv_process = None if 'vm' in config['cron']: try: - start_vm(config) + start_vm(config, checkout_repos=checkout_repos) except BaseException as e: close_vm(config) raise e @@ -536,7 +541,7 @@ def _run_test(config): if 'remote_machine' in config: # Start the autoptsserver.py on the remote machine - start_remote_autoptsserver(config) + start_remote_autoptsserver(config, checkout_repos) elif sys.platform == 'win32' and config['server_start_cmd']: # Start subprocess running autoptsserver.py srv_process = win_start_autoptsserver(config) @@ -556,31 +561,52 @@ def _run_test(config): stderr=subprocess.STDOUT, cwd=config['autopts_repo']) - sleep_job(config['cancel_job'], 5) + return srv_process, bot_process - try: - # Main thread waits for at least one of subprocesses to finish - while not config['cancel_job'].canceled: - if srv_process and srv_process.poll() is not None: - log('server process finished.') - break - if bot_process.poll() is not None: - log('bot process finished.') - break +def _restart_processes(config): + terminate_processes(config) + return _start_processes(config, checkout_repos=False) - sleep_job(config['cancel_job'], 5) - except: - pass - # Terminate the other subprocess if it is still running - if srv_process and srv_process.poll() is None: - srv_process.terminate() +def _run_test(config): + backup = config['auto_pts'].get('use_backup', False) + timeguard = config['cron']['test_run_timeguard'] + results_file_path = os.path.join(config['cron']['autopts_repo'], TC_STATS_JSON) + + srv_process, bot_process = _start_processes(config, checkout_repos=True) + last_check_time = time() - if bot_process.poll() is None: - bot_process.terminate() + # Main thread waits for at least one of subprocesses to finish + while not config['cron']['cancel_job'].canceled: + sleep_job(config['cron']['cancel_job'], config['cron']['check_interval']) - sleep(10) + if srv_process and srv_process.poll() is not None: + log('server process finished.') + break + + if bot_process.poll() is not None: + log('bot process finished.') + break + + if not backup: + continue + + current_time = time() + + if not os.path.exists(results_file_path): + if timedelta(seconds=current_time - last_check_time) > timedelta(seconds=timeguard): + log("Test run has not been started on time. Restarting processes...") + srv_process, bot_process = _restart_processes(config) + + continue + + last_check_time = current_time + + if timedelta(seconds=current_time - os.path.getmtime(results_file_path)) > timedelta(seconds=timeguard): + log("Test run results have not been updated for a while. Restarting processes...") + srv_process, bot_process = _restart_processes(config) + sleep_job(config['cron']['cancel_job'], timeguard) def run_test(config): @@ -591,8 +617,8 @@ def run_test(config): _run_test(config) except: log(traceback.format_exc()) - - terminate_processes(config) + finally: + terminate_processes(config) def parse_test_cases_from_comment(pr_cfg): @@ -662,13 +688,19 @@ def get_cron_config(cfg, **kwargs): f'python autoptsclient_bot.py {bot_args} >> stdout_autoptsbot.log 2>&1' if 'bot_start_delay' not in config: - config['bot_start_delay'] = 60 + config['bot_start_delay'] = 60 # seconds if 'server_start_cmd' not in config: server_options = config.get('server_options', '') config['server_start_cmd'] = \ f'python autoptsserver.py {server_options} >> stdout_autoptsserver.log 2>&1' + if 'test_run_timeguard' not in config: + config['test_run_timeguard'] = 20 * 60 # seconds + + if 'check_interval' not in config: + config['check_interval'] = 60 # seconds + return cron_config @@ -789,7 +821,7 @@ def start_vm_job(cfg, **kwargs): config = load_config(cfg) - start_vm(config) + start_vm(config, checkout_repos=True) log(f'The {start_vm_job.__name__} Job finished') From 164c8fdfc67dcc3f8849612bd5ffc5a4cef48711 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Thu, 4 Jul 2024 13:17:04 +0200 Subject: [PATCH 04/44] bot: Unify report creation Make reports generation more generic. --- autopts/bot/common.py | 376 ++++++++++++++++++++++++-- autopts/bot/common_features/report.py | 87 +----- autopts/bot/mynewt.py | 186 ++----------- autopts/bot/zephyr.py | 269 ++---------------- autopts/client.py | 8 +- autopts/config.py | 31 ++- autoptsserver.py | 7 + test/unittests.py | 35 +-- tools/cron/common.py | 10 +- 9 files changed, 477 insertions(+), 532 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index f9573fccbe..6b8745af32 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -12,7 +12,9 @@ # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # +import collections import copy +import datetime import importlib import logging import os @@ -21,20 +23,19 @@ import shutil import time import json +import traceback from pathlib import Path from argparse import Namespace from autopts import client as autoptsclient +from autopts.bot.common_features import github, report, mail, google_drive +from autopts.bot.common_features.report import REPORT_TXT from autopts.client import CliParser, Client, TestCaseRunStats, init_logging from autopts.config import MAX_SERVER_RESTART_TIME, TEST_CASES_JSON, ALL_STATS_JSON, TC_STATS_JSON, \ - ALL_STATS_RESULTS_XML, TC_STATS_RESULTS_XML, BOT_STATE_JSON -from autopts.ptsprojects.boards import get_free_device, get_tty, get_debugger_snr + ALL_STATS_RESULTS_XML, TC_STATS_RESULTS_XML, BOT_STATE_JSON, TMP_DIR, REPORT_README_MD, AUTOPTS_REPORT_FOLDER, \ + REPORT_DIFF_TXT, REPORT_XLSX, IUT_LOGS_FOLDER, AUTOPTS_ROOT_DIR +from autopts.ptsprojects.boards import get_free_device, get_tty, get_debugger_snr, release_device from autopts.ptsprojects.testcase_db import DATABASE_FILE -PROJECT_DIR = os.path.dirname( # auto-pts repo directory - os.path.dirname( # autopts module directory - os.path.dirname( # bot module directory - os.path.abspath(__file__)))) # this file directory - log = logging.debug @@ -52,6 +53,27 @@ def cleanup_tmp_files(): os.remove(file) +def get_deepest_dirs(logs_tree, dst_tree, max_depth): + def recursive(directory, depth=3): + depth -= 1 + + for file in os.scandir(directory): + if file.is_dir(): + if depth > 0: + recursive(file.path, depth) + else: + dst_file = os.path.join(dst_tree, file.name) + try: + shutil.move(file.path, dst_file) + except BaseException as e: # skip waiting for BPV to release the file + try: + shutil.copy(file.path, dst_file) + except BaseException as e2: + print(e2) + + recursive(logs_tree, max_depth) + + class BuildAndFlashException(Exception): pass @@ -401,22 +423,342 @@ def run_test_cases(self): self.ptses[0].get_test_case_description(project_name, test_case_name) all_stats.update_descriptions(descriptions) - all_stats.pts_ver = '{}'.format(self.ptses[0].get_version()) - all_stats.platform = '{}'.format(self.ptses[0].get_system_model()) + all_stats.pts_ver = str(self.ptses[0].get_version()) + all_stats.platform = str(self.ptses[0].get_system_model()) + all_stats.system_version = str(self.ptses[0].get_system_version()) except: log('Failed to generate some stats.') return all_stats def start(self, args=None): - # Extend this method in a derived class to handle sending - # logs, reports, etc. - self.run_tests() + """ + Extend this method in a derived class, if needed, to handle + sending logs, reports, etc. + """ + + if os.path.exists(BOT_STATE_JSON): + print(f'Continuing the previous terminated test run (remove {TMP_DIR} to start freshly)') + + with open(BOT_STATE_JSON, "r") as f: + data = f.read() + bot_state = json.loads(data) + self.bot_config = bot_state['bot_config'] + + else: + # Start fresh test run + + pre_cleanup() + bot_state = {'start_time': time.time()} + + if 'githubdrive' in self.bot_config: + github.update_sources(self.bot_config['githubdrive']['path'], + self.bot_config['githubdrive']['remote'], + self.bot_config['githubdrive']['branch'], True) + + if 'git' in self.bot_config: + bot_state['repos_info'] = github.update_repos( + self.bot_config['auto_pts']['project_path'], + self.bot_config["git"]) + bot_state['repo_status'] = report.make_repo_status(bot_state['repos_info']) + else: + bot_state['repos_info'] = {} + bot_state['repo_status'] = '' + + if self.bot_config['auto_pts'].get('use_backup', False): + os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) + bot_state['bot_config'] = self.bot_config + + with open(BOT_STATE_JSON, "w") as f: + f.write(json.dumps(bot_state, indent=4)) + + try: + stats = self.run_tests() + finally: + release_device(self.args.tty_file) + + report_data = bot_state + report_data['end_time'] = time.time() + report_data['end_time_stamp'] = datetime.datetime.fromtimestamp( + report_data['end_time']).strftime("%Y_%m_%d_%H_%M_%S") + report_data['start_time_stamp'] = datetime.datetime.fromtimestamp( + bot_state['start_time']).strftime("%Y_%m_%d_%H_%M_%S") + + report_data['status_count'] = stats.get_status_count() + report_data['tc_results'] = stats.get_results() + report_data['descriptions'] = stats.get_descriptions() + report_data['regressions'] = stats.get_regressions() + report_data['progresses'] = stats.get_progresses() + report_data['new_cases'] = stats.get_new_cases() + report_data['deleted_cases'] = [] + report_data['pts_ver'] = stats.pts_ver + report_data['platform'] = stats.platform + report_data['system_version'] = stats.system_version + report_data['database_file'] = self.bot_config['auto_pts'].get('database_file', DATABASE_FILE) + + report_data['tc_results'] = collections.OrderedDict(sorted(report_data['tc_results'].items())) + + report_data['errata'] = report.get_errata(self.autopts_project_name) + + report_data['pts_logs_folder'], report_data['pts_xml_folder'] = report.pull_server_logs(self.args) + + report_data['report_xlsx'] = report.make_report_xlsx(report_data['tc_results'], + report_data['status_count'], + report_data['regressions'], + report_data['progresses'], + report_data['descriptions'], + report_data['pts_xml_folder'], + report_data['errata']) + + report_data['report_txt'] = report.make_report_txt(report_data['tc_results'], + report_data['regressions'], + report_data['progresses'], + report_data['repo_status'], + report_data['errata']) + + if 'githubdrive' in self.bot_config or 'gdrive' in self.bot_config: + self.make_report_folder(report_data) + + if 'gdrive' in self.bot_config: + self.upload_logs_to_gdrive(report_data) + + if 'githubdrive' in self.bot_config: + self.upload_logs_to_github(report_data) + + if 'mail' in self.bot_config: + self.send_email(report_data) + + print("Done") def run_tests(self): # Entry point of the simple client layer return super().start() + def make_readme_md(self, report_data): + """Creates README.md for Github logging repo + """ + readme_file = REPORT_README_MD + + Path(os.path.dirname(readme_file)).mkdir(parents=True, exist_ok=True) + + with open(readme_file, 'w') as f: + readme_body = f'''# AutoPTS report + + Start time: {report_data["start_time_stamp"]} + + End time: {report_data["end_time_stamp"]} + + PTS version: {report_data["pts_ver"]} + + Repositories: + +''' + f.write(readme_body) + + for name, info in report_data['repos_info'].items(): + f.write(f'\t{name}: {info["commit"]} [{info["desc"]}]\n') + + return readme_file + + def make_report_folder(self, report_data): + """Creates folder containing .txt and .xlsx reports, pulled logs + from autoptsserver, iut logs and additional README.md. + """ + report_data['report_folder'] = AUTOPTS_REPORT_FOLDER + shutil.rmtree(report_data['report_folder'], ignore_errors=True) + Path(report_data['report_folder']).mkdir(parents=True, exist_ok=True) + + if 'githubdrive' in self.bot_config: + report_folder_name = os.path.basename(report_data['report_folder']) + + report_data['old_report_txt'] = os.path.join(self.bot_config['githubdrive']['path'], + self.bot_config['githubdrive']['subdir'], + report_folder_name, REPORT_TXT) + + report_data['report_diff_txt'], report_data['deleted_cases'] = \ + report.make_report_diff(report_data['old_report_txt'], + report_data['tc_results'], + report_data['regressions'], + report_data['progresses'], + report_data['new_cases']) + + report_data['readme_file'] = self.make_readme_md(report_data) + + attachments = [ + REPORT_DIFF_TXT, + report_data['report_txt'], + (report_data['report_txt'], f'report_{report_data["start_time_stamp"]}.txt'), + (report_data['report_xlsx'], f'report_{report_data["start_time_stamp"]}.xlsx'), + REPORT_README_MD, + report_data['database_file'], + report_data['pts_xml_folder'], + ] + + iut_logs_new = os.path.join(report_data['report_folder'], 'iut_logs') + pts_logs_new = os.path.join(report_data['report_folder'], 'pts_logs') + get_deepest_dirs(IUT_LOGS_FOLDER, iut_logs_new, 3) + get_deepest_dirs(report_data['pts_logs_folder'], pts_logs_new, 3) + + self.generate_attachments(report_data, attachments) + + self.pack_report_folder(report_data, attachments) + + def generate_attachments(self, report_data, attachments): + """Overwrite this if needed""" + pass + + def pack_report_folder(self, report_data, attachments): + report_dir = report_data['report_folder'] + + for item in attachments: + if isinstance(item, tuple): + src_file, dst_file = item + dst_file = os.path.join(report_dir, dst_file) + else: + src_file = item + dst_file = os.path.join(report_dir, os.path.basename(src_file)) + + try: + if not os.path.exists(src_file): + log(f'The file {src_file} does not exist') + continue + + if os.path.isdir(src_file): + try: + shutil.move(src_file, dst_file) + continue + except: # skip waiting for BPV to release the file + pass + + try: + shutil.copy(src_file, dst_file) + except: + pass + + except BaseException as e: + traceback.print_exception(e) + + def upload_logs_to_github(self, report_data): + log("Uploading to Github ...") + + if 'commit_msg' not in report_data: + report_data['commit_msg'] = report_data['start_time_stamp'] + + report_data['github_link'], report_data['report_folder'] = report.github_push_report( + report_data['report_folder'], self.bot_config['githubdrive'], report_data['commit_msg']) + + def upload_logs_to_gdrive(self, report_data): + report_folder = report_data['report_folder'] + board_name = self.bot_config['auto_pts']['board'] + gdrive_config = self.bot_config['gdrive'] + + log(f'Archiving the report folder ...') + report.archive_testcases(report_folder, depth=2) + + log(f'Connecting to GDrive ...') + drive = google_drive.Drive(gdrive_config) + + log(f'Creating GDrive directory ...') + report_data['gdrive_url'] = drive.new_workdir(board_name) + log(report_data['gdrive_url']) + + log("Uploading to GDrive ...") + drive.upload_folder(report_folder) + + def send_email(self, report_data): + log("Sending email ...") + + descriptions = report_data['descriptions'] + + mail_ctx = {'repos_info': report_data['repo_status'], + 'summary': [mail.status_dict2summary_html(report_data['status_count'])], + 'log_url': [], + 'board': self.bot_config['auto_pts']['board'], + 'platform': report_data['platform'], + 'pts_ver': report_data['pts_ver'], + 'system_version': report_data['system_version'], + 'additional_info': '', + } + + mail_ctx.update(self.bot_config['mail']) + + if report_data['regressions']: + mail_ctx['summary'].append(mail.regressions2html(report_data['regressions'], descriptions)) + + if report_data['progresses']: + mail_ctx['summary'].append(mail.progresses2html(report_data['progresses'], descriptions)) + + if report_data['new_cases']: + mail_ctx['summary'].append(mail.new_cases2html(report_data['new_cases'], descriptions)) + + if report_data['deleted_cases']: + mail_ctx['summary'].append(mail.deleted_cases2html(report_data['deleted_cases'], descriptions)) + + mail_ctx['summary'] = '
'.join(mail_ctx['summary']) + + if 'gdrive' in self.bot_config and 'gdrive_url' in report_data: + mail_ctx['log_url'].append(mail.url2html(report_data['gdrive_url'], "Results on Google Drive")) + + if 'githubdrive' in self.bot_config and 'github_link' in report_data: + mail_ctx['log_url'].append(mail.url2html(report_data['github_link'], 'Results on Github')) + + mail_ctx['log_url'] = '
'.join(mail_ctx['log_url']) + + if not mail_ctx['log_url']: + mail_ctx['log_url'] = 'Not Available' + + mail_ctx["elapsed_time"] = str(datetime.timedelta( + seconds=(int(report_data['end_time'] - report_data['start_time'])))) + + if 'additional_info_path' in mail_ctx: + try: + with open(mail_ctx['additional_info_path']) as file: + mail_ctx['additional_info'] = f'{file.read()}
' + except Exception as e: + logging.exception(e) + + subject, body = self.compose_mail(mail_ctx) + + mail.send_mail(self.bot_config['mail'], subject, body, + [report_data['report_xlsx'], report_data['report_txt']]) + + def compose_mail(self, mail_ctx): + """ Create a email body + """ + iso_cal = datetime.date.today().isocalendar() + ww_dd_str = "WW%s.%s" % (iso_cal[1], iso_cal[2]) + + body = ''' +

This is automated email and do not reply.

+

Bluetooth test session - {ww_dd_str}

+ {additional_info} +

1. IUT Setup

+

Type: Zephyr
+ Board: {board}
+ Source: {repos_info}

+

2. PTS Setup

+

OS: {system_version}
+ Platform: {platform}
+ Version: {pts_ver}

+

3. Test Results

+

Execution Time: {elapsed_time}

+ {summary} +

Logs

+ {log_url} +

Sincerely,

+

{name}

+''' + + if 'body' in mail_ctx: + body = mail_ctx['body'] + + body = body.format(ww_dd_str=ww_dd_str, **mail_ctx) + + subject = mail_ctx.get('subject', 'AutoPTS test session results') + subject = f"{subject} - {ww_dd_str}" + + return subject, body + def get_filtered_test_cases(iut_config, bot_args, config_default, pts): _args = {} @@ -517,7 +859,7 @@ def check_call(cmd, env=None, cwd=None, shell=True): def get_workspace(workspace): - for root, dirs, files in os.walk(os.path.join(PROJECT_DIR, 'autopts/workspaces'), + for root, dirs, files in os.walk(os.path.join(AUTOPTS_ROOT_DIR, 'autopts/workspaces'), topdown=True): for name in dirs: if name == workspace: @@ -560,11 +902,11 @@ def get_absolute_module_path(config_path): if os.path.isfile(_path): return _path - _path = os.path.join(PROJECT_DIR, f'autopts/bot/{config_path}') + _path = os.path.join(AUTOPTS_ROOT_DIR, f'autopts/bot/{config_path}') if os.path.isfile(_path): return _path - _path = os.path.join(PROJECT_DIR, f'autopts/bot/{config_path}.py') + _path = os.path.join(AUTOPTS_ROOT_DIR, f'autopts/bot/{config_path}.py') if os.path.isfile(_path): return _path @@ -591,8 +933,8 @@ def pre_cleanup(): :return: None """ try: - shutil.copytree("logs", "oldlogs", dirs_exist_ok=True) - shutil.rmtree("logs") + shutil.copytree(IUT_LOGS_FOLDER, "oldlogs", dirs_exist_ok=True) + shutil.rmtree(IUT_LOGS_FOLDER) except OSError: pass diff --git a/autopts/bot/common_features/report.py b/autopts/bot/common_features/report.py index de67213e01..c48544cc6a 100644 --- a/autopts/bot/common_features/report.py +++ b/autopts/bot/common_features/report.py @@ -29,13 +29,9 @@ from autopts.bot.common_features import github from autopts.bot import common from autopts.client import PtsServer +from autopts.config import PTS_XMLS_FOLDER, TMP_DIR, REPORT_XLSX, REPORT_TXT, REPORT_DIFF_TXT, ERROR_TXT, \ + ERRATA_DIR_PATH, AUTOPTS_ROOT_DIR -REPORT_XLSX = "report.xlsx" -REPORT_TXT = "report.txt" -REPORT_DIFF_TXT = "report-diff.txt" -ERROR_TXT = 'error.txt' - -ERRATA_DIR_PATH = os.path.join(common.PROJECT_DIR, 'errata') log = logging.debug @@ -54,7 +50,7 @@ def get_errata(project_name): def get_autopts_version(): - repo = git.Repo(common.PROJECT_DIR) + repo = git.Repo(AUTOPTS_ROOT_DIR) version = repo.git.show('-s', '--format=%H') if repo.is_dirty(): @@ -76,7 +72,7 @@ def make_repo_status(repos_info): # .xlsx spreadsheet file # **************************************************************************** def make_report_xlsx(results_dict, status_dict, regressions_list, - progresses_list, descriptions, xmls, project_name=''): + progresses_list, descriptions, xmls, errata): """Creates excel file containing test cases results and summary pie chart :param results_dict: dictionary with test cases results :param status_dict: status dictionary, where key is status and value is @@ -90,7 +86,7 @@ def make_report_xlsx(results_dict, status_dict, regressions_list, try: xml_list = list(os.scandir(xmls)) except FileNotFoundError as e: - print("No XMLs found") + log("No XMLs found") xml_list = None matched_xml = '' @@ -105,8 +101,6 @@ def find_xml_by_case(case): matched_xml = xml.name break - errata = get_errata(project_name) - header = "AutoPTS Report: " \ "{}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")) workbook = xlsxwriter.Workbook(REPORT_XLSX) @@ -186,7 +180,7 @@ def find_xml_by_case(case): # .txt result file # **************************************************************************** def make_report_txt(results_dict, regressions_list, - progresses_list, repo_status, project_name=''): + progresses_list, repo_status, errata): """Creates txt file containing test cases results :param results_dict: dictionary with test cases results :param regressions_list: list of regressions found @@ -199,8 +193,6 @@ def make_report_txt(results_dict, regressions_list, filename = os.path.join(os.getcwd(), REPORT_TXT) f = open(filename, "w") - errata = get_errata(project_name) - f.write(f"{repo_status}, autopts={get_autopts_version()}\n") for tc, result in list(results_dict.items()): res = result[0] @@ -217,7 +209,7 @@ def make_report_txt(results_dict, regressions_list, if tc in errata: result += ' - ERRATA ' + errata[tc] - # The first id in the test case is test group + # The first id in the test case is a test group tg = tc.split('/')[0] f.write("%s%s%s\n" % (tg.ljust(8, ' '), tc.ljust(32, ' '), result)) @@ -226,60 +218,6 @@ def make_report_txt(results_dict, regressions_list, return filename -# **************************************************************************** -# autopts_report result folder -# **************************************************************************** -def make_report_folder(iut_logs, pts_logs, xmls, report_xlsx, report_txt, - report_diff_txt, readme_file, database_file, tag=''): - """Creates folder containing .txt and .xlsx reports, pulled logs - from autoptsserver, iut logs and additional README.md. - """ - - def get_deepest_dirs(logs_tree, dst_tree, max_depth): - def recursive(directory, depth=3): - depth -= 1 - - for file in os.scandir(directory): - if file.is_dir(): - if depth > 0: - recursive(file.path, depth) - else: - dst_file = os.path.join(dst_tree, file.name) - try: - shutil.move(file.path, dst_file) - except: # skip waiting for BPV to release the file - try: - shutil.copy(file.path, dst_file) - except: - pass - - recursive(logs_tree, max_depth) - - report_dir = 'tmp/autopts_report' - shutil.rmtree(report_dir, ignore_errors=True) - Path(report_dir).mkdir(parents=True, exist_ok=True) - - shutil.copy(report_diff_txt, os.path.join(report_dir, 'report-diff.txt')) - shutil.copy(report_txt, os.path.join(report_dir, 'report.txt')) - shutil.copy(report_txt, os.path.join(report_dir, 'report{}.txt'.format(tag))) - shutil.copy(report_xlsx, os.path.join(report_dir, 'report{}.xlsx'.format(tag))) - shutil.copy(readme_file, os.path.join(report_dir, 'README.md')) - shutil.copy(database_file, os.path.join(report_dir, os.path.basename(database_file))) - - iut_logs_new = os.path.join(report_dir, 'iut_logs') - pts_logs_new = os.path.join(report_dir, 'pts_logs') - xmls_new = os.path.join(report_dir, 'XMLs/') - - get_deepest_dirs(iut_logs, iut_logs_new, 3) - get_deepest_dirs(pts_logs, pts_logs_new, 3) - try: - shutil.move(xmls, xmls_new) - except FileNotFoundError: - print('XMLs directory doesn\'t exist') - - return os.path.join(os.getcwd(), report_dir) - - def report_parse_test_cases(report): if not os.path.exists(report): return None @@ -300,13 +238,8 @@ def report_parse_test_cases(report): return test_cases[1:] -def make_report_diff(log_git_conf, results, regressions, +def make_report_diff(old_report_txt, results, regressions, progresses, new_cases): - old_report_txt = os.path.join(log_git_conf['path'], - log_git_conf['subdir'], - 'autopts_report', - REPORT_TXT) - filename = os.path.join(os.getcwd(), REPORT_DIFF_TXT) f = open(filename, "w") @@ -432,8 +365,8 @@ def pull_server_logs(args): else: workspace_dir = workspace_name - logs_folder = 'tmp/' + workspace_name - xml_folder = 'tmp/XMLs' + logs_folder = os.path.join(TMP_DIR, workspace_name) + xml_folder = PTS_XMLS_FOLDER shutil.rmtree(logs_folder, ignore_errors=True) shutil.rmtree(xml_folder, ignore_errors=True) Path(xml_folder).mkdir(parents=True, exist_ok=True) diff --git a/autopts/bot/mynewt.py b/autopts/bot/mynewt.py index f24a68d9b4..9650f5872f 100755 --- a/autopts/bot/mynewt.py +++ b/autopts/bot/mynewt.py @@ -13,8 +13,6 @@ # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # -import collections -import datetime import importlib import logging import os @@ -26,12 +24,11 @@ from autopts import bot from autopts.bot.common import BuildAndFlashException -from autopts.bot.common_features.github import update_sources from autopts.client import Client -from autopts.ptsprojects.boards import release_device, get_build_and_flash, get_board_type +from autopts.config import BOT_STATE_JSON +from autopts.ptsprojects.boards import get_build_and_flash, get_board_type from autopts.ptsprojects.mynewt.iutctl import get_iut, log -from autopts.ptsprojects.testcase_db import DATABASE_FILE -from autopts.bot.common_features import github, report, mail, google_drive +from autopts.bot.common_features import report PROJECT_NAME = Path(__file__).stem @@ -92,48 +89,6 @@ def get_build_info_file(project_path): return file_name -def compose_mail(args, mail_cfg, mail_ctx): - """ Create a email body - """ - - additional_info = '' - if 'additional_info_path' in mail_cfg: - try: - with open(mail_cfg['additional_info_path']) as file: - additional_info = f'{file.read()}
' - except Exception as e: - logging.exception(e) - - body = f''' -

This is automated email and do not reply.

-

Bluetooth test session

- {additional_info} -

1. IUT Setup

- Board: {args["board"]}
- Source: {mail_ctx["mynewt_repo_status"]}

-

2. PTS Setup

-

OS: Windows 10
- Platform: {args['platform']}
- Version: {args['pts_ver']}

-

3. Test Results

-

Execution Time: {mail_ctx["elapsed_time"]}

- {mail_ctx["summary"]} - {mail_ctx["regression"]} - {mail_ctx["progresses"]} -

Logs

- {mail_ctx["log_url"]} -

Sincerely,

-

{mail_cfg['name']}

-''' - - if 'subject' in mail_cfg: - subject = mail_cfg['subject'] - else: - subject = "[Mynewt Nimble] AutoPTS test session results" - - return subject, body - - class MynewtBotConfigArgs(bot.common.BotConfigArgs): def __init__(self, args): super().__init__(args) @@ -179,124 +134,37 @@ def apply_config(self, args, config, value): time.sleep(10) def start(self, args=None): - main(self) - - -class MynewtClient(Client): - def __init__(self): - super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'mynewt') - - -BotClient = MynewtBotClient - - -def main(bot_client): - print("Mynewt bot start!") - - if sys.platform == 'win32': - if 'MSYS2_BASH_PATH' not in os.environ: - print('Set environmental variable MSYS2_BASH_PATH.') - return 0 - # In case wsl was configured and its bash has higher prio than msys2 bash - os.environ['PATH'] = '/usr/bin:' + os.environ['PATH'] - - bot.common.pre_cleanup() - - start_time = time.time() - - cfg = bot_client.bot_config - args = cfg['auto_pts'] + print("Mynewt bot start!") - if 'database_file' not in args: - args['database_file'] = DATABASE_FILE + if sys.platform == 'win32': + if 'MSYS2_BASH_PATH' not in os.environ: + print('Set environmental variable MSYS2_BASH_PATH.') + return 0 + # In case wsl was configured and its bash has higher prio than msys2 bash + os.environ['PATH'] = '/usr/bin:' + os.environ['PATH'] - if 'githubdrive' in cfg: - update_sources(cfg['githubdrive']['path'], - cfg['githubdrive']['remote'], - cfg['githubdrive']['branch'], True) + if not os.path.exists(BOT_STATE_JSON): + if self.bot_config.get('newt_upgrade', False): + bot.common.check_call(['newt', 'upgrade', '-f', '--shallow=0'], + cwd=self.bot_config['project_path']) - if args.get('newt_upgrade', False): - bot.common.check_call(['newt', 'upgrade', '-f', '--shallow=0'], cwd=args['project_path']) + super().start(args) - if 'git' in cfg: - repos_info = github.update_repos(args['project_path'], cfg["git"]) - repo_status = report.make_repo_status(repos_info) - else: - repo_status = '' + def generate_attachments(self, report_data, attachments): + project_path = os.path.abspath(self.bot_config['auto_pts']['project_path']) + build_info_file = get_build_info_file(project_path) + attachments.append(build_info_file) - try: - stats = bot_client.run_tests() - finally: - release_device(bot_client.args.tty_file) + def compose_mail(self, mail_ctx): + if 'subject' not in mail_ctx: + mail_ctx['subject'] = "[Mynewt Nimble] AutoPTS test session results" - summary = stats.get_status_count() - results = stats.get_results() - descriptions = stats.get_descriptions() - regressions = stats.get_regressions() - progresses = stats.get_progresses() - args['pts_ver'] = stats.pts_ver - args['platform'] = stats.platform + return super().compose_mail(mail_ctx) - results = collections.OrderedDict(sorted(results.items())) - pts_logs, xmls = report.pull_server_logs(bot_client.args) - - report_file = report.make_report_xlsx(results, summary, regressions, - progresses, descriptions, xmls, PROJECT_NAME) - report_txt = report.make_report_txt(results, regressions, - progresses, repo_status, PROJECT_NAME) - logs_folder = report.archive_testcases("logs") - - build_info_file = get_build_info_file(os.path.abspath(args['project_path'])) - - end_time = time.time() - url = None - - if 'gdrive' in cfg: - drive = google_drive.Drive(cfg['gdrive']) - url = drive.new_workdir(args['board']) - drive.upload(report_file) - drive.upload(report_txt) - drive.upload_folder(logs_folder) - drive.upload(build_info_file) - drive.upload(args['database_file']) - drive.upload_folder(pts_logs) - - if 'mail' in cfg: - print("Sending email ...") - - # keep mail related context to simplify the code - mail_ctx = {"summary": mail.status_dict2summary_html(summary), - "regression": mail.regressions2html(regressions, - descriptions), - "progresses": mail.progresses2html(progresses, - descriptions), - "mynewt_repo_status": repo_status} - - # Summary - - # Regression and test case description - - # Log in Google drive in HTML format - if 'gdrive' in cfg and url: - mail_ctx["log_url"] = mail.url2html(url, - "Results on Google Drive") - else: - mail_ctx["log_url"] = "Not Available" - - # Elapsed Time - mail_ctx["elapsed_time"] = str(datetime.timedelta( - seconds=(int(end_time - start_time)))) - - subject, body = compose_mail(args, cfg['mail'], mail_ctx) - - mail.send_mail(cfg['mail'], subject, body, - [report_file, report_txt]) - - print("Done") +class MynewtClient(Client): + def __init__(self): + super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'mynewt') - bot.common.cleanup() - print("\nBye!") - sys.stdout.flush() - return 0 +BotClient = MynewtBotClient diff --git a/autopts/bot/zephyr.py b/autopts/bot/zephyr.py index 574ec2fcf7..e33e3a2ec0 100755 --- a/autopts/bot/zephyr.py +++ b/autopts/bot/zephyr.py @@ -16,30 +16,22 @@ # more details. # -import collections -import datetime import importlib -import json import logging import os import sys import time import traceback -from pathlib import Path - import serial +from pathlib import Path from autopts import bot -from autopts.bot.common_features.github import update_sources -from autopts.config import BOT_STATE_JSON, TMP_DIR from autopts.ptsprojects.zephyr import ZEPHYR_PROJECT_URL from autopts import client as autoptsclient - from autopts.bot.common import BotConfigArgs, BotClient, BuildAndFlashException -from autopts.ptsprojects.boards import tty_to_com, release_device, get_build_and_flash, get_board_type -from autopts.ptsprojects.testcase_db import DATABASE_FILE +from autopts.ptsprojects.boards import tty_to_com, get_build_and_flash, get_board_type from autopts.ptsprojects.zephyr.iutctl import get_iut, log -from autopts.bot.common_features import github, report, mail, google_drive +from autopts.bot.common_features import report PROJECT_NAME = Path(__file__).stem @@ -91,79 +83,6 @@ def zephyr_hash_url(commit): commit) -def make_readme_md(start_time, end_time, repos_info, pts_ver): - """Creates README.md for Github logging repo - """ - readme_file = 'tmp/README.md' - - Path(os.path.dirname(readme_file)).mkdir(parents=True, exist_ok=True) - - with open(readme_file, 'w') as f: - readme_body = '''# AutoPTS report - - Start time: {} - - End time: {} - - PTS version: {} - - Repositories: - - '''.format(start_time, end_time, pts_ver) - f.write(readme_body) - - f.writelines( - ['\t{}: {} [{}]\n'.format(name, info['commit'], info['desc']) for name, info in repos_info.items()]) - - return readme_file - - -def compose_mail(args, mail_cfg, mail_ctx): - """ Create a email body - """ - - additional_info = '' - if 'additional_info_path' in mail_cfg: - try: - with open(mail_cfg['additional_info_path']) as file: - additional_info = f'{file.read()}
' - except Exception as e: - logging.exception(e) - - iso_cal = datetime.date.today().isocalendar() - ww_dd_str = "WW%s.%s" % (iso_cal[1], iso_cal[2]) - - body = f''' -

This is automated email and do not reply.

-

Bluetooth test session - {ww_dd_str}

- {additional_info} -

1. IUT Setup

-

Type: Zephyr
- Board: {args['board']}
- Source: {mail_ctx['repos_info']}

-

2. PTS Setup

-

OS: Windows 10
- Platform: {args['platform']}
- Version: {args['pts_ver']}

-

3. Test Results

-

Execution Time: {mail_ctx['elapsed_time']}

- {mail_ctx['summary']} -

Logs

- {mail_ctx['log_url']} -

Sincerely,

-

{mail_cfg['name']}

-''' - - if 'subject' in mail_cfg: - subject = mail_cfg['subject'] - else: - subject = "AutoPTS test session results" - - subject = "%s - %s" % (subject, ww_dd_str) - - return subject, body - - class ZephyrBotConfigArgs(BotConfigArgs): def __init__(self, args): super().__init__(args) @@ -226,178 +145,36 @@ def apply_config(self, args, config, value): time.sleep(10) def start(self, args=None): - main(self) + super().start(args) - -class ZephyrClient(autoptsclient.Client): - def __init__(self): - super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'zephyr') - - -BotClient = ZephyrBotClient - - -def main(bot_client): - if os.path.exists(BOT_STATE_JSON): - print('Continuing the previous terminated test run (remove tmp/ to start freshly)') - - with open(BOT_STATE_JSON, "r") as f: - data = f.read() - bot_state = json.loads(data) - cfg = bot_state['config'] - args = cfg['auto_pts'] - - else: - # Start fresh test run - - bot_state = {} - bot.common.pre_cleanup() - - bot_state['start_time'] = time.time() - - cfg = bot_client.bot_config - args = cfg['auto_pts'] - bot_state['config'] = cfg - - if 'database_file' not in args: - args['database_file'] = DATABASE_FILE - - if 'githubdrive' in cfg: - update_sources(cfg['githubdrive']['path'], - cfg['githubdrive']['remote'], - cfg['githubdrive']['branch'], True) - - args['kernel_image'] = os.path.join(args['project_path'], 'tests', - 'bluetooth', 'tester', 'outdir', - 'zephyr', 'zephyr.elf') - - if 'git' in cfg: - bot_state['repos_info'] = github.update_repos(args['project_path'], cfg["git"]) - bot_state['repo_status'] = report.make_repo_status(bot_state['repos_info']) - args['repos'] = cfg['git'] - else: - bot_state['repos_info'] = {} - bot_state['repo_status'] = '' - - if args.get('use_backup', False): - os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) - - with open(BOT_STATE_JSON, "w") as f: - f.write(json.dumps(bot_state, indent=4)) - - try: - stats = bot_client.run_tests() - finally: - release_device(bot_client.args.tty_file) - - summary = stats.get_status_count() - results = stats.get_results() - descriptions = stats.get_descriptions() - regressions = stats.get_regressions() - progresses = stats.get_progresses() - new_cases = stats.get_new_cases() - deleted_cases = [] - args['pts_ver'] = stats.pts_ver - args['platform'] = stats.platform - - results = collections.OrderedDict(sorted(results.items())) - - pts_logs, xmls = report.pull_server_logs(bot_client.args) - - report_file = report.make_report_xlsx(results, summary, regressions, - progresses, descriptions, xmls, PROJECT_NAME) - report_txt = report.make_report_txt(results, regressions, - progresses, bot_state['repo_status'], PROJECT_NAME) - - end_time = time.time() - end_time_stamp = datetime.datetime.fromtimestamp(end_time).strftime("%Y_%m_%d_%H_%M_%S") - start_time_stamp = datetime.datetime.fromtimestamp(bot_state['start_time']).strftime("%Y_%m_%d_%H_%M_%S") - url = None - github_link = None - report_folder = None - - if 'githubdrive' in cfg or 'gdrive' in cfg: - iut_logs = 'logs/' - readme_file = make_readme_md(start_time_stamp, end_time_stamp, - bot_state['repos_info'], args['pts_ver']) - - report_diff_txt, deleted_cases = report.make_report_diff(cfg['githubdrive'], results, - regressions, progresses, new_cases) - - report_folder = report.make_report_folder(iut_logs, pts_logs, xmls, report_file, - report_txt, report_diff_txt, readme_file, - args['database_file'], - '_iut_zephyr_' + start_time_stamp) - - if 'githubdrive' in cfg: - print("Uploading to Github ...") - commit_msg_pattern = '{branch}_{timestamp}_{commit_sha}' + def upload_logs_to_github(self, report_data): + commit_msg_pattern = '{branch}_{start_time_stamp}_{commit_sha}' branch = 'no_branch' commit_sha = 'no_sha' - if 'commit_msg' in cfg['githubdrive'] and \ - cfg['githubdrive']['commit_msg'] is not None: - commit_msg_pattern = cfg['githubdrive']['commit_msg'] + if 'commit_msg' in self.bot_config['githubdrive'] and \ + self.bot_config['githubdrive']['commit_msg'] is not None: + commit_msg_pattern = self.bot_config['githubdrive']['commit_msg'] - if 'git' in cfg: - commit_sha = bot_state['repos_info']['zephyr']['commit'] - branch = cfg['git']['zephyr']['branch'] + if 'git' in self.bot_config: + commit_sha = report_data['repos_info']['zephyr']['commit'] + branch = self.bot_config['git']['zephyr']['branch'] - commit_msg = commit_msg_pattern.format( - timestamp=start_time_stamp, branch=branch, commit_sha=commit_sha) - github_link, report_folder = report.github_push_report( - report_folder, cfg['githubdrive'], commit_msg) + report_data['commit_msg'] = commit_msg_pattern.format( + branch=branch, commit_sha=commit_sha, **report_data) - if 'gdrive' in cfg: - print("Uploading to GDrive ...") - report.archive_testcases(report_folder, depth=2) - drive = google_drive.Drive(cfg['gdrive']) - url = drive.new_workdir(args['board']) - drive.upload_folder(report_folder) + super().upload_logs_to_github(report_data) - if 'mail' in cfg: - print("Sending email ...") + def compose_mail(self, mail_ctx): + if 'subject' not in mail_ctx: + mail_ctx['subject'] = "[Zephyr] AutoPTS test session results" - # keep mail related context to simplify the code - mail_ctx = {'repos_info': bot_state['repo_status'], - 'summary': f'''{mail.status_dict2summary_html(summary)} -{mail.regressions2html(regressions, descriptions)} -{mail.progresses2html(progresses, descriptions)} -{mail.new_cases2html(new_cases, descriptions)} -{mail.deleted_cases2html(deleted_cases, descriptions)}''', - } + return super().compose_mail(mail_ctx) - # Summary - # Regression and test case description - - # Log in Google drive in HTML format - if 'gdrive' in cfg and url: - mail_ctx["log_url"] = mail.url2html(url, "Results on Google Drive") - - if 'githubdrive' in cfg and github_link: - if 'log_url' in mail_ctx: - mail_ctx["log_url"] += '
' - else: - mail_ctx["log_url"] = '' - mail_ctx['log_url'] += mail.url2html(github_link, 'Results on Github') - - if 'log_url' not in mail_ctx: - mail_ctx['log_url'] = 'Not Available' - - # Elapsed Time - mail_ctx["elapsed_time"] = str(datetime.timedelta( - seconds=(int(end_time - bot_state['start_time'])))) - - subject, body = compose_mail(args, cfg['mail'], mail_ctx) - - mail.send_mail(cfg['mail'], subject, body, - [report_file, report_txt]) - - print("Done") +class ZephyrClient(autoptsclient.Client): + def __init__(self): + super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'zephyr') - bot.common.cleanup() - print("\nBye!") - sys.stdout.flush() - return 0 +BotClient = ZephyrBotClient diff --git a/autopts/client.py b/autopts/client.py index ca6a801a7a..f20e727f7b 100755 --- a/autopts/client.py +++ b/autopts/client.py @@ -38,7 +38,7 @@ from xmlrpc.server import SimpleXMLRPCServer from termcolor import colored -from autopts.config import TC_STATS_RESULTS_XML, TEST_CASE_DB, TMP_DIR +from autopts.config import TC_STATS_RESULTS_XML, TEST_CASE_DB, TMP_DIR, IUT_LOGS_FOLDER from autopts.ptsprojects import ptstypes from autopts.ptsprojects import stack from autopts.ptsprojects.boards import get_available_boards, tty_to_com @@ -557,6 +557,7 @@ def __init__(self, projects, test_cases, retry_count, db=None, xml_results_file=None): self.pts_ver = '' self.platform = '' + self.system_version = '' self.run_count_max = retry_count + 1 # Run test at least once self.run_count = 0 # Run count of current test case self.num_test_cases = len(test_cases) @@ -1211,7 +1212,7 @@ def run_test_cases(ptses, test_case_instances, args, stats, **kwargs): ports_str = '_'.join(str(x) for x in args.cli_port) now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") - session_log_dir = 'logs/cli_port_' + ports_str + '/' + now + session_log_dir = f'{IUT_LOGS_FOLDER}/cli_port_{ports_str}/{now}' try: os.makedirs(session_log_dir) except OSError as e: @@ -1300,6 +1301,7 @@ def __init__(self, get_iut, project, name, parser_class=CliParser): """ self.test_cases = None self.get_iut = get_iut + self.autopts_project_name = name self.store_tag = name + '_' setup_project_name(project) self.boards = get_available_boards(name) @@ -1357,7 +1359,7 @@ def main(self, _args=None): elif self.args.sudo: sys.exit("Please run this program as root.") - os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) + os.makedirs(TMP_DIR, exist_ok=True) if self.args.store: tc_db_table_name = self.store_tag + str(self.args.board_name) diff --git a/autopts/config.py b/autopts/config.py index 7235786e32..335867c6d4 100644 --- a/autopts/config.py +++ b/autopts/config.py @@ -15,17 +15,32 @@ """Configuration variables""" +import os.path + SERVER_PORT = 65000 CLIENT_PORT = 65001 BTMON_PORT = 65432 MAX_SERVER_RESTART_TIME = 60 -TMP_DIR = 'tmp/' -ALL_STATS_RESULTS_XML = TMP_DIR + 'all_stats_results.xml' -TC_STATS_RESULTS_XML = TMP_DIR + 'tc_stats_results.xml' -TEST_CASES_JSON = TMP_DIR + 'test_cases_file.json' -ALL_STATS_JSON = TMP_DIR + 'all_stats.json' -TC_STATS_JSON = TMP_DIR + 'tc_stats.json' -TEST_CASE_DB = TMP_DIR + 'TestCase.db' -BOT_STATE_JSON = TMP_DIR + 'bot_state.json' +AUTOPTS_ROOT_DIR = os.path.dirname( # auto-pts repo directory + os.path.dirname( # autopts module directory + os.path.abspath(__file__))) # this file directory + +TMP_DIR = 'tmp' +ALL_STATS_RESULTS_XML = os.path.join(TMP_DIR, 'all_stats_results.xml') +TC_STATS_RESULTS_XML = os.path.join(TMP_DIR, 'tc_stats_results.xml') +TEST_CASES_JSON = os.path.join(TMP_DIR, 'test_cases_file.json') +ALL_STATS_JSON = os.path.join(TMP_DIR, 'all_stats.json') +TC_STATS_JSON = os.path.join(TMP_DIR, 'tc_stats.json') +TEST_CASE_DB = os.path.join(TMP_DIR, 'TestCase.db') +BOT_STATE_JSON = os.path.join(TMP_DIR, 'bot_state.json') +REPORT_README_MD = os.path.join(TMP_DIR, 'README.md') +AUTOPTS_REPORT_FOLDER = os.path.join(TMP_DIR, 'autopts_report') +IUT_LOGS_FOLDER = 'logs' +PTS_XMLS_FOLDER = os.path.join(TMP_DIR, 'XMLs') +ERRATA_DIR_PATH = os.path.join(AUTOPTS_ROOT_DIR, 'errata') +REPORT_XLSX = "report.xlsx" +REPORT_TXT = "report.txt" +REPORT_DIFF_TXT = "report-diff.txt" +ERROR_TXT = 'error.txt' diff --git a/autoptsserver.py b/autoptsserver.py index c59a53e2eb..735a1b4920 100755 --- a/autoptsserver.py +++ b/autoptsserver.py @@ -32,6 +32,7 @@ import copy import logging as root_logging import os +import platform import shutil import subprocess import sys @@ -502,6 +503,7 @@ def server_init(self): self.server.register_function(self.copy_file, 'copy_file') self.server.register_function(self.delete_file, 'delete_file') self.server.register_function(self.get_system_model, 'get_system_model') + self.server.register_function(self.get_system_version, 'get_system_version') self.server.register_function(self.shutdown_pts_bpv, 'shutdown_pts_bpv') self.server.register_function(self.get_path, 'get_path') self.server.register_instance(self.pts) @@ -549,6 +551,11 @@ def get_system_model(self): return 'Real HW' return 'PotatOS' + def get_system_version(self): + os_name = platform.system() + version = platform.release() + return f'{os_name} {version}' + def get_path(self): self._update_request_time() return os.path.dirname(os.path.abspath(__file__)) diff --git a/test/unittests.py b/test/unittests.py index aa3de0a41c..5b7c42b0d7 100644 --- a/test/unittests.py +++ b/test/unittests.py @@ -6,9 +6,9 @@ from pathlib import Path from unittest.mock import patch -from autopts.bot.zephyr import make_readme_md +# from autopts.bot.zephyr import make_readme_md from autopts.client import FakeProxy, TestCaseRunStats -from autopts.config import TMP_DIR, ALL_STATS_RESULTS_XML +from autopts.config import TMP_DIR, ALL_STATS_RESULTS_XML, IUT_LOGS_FOLDER from autopts.ptsprojects.testcase_db import TestCaseTable from autoptsclient_bot import import_bot_projects, import_bot_module from test.mocks.mocked_test_cases import mock_workspace_test_cases, test_case_list_generation_samples @@ -33,7 +33,7 @@ def setUp(self): os.chdir(dirname(dirname(abspath(__file__)))) open('ttyUSB', 'w').close() shutil.copy('test/configs/config_zephyr.py', 'autopts/bot/config.py') - os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) + os.makedirs(TMP_DIR, exist_ok=True) def tearDown(self): os.remove('ttyUSB') @@ -150,6 +150,7 @@ def generate_stats(self, files): database_file = DATABASE_FILE TEST_CASE_DB = TestCaseTable('zephyr', database_file) + errata = report.get_errata('zephyr') start_time = 1693378197 # result of time.time() duration = 30 # seconds end_time = start_time + duration @@ -188,7 +189,7 @@ def generate_stats(self, files): } report_txt = report.make_report_txt( - results, regressions, progresses, '', 'zephyr') + results, regressions, progresses, '', errata) files['first_report_txt'] = report_txt assert os.path.exists(report_txt) @@ -226,7 +227,7 @@ def generate_stats(self, files): repos_info = {'zephyr': {'commit': '123456', 'desc': 'zephyr'}} pts_ver = '8_5_0' - iut_logs = 'logs/' + iut_logs = IUT_LOGS_FOLDER pts_logs = 'tmp/zephyr-master' xmls = 'tmp/XMLs' Path(iut_logs).mkdir(parents=True, exist_ok=True) @@ -238,30 +239,30 @@ def generate_stats(self, files): report_file = report.make_report_xlsx( results, summary, regressions, progresses, descriptions, - xmls, 'zephyr') + xmls, errata) files['report_file'] = report_file assert os.path.exists(report_file) report_txt = report.make_report_txt( - results, regressions, progresses, '', 'zephyr') + results, regressions, progresses, '', errata) files['report_txt'] = report_txt assert os.path.exists(report_txt) - readme_file = make_readme_md( - start_timestamp, end_time, repos_info, pts_ver) - files['readme_file'] = readme_file - assert os.path.exists(readme_file) + # readme_file = make_readme_md( + # start_timestamp, end_time, repos_info, pts_ver) + # files['readme_file'] = readme_file + # assert os.path.exists(readme_file) report_diff_txt, deleted_cases = report.make_report_diff( - githubdrive, results, regressions, progresses, new_cases) + '', results, regressions, progresses, new_cases) files['report_diff_txt'] = report_diff_txt assert os.path.exists(report_diff_txt) - report_folder = report.make_report_folder( - iut_logs, pts_logs, xmls, report_file, report_txt, report_diff_txt, - readme_file, database_file, '_iut_zephyr_' + start_timestamp) - files['report_folder'] = report_folder - assert os.path.exists(report_folder) + # report_folder = report.make_report_folder( + # iut_logs, pts_logs, xmls, report_file, report_txt, report_diff_txt, + # readme_file, database_file, '_iut_zephyr_' + start_timestamp) + # files['report_folder'] = report_folder + # assert os.path.exists(report_folder) def test_generate_stats(self): files = {} diff --git a/tools/cron/common.py b/tools/cron/common.py index 6a4d9712be..aad6839c52 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -51,7 +51,7 @@ from autopts.bot.common import load_module_from_path from autopts.bot.common_features.github import update_repos from autopts.bot.common_features.mail import send_mail -from autopts.config import TC_STATS_JSON +from autopts.config import TC_STATS_JSON, TMP_DIR, IUT_LOGS_FOLDER, REPORT_XLSX, REPORT_TXT from tools.cron.compatibility import find_latest, find_by_project_hash, find_by_autopts_hash, find_by_pts_ver, \ get_hash_from_reference from tools.cron.remote_terminal import RemoteTerminalClientProxy @@ -307,10 +307,10 @@ def ssh_copy_file(hostname, username, password, def pre_cleanup_files(autopts_repo, project_repo): files_to_save = [ - os.path.join(autopts_repo, 'tmp/'), - os.path.join(autopts_repo, 'logs/'), - os.path.join(autopts_repo, 'report.txt'), - os.path.join(autopts_repo, 'report.xlsx'), + os.path.join(autopts_repo, TMP_DIR), + os.path.join(autopts_repo, IUT_LOGS_FOLDER), + os.path.join(autopts_repo, REPORT_TXT), + os.path.join(autopts_repo, REPORT_XLSX), os.path.join(autopts_repo, 'TestCase.db'), os.path.join(autopts_repo, 'stdout_autoptsbot.log'), os.path.join(autopts_repo, 'stdout_autoptsserver.log'), From 499a353796330169438893c4a34f5979d6e72e9f Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 5 Jul 2024 15:01:27 +0200 Subject: [PATCH 05/44] common: Allow to configure the paths of generated files --- autopts/bot/common.py | 216 ++++++++++++-------------- autopts/bot/common_features/report.py | 41 ++--- autopts/bot/mynewt.py | 5 +- autopts/bot/zephyr.py | 2 +- autopts/client.py | 42 +++-- autopts/config.py | 53 +++++-- test/unittests.py | 87 ++++------- tools/cron/common.py | 31 ++-- 8 files changed, 232 insertions(+), 245 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index 6b8745af32..d3fefa9e51 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -28,31 +28,14 @@ from argparse import Namespace from autopts import client as autoptsclient from autopts.bot.common_features import github, report, mail, google_drive -from autopts.bot.common_features.report import REPORT_TXT from autopts.client import CliParser, Client, TestCaseRunStats, init_logging -from autopts.config import MAX_SERVER_RESTART_TIME, TEST_CASES_JSON, ALL_STATS_JSON, TC_STATS_JSON, \ - ALL_STATS_RESULTS_XML, TC_STATS_RESULTS_XML, BOT_STATE_JSON, TMP_DIR, REPORT_README_MD, AUTOPTS_REPORT_FOLDER, \ - REPORT_DIFF_TXT, REPORT_XLSX, IUT_LOGS_FOLDER, AUTOPTS_ROOT_DIR +from autopts.config import MAX_SERVER_RESTART_TIME, AUTOPTS_ROOT_DIR, generate_file_paths from autopts.ptsprojects.boards import get_free_device, get_tty, get_debugger_snr, release_device from autopts.ptsprojects.testcase_db import DATABASE_FILE log = logging.debug -def cleanup_tmp_files(): - files = [ALL_STATS_RESULTS_XML, - TC_STATS_RESULTS_XML, - TEST_CASES_JSON, - ALL_STATS_JSON, - TC_STATS_JSON, - BOT_STATE_JSON, - ] - - for file in files: - if os.path.exists(file): - os.remove(file) - - def get_deepest_dirs(logs_tree, dst_tree, max_depth): def recursive(directory, depth=3): depth -= 1 @@ -177,10 +160,7 @@ def __init__(self, get_iut, project, name, bot_config_class=BotConfigArgs, self.backup = {'available': False, 'create': False, 'all_stats': None, - 'tc_stats': None, - 'test_cases_file': TEST_CASES_JSON, - 'all_stats_file': ALL_STATS_JSON, - 'tc_stats_file': TC_STATS_JSON} + 'tc_stats': None} def parse_or_find_tty(self, args): if args.tty_alias: @@ -205,20 +185,20 @@ def load_backup_of_previous_run(self): continue_test_case = None continue_config = None - if os.path.exists(self.backup['all_stats_file']): - self.backup['all_stats'] = TestCaseRunStats.load_from_backup(self.backup['all_stats_file']) + if os.path.exists(self.file_paths['ALL_STATS_JSON_FILE']): + self.backup['all_stats'] = TestCaseRunStats.load_from_backup(self.file_paths['ALL_STATS_JSON_FILE']) continue_config = self.backup['all_stats'].pending_config # The last config and test case preformed in the broken test run - if os.path.exists(self.backup['tc_stats_file']): - self.backup['tc_stats'] = TestCaseRunStats.load_from_backup(self.backup['tc_stats_file']) + if os.path.exists(self.file_paths['TC_STATS_JSON_FILE']): + self.backup['tc_stats'] = TestCaseRunStats.load_from_backup(self.file_paths['TC_STATS_JSON_FILE']) continue_config = self.backup['tc_stats'].pending_config continue_test_case = self.backup['tc_stats'].pending_test_case if not continue_config: return - with open(self.backup['test_cases_file']) as f: + with open(self.file_paths['TEST_CASES_JSON_FILE']) as f: data = f.read() test_cases_per_config = json.loads(data) run_order = list(test_cases_per_config.keys()) @@ -234,7 +214,7 @@ def load_backup_of_previous_run(self): # The faulty test case was the last one in the config. Move to the next config self.backup['tc_stats'].update(continue_test_case, 0, 'TIMEOUT') self._merge_stats(self.backup['all_stats'], self.backup['tc_stats']) - self.backup['all_stats'].save_to_backup(self.backup['all_stats_file']) + self.backup['all_stats'].save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) self.backup['tc_stats'] = None config_index += 1 continue_test_case = None @@ -271,23 +251,47 @@ def parse_config_and_args(self, bot_config_dict=None): self.args, errmsg = self.arg_parser.parse(bot_config_namespace) self.args.retry_config = bot_config_dict.get('retry_config', None) + if 'file_paths' in self.bot_config: + generate_file_paths(self.bot_config['file_paths']) + if errmsg: return errmsg - if self.args.use_backup: + if self.args.use_backup and os.path.exists(self.file_paths['BOT_STATE_JSON_FILE']): self.load_backup_of_previous_run() else: - cleanup_tmp_files() + self.bot_pre_cleanup() # Remove default root handler that was created at the first logging.debug logging.getLogger().handlers.clear() - init_logging('_' + '_'.join(str(x) for x in self.args.cli_port)) + init_logging('_' + '_'.join(str(x) for x in self.args.cli_port), + self.file_paths.get('BOT_LOG_FILE', None)) return errmsg def apply_config(self, args, config, value): pass + def bot_pre_cleanup(self): + """Perform cleanup before test run + :return: None + """ + try: + files_to_save = [ + self.file_paths['TMP_DIR'], + self.file_paths['IUT_LOGS_DIR'], + ] + + save_dir = os.path.join(self.file_paths['OLD_LOGS_DIR'], + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M")) + Path(save_dir).mkdir(parents=True, exist_ok=True) + + for file_path in files_to_save: + if os.path.exists(file_path): + shutil.move(file_path, os.path.join(save_dir, os.path.basename(file_path))) + except OSError as e: + pass + def _yield_next_config(self): limit_counter = 0 @@ -330,7 +334,7 @@ def _yield_next_config(self): run_order.append(config) if self.args.use_backup: - with open(self.backup['test_cases_file'], 'w') as file: + with open(self.file_paths['TEST_CASES_JSON_FILE'], 'w') as file: file.write(json.dumps(test_cases, indent=4)) for config in run_order: @@ -342,7 +346,7 @@ def _backup_tc_stats(self, config=None, test_case=None, stats=None, **kwargs): stats.pending_config = config stats.pending_test_case = test_case - stats.save_to_backup(self.backup['tc_stats_file']) + stats.save_to_backup(self.file_paths['TC_STATS_JSON_FILE']) def _merge_stats(self, all_stats, stats): all_stats.merge(stats) @@ -350,19 +354,19 @@ def _merge_stats(self, all_stats, stats): if os.path.exists(stats.xml_results): os.remove(stats.xml_results) - if os.path.exists(TC_STATS_JSON): - os.remove(TC_STATS_JSON) + if os.path.exists(self.file_paths['TC_STATS_JSON_FILE']): + os.remove(self.file_paths['TC_STATS_JSON_FILE']) def run_test_cases(self): all_stats = self.backup['all_stats'] stats = self.backup['tc_stats'] if not all_stats: - all_stats = TestCaseRunStats([], [], 0, xml_results_file=ALL_STATS_RESULTS_XML) + all_stats = TestCaseRunStats([], [], 0, xml_results_file=self.file_paths['ALL_STATS_RESULTS_XML_FILE']) self.backup['all_stats'] = all_stats if self.args.use_backup: - all_stats.save_to_backup(self.backup['all_stats_file']) + all_stats.save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) projects = self.ptses[0].get_project_list() @@ -373,7 +377,7 @@ def run_test_cases(self): config_args.test_cases, config_args.retry, self.test_case_database, - xml_results_file=TC_STATS_RESULTS_XML) + xml_results_file=self.file_paths['TC_STATS_RESULTS_XML_FILE']) if self.args.use_backup: self._backup_tc_stats(config=config, test_case=None, stats=stats) @@ -385,7 +389,8 @@ def run_test_cases(self): config_args, stats, config=config, - pre_test_case_fn=self._backup_tc_stats) + pre_test_case_fn=self._backup_tc_stats, + file_paths=copy.deepcopy(self.file_paths)) except BuildAndFlashException: log(f'Build and flash step failed for config {config}') @@ -399,7 +404,7 @@ def run_test_cases(self): stats = None if self.args.use_backup: - all_stats.save_to_backup(self.backup['all_stats_file']) + all_stats.save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) # End of bot run - all test cases completed @@ -412,7 +417,7 @@ def run_test_cases(self): if self.args.use_backup: all_stats.test_run_completed = True - all_stats.save_to_backup(self.backup['all_stats_file']) + all_stats.save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) try: results = all_stats.get_results() @@ -437,10 +442,11 @@ def start(self, args=None): sending logs, reports, etc. """ - if os.path.exists(BOT_STATE_JSON): - print(f'Continuing the previous terminated test run (remove {TMP_DIR} to start freshly)') + if os.path.exists(self.file_paths['BOT_STATE_JSON_FILE']): + print(f'Continuing the previous terminated test run ' + f'(remove {self.file_paths["TMP_DIR"]} to start freshly)') - with open(BOT_STATE_JSON, "r") as f: + with open(self.file_paths['BOT_STATE_JSON_FILE'], "r") as f: data = f.read() bot_state = json.loads(data) self.bot_config = bot_state['bot_config'] @@ -448,7 +454,6 @@ def start(self, args=None): else: # Start fresh test run - pre_cleanup() bot_state = {'start_time': time.time()} if 'githubdrive' in self.bot_config: @@ -466,10 +471,10 @@ def start(self, args=None): bot_state['repo_status'] = '' if self.bot_config['auto_pts'].get('use_backup', False): - os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True) + os.makedirs(self.file_paths["TMP_DIR"], exist_ok=True) bot_state['bot_config'] = self.bot_config - with open(BOT_STATE_JSON, "w") as f: + with open(self.file_paths['BOT_STATE_JSON_FILE'], "w") as f: f.write(json.dumps(bot_state, indent=4)) try: @@ -498,23 +503,31 @@ def start(self, args=None): report_data['tc_results'] = collections.OrderedDict(sorted(report_data['tc_results'].items())) - report_data['errata'] = report.get_errata(self.autopts_project_name) - - report_data['pts_logs_folder'], report_data['pts_xml_folder'] = report.pull_server_logs(self.args) - - report_data['report_xlsx'] = report.make_report_xlsx(report_data['tc_results'], - report_data['status_count'], - report_data['regressions'], - report_data['progresses'], - report_data['descriptions'], - report_data['pts_xml_folder'], - report_data['errata']) - - report_data['report_txt'] = report.make_report_txt(report_data['tc_results'], - report_data['regressions'], - report_data['progresses'], - report_data['repo_status'], - report_data['errata']) + report_data['errata'] = report.get_errata([ + os.path.join(AUTOPTS_ROOT_DIR, 'errata/common.yaml'), + os.path.join(AUTOPTS_ROOT_DIR, f'errata/{self.autopts_project_name}.yaml') + ]) + + report_data['pts_logs_folder'], report_data['pts_xml_folder'] = \ + report.pull_server_logs(self.args, + self.file_paths['TMP_DIR'], + self.file_paths['PTS_XMLS_DIR']) + + report.make_report_xlsx(self.file_paths['REPORT_XLSX_FILE'], + report_data['tc_results'], + report_data['status_count'], + report_data['regressions'], + report_data['progresses'], + report_data['descriptions'], + report_data['pts_xml_folder'], + report_data['errata']) + + report.make_report_txt(self.file_paths['REPORT_TXT_FILE'], + report_data['tc_results'], + report_data['regressions'], + report_data['progresses'], + report_data['repo_status'], + report_data['errata']) if 'githubdrive' in self.bot_config or 'gdrive' in self.bot_config: self.make_report_folder(report_data) @@ -534,10 +547,10 @@ def run_tests(self): # Entry point of the simple client layer return super().start() - def make_readme_md(self, report_data): + def make_readme_md(self, readme_md_path, report_data): """Creates README.md for Github logging repo """ - readme_file = REPORT_README_MD + readme_file = readme_md_path Path(os.path.dirname(readme_file)).mkdir(parents=True, exist_ok=True) @@ -564,39 +577,33 @@ def make_report_folder(self, report_data): """Creates folder containing .txt and .xlsx reports, pulled logs from autoptsserver, iut logs and additional README.md. """ - report_data['report_folder'] = AUTOPTS_REPORT_FOLDER - shutil.rmtree(report_data['report_folder'], ignore_errors=True) - Path(report_data['report_folder']).mkdir(parents=True, exist_ok=True) + shutil.rmtree(self.file_paths['REPORT_DIR'], ignore_errors=True) + Path(self.file_paths['REPORT_DIR']).mkdir(parents=True, exist_ok=True) if 'githubdrive' in self.bot_config: - report_folder_name = os.path.basename(report_data['report_folder']) - - report_data['old_report_txt'] = os.path.join(self.bot_config['githubdrive']['path'], - self.bot_config['githubdrive']['subdir'], - report_folder_name, REPORT_TXT) - - report_data['report_diff_txt'], report_data['deleted_cases'] = \ - report.make_report_diff(report_data['old_report_txt'], + report_data['deleted_cases'] = \ + report.make_report_diff(self.bot_config['githubdrive'].get('old_report_txt', ''), + self.file_paths['REPORT_DIFF_TXT_FILE'], report_data['tc_results'], report_data['regressions'], report_data['progresses'], report_data['new_cases']) - report_data['readme_file'] = self.make_readme_md(report_data) + report_data['readme_file'] = self.make_readme_md(self.file_paths['REPORT_README_MD_FILE'], report_data) attachments = [ - REPORT_DIFF_TXT, - report_data['report_txt'], - (report_data['report_txt'], f'report_{report_data["start_time_stamp"]}.txt'), - (report_data['report_xlsx'], f'report_{report_data["start_time_stamp"]}.xlsx'), - REPORT_README_MD, + self.file_paths['REPORT_DIFF_TXT_FILE'], + self.file_paths['REPORT_TXT_FILE'], + (self.file_paths['REPORT_TXT_FILE'], f'report_{report_data["start_time_stamp"]}.txt'), + (self.file_paths['REPORT_TXT_FILE'], f'report_{report_data["start_time_stamp"]}.xlsx'), + self.file_paths['REPORT_README_MD_FILE'], report_data['database_file'], report_data['pts_xml_folder'], ] - iut_logs_new = os.path.join(report_data['report_folder'], 'iut_logs') - pts_logs_new = os.path.join(report_data['report_folder'], 'pts_logs') - get_deepest_dirs(IUT_LOGS_FOLDER, iut_logs_new, 3) + iut_logs_new = os.path.join(self.file_paths['REPORT_DIR'], 'iut_logs') + pts_logs_new = os.path.join(self.file_paths['REPORT_DIR'], 'pts_logs') + get_deepest_dirs(self.file_paths['IUT_LOGS_DIR'], iut_logs_new, 3) get_deepest_dirs(report_data['pts_logs_folder'], pts_logs_new, 3) self.generate_attachments(report_data, attachments) @@ -608,7 +615,7 @@ def generate_attachments(self, report_data, attachments): pass def pack_report_folder(self, report_data, attachments): - report_dir = report_data['report_folder'] + report_dir = self.file_paths['REPORT_DIR'] for item in attachments: if isinstance(item, tuple): @@ -644,16 +651,17 @@ def upload_logs_to_github(self, report_data): if 'commit_msg' not in report_data: report_data['commit_msg'] = report_data['start_time_stamp'] - report_data['github_link'], report_data['report_folder'] = report.github_push_report( - report_data['report_folder'], self.bot_config['githubdrive'], report_data['commit_msg']) + report_data['github_link'], self.file_paths['REPORT_DIR'] = \ + report.github_push_report(self.file_paths['REPORT_DIR'], + self.bot_config['githubdrive'], + report_data['commit_msg']) def upload_logs_to_gdrive(self, report_data): - report_folder = report_data['report_folder'] board_name = self.bot_config['auto_pts']['board'] gdrive_config = self.bot_config['gdrive'] log(f'Archiving the report folder ...') - report.archive_testcases(report_folder, depth=2) + report.archive_testcases(self.file_paths['REPORT_DIR'], depth=2) log(f'Connecting to GDrive ...') drive = google_drive.Drive(gdrive_config) @@ -663,7 +671,7 @@ def upload_logs_to_gdrive(self, report_data): log(report_data['gdrive_url']) log("Uploading to GDrive ...") - drive.upload_folder(report_folder) + drive.upload_folder(self.file_paths['REPORT_DIR']) def send_email(self, report_data): log("Sending email ...") @@ -720,7 +728,8 @@ def send_email(self, report_data): subject, body = self.compose_mail(mail_ctx) mail.send_mail(self.bot_config['mail'], subject, body, - [report_data['report_xlsx'], report_data['report_txt']]) + [self.file_paths['REPORT_XLSX_FILE'], + self.file_paths['REPORT_TXT_FILE']]) def compose_mail(self, mail_ctx): """ Create a email body @@ -926,24 +935,3 @@ def load_module_from_path(cfg): sys.path.remove(config_dirname) return module - - -def pre_cleanup(): - """Perform cleanup before test run - :return: None - """ - try: - shutil.copytree(IUT_LOGS_FOLDER, "oldlogs", dirs_exist_ok=True) - shutil.rmtree(IUT_LOGS_FOLDER) - except OSError: - pass - - -def cleanup(): - """Perform cleanup - :return: None - """ - try: - cleanup_tmp_files() - except OSError: - pass diff --git a/autopts/bot/common_features/report.py b/autopts/bot/common_features/report.py index c48544cc6a..ecab65a689 100644 --- a/autopts/bot/common_features/report.py +++ b/autopts/bot/common_features/report.py @@ -29,18 +29,15 @@ from autopts.bot.common_features import github from autopts.bot import common from autopts.client import PtsServer -from autopts.config import PTS_XMLS_FOLDER, TMP_DIR, REPORT_XLSX, REPORT_TXT, REPORT_DIFF_TXT, ERROR_TXT, \ - ERRATA_DIR_PATH, AUTOPTS_ROOT_DIR +from autopts.config import AUTOPTS_ROOT_DIR log = logging.debug -def get_errata(project_name): - errata_common = os.path.join(ERRATA_DIR_PATH, 'common.yaml') - errata_project = os.path.join(ERRATA_DIR_PATH, f'{project_name}.yaml') +def get_errata(errata_files): errata = {} - for file in [errata_common, errata_project]: + for file in errata_files: if os.path.exists(file): with open(file, 'r') as stream: loaded_errata = yaml.safe_load(stream) @@ -71,7 +68,7 @@ def make_repo_status(repos_info): # **************************************************************************** # .xlsx spreadsheet file # **************************************************************************** -def make_report_xlsx(results_dict, status_dict, regressions_list, +def make_report_xlsx(report_xlsx_path, results_dict, status_dict, regressions_list, progresses_list, descriptions, xmls, errata): """Creates excel file containing test cases results and summary pie chart :param results_dict: dictionary with test cases results @@ -103,7 +100,7 @@ def find_xml_by_case(case): header = "AutoPTS Report: " \ "{}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")) - workbook = xlsxwriter.Workbook(REPORT_XLSX) + workbook = xlsxwriter.Workbook(report_xlsx_path) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'pie', 'subtype': 'percent_stacked'}) @@ -173,13 +170,11 @@ def find_xml_by_case(case): worksheet.insert_chart('H2', chart) workbook.close() - return os.path.join(os.getcwd(), REPORT_XLSX) - # **************************************************************************** # .txt result file # **************************************************************************** -def make_report_txt(results_dict, regressions_list, +def make_report_txt(report_txt_path, results_dict, regressions_list, progresses_list, repo_status, errata): """Creates txt file containing test cases results :param results_dict: dictionary with test cases results @@ -190,8 +185,7 @@ def make_report_txt(results_dict, regressions_list, :return: txt file path """ - filename = os.path.join(os.getcwd(), REPORT_TXT) - f = open(filename, "w") + f = open(report_txt_path, "w") f.write(f"{repo_status}, autopts={get_autopts_version()}\n") for tc, result in list(results_dict.items()): @@ -215,8 +209,6 @@ def make_report_txt(results_dict, regressions_list, f.close() - return filename - def report_parse_test_cases(report): if not os.path.exists(report): @@ -238,10 +230,9 @@ def report_parse_test_cases(report): return test_cases[1:] -def make_report_diff(old_report_txt, results, regressions, - progresses, new_cases): - filename = os.path.join(os.getcwd(), REPORT_DIFF_TXT) - f = open(filename, "w") +def make_report_diff(old_report_txt, report_diff_txt_path, results, + regressions, progresses, new_cases): + f = open(report_diff_txt_path, "w") deleted_cases = [] old_test_cases = [] @@ -272,12 +263,11 @@ def make_report_diff(old_report_txt, results, regressions, f.close() - return filename, deleted_cases + return deleted_cases -def make_error_txt(msg): - filename = os.path.join(os.getcwd(), ERROR_TXT) - with open(filename, "w") as f: +def make_error_txt(msg, error_txt_path): + with open(error_txt_path, "w") as f: f.write(msg) @@ -352,7 +342,7 @@ def split_xml_filename(file_path): return test_name, timestamp -def pull_server_logs(args): +def pull_server_logs(args, tmp_dir, xml_folder): """Copy Bluetooth Protocol Viewer logs from auto-pts servers. :param args: args """ @@ -365,8 +355,7 @@ def pull_server_logs(args): else: workspace_dir = workspace_name - logs_folder = os.path.join(TMP_DIR, workspace_name) - xml_folder = PTS_XMLS_FOLDER + logs_folder = os.path.join(tmp_dir, workspace_name) shutil.rmtree(logs_folder, ignore_errors=True) shutil.rmtree(xml_folder, ignore_errors=True) Path(xml_folder).mkdir(parents=True, exist_ok=True) diff --git a/autopts/bot/mynewt.py b/autopts/bot/mynewt.py index 9650f5872f..0580076e3f 100755 --- a/autopts/bot/mynewt.py +++ b/autopts/bot/mynewt.py @@ -25,7 +25,6 @@ from autopts import bot from autopts.bot.common import BuildAndFlashException from autopts.client import Client -from autopts.config import BOT_STATE_JSON from autopts.ptsprojects.boards import get_build_and_flash, get_board_type from autopts.ptsprojects.mynewt.iutctl import get_iut, log from autopts.bot.common_features import report @@ -128,7 +127,7 @@ def apply_config(self, args, config, value): build_and_flash(args.project_path, board_type, overlay, args.debugger_snr) except BaseException as e: traceback.print_exception(e) - report.make_error_txt('Build and flash step failed') + report.make_error_txt('Build and flash step failed', self.file_paths['ERROR_TXT_FILE']) raise BuildAndFlashException time.sleep(10) @@ -143,7 +142,7 @@ def start(self, args=None): # In case wsl was configured and its bash has higher prio than msys2 bash os.environ['PATH'] = '/usr/bin:' + os.environ['PATH'] - if not os.path.exists(BOT_STATE_JSON): + if not os.path.exists(self.file_paths['BOT_STATE_JSON_FILE']): if self.bot_config.get('newt_upgrade', False): bot.common.check_call(['newt', 'upgrade', '-f', '--shallow=0'], cwd=self.bot_config['project_path']) diff --git a/autopts/bot/zephyr.py b/autopts/bot/zephyr.py index e33e3a2ec0..b02c695ba2 100755 --- a/autopts/bot/zephyr.py +++ b/autopts/bot/zephyr.py @@ -139,7 +139,7 @@ def apply_config(self, args, config, value): flush_serial(args.tty_file) except BaseException as e: traceback.print_exception(e) - report.make_error_txt('Build and flash step failed') + report.make_error_txt('Build and flash step failed', self.file_paths['error_txt']) raise BuildAndFlashException time.sleep(10) diff --git a/autopts/client.py b/autopts/client.py index f20e727f7b..fc5e4e0da0 100755 --- a/autopts/client.py +++ b/autopts/client.py @@ -17,7 +17,7 @@ # """Common code for the auto PTS clients""" - +import copy import datetime import errno import json @@ -35,10 +35,11 @@ import traceback import xml.etree.ElementTree as ElementTree import xmlrpc.client +from os.path import dirname from xmlrpc.server import SimpleXMLRPCServer from termcolor import colored -from autopts.config import TC_STATS_RESULTS_XML, TEST_CASE_DB, TMP_DIR, IUT_LOGS_FOLDER +from autopts.config import FILE_PATHS from autopts.ptsprojects import ptstypes from autopts.ptsprojects import stack from autopts.ptsprojects.boards import get_available_boards, tty_to_com @@ -404,7 +405,7 @@ def get_test_data_path(pts): get_my_ip_address.cached_address = None -def init_logging(tag=""): +def init_logging(tag="", log_filename=None): """Initialize logging""" root_logger = logging.getLogger('root') @@ -417,10 +418,13 @@ def init_logging(tag=""): # Already inited return - script_name = os.path.basename(sys.argv[0]) # in case it is full path - script_name_no_ext = os.path.splitext(script_name)[0] + if log_filename: + os.makedirs(dirname(log_filename), exist_ok=True) + else: + script_name = os.path.basename(sys.argv[0]) # in case it is full path + script_name_no_ext = os.path.splitext(script_name)[0] + log_filename = "%s%s.log" % (script_name_no_ext, tag) - log_filename = "%s%s.log" % (script_name_no_ext, tag) format_template = ("%(asctime)s %(threadName)s %(name)s %(levelname)s %(filename)-25s " "%(lineno)-5s %(funcName)-25s : %(message)s") @@ -574,6 +578,7 @@ def __init__(self, projects, test_cases, retry_count, db=None, self.test_run_completed = False if self.xml_results and not os.path.exists(self.xml_results): + os.makedirs(dirname(self.xml_results), exist_ok=True) root = ElementTree.Element("results") tree = ElementTree.ElementTree(root) tree.write(self.xml_results) @@ -1212,7 +1217,8 @@ def run_test_cases(ptses, test_case_instances, args, stats, **kwargs): ports_str = '_'.join(str(x) for x in args.cli_port) now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") - session_log_dir = f'{IUT_LOGS_FOLDER}/cli_port_{ports_str}/{now}' + logs_folder = kwargs["file_paths"]["IUT_LOGS_DIR"] + session_log_dir = f'{logs_folder}/cli_port_{ports_str}/{now}' try: os.makedirs(session_log_dir) except OSError as e: @@ -1300,6 +1306,7 @@ def __init__(self, get_iut, project, name, parser_class=CliParser): param parser_class: argument parser """ self.test_cases = None + self.file_paths = FILE_PATHS self.get_iut = get_iut self.autopts_project_name = name self.store_tag = name + '_' @@ -1359,15 +1366,17 @@ def main(self, _args=None): elif self.args.sudo: sys.exit("Please run this program as root.") - os.makedirs(TMP_DIR, exist_ok=True) + os.makedirs(self.file_paths["TMP_DIR"], exist_ok=True) if self.args.store: tc_db_table_name = self.store_tag + str(self.args.board_name) - if os.path.exists(self.args.database_file) and not os.path.exists(TEST_CASE_DB): - shutil.copy(self.args.database_file, TEST_CASE_DB) + if os.path.exists(self.args.database_file) and \ + not os.path.exists(self.file_paths['TEST_CASE_DB_FILE']): + shutil.copy(self.args.database_file, self.file_paths['TEST_CASE_DB_FILE']) - self.test_case_database = TestCaseTable(tc_db_table_name, TEST_CASE_DB) + self.test_case_database = TestCaseTable(tc_db_table_name, + self.file_paths['TEST_CASE_DB_FILE']) init_pts(self.args, self.ptses) @@ -1384,7 +1393,7 @@ def main(self, _args=None): self.cleanup() if self.args.store: - shutil.move(TEST_CASE_DB, self.args.database_file) + shutil.move(self.file_paths['TEST_CASE_DB_FILE'], self.args.database_file) print("\nBye!") sys.stdout.flush() @@ -1413,14 +1422,15 @@ def run_test_cases(self): projects = self.ptses[0].get_project_list() - if os.path.exists(TC_STATS_RESULTS_XML): - os.remove(TC_STATS_RESULTS_XML) + if os.path.exists(self.file_paths['TC_STATS_RESULTS_XML_FILE']): + os.remove(self.file_paths['TC_STATS_RESULTS_XML_FILE']) stats = TestCaseRunStats(projects, self.args.test_cases, self.args.retry, self.test_case_database, - xml_results_file=TC_STATS_RESULTS_XML) + xml_results_file=self.file_paths['TC_STATS_RESULTS_XML_FILE']) - return run_test_cases(self.ptses, self.test_cases, self.args, stats) + return run_test_cases(self.ptses, self.test_cases, self.args, stats, + file_paths=copy.deepcopy(self.file_paths)) def cleanup(self): log(f'{self.__class__.__name__}.{self.cleanup.__name__}') diff --git a/autopts/config.py b/autopts/config.py index 335867c6d4..dacb17dbd0 100644 --- a/autopts/config.py +++ b/autopts/config.py @@ -27,20 +27,39 @@ os.path.dirname( # autopts module directory os.path.abspath(__file__))) # this file directory -TMP_DIR = 'tmp' -ALL_STATS_RESULTS_XML = os.path.join(TMP_DIR, 'all_stats_results.xml') -TC_STATS_RESULTS_XML = os.path.join(TMP_DIR, 'tc_stats_results.xml') -TEST_CASES_JSON = os.path.join(TMP_DIR, 'test_cases_file.json') -ALL_STATS_JSON = os.path.join(TMP_DIR, 'all_stats.json') -TC_STATS_JSON = os.path.join(TMP_DIR, 'tc_stats.json') -TEST_CASE_DB = os.path.join(TMP_DIR, 'TestCase.db') -BOT_STATE_JSON = os.path.join(TMP_DIR, 'bot_state.json') -REPORT_README_MD = os.path.join(TMP_DIR, 'README.md') -AUTOPTS_REPORT_FOLDER = os.path.join(TMP_DIR, 'autopts_report') -IUT_LOGS_FOLDER = 'logs' -PTS_XMLS_FOLDER = os.path.join(TMP_DIR, 'XMLs') -ERRATA_DIR_PATH = os.path.join(AUTOPTS_ROOT_DIR, 'errata') -REPORT_XLSX = "report.xlsx" -REPORT_TXT = "report.txt" -REPORT_DIFF_TXT = "report-diff.txt" -ERROR_TXT = 'error.txt' +FILE_PATHS = {} + + +def generate_file_paths(file_paths=None, autopts_root_dir=AUTOPTS_ROOT_DIR): + if file_paths and 'TMP_DIR' in file_paths: + FILE_PATHS['TMP_DIR'] = file_paths['TMP_DIR'] + else: + FILE_PATHS['TMP_DIR'] = os.path.join(autopts_root_dir, 'tmp') + + FILE_PATHS.update({ + 'ALL_STATS_RESULTS_XML_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'all_stats_results.xml'), + 'TC_STATS_RESULTS_XML_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'tc_stats_results.xml'), + 'TEST_CASES_JSON_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'test_cases_file.json'), + 'ALL_STATS_JSON_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'all_stats.json'), + 'TC_STATS_JSON_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'tc_stats.json'), + 'TEST_CASE_DB_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'TestCase.db'), + 'BOT_STATE_JSON_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'bot_state.json'), + 'REPORT_README_MD_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'README.md'), + 'REPORT_DIR': os.path.join(FILE_PATHS['TMP_DIR'], 'autopts_report'), + 'IUT_LOGS_DIR': os.path.join(autopts_root_dir, 'logs'), + 'OLD_LOGS_DIR': os.path.join(autopts_root_dir, 'oldlogs'), + 'PTS_XMLS_DIR': os.path.join(FILE_PATHS['TMP_DIR'], 'XMLs'), + 'REPORT_XLSX_FILE': os.path.join(autopts_root_dir, "report.xlsx"), + 'REPORT_TXT_FILE': os.path.join(autopts_root_dir, "report.txt"), + 'REPORT_DIFF_TXT_FILE': os.path.join(FILE_PATHS['TMP_DIR'], "report-diff.txt"), + 'ERROR_TXT_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'error.txt'), + # 'BOT_LOG_FILE': os.path.join(autopts_root_dir, 'autoptsclient_bot.log'), + }) + + if file_paths: + FILE_PATHS.update(file_paths) + + return FILE_PATHS + + +generate_file_paths({}, AUTOPTS_ROOT_DIR) diff --git a/test/unittests.py b/test/unittests.py index 5b7c42b0d7..65e84bd0e4 100644 --- a/test/unittests.py +++ b/test/unittests.py @@ -6,9 +6,8 @@ from pathlib import Path from unittest.mock import patch -# from autopts.bot.zephyr import make_readme_md from autopts.client import FakeProxy, TestCaseRunStats -from autopts.config import TMP_DIR, ALL_STATS_RESULTS_XML, IUT_LOGS_FOLDER +from autopts.config import FILE_PATHS from autopts.ptsprojects.testcase_db import TestCaseTable from autoptsclient_bot import import_bot_projects, import_bot_module from test.mocks.mocked_test_cases import mock_workspace_test_cases, test_case_list_generation_samples @@ -33,12 +32,12 @@ def setUp(self): os.chdir(dirname(dirname(abspath(__file__)))) open('ttyUSB', 'w').close() shutil.copy('test/configs/config_zephyr.py', 'autopts/bot/config.py') - os.makedirs(TMP_DIR, exist_ok=True) def tearDown(self): os.remove('ttyUSB') delete_file('autopts/bot/config.py') - delete_file('tmp/') + for name in FILE_PATHS: + delete_file(FILE_PATHS[name]) def test_bot_startup_import_bot_projects(self): """Check that all supported methods of passing a config file @@ -116,7 +115,7 @@ def mock_get_test_case_list(project): return mock_workspace_test_cases[project] def mock_run_test_cases(ptses, test_case_instances, args, stats, **kwargs): - return TestCaseRunStats([], [], 0, xml_results_file=ALL_STATS_RESULTS_XML) + return TestCaseRunStats([], [], 0, xml_results_file=FILE_PATHS['ALL_STATS_RESULTS_XML_FILE']) for args in testargs: with patch.object(sys, 'argv', args.split(' ')): @@ -145,11 +144,11 @@ def mock_run_test_cases(ptses, test_case_instances, args, stats, **kwargs): f'mock_iut_config_{i} use case failed') bot_client.run_test_cases() - def generate_stats(self, files): + def test_generate_stats(self): # Test useful for debugging stats and reports generation database_file = DATABASE_FILE - TEST_CASE_DB = TestCaseTable('zephyr', database_file) + test_case_db = TestCaseTable('zephyr', database_file) errata = report.get_errata('zephyr') start_time = 1693378197 # result of time.time() duration = 30 # seconds @@ -165,7 +164,7 @@ def generate_stats(self, files): new_cases_id = [13, 14, 15] stats1 = TestCaseRunStats(mock_workspace_test_cases.keys(), - test_cases, 0, None, xml_results_file=ALL_STATS_RESULTS_XML) + test_cases, 0, None, xml_results_file=FILE_PATHS['ALL_STATS_RESULTS_XML_FILE']) # Mock results from a first bot run, to generate regressions, # progresses, new cases in a second one. for i, tc in enumerate(test_cases): @@ -176,7 +175,7 @@ def generate_stats(self, files): continue stats1.update(tc, end_time, status) - TEST_CASE_DB.update_statistics(tc, duration, status) + test_case_db.update_statistics(tc, duration, status) end_time = start_time + duration results = stats1.get_results() @@ -188,21 +187,19 @@ def generate_stats(self, files): 'subdir': 'host/', } - report_txt = report.make_report_txt( - results, regressions, progresses, '', errata) - files['first_report_txt'] = report_txt - assert os.path.exists(report_txt) + report.make_report_txt(FILE_PATHS['REPORT_TXT_FILE'], results, + regressions, progresses, '', errata) + assert os.path.exists(FILE_PATHS['REPORT_TXT_FILE']) first_report_txt = os.path.join( githubdrive['path'], githubdrive['subdir'], 'autopts_report', - os.path.basename(report_txt)) + os.path.basename(FILE_PATHS['REPORT_TXT_FILE'])) Path(os.path.dirname(first_report_txt)).mkdir(parents=True, exist_ok=True) - shutil.move(report_txt, first_report_txt) - files['first_report_txt'] = first_report_txt + shutil.move(FILE_PATHS['REPORT_TXT_FILE'], first_report_txt) stats = TestCaseRunStats(mock_workspace_test_cases.keys(), - test_cases, 0, TEST_CASE_DB, xml_results_file=ALL_STATS_RESULTS_XML) + test_cases, 0, test_case_db, xml_results_file=FILE_PATHS['ALL_STATS_RESULTS_XML_FILE']) # Mock results from a second bot run. # Note one deleted test case. @@ -212,7 +209,7 @@ def generate_stats(self, files): status = 'FAIL' stats.update(tc, end_time, status) - TEST_CASE_DB.update_statistics(tc, duration, status) + test_case_db.update_statistics(tc, duration, status) end_time = start_time + duration summary = stats.get_status_count() @@ -227,50 +224,24 @@ def generate_stats(self, files): repos_info = {'zephyr': {'commit': '123456', 'desc': 'zephyr'}} pts_ver = '8_5_0' - iut_logs = IUT_LOGS_FOLDER - pts_logs = 'tmp/zephyr-master' - xmls = 'tmp/XMLs' + iut_logs = FILE_PATHS['IUT_LOGS_DIR'] + pts_logs = os.path.join(FILE_PATHS['TMP_DIR'], 'zephyr-master') + xmls = FILE_PATHS['PTS_XMLS_DIR'] Path(iut_logs).mkdir(parents=True, exist_ok=True) Path(pts_logs).mkdir(parents=True, exist_ok=True) Path(xmls).mkdir(parents=True, exist_ok=True) - files['iut_logs'] = iut_logs - files['pts_logs'] = pts_logs - files['xmls'] = xmls - - report_file = report.make_report_xlsx( - results, summary, regressions, progresses, descriptions, - xmls, errata) - files['report_file'] = report_file - assert os.path.exists(report_file) - - report_txt = report.make_report_txt( - results, regressions, progresses, '', errata) - files['report_txt'] = report_txt - assert os.path.exists(report_txt) - - # readme_file = make_readme_md( - # start_timestamp, end_time, repos_info, pts_ver) - # files['readme_file'] = readme_file - # assert os.path.exists(readme_file) - - report_diff_txt, deleted_cases = report.make_report_diff( - '', results, regressions, progresses, new_cases) - files['report_diff_txt'] = report_diff_txt - assert os.path.exists(report_diff_txt) - - # report_folder = report.make_report_folder( - # iut_logs, pts_logs, xmls, report_file, report_txt, report_diff_txt, - # readme_file, database_file, '_iut_zephyr_' + start_timestamp) - # files['report_folder'] = report_folder - # assert os.path.exists(report_folder) - def test_generate_stats(self): - files = {} - try: - self.generate_stats(files) - finally: - for key in files: - delete_file(files[key]) + report.make_report_xlsx(FILE_PATHS['REPORT_XLSX_FILE'], results, summary, + regressions, progresses, descriptions, xmls, errata) + assert os.path.exists(FILE_PATHS['REPORT_XLSX_FILE']) + + report.make_report_txt(FILE_PATHS['REPORT_TXT_FILE'], results, + regressions, progresses, '', errata) + assert os.path.exists(FILE_PATHS['REPORT_TXT_FILE']) + + report.make_report_diff('', FILE_PATHS['REPORT_DIFF_TXT_FILE'], + results, regressions, progresses, new_cases) + assert os.path.exists(FILE_PATHS['REPORT_DIFF_TXT_FILE']) if __name__ == '__main__': diff --git a/tools/cron/common.py b/tools/cron/common.py index aad6839c52..f8f22f6921 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -51,7 +51,7 @@ from autopts.bot.common import load_module_from_path from autopts.bot.common_features.github import update_repos from autopts.bot.common_features.mail import send_mail -from autopts.config import TC_STATS_JSON, TMP_DIR, IUT_LOGS_FOLDER, REPORT_XLSX, REPORT_TXT +from autopts.config import generate_file_paths, FILE_PATHS from tools.cron.compatibility import find_latest, find_by_project_hash, find_by_autopts_hash, find_by_pts_ver, \ get_hash_from_reference from tools.cron.remote_terminal import RemoteTerminalClientProxy @@ -146,7 +146,7 @@ def report_to_review_msg(report_path): def error_to_review_msg(config): - error_txt_path = os.path.join(config['cron']['autopts_repo'], 'error.txt') + error_txt_path = config['file_paths']['ERROR_TXT_FILE'] msg = 'AutoPTS Bot failed:\n' if not os.path.exists(error_txt_path): @@ -266,8 +266,7 @@ def recursive(directory, depth): def pre_cleanup(config): terminate_processes(config) - pre_cleanup_files(config['cron']['autopts_repo'], - config['auto_pts']['project_path']) + pre_cleanup_files(config) workspace_path = find_workspace_in_tree( os.path.join(config['cron']['autopts_repo'], @@ -305,12 +304,16 @@ def ssh_copy_file(hostname, username, password, client.close() -def pre_cleanup_files(autopts_repo, project_repo): +def pre_cleanup_files(config): + file_paths = config['file_paths'] + autopts_repo = config['cron']['autopts_repo'] + project_repo = config['auto_pts']['project_path'] + files_to_save = [ - os.path.join(autopts_repo, TMP_DIR), - os.path.join(autopts_repo, IUT_LOGS_FOLDER), - os.path.join(autopts_repo, REPORT_TXT), - os.path.join(autopts_repo, REPORT_XLSX), + file_paths['TMP_DIR'], + file_paths['IUT_LOGS_DIR'], + file_paths['REPORT_TXT_FILE'], + file_paths['REPORT_XLSX_FILE'], os.path.join(autopts_repo, 'TestCase.db'), os.path.join(autopts_repo, 'stdout_autoptsbot.log'), os.path.join(autopts_repo, 'stdout_autoptsserver.log'), @@ -572,7 +575,7 @@ def _restart_processes(config): def _run_test(config): backup = config['auto_pts'].get('use_backup', False) timeguard = config['cron']['test_run_timeguard'] - results_file_path = os.path.join(config['cron']['autopts_repo'], TC_STATS_JSON) + results_file_path = config['file_paths']['TC_STATS_JSON_FILE'] srv_process, bot_process = _start_processes(config, checkout_repos=True) last_check_time = time() @@ -644,6 +647,14 @@ def get_cron_config(cfg, **kwargs): if 'autopts_repo' not in config: config['autopts_repo'] = AUTOPTS_REPO + file_paths = cron_config.get('file_paths', {}) + if 'TMP_DIR' not in file_paths: + file_paths['TMP_DIR'] = os.path.join(config['autopts_repo'], + os.path.basename(FILE_PATHS['TMP_DIR'])) + + cron_config['file_paths'] = generate_file_paths( + file_paths=file_paths, autopts_root_dir=config['autopts_repo']) + if 'compatibility_csv' in config: if 'project_hash' in config: project_hash = get_hash_from_reference(config['project_path'], config['project_hash']) From 37972e291bf5711209032b8388b1aa9548a83466 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 5 Jul 2024 19:52:25 +0200 Subject: [PATCH 06/44] mynewt: Apply auto-update of PTS workspace to 8.6 --- .../nimble-master/nimble-master.pqw6 | 622 +++++++----------- 1 file changed, 242 insertions(+), 380 deletions(-) diff --git a/autopts/workspaces/nimble-master/nimble-master.pqw6 b/autopts/workspaces/nimble-master/nimble-master.pqw6 index 13b732b549..de9e2a657e 100755 --- a/autopts/workspaces/nimble-master/nimble-master.pqw6 +++ b/autopts/workspaces/nimble-master/nimble-master.pqw6 @@ -1,25 +1,24 @@ - + GAP - TSPC_GAP_0_1 - BR/EDR (C.1) + BR/EDR (C.1, C.4) FALSE FALSE TSPC_GAP_0_2 - LE (C.2) + LE (C.2, C.4) TRUE FALSE TSPC_GAP_0_3 - BR/EDR/LE (C.3) + BR/EDR/LE (C.3, C.4) FALSE FALSE @@ -179,6 +178,12 @@ FALSE FALSE + + TSPC_GAP_2_14 + Out-of-Band (O) + FALSE + FALSE + TSPC_GAP_3_1 Initiation of general inquiry (C.1) @@ -257,6 +262,12 @@ FALSE FALSE + + TSPC_GAP_4a_1 + Detect insufficient authentication (C.1) + FALSE + FALSE + TSPC_GAP_5_1 Broadcaster (C.1) @@ -313,25 +324,25 @@ TSPC_GAP_8_1 - Non-connectable and non-scannable undirected event (M) + Non-connectable and non-scannable undirected events (M) TRUE TRUE TSPC_GAP_8_2 - Scannable undirected event (O) + Scannable undirected events (O) TRUE FALSE TSPC_GAP_8_3 - Non-connectable and non-scannable directed event (C.1) + Non-connectable and non-scannable directed events (C.1) TRUE FALSE TSPC_GAP_8_4 - Scannable directed event (C.1) + Scannable directed events (C.1) TRUE FALSE @@ -859,43 +870,43 @@ TSPC_GAP_20_1 - Connectable and scannable undirected event (M) + Connectable and scannable undirected events (M) TRUE TRUE TSPC_GAP_20_2 - Connectable directed event (O) + Connectable directed events (O) TRUE FALSE TSPC_GAP_20_3 - Non-connectable and non-scannable undirected event (O) + Non-connectable and non-scannable undirected events (O) TRUE FALSE TSPC_GAP_20_4 - Scannable undirected event (O) + Scannable undirected events (O) TRUE FALSE TSPC_GAP_20_5 - Connectable undirected event (C.1) + Connectable undirected events (C.1) TRUE FALSE TSPC_GAP_20_6 - Non-connectable and non-scannable directed event (C.1) + Non-connectable and non-scannable directed events (C.1) TRUE FALSE TSPC_GAP_20_7 - Scannable directed event (C.1) + Scannable directed events (C.1) TRUE FALSE @@ -1039,7 +1050,7 @@ TSPC_GAP_21_3 - Encryption procedure (O) + Encryption procedure (C.3) TRUE FALSE @@ -1273,7 +1284,7 @@ TSPC_GAP_25_14 - Client security checks for GATT indications and notifications (C.7) + Client security checks for GATT indications and notifications (O) TRUE FALSE @@ -1343,6 +1354,12 @@ FALSE FALSE + + TSPC_GAP_27_10a + Encrypted Data Key Material Indications (C.2) + FALSE + FALSE + TSPC_GAP_27_11 LE GATT Security Levels (O) @@ -1415,6 +1432,30 @@ FALSE FALSE + + TSPC_GAP_27b_9 + Out of Band (O) + FALSE + FALSE + + + TSPC_GAP_27c_1 + Detect insufficient authentication (C.1) + FALSE + FALSE + + + TSPC_GAP_27c_2 + Detect insufficient authorization (C.2) + FALSE + FALSE + + + TSPC_GAP_27c_3 + Detect insufficient encryption (C.3) + FALSE + FALSE + TSPC_GAP_28_1 Transmitter (M) @@ -1603,7 +1644,7 @@ TSPC_GAP_31_3 - Encryption procedure (O) + Encryption procedure (C.3) TRUE FALSE @@ -1795,7 +1836,7 @@ TSPC_GAP_35_8 - Unauthenticated Pairing (LE security mode1 level 2) (C.1) + Unauthenticated Pairing (LE security mode 1 level 2) (C.1) TRUE FALSE @@ -1837,7 +1878,7 @@ TSPC_GAP_35_15 - Client security checks for GATT indications and notifications (C.5) + Client security checks for GATT indications and notifications (O) TRUE FALSE @@ -1955,6 +1996,30 @@ FALSE FALSE + + TSPC_GAP_37b_9 + Out of Band (O) + FALSE + FALSE + + + TSPC_GAP_37c_1 + Detect insufficient authentication (C.1) + FALSE + FALSE + + + TSPC_GAP_37c_2 + Detect insufficient authorization (C.2) + FALSE + FALSE + + + TSPC_GAP_37c_3 + Detect insufficient encryption (C.3) + FALSE + FALSE + TSPC_GAP_38_1 Broadcaster (C.1) @@ -1985,15 +2050,21 @@ FALSE TRUE + + TSPC_GAP_41_2 + Cross Transport Key Derivation Supported (C.1) + FALSE + FALSE + TSPC_GAP_41_2a - Derivation of BR/EDR Link Key from LE LTK (O) + Derivation of BR/EDR Link Key from LE LTK (C.2) FALSE FALSE TSPC_GAP_41_2b - Derivation of LE LTK from BR/EDR Link Key (O) + Derivation of LE LTK from BR/EDR Link Key (C.3) FALSE FALSE @@ -2003,15 +2074,21 @@ FALSE TRUE + + TSPC_GAP_43_2 + Cross Transport Key Derivation Supported (C.1) + FALSE + FALSE + TSPC_GAP_43_2a - Derivation of BR/EDR Link Key from LE LTK (O) + Derivation of BR/EDR Link Key from LE LTK (C.2) FALSE FALSE TSPC_GAP_43_2b - Derivation of LE LTK from BR/EDR Link Key (O) + Derivation of LE LTK from BR/EDR Link Key (C.3) FALSE FALSE @@ -2605,13 +2682,13 @@ TSPC_GATT_1_1 Generic Attribute Profile (GATT) Client (C.1) - TRUE + FALSE FALSE TSPC_GATT_1_2 Generic Attribute Profile (GATT) Server (C.1) - TRUE + FALSE FALSE @@ -2779,7 +2856,7 @@ TSPC_GATT_3_17 Notifications (C.7) - TRUE + FALSE FALSE @@ -3100,6 +3177,12 @@ FALSE FALSE + + TSPC_GATT_6_6 + AdditionalProtocolDescriptorList (C.3) + FALSE + FALSE + TSPC_GATT_7_2 LE security Mode 1 (C.2) @@ -3136,6 +3219,12 @@ FALSE FALSE + + TSPC_GATT_7_8 + Client security checks for GATT indications and notifications (C.3) + FALSE + FALSE + TSPC_GATT_8_1 Support for multiple simultaneous active ATT bearers from same device – ATT over LE and ATT over BR/EDR (C.1) @@ -3349,66 +3438,48 @@ TSPC_GAP_0_2 LE (C.2) - TRUE + FALSE FALSE TSPC_GAP_24_2 Bondable Mode (C.1) - TRUE + FALSE FALSE TSPC_GAP_24_3 Bonding Procedure (C.1) - TRUE + FALSE FALSE TSPC_GAP_34_2 Bondable Mode (C.2) - TRUE + FALSE FALSE TSPC_GAP_34_3 Bonding Procedure (C.2) - TRUE - FALSE - - - TSPC_SUM_ICS_31_17 - Core Spec Version 4.2, Adopted 02 December 2014 (C.1) FALSE FALSE - TSPC_SUM_ICS_31_18 - Core Spec Version 4.2 + HS, Adopted 02 December 2014 (C.1) + TSPC_CORE_2a_52 + Host Core v5.2 or later (C.3) FALSE FALSE - TSPC_SUM_ICS_31_19 - Core Specification Version 5.0. Adopted 06 December 2016 (C.1) + TSPC_CORE_2a_53 + Host Core v5.3 or later (C.4) FALSE FALSE - TSPC_SUM_ICS_31_20 - Core Specification Version 5.1. Adopted 15 January 2019 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_21 - Core Specification Version 5.2. Adopted 31 December 2019 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_22 - Core Specification Version 5.3. Adopted 06 July 2021 (C.1) + TSPC_CORE_2b_51 + Host Core v5.1 or earlier (C.2) FALSE FALSE @@ -3558,401 +3629,223 @@ BOOLEAN FALSE + + TSPX_iut_mandates_mitm + IUT security policy of whether or not it mandates MITM. (Default: FALSE) + FALSE + BOOLEAN + + + TSPX_additional_service_uuid_for_sdp + Service Search Pattern used by Discover Services by UUID using SDP test cases.(Default: 180F) + 180F + OCTETSTRING + - + - IOPT + IOPT_OLD - TSPC_GAP_1_2 - Limited-discoverable mode (O) + TSPC_3DSP_1_1 + 3DSP 1.0 FALSE FALSE - TSPC_GAP_1_3 - General-discoverable mode (O) + TSPC_A2DP_1_1 + Source (C.1) FALSE FALSE - TSPC_HFP_1_1 - Role: Audio Gateway (AG) + TSPC_A2DP_1_2 + Sink (C.1) FALSE FALSE - TSPC_SDP_1b_2 - Support for client role. (C.1) - SDP, 2.1 - C.1 + TSPC_AVRCP_1_1 + Controller (C.1) FALSE FALSE - TSPC_SDP_2_1 - Support for respond on search of single Service, using ServiceSearchRequest. (C.2) - SDP, 4.5 - C.2 - - FALSE - FALSE - - - TSPC_support_AdvancedAudioDistributionProfile_Sink - Support for: Advanced Audio Distribution Profile. Role: Sink - FALSE - FALSE - - - TSPC_support_AdvancedAudioDistributionProfile_Source - Support for: Advanced Audio Distribution Profile. Role: Source - FALSE - FALSE - - - TSPC_support_AVRemoteControlProfile_CT - Support for: Audio\Video Remote Control Profile. Role: Controller - FALSE - FALSE - - - TSPC_support_AVRemoteControlProfile_TG - Support for: Audio\Video Remote Control Profile. Role: Target - FALSE - FALSE - - - TSPC_support_BasicImagingProfile_CLIENT - Support for: Basic Imaging Profile. Role: Client - FALSE - FALSE - - - TSPC_support_BasicImagingProfile_SERVER_ImagingAutomaticArchive - Support for: Basic Imaging Profile. Role: Server Functionality: Imaging autoarchive - FALSE - FALSE - - - TSPC_support_BasicImagingProfile_SERVER_ImagingReferencedObjects - Support for: Basic Imaging Profile. Role: Server Functionality: Imaging referenced objects - FALSE - FALSE - - - TSPC_support_BasicImagingProfile_SERVER_ImagingResponder - Support for: Basic Imaging Profile. Role: Server Functionality: Imaging responder - FALSE - FALSE - - - TSPC_support_BasicPrintingProfile_PRINTER - Support for: Basic Printing Profile. Role: Printer - FALSE - FALSE - - - TSPC_support_BasicPrintingProfile_PRINTER_ReflectedUI - Support for: Basic Printing Profile. Role: Printer Functionality: Reflected UI - FALSE - FALSE - - - TSPC_support_BasicPrintingProfile_SENDER_Referenced_objects_Service - Support for: Basic Printing Profile. Role: Sender Functionality: Reference objects service - FALSE - FALSE - - - TSPC_support_DialUpNetworkingProfile_DT - Support for: Dial-Up Networking Profile. Role: Data Terminal - FALSE - FALSE - - - TSPC_support_DialUpNetworkingProfile_GW - Support for: Dial-Up Networking Profile. Role: Gateway - FALSE - FALSE - - - TSPC_support_ExtendedServiceDiscoveryProfile_IP_LAP - Support for: Extended Service Discovery Profile. Version: IP-LAP - FALSE - FALSE - - - TSPC_support_ExtendedServiceDiscoveryProfile_IP_PAN - Support for: Extended Service Discovery Profile. Version: IP-PAN + TSPC_BPP_1_1 + Roles: Printer (Pr) (C.1) FALSE FALSE - TSPC_support_ExtendedServiceDiscoveryProfile_L2CAP - Support for: Extended Service Discovery Profile. Version: L2CAP + TSPC_DUN_1_1 + Role : Gateway (GW) (C.1) FALSE FALSE - TSPC_support_FAXProfile_DT - Support for: FAX Profile. Role: Data Terminal + TSPC_FTP_1_1 + File Transfer Server (C.1) FALSE FALSE - TSPC_support_FAXProfile_GW - Support for: FAX Profile. Role: Gateway + TSPC_GNSS_1_1 + Role: Server (C.1) FALSE FALSE - TSPC_support_FileTransferProfile_CLIENT - Support for: File Transfer Profile. Role: Client + TSPC_HCRP_1_1 + Role: Server (C.1) FALSE FALSE - TSPC_support_FileTransferProfile_SERVER - Support for: File Transfer Profile. Role: Server + TSPC_HFP_1_2 + Role: Hands-Free (HF) (C.1) FALSE FALSE - TSPC_support_HealthDeviceProfile_Sink - Support for: Health Device Profile Role: Sink + TSPC_HID11_1_4 + HID Device Role (C.2) FALSE FALSE - TSPC_support_HealthDeviceProfile_Source - Support for: Health Device Profile Role: Source + TSPC_HSP_1_2 + Role: Headset (HS) (C.1) FALSE FALSE - TSPC_support_NewHandsFreeProfile_AG - Support for: Handsfree Profile Role: Audio gateway - FALSE - FALSE - - - TSPC_support_NewHandsFreeProfile_HF - Support for: Handsfree Profile Role: Hands-Free unit - FALSE - FALSE - - - TSPC_support_HardCopyReplacementProfile_CLIENT_CR_RegisterNotofication_support - Support for: Hard Copy cable Replacement Profile. Role: Client Functionality: CR register notification support - FALSE - FALSE - - - TSPC_support_HardCopyReplacementProfile_CLIENT_print - Support for: Hard Copy cable Replacement Profile. Role: Client Functionality: Print - FALSE - FALSE - - - TSPC_support_HardCopyReplacementProfile_CLIENT_scan - Support for: Hard Copy cable Replacement Profile. Role: Client Functionality: Scan - FALSE - FALSE - - - TSPC_support_HardCopyReplacementProfile_SERVER_print - Support for: Hard Copy cable Replacement Profile. Role: Server Functionality: Print - FALSE - FALSE - - - TSPC_support_HardCopyReplacementProfile_SERVER_scan - Support for: Hard Copy cable Replacement Profile. Role: Server Functionality: Scan - FALSE - FALSE - - - TSPC_support_HeadsetProfile_AG - Support for: Headset Profile Role: Audio Gateway - FALSE - FALSE - - - TSPC_support_HeadsetProfile_HS - Support for: Headset Profile. Role: Headset - FALSE - FALSE - - - TSPC_support_HumanInterfaceDeviceProfile - Support for: Human Interface Device Profile Role: Device - FALSE - FALSE - - - TSPC_support_HID_Host - Support for: Human Interface Device Profile Role: Host - FALSE - FALSE - - - TSPC_support_LANAccessProfile_DT - Support for: LAN Access Profile. Role: Data Terminal - FALSE - FALSE - - - TSPC_support_LANAccessProfile_LAP - Support for: LAN Access Profile. Role: LAN Access Point - FALSE - FALSE - - - TSPC_support_MessageAccessProfile_MCE - Support for: Message Access Profile Role: MCE - FALSE - FALSE - - - TSPC_support_MessageAccessProfile_MSE - Support for: Message Access Profile Role: MSE - FALSE - FALSE - - - TSPC_support_ObjectPushProfile_CLIENT - Support for: Object Push Profile. Role: Client - FALSE - FALSE - - - TSPC_support_ObjectPushProfile_SERVER - Support for: Object Push Profile. Role: Server + TSPC_OPP_1_2 + Object Push Server (C.1) + FALSE FALSE - TSPC_support_PersonalAreaNetworkProfile_GN - Support for: Personal Area Network Profile. Role: GN + TSPC_PAN_1_1 + Role: Network Access Point (C.1) FALSE FALSE - TSPC_support_PersonalAreaNetworkProfile_NAP - Support for: Personal Area Network Profile. Role: NAP + TSPC_PAN_1_2 + Role: Group Ad-hoc Network (C.1) FALSE FALSE - TSPC_support_PersonalAreaNetworkProfile_PANU - Support for: Personal Area Network Profile. Role: PANU + TSPC_PAN_1_3 + Role: PAN User (C.1) FALSE FALSE - TSPC_support_PhonebookAccessProfile_PCE - Support for: Personal phone book access. Role: PCE. + TSPC_SAP_1_2 + Role: SIM Access Server (Server) (C.1) FALSE FALSE - TSPC_support_PhonebookAccessProfile_PSE - Support for: Personal phone book access. Role: PSE. + TSPC_SYNC_1_1 + IrMC Client (C.1) FALSE FALSE - TSPC_support_SerialPortProfile_Service - Support for: Serial Port Profile. Role: Dev B + TSPC_SYNC_1_2 + IrMC Server (C.1) FALSE FALSE - TSPC_support_ServiceDiscoveryApplicationProfile - Support for: Service Discovery Application Profile. + TSPC_VDP_1_1 + Source (C.1) FALSE FALSE - TSPC_support_SIMAccessProfile_CLIENT - Support for: SIM access Profile. Role: Client + TSPC_VDP_1_2 + Sink (C.1) FALSE FALSE - TSPC_support_SIMAccessProfile_SERVER - Support for: SIM access Profile. Role: Server + TSPC_AICS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_SynchronizationProfile_CLIENT - Support for: Synchronization Profile. Role: Client + TSPC_ASCS_2_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_SynchronizationProfile_SERVER - Support for: Synchronization Profile. Role: Server + TSPC_BASS_2_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_UDIProfile_MT - Support for: UDI Profile. Role: MT + TSPC_CAS_2_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_UDIProfile_TA - Support for: UDI Profile. Role: TA + TSPC_MCS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_VideoDistributionProfile_Sink - Support for: Video distribution Profile. Role: Sink + TSPC_GMCS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_VideoDistributionProfile_Source - Support for: Video distribution Profile. Role: Source + TSPC_MICS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_WAPOverBluetooth_CLIENT - Support for: WAP over Bluetooth Profile. Role: Client + TSPC_PACS_2_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_WAPOverBluetooth_PROXY - Support for: WAP over Bluetooth Profile. Role: PROXY + TSPC_TBS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_GNSS_SERVER - Support for: GNSS Profile. Role: Server + TSPC_GTBS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_3DSP_Display - Support for: 3DSP Profile. Role: Display + TSPC_VCS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE - TSPC_support_3DSP_Glasses - Support for: 3DSP Profile. Role: Glasses + TSPC_VOCS_1_1 + Service supported over BR/EDR (C.1) FALSE FALSE @@ -3965,7 +3858,7 @@ - IOPT + IOPT_OLD @@ -4043,19 +3936,19 @@ TSPC_L2CAP_0_1 - BR/EDR (includes possible support of GAP LE Broadcaster or LE Observer roles) (C.1) + BR/EDR (C.1, C.4) FALSE FALSE TSPC_L2CAP_0_2 - LE (C.2) + LE (C.2, C.4) TRUE FALSE TSPC_L2CAP_0_3 - BR/EDR/LE (C.3) + BR/EDR/LE (C.3, C.4) FALSE FALSE @@ -4127,7 +4020,7 @@ TSPC_L2CAP_2_6 - Send information request (C.17) + Send information request (C.11) FALSE FALSE @@ -4151,7 +4044,7 @@ TSPC_L2CAP_2_11 - Flow Control mode (C.17) + Flow Control mode (C.11) FALSE FALSE @@ -4265,7 +4158,7 @@ TSPC_L2CAP_2_30 - Fixed channel(s) (C.11) + Information request for fixed channels (C.9) FALSE FALSE @@ -4331,7 +4224,7 @@ TSPC_L2CAP_2_41 - Command reject (C.13) + Command reject (C.29) TRUE FALSE @@ -4395,6 +4288,12 @@ TRUE FALSE + + TSPC_L2CAP_2_49 + Support pending result in L2CAP_CREDIT_BASED_CONNECTION_RSP (C.28) + FALSE + FALSE + TSPC_L2CAP_3_1 RTX timer (M) @@ -4445,19 +4344,19 @@ TSPC_L2CAP_3_9 - Negotiate and support service type No traffic (C.2) + Negotiate and support service type 'No traffic' (C.2) FALSE FALSE TSPC_L2CAP_3_10 - Negotiate and support service type Best effort (C.3) + Negotiate and support service type 'Best effort' (C.3) FALSE FALSE TSPC_L2CAP_3_11 - Negotiate and support service type Guaranteed (C.2) + Negotiate and support service type 'Guaranteed' (C.2) FALSE FALSE @@ -4469,19 +4368,19 @@ TSPC_L2CAP_3_13 - Negotiate and support service type No traffic for Extended Flow Specification (C.7) + Negotiate and support service type 'No traffic' for Extended Flow Specification (C.7) FALSE FALSE TSPC_L2CAP_3_14 - Negotiate and support service type Best Effort for Extended Flow Specification (C.8) + Negotiate and support service type 'Best Effort' for Extended Flow Specification (C.8) FALSE FALSE TSPC_L2CAP_3_15 - Negotiate and support service type Guaranteed for Extended Flow Specification. (C.7) + Negotiate and support service type 'Guaranteed' for Extended Flow Specification. (C.7) FALSE FALSE @@ -4493,31 +4392,31 @@ TSPC_L2CAP_4_1 - Authentication procedure (LE) (C.1) + Detect insufficient authentication (O) TRUE FALSE TSPC_L2CAP_4_2 - Authorization procedure (LE) (C.1) + Detect insufficient authorization (O) TRUE FALSE TSPC_L2CAP_4_3 - Encryption procedure (C.2) + Detect insufficient encryption (O) TRUE FALSE TSPC_L2CAP_5_1 - Authentication procedure (BR/EDR) (C.1) + Detect insufficient authentication (O) FALSE FALSE TSPC_L2CAP_5_2 - Authorization procedure (BR/EDR) (O) + Detect insufficient authorization (O) FALSE FALSE @@ -4548,7 +4447,7 @@ TSPC_GATT_1a_1 GATT Client over LE (C.1) - TRUE + FALSE FALSE @@ -4560,7 +4459,7 @@ TSPC_GATT_1a_3 GATT Server over LE (C.3) - TRUE + FALSE FALSE @@ -4578,7 +4477,7 @@ TSPC_GATT_2_3a Enhanced ATT bearer over LE (C.4, C.3) - TRUE + FALSE FALSE @@ -7505,18 +7404,17 @@ SM - TSPC_SM_1_1 - Role : Central Role (Initiator) (C.1) - TRUE + Central Role (Initiator) (C.1) + FALSE FALSE TSPC_SM_1_2 - Role : Peripheral Role (Responder) (C.1) - TRUE + Peripheral Role (Responder) (C.1) + FALSE FALSE @@ -7537,12 +7435,6 @@ TRUE TRUE - - TSPC_SM_2_4 - OOB supported (O) - TRUE - FALSE - TSPC_SM_2_5 LE Secure Connections (O) @@ -7688,38 +7580,8 @@ FALSE - TSPC_SUM_ICS_31_17 - Core Spec Version 4.2, Adopted 02 December 2014 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_18 - Core Spec Version 4.2 + HS, Adopted 02 December 2014 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_19 - Core Specification Version 5.0. Adopted 06 December 2016 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_20 - Core Specification Version 5.1. Adopted 15 January 2019 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_21 - Core Specification Version 5.2. Adopted 31 December 2019 (C.1) - FALSE - FALSE - - - TSPC_SUM_ICS_31_22 - Core Specification Version 5.3. Adopted 06 July 2021 (C.1) + TSPC_CORE_2a_53 + Host Core v5.3 or later (C.4) FALSE FALSE From 90a35b2719d1cca835649b86e05af674c8dd0961 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Thu, 11 Jul 2024 10:48:51 +0200 Subject: [PATCH 07/44] errata: Update with erratas for PTS 8.6 --- errata/common.yaml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/errata/common.yaml b/errata/common.yaml index 9b0baacc5a..40025e5502 100644 --- a/errata/common.yaml +++ b/errata/common.yaml @@ -2,10 +2,7 @@ # GAP/CONN/NCON/BV-01-C: CASE00xxxxx # GAP/CONN/NCON/BV-02-C: CASE00xxxxx -L2CAP/ECFC/BV-43-C: TSE18351 -L2CAP/ECFC/BV-44-C: TSE18351 - -GAP/BOND/BON/BV-03-C: Request ID 104112 -GAP/SEC/SEM/BI-09-C: Request ID 104091 -GAP/SEC/SEM/BI-20-C: Request ID 104091 -GAP/SEC/SEM/BI-21-C: Request ID 104091 +GAP/BOND/BON/BV-03-C: ES-25754 +GAP/SEC/SEM/BI-10-C: Request ID 104895 +GAP/SEC/SEM/BI-22-C: Request ID 104895 +GAP/SEC/SEM/BI-23-C: Request ID 104895 From a5bd2901a62ecc979bbddca633c80f07aa33eb4e Mon Sep 17 00:00:00 2001 From: m-alperen-sener Date: Tue, 9 Jul 2024 14:22:07 +0200 Subject: [PATCH 08/44] zephyr: mesh: add mbtm and dfum profiles to zephyr-master workspaces Adding back the test profiles mbtm and dfum to workspace with default IXIT values. Signed-off-by: m-alperen-sener --- .../zephyr/zephyr-master/zephyr-master.pqw6 | 574 ++++++++++++++++++ 1 file changed, 574 insertions(+) diff --git a/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 b/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 index ed9facef76..86b742bf4c 100755 --- a/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 +++ b/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 @@ -9067,6 +9067,320 @@ + + + DFUM + + + + TSPC_DFUM_ALL + Enables all test cases when set. + FALSE + FALSE + + + TSPC_DFUM_0_1 + Mesh Device Firmware Update Model v1.0 (M) + TRUE + TRUE + + + TSPC_DFUM_3_1 + Firmware Update Server model (O) + TRUE + FALSE + + + TSPC_DFUM_3_2 + Firmware Distribution Server model (C.1) + TRUE + FALSE + + + TSPC_DFUM_3_3 + Firmware Update Client model (O) + TRUE + FALSE + + + TSPC_DFUM_3_4 + Firmware Distribution Client model (C.1) + FALSE + FALSE + + + TSPC_DFUM_10_1 + BLOB Transfer Server model (M) + TRUE + TRUE + + + TSPC_DFUM_11_1 + Pull BLOB Transfer Mode (C.1) + TRUE + FALSE + + + TSPC_DFUM_11_2 + Push BLOB Transfer Mode (C.2) + TRUE + FALSE + + + TSPC_DFUM_20_1 + BLOB Transfer Server model (M) + TRUE + TRUE + + + TSPC_DFUM_21_1 + Pull BLOB Transfer Mode (C.1) + TRUE + FALSE + + + TSPC_DFUM_21_2 + Push BLOB Transfer Mode (C.2) + TRUE + FALSE + + + TSPC_DFUM_22_1 + Store Firmware OOB procedure (O) + TRUE + FALSE + + + TSPC_DFUM_22_2 + Firmware Retrieval Over HTTPS procedure (C.1) + FALSE + FALSE + + + TSPC_DFUM_22_3 + Multiple Firmware Image Support (O) + TRUE + FALSE + + + TSPC_DFUM_30_1 + BLOB Transfer Client model (M) + TRUE + TRUE + + + TSPC_DFUM_40_1 + BLOB Transfer Client model (M) + FALSE + TRUE + + + TSPC_DFUM_41_1 + Upload Firmware OOB procedure (O) + FALSE + FALSE + + + TSPC_ALL + Enables all test cases when set. + FALSE + FALSE + + + + + DFUM + 1.0 + + + TSPX_bd_addr_iut + The unique 48-bit Bluetooth device address (BD_ADDR) of the IUT. This was filled in during workspace creation. + OCTETSTRING + 000000000000 + + + TSPX_time_guard + The time in milliseconds, to break PTS from waiting for a required event, if time guard is used by the test case. (Default: 300000) + INTEGER + 300000 + + + TSPX_use_implicit_send + Whether to receive a message via Implicit Send when an action or attention is needed by test operator. (Default: TRUE) + BOOLEAN + TRUE + + + TSPX_tester_database_file + Location of SIG database file. (Default: C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\PTS_SMPP_db.xml) + IA5STRING + C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\PTS_SMPP_db.xml + + + TSPX_mtu_size + MTU size. (Default: 23) + INTEGER + 23 + + + TSPX_delete_link_key + Whether to delete link key or not. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_delete_ltk + Whether to delete Long Term Key or not. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_security_enabled + Whether to set security for the test suite. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_iut_setup_att_over_br_edr + IUT can optionally test ATT over BR/EDR connection when this flag is set when applicable. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_scan_interval + The scan interval value N*0.625ms. Default is setto fast scan interval.(Default: 30) + INTEGER + 30 + + + TSPX_scan_window + The scan window value N*0.625ms. Default is set to fast scan window.(Default: 30) + INTEGER + 30 + + + TSPX_scan_filter + The scan filter setting to allow accept filter accept list only device.(Default: 0 - Fileter none) + OCTETSTRING + 00 + + + TSPX_advertising_interval_min + The minimum advertising interval value N*0.625ms. PTS will using this value to set advertising parameters.(Default: 160) + INTEGER + 160 + + + TSPX_advertising_interval_max + The maximum advertising interval value N*0.625ms. PTS will using this value to set advertising parameters.(Default: 160) + INTEGER + 160 + + + TSPX_tester_OOB_information + The 16-bit tester OOB information value. PTS will advertise using this value when in peripheral role.(Default: F87F) + OCTETSTRING + F87F + + + TSPX_device_uuid + used for mesh beacon. (Default: 001BDC0810210B0E0A0C000B0E0A0C00) + OCTETSTRING + 001BDC0810210B0E0A0C000B0E0A0C00 + + + TSPX_device_uuid2 + used for tester2 mesh beacon. (Default: 00000000000000000000000000000000) + OCTETSTRING + 00000000000000000000000000000000 + + + TSPX_use_pb_gatt_bearer + PB ADV or PB GATT bearer for PROT test cases. (Default: TRUE - PB-ADV) + BOOLEAN + FALSE + + + TSPX_iut_comp_data_page + The page number of the composition data (Default: 0) + INTEGER + 0 + + + TSPX_oob_state_change + Indicates if the IUT supports out-of-band triggered state changes. If set to TRUE, then the IUT supports Upper Tester commands to trigger a state change. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_enable_IUT_provisioner + Indicates if the IUT performs provisioning for client cases. If set to TRUE, PTS expects to get provisioned by the IUT. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_Procedure_Timeout + The time in seconds. (Default: 60) + INTEGER + 60 + + + TSPX_Client_BLOB_ID + Unique identifier for the BLOB being transferred by the BLOB Transfer Client IUT. (Default: 1) + OCTETSTRING + 1100000000000011 + + + TSPX_Client_BLOB_Data + BLOB being transferred by the IUT. + IA5STRING + + + + TSPX_TTL + TTL. (Default: 2) + INTEGER + 2 + + + TSPX_Server_Timeout_Base + Server Timeout Base state. (Default: 5) + INTEGER + 5 + + + TSPX_Firmware_ID + Firmware_ID used by device to varify firmware.(Default: 1) + OCTETSTRING + 11000011 + + + TSPX_Firmware_Metadata + Firmware metadata for the incoming firmware on the IUT. (Default: ) + OCTETSTRING + 1100000000000011 + + + TSPX_Firmware_Update_URI + URI used for test case execution. (Default: http://www.dummy.com) + IA5STRING + http://www.dummy.com + + + TSPX_New_Firmware_Image + Firmware being transferred by the IUT. + IA5STRING + + + + TSPX_Update_Firmware_Image_Index + Firmware Image Index value to identiy firmware used for testing. (Default: 0) + INTEGER + 0 + + + + DIS @@ -18192,6 +18506,266 @@ + + + MBTM + + + + TSPC_MBTM_ALL + Enables all test cases when set. + FALSE + FALSE + + + TSPC_MBTM_0_1 + Mesh Binary Large Object Transfer Model v1.0 (M) + TRUE + TRUE + + + TSPC_MBTM_3_1 + BLOB Transfer Server model (C.1) + TRUE + FALSE + + + TSPC_MBTM_3_2 + BLOB Transfer Client model (C.1) + TRUE + FALSE + + + TSPC_MBTM_10_1 + Pull BLOB Transfer Mode (C.1) + TRUE + FALSE + + + TSPC_MBTM_10_2 + Push BLOB Transfer Mode (C.1) + TRUE + FALSE + + + TSPC_MBTM_20_1 + Pull BLOB Transfer Mode (M) + TRUE + TRUE + + + TSPC_MBTM_20_2 + Push BLOB Transfer Mode (M) + TRUE + TRUE + + + TSPC_ALL + Enables all test cases when set. + FALSE + FALSE + + + + + MBTM + 1.0 + + + TSPX_bd_addr_iut + The unique 48-bit Bluetooth device address (BD_ADDR) of the IUT. This was filled in during workspace creation. + OCTETSTRING + 000000000000 + + + TSPX_time_guard + The time in milliseconds, to break PTS from waiting for a required event, if time guard is used by the test case. (Default: 300000) + INTEGER + 300000 + + + TSPX_use_implicit_send + Whether to receive a message via Implicit Send when an action or attention is needed by test operator. (Default: TRUE) + BOOLEAN + TRUE + + + TSPX_tester_database_file + Location of SIG database file. (Default: C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\PTS_SMPP_db.xml) + IA5STRING + C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\PTS_SMPP_db.xml + + + TSPX_mtu_size + MTU size. (Default: 23) + INTEGER + 23 + + + TSPX_delete_link_key + Whether to delete link key or not. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_delete_ltk + Whether to delete Long Term Key or not. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_security_enabled + Whether to set security for the test suite. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_iut_setup_att_over_br_edr + IUT can optionally test ATT over BR/EDR connection when this flag is set when applicable. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_scan_interval + The scan interval value N*0.625ms. Default is setto fast scan interval.(Default: 30) + INTEGER + 30 + + + TSPX_scan_window + The scan window value N*0.625ms. Default is set to fast scan window.(Default: 30) + INTEGER + 30 + + + TSPX_scan_filter + The scan filter setting to allow accept filter accept list only device.(Default: 0 - Fileter none) + OCTETSTRING + 00 + + + TSPX_advertising_interval_min + The minimum advertising interval value N*0.625ms. PTS will using this value to set advertising parameters.(Default: 160) + INTEGER + 160 + + + TSPX_advertising_interval_max + The maximum advertising interval value N*0.625ms. PTS will using this value to set advertising parameters.(Default: 160) + INTEGER + 160 + + + TSPX_tester_OOB_information + The 16-bit tester OOB information value. PTS will advertise using this value when in peripheral role.(Default: F87F) + OCTETSTRING + F87F + + + TSPX_device_uuid + used for mesh beacon. (Default: 001BDC0810210B0E0A0C000B0E0A0C00) + OCTETSTRING + 001BDC0810210B0E0A0C000B0E0A0C00 + + + TSPX_device_uuid2 + used for tester2 mesh beacon. (Default: 00000000000000000000000000000000) + OCTETSTRING + 00000000000000000000000000000000 + + + TSPX_use_pb_gatt_bearer + PB ADV or PB GATT bearer for PROT test cases. (Default: TRUE - PB-ADV) + BOOLEAN + FALSE + + + TSPX_iut_comp_data_page + The page number of the composition data (Default: 0) + INTEGER + 0 + + + TSPX_oob_state_change + Indicates if the IUT supports out-of-band triggered state changes. If set to TRUE, then the IUT supports Upper Tester commands to trigger a state change. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_enable_IUT_provisioner + Indicates if the IUT performs provisioning for client cases. If set to TRUE, PTS expects to get provisioned by the IUT. (Default: FALSE) + BOOLEAN + FALSE + + + TSPX_Procedure_Timeout + The time in seconds. (Default: 60) + INTEGER + 60 + + + TSPX_Client_BLOB_ID + Unique identifier for the BLOB being transferred by the BLOB Transfer Client IUT. (Default: 1) + OCTETSTRING + 1100000000000011 + + + TSPX_Client_BLOB_Data + BLOB being transferred by the IUT. + IA5STRING + + + + TSPX_Transfer_TTL + TTL. (Default: 3) + INTEGER + 3 + + + TSPX_Client_Timeout_Base + Client Timeout Base state. (Default: 5) + INTEGER + 4 + + + TSPX_Server_Timeout_Base + Server Timeout Base state. (Default: 5) + INTEGER + 5 + + + TSPX_Firmware_ID + Firmware_ID used by device to varify firmware.(Default: 1) + OCTETSTRING + 11000011 + + + TSPX_Firmware_Metadata + Firmware metadata for the incoming firmware on the IUT. (Default: ) + OCTETSTRING + 1100000000000011 + + + TSPX_Firmware_Update_URI + URI used for test case execution. (Default: http://www.dummy.com) + IA5STRING + http://www.dummy.com + + + TSPX_New_Firmware_Image + Firmware being transferred by the IUT. + IA5STRING + + + + TSPX_Update_Firmware_Image_Index + Firmware Image Index value to identiy firmware used for testing. (Default: 0) + INTEGER + 0 + + + + MCP From 483956ae27fe6e15273aa21d1895bb931caaa492 Mon Sep 17 00:00:00 2001 From: m-alperen-sener Date: Wed, 10 Jul 2024 08:38:28 +0200 Subject: [PATCH 09/44] zephyr: mesh: apply TCRL-2024 changes for MBT and DFU profiles - With TCRL-2024-1 profiles MBT and DFU changed to MBTM and DFUM respectively. Updating the test case names and profile names. - TSPX_Reception_Counter and TSPX_Server_Timeout removed and separate TSPX_Client_Timeout_Base and TSPX_Server_Timeout_Base added for client and server. - Adding missing DFUM tests to zephyr iut_config. - Adjust the CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MIN/MAX according to MBTM test cases. - Increase the CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE to 1200 instead of default 800. Signed-off-by: m-alperen-sener --- autopts/bot/iut_config/zephyr.py | 281 +++++++++--------- autopts/ptsprojects/stack/layers/mesh.py | 8 +- autopts/ptsprojects/zephyr/__init__.py | 4 +- .../ptsprojects/zephyr/{dfu.py => dfum.py} | 117 ++++---- .../ptsprojects/zephyr/{mbt.py => mbtm.py} | 94 +++--- autopts/wid/mmdl.py | 18 +- 6 files changed, 267 insertions(+), 255 deletions(-) rename autopts/ptsprojects/zephyr/{dfu.py => dfum.py} (55%) rename autopts/ptsprojects/zephyr/{mbt.py => mbtm.py} (58%) diff --git a/autopts/bot/iut_config/zephyr.py b/autopts/bot/iut_config/zephyr.py index b4315c6680..7e32a0f06b 100644 --- a/autopts/bot/iut_config/zephyr.py +++ b/autopts/bot/iut_config/zephyr.py @@ -174,6 +174,7 @@ 'CONFIG_FLASH_MAP': 'y', 'CONFIG_NVS': 'y', 'CONFIG_SETTINGS_NVS': 'y', + 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200' }, "test_cases": [ 'MESH/SR/RPR/PDU/BV-02-C', @@ -197,114 +198,117 @@ 'CONFIG_FLASH_PAGE_LAYOUT': 'y', 'CONFIG_FLASH_MAP': 'y', 'CONFIG_NVS': 'y', + 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', 'CONFIG_BT_MESH_DFD_SRV': 'y', 'CONFIG_BT_MESH_DFU_SRV': 'y', 'CONFIG_BT_MESH_DFD_SRV_OOB_UPLOAD': 'y', 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', - 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MIN': '1024', - 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MAX': '1024' + 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MIN': '256', + 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MAX': '256' }, "test_cases": [ - 'DFU/CL/FU/BV-01-C', - 'DFU/CL/FU/BV-02-C', - 'DFU/CL/FU/BV-03-C', - 'DFU/SR/FD/BV-01-C', - 'DFU/SR/FD/BV-02-C', - 'DFU/SR/FD/BV-03-C', - 'DFU/SR/FD/BV-04-C', - 'DFU/SR/FD/BV-05-C', - 'DFU/SR/FD/BV-06-C', - 'DFU/SR/FD/BV-07-C', - 'DFU/SR/FD/BV-08-C', - 'DFU/SR/FD/BV-09-C', - 'DFU/SR/FD/BV-10-C', - 'DFU/SR/FD/BV-11-C', - 'DFU/SR/FD/BV-12-C', - 'DFU/SR/FD/BV-13-C', - 'DFU/SR/FD/BV-14-C', - 'DFU/SR/FD/BV-15-C', - 'DFU/SR/FD/BV-16-C', - 'DFU/SR/FD/BV-17-C', - 'DFU/SR/FD/BV-18-C', - 'DFU/SR/FD/BV-19-C', - 'DFU/SR/FD/BV-20-C', - 'DFU/SR/FD/BV-21-C', - 'DFU/SR/FD/BV-22-C', - 'DFU/SR/FD/BV-23-C', - 'DFU/SR/FD/BV-24-C', - 'DFU/SR/FD/BV-25-C', - 'DFU/SR/FD/BV-26-C', - 'DFU/SR/FD/BV-27-C', - 'DFU/SR/FD/BV-28-C', - 'DFU/SR/FD/BV-29-C', - 'DFU/SR/FD/BV-30-C', - 'DFU/SR/FD/BV-31-C', - 'DFU/SR/FD/BV-32-C', - 'DFU/SR/FD/BV-33-C', - 'DFU/SR/FD/BV-34-C', - 'DFU/SR/FD/BV-59-C', - 'DFU/SR/FD/BV-50-C', - 'DFU/SR/FD/BV-35-C', - 'DFU/SR/FD/BV-36-C', - 'DFU/SR/FD/BV-37-C', - 'DFU/SR/FD/BV-38-C', - 'DFU/SR/FD/BV-39-C', - 'DFU/SR/FD/BV-40-C', - 'DFU/SR/FD/BV-41-C', - 'DFU/SR/FD/BV-51-C', - 'DFU/SR/FD/BV-42-C', - 'DFU/SR/FD/BV-43-C', - 'DFU/SR/FD/BV-44-C', - 'DFU/SR/FD/BV-45-C', - 'DFU/SR/FD/BV-46-C', - 'DFU/SR/FD/BV-49-C', - 'DFU/SR/FD/BV-52-C', - 'DFU/SR/FD/BV-53-C', - 'DFU/SR/FD/BV-54-C', - 'DFU/SR/FD/BV-55-C', - 'DFU/SR/FD/BV-56-C', - 'DFU/SR/FD/BV-57-C', - 'DFU/SR/FD/BV-58-C', - 'MBT/SR/BT/BV-01-C', - 'MBT/SR/BT/BV-02-C', - 'MBT/SR/BT/BV-03-C', - 'MBT/SR/BT/BV-04-C', - 'MBT/SR/BT/BV-05-C', - 'MBT/SR/BT/BV-06-C', - 'MBT/SR/BT/BV-07-C', - 'MBT/SR/BT/BV-08-C', - 'MBT/SR/BT/BV-09-C', - 'MBT/SR/BT/BV-10-C', - 'MBT/SR/BT/BV-11-C', - 'MBT/SR/BT/BV-12-C', - 'MBT/SR/BT/BV-13-C', - 'MBT/SR/BT/BV-14-C', - 'MBT/SR/BT/BV-15-C', - 'MBT/SR/BT/BV-16-C', - 'MBT/SR/BT/BV-17-C', - 'MBT/SR/BT/BV-18-C', - 'MBT/SR/BT/BV-19-C', - 'MBT/SR/BT/BV-20-C', - 'MBT/SR/BT/BV-21-C', - 'MBT/SR/BT/BV-22-C', - 'MBT/SR/BT/BV-23-C', - 'MBT/SR/BT/BV-24-C', - 'MBT/SR/BT/BV-25-C', - 'MBT/SR/BT/BV-26-C', - 'MBT/SR/BT/BV-27-C', - 'MBT/SR/BT/BV-28-C', - 'MBT/SR/BT/BV-29-C', - 'MBT/SR/BT/BV-30-C', - 'MBT/SR/BT/BV-31-C', - 'MBT/SR/BT/BV-32-C', - 'MBT/SR/BT/BV-33-C', - 'MBT/SR/BT/BV-34-C', - 'MBT/SR/BT/BV-35-C', - 'MBT/SR/BT/BV-36-C', - 'MBT/SR/BT/BV-37-C', - 'MBT/SR/BT/BV-38-C', - 'MBT/SR/BT/BI-01-C', - 'MBT/SR/BT/BI-02-C', + 'DFU/SR-CL/GEN/BV-01-C' + 'DFUM/CL/FU/BV-01-C', + 'DFUM/CL/FU/BV-02-C', + 'DFUM/CL/FU/BV-03-C', + 'DFUM/SR/FD/BV-01-C', + 'DFUM/SR/FD/BV-02-C', + 'DFUM/SR/FD/BV-03-C', + 'DFUM/SR/FD/BV-04-C', + 'DFUM/SR/FD/BV-05-C', + 'DFUM/SR/FD/BV-06-C', + 'DFUM/SR/FD/BV-07-C', + 'DFUM/SR/FD/BV-08-C', + 'DFUM/SR/FD/BV-09-C', + 'DFUM/SR/FD/BV-10-C', + 'DFUM/SR/FD/BV-11-C', + 'DFUM/SR/FD/BV-12-C', + 'DFUM/SR/FD/BV-13-C', + 'DFUM/SR/FD/BV-14-C', + 'DFUM/SR/FD/BV-15-C', + 'DFUM/SR/FD/BV-16-C', + 'DFUM/SR/FD/BV-17-C', + 'DFUM/SR/FD/BV-18-C', + 'DFUM/SR/FD/BV-19-C', + 'DFUM/SR/FD/BV-20-C', + 'DFUM/SR/FD/BV-21-C', + 'DFUM/SR/FD/BV-22-C', + 'DFUM/SR/FD/BV-23-C', + 'DFUM/SR/FD/BV-24-C', + 'DFUM/SR/FD/BV-25-C', + 'DFUM/SR/FD/BV-26-C', + 'DFUM/SR/FD/BV-27-C', + 'DFUM/SR/FD/BV-28-C', + 'DFUM/SR/FD/BV-29-C', + 'DFUM/SR/FD/BV-30-C', + 'DFUM/SR/FD/BV-31-C', + 'DFUM/SR/FD/BV-32-C', + 'DFUM/SR/FD/BV-33-C', + 'DFUM/SR/FD/BV-34-C', + 'DFUM/SR/FD/BV-59-C', + 'DFUM/SR/FD/BV-50-C', + 'DFUM/SR/FD/BV-35-C', + 'DFUM/SR/FD/BV-36-C', + 'DFUM/SR/FD/BV-37-C', + 'DFUM/SR/FD/BV-38-C', + 'DFUM/SR/FD/BV-39-C', + 'DFUM/SR/FD/BV-40-C', + 'DFUM/SR/FD/BV-41-C', + 'DFUM/SR/FD/BV-51-C', + 'DFUM/SR/FD/BV-42-C', + 'DFUM/SR/FD/BV-43-C', + 'DFUM/SR/FD/BV-44-C', + 'DFUM/SR/FD/BV-45-C', + 'DFUM/SR/FD/BV-46-C', + 'DFUM/SR/FD/BV-47-C', + 'DFUM/SR/FD/BV-49-C', + 'DFUM/SR/FD/BV-52-C', + 'DFUM/SR/FD/BV-53-C', + 'DFUM/SR/FD/BV-54-C', + 'DFUM/SR/FD/BV-55-C', + 'DFUM/SR/FD/BV-56-C', + 'DFUM/SR/FD/BV-57-C', + 'DFUM/SR/FD/BV-58-C', + 'MBTM/SR/BT/BV-01-C', + 'MBTM/SR/BT/BV-02-C', + 'MBTM/SR/BT/BV-03-C', + 'MBTM/SR/BT/BV-04-C', + 'MBTM/SR/BT/BV-05-C', + 'MBTM/SR/BT/BV-06-C', + 'MBTM/SR/BT/BV-07-C', + 'MBTM/SR/BT/BV-08-C', + 'MBTM/SR/BT/BV-09-C', + 'MBTM/SR/BT/BV-10-C', + 'MBTM/SR/BT/BV-11-C', + 'MBTM/SR/BT/BV-12-C', + 'MBTM/SR/BT/BV-13-C', + 'MBTM/SR/BT/BV-14-C', + 'MBTM/SR/BT/BV-15-C', + 'MBTM/SR/BT/BV-16-C', + 'MBTM/SR/BT/BV-17-C', + 'MBTM/SR/BT/BV-18-C', + 'MBTM/SR/BT/BV-19-C', + 'MBTM/SR/BT/BV-20-C', + 'MBTM/SR/BT/BV-21-C', + 'MBTM/SR/BT/BV-22-C', + 'MBTM/SR/BT/BV-23-C', + 'MBTM/SR/BT/BV-24-C', + 'MBTM/SR/BT/BV-25-C', + 'MBTM/SR/BT/BV-26-C', + 'MBTM/SR/BT/BV-27-C', + 'MBTM/SR/BT/BV-28-C', + 'MBTM/SR/BT/BV-29-C', + 'MBTM/SR/BT/BV-30-C', + 'MBTM/SR/BT/BV-31-C', + 'MBTM/SR/BT/BV-32-C', + 'MBTM/SR/BT/BV-33-C', + 'MBTM/SR/BT/BV-34-C', + 'MBTM/SR/BT/BV-35-C', + 'MBTM/SR/BT/BV-36-C', + 'MBTM/SR/BT/BV-37-C', + 'MBTM/SR/BT/BV-38-C', + 'MBTM/SR/BT/BI-01-C', + 'MBTM/SR/BT/BI-02-C', ] }, @@ -316,42 +320,43 @@ 'CONFIG_FLASH_PAGE_LAYOUT': 'y', 'CONFIG_FLASH_MAP': 'y', 'CONFIG_NVS': 'y', + 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', 'CONFIG_BT_MESH_DFD_SRV': 'n' }, "test_cases": [ - 'DFU/SR/FU/BV-01-C', - 'DFU/SR/FU/BV-02-C', - 'DFU/SR/FU/BV-03-C', - 'DFU/SR/FU/BV-04-C', - 'DFU/SR/FU/BV-05-C', - 'DFU/SR/FU/BV-06-C', - 'DFU/SR/FU/BV-07-C', - 'DFU/SR/FU/BV-08-C', - 'DFU/SR/FU/BV-09-C', - 'DFU/SR/FU/BV-10-C', - 'DFU/SR/FU/BV-11-C', - 'DFU/SR/FU/BV-12-C', - 'DFU/SR/FU/BV-13-C', - 'DFU/SR/FU/BV-14-C', - 'DFU/SR/FU/BV-15-C', - 'DFU/SR/FU/BV-16-C', - 'DFU/SR/FU/BV-17-C', - 'DFU/SR/FU/BV-18-C', - 'DFU/SR/FU/BV-19-C', - 'DFU/SR/FU/BV-20-C', - 'DFU/SR/FU/BV-21-C', - 'DFU/SR/FU/BV-22-C', - 'DFU/SR/FU/BV-23-C', - 'DFU/SR/FU/BV-24-C', - 'DFU/SR/FU/BV-27-C', - 'MBT/CL/BT/BV-01-C', - 'MBT/CL/BT/BV-02-C', - 'MBT/CL/BT/BV-03-C', - 'MBT/CL/BT/BV-04-C', - 'MBT/CL/BT/BV-05-C', - 'MBT/CL/BT/BV-06-C', - 'MBT/CL/BT/BV-07-C', - 'MBT/CL/BT/BV-08-C' + 'DFUM/SR/FU/BV-01-C', + 'DFUM/SR/FU/BV-02-C', + 'DFUM/SR/FU/BV-03-C', + 'DFUM/SR/FU/BV-04-C', + 'DFUM/SR/FU/BV-05-C', + 'DFUM/SR/FU/BV-06-C', + 'DFUM/SR/FU/BV-07-C', + 'DFUM/SR/FU/BV-08-C', + 'DFUM/SR/FU/BV-09-C', + 'DFUM/SR/FU/BV-10-C', + 'DFUM/SR/FU/BV-11-C', + 'DFUM/SR/FU/BV-12-C', + 'DFUM/SR/FU/BV-13-C', + 'DFUM/SR/FU/BV-14-C', + 'DFUM/SR/FU/BV-15-C', + 'DFUM/SR/FU/BV-16-C', + 'DFUM/SR/FU/BV-17-C', + 'DFUM/SR/FU/BV-18-C', + 'DFUM/SR/FU/BV-19-C', + 'DFUM/SR/FU/BV-20-C', + 'DFUM/SR/FU/BV-21-C', + 'DFUM/SR/FU/BV-22-C', + 'DFUM/SR/FU/BV-23-C', + 'DFUM/SR/FU/BV-24-C', + 'DFUM/SR/FU/BV-27-C', + 'MBTM/CL/BT/BV-01-C', + 'MBTM/CL/BT/BV-02-C', + 'MBTM/CL/BT/BV-03-C', + 'MBTM/CL/BT/BV-04-C', + 'MBTM/CL/BT/BV-05-C', + 'MBTM/CL/BT/BV-06-C', + 'MBTM/CL/BT/BV-07-C', + 'MBTM/CL/BT/BV-08-C' ] }, @@ -363,10 +368,12 @@ 'CONFIG_FLASH_PAGE_LAYOUT': 'y', 'CONFIG_FLASH_MAP': 'y', 'CONFIG_NVS': 'y', - 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000' + 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', + 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', }, "test_cases": [ - 'DFU/SR/FD/BV-48-C', + 'DFUM/SR/FD/BV-48-C', + 'DFUM/SR/FD/BV-59-C' ] }, diff --git a/autopts/ptsprojects/stack/layers/mesh.py b/autopts/ptsprojects/stack/layers/mesh.py index 100cb5f2b5..498520cd9d 100644 --- a/autopts/ptsprojects/stack/layers/mesh.py +++ b/autopts/ptsprojects/stack/layers/mesh.py @@ -186,11 +186,11 @@ def set_iut_provisioner(self, _is_prov): def set_iut_addr(self, _addr): self.address_iut = _addr - def timeout_set(self, timeout): - self.timeout = timeout + def timeout_base_set(self, timeout): + self.timeout_base = timeout - def timeout_get(self): - return self.timeout + def timeout_base_get(self): + return self.timeout_base def transfer_ttl_set(self, ttl): self.transfer_ttl = ttl diff --git a/autopts/ptsprojects/zephyr/__init__.py b/autopts/ptsprojects/zephyr/__init__.py index b97ceb4c65..30798ccc9d 100644 --- a/autopts/ptsprojects/zephyr/__init__.py +++ b/autopts/ptsprojects/zephyr/__init__.py @@ -36,8 +36,8 @@ import autopts.ptsprojects.zephyr.mics import autopts.ptsprojects.zephyr.ccp import autopts.ptsprojects.zephyr.vcp -import autopts.ptsprojects.zephyr.dfu -import autopts.ptsprojects.zephyr.mbt +import autopts.ptsprojects.zephyr.dfum +import autopts.ptsprojects.zephyr.mbtm import autopts.ptsprojects.zephyr.cas import autopts.ptsprojects.zephyr.cap import autopts.ptsprojects.zephyr.mcp diff --git a/autopts/ptsprojects/zephyr/dfu.py b/autopts/ptsprojects/zephyr/dfum.py similarity index 55% rename from autopts/ptsprojects/zephyr/dfu.py rename to autopts/ptsprojects/zephyr/dfum.py index b5733219f6..e4c54d2ba0 100644 --- a/autopts/ptsprojects/zephyr/dfu.py +++ b/autopts/ptsprojects/zephyr/dfum.py @@ -40,48 +40,47 @@ def set_pixits(ptses): pts = ptses[0] - pts.set_pixit("DFU", "TSPX_bd_addr_iut", "DEADBEEFDEAD") - pts.set_pixit("DFU", "TSPX_time_guard", "300000") - pts.set_pixit("DFU", "TSPX_use_implicit_send", "TRUE") - pts.set_pixit("DFU", "TSPX_tester_database_file", + pts.set_pixit("DFUM", "TSPX_bd_addr_iut", "DEADBEEFDEAD") + pts.set_pixit("DFUM", "TSPX_time_guard", "300000") + pts.set_pixit("DFUM", "TSPX_use_implicit_send", "TRUE") + pts.set_pixit("DFUM", "TSPX_tester_database_file", r"C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\PTS_SMPP_db.xml") - pts.set_pixit("DFU", "TSPX_mtu_size", "23") - pts.set_pixit("DFU", "TSPX_delete_link_key", "TRUE") - pts.set_pixit("DFU", "TSPX_delete_ltk", "TRUE") - pts.set_pixit("DFU", "TSPX_security_enabled", "FALSE") - pts.set_pixit("DFU", "TSPX_iut_setup_att_over_br_edr", "FALSE") - pts.set_pixit("DFU", "TSPX_scan_interval", "30") - pts.set_pixit("DFU", "TSPX_scan_window", "30") - pts.set_pixit("DFU", "TSPX_scan_filter", "00") - pts.set_pixit("DFU", "TSPX_advertising_interval_min", "160") - pts.set_pixit("DFU", "TSPX_advertising_interval_max", "160") - pts.set_pixit("DFU", "TSPX_tester_OOB_information", "F87F") - pts.set_pixit("DFU", "TSPX_device_uuid", "00000000000000000000000000000000") - pts.set_pixit("DFU", "TSPX_device_uuid2", "001BDC0810210B0E0A0C000B0E0A0C00") - pts.set_pixit("DFU", "TSPX_use_pb_gatt_bearer", "FALSE") - pts.set_pixit("DFU", "TSPX_iut_comp_data_page", "2") - pts.set_pixit("DFU", "TSPX_oob_state_change", "FALSE") - pts.set_pixit("DFU", "TSPX_enable_IUT_provisioner", "FALSE") - pts.set_pixit("DFU", "TSPX_Procedure_Timeout", "60") - pts.set_pixit("DFU", "TSPX_Client_BLOB_ID", "1100000000000011") - pts.set_pixit("DFU", "TSPX_Client_BLOB_Data", r"data.txt") - pts.set_pixit("DFU", "TSPX_TTL", "2") - pts.set_pixit("DFU", "TSPX_Reception_Counter", "1") - pts.set_pixit("DFU", "TSPX_Server_Timeout", "10") - pts.set_pixit("DFU", "TSPX_Firmware_ID", "11000011") - pts.set_pixit("DFU", "TSPX_Firmware_Metadata", "1100000000000011") - pts.set_pixit("DFU", "TSPX_Firmware_Update_URI", "http://www.dummy.com") - pts.set_pixit("DFU", "TSPX_New_Firmware_Image", r"data2.txt") - pts.set_pixit("DFU", "TSPX_Update_Firmware_Image_Index", "0") + pts.set_pixit("DFUM", "TSPX_mtu_size", "23") + pts.set_pixit("DFUM", "TSPX_delete_link_key", "TRUE") + pts.set_pixit("DFUM", "TSPX_delete_ltk", "TRUE") + pts.set_pixit("DFUM", "TSPX_security_enabled", "FALSE") + pts.set_pixit("DFUM", "TSPX_iut_setup_att_over_br_edr", "FALSE") + pts.set_pixit("DFUM", "TSPX_scan_interval", "30") + pts.set_pixit("DFUM", "TSPX_scan_window", "30") + pts.set_pixit("DFUM", "TSPX_scan_filter", "00") + pts.set_pixit("DFUM", "TSPX_advertising_interval_min", "160") + pts.set_pixit("DFUM", "TSPX_advertising_interval_max", "160") + pts.set_pixit("DFUM", "TSPX_tester_OOB_information", "F87F") + pts.set_pixit("DFUM", "TSPX_device_uuid", "00000000000000000000000000000000") + pts.set_pixit("DFUM", "TSPX_device_uuid2", "001BDC0810210B0E0A0C000B0E0A0C00") + pts.set_pixit("DFUM", "TSPX_use_pb_gatt_bearer", "FALSE") + pts.set_pixit("DFUM", "TSPX_iut_comp_data_page", "2") + pts.set_pixit("DFUM", "TSPX_oob_state_change", "FALSE") + pts.set_pixit("DFUM", "TSPX_enable_IUT_provisioner", "FALSE") + pts.set_pixit("DFUM", "TSPX_Procedure_Timeout", "60") + pts.set_pixit("DFUM", "TSPX_Client_BLOB_ID", "1100000000000011") + pts.set_pixit("DFUM", "TSPX_Client_BLOB_Data", r"data.txt") + pts.set_pixit("DFUM", "TSPX_TTL", "2") + pts.set_pixit("MBTM", "TSPX_Server_Timeout_Base", "5") + pts.set_pixit("DFUM", "TSPX_Firmware_ID", "11000011") + pts.set_pixit("DFUM", "TSPX_Firmware_Metadata", "1100000000000011") + pts.set_pixit("DFUM", "TSPX_Firmware_Update_URI", "http://www.dummy.com") + pts.set_pixit("DFUM", "TSPX_New_Firmware_Image", r"data2.txt") + pts.set_pixit("DFUM", "TSPX_Update_Firmware_Image_Index", "0") def test_cases(ptses): - """Returns a list of MMDL test cases + """Returns a list of DFUM test cases ptses -- list of PyPTS instances""" pts = ptses[0] - if 'DFU' not in pts.get_project_list(): + if 'DFUM' not in pts.get_project_list(): return [] stack = get_stack() @@ -104,7 +103,8 @@ def test_cases(ptses): crpl_size = 10 # Maximum capacity of the replay protection list auth_method = 0x00 iut_device_name = get_unique_name(pts) - FD_timeout = 80 + FD_timeout = 8 + timeout_base = 5 pre_conditions = [ TestFunc(btp.core_reg_svc_gap), @@ -117,58 +117,61 @@ def test_cases(ptses): TestFunc(lambda: stack.mesh.set_prov_data(oob, out_size, rand_out_actions, in_size, rand_in_actions, crpl_size, auth_method)), TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_device_uuid", stack.mesh.get_dev_uuid())), + "DFUM", "TSPX_device_uuid", stack.mesh.get_dev_uuid())), TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_device_uuid2", stack.mesh.get_dev_uuid_lt2())), + "DFUM", "TSPX_device_uuid2", stack.mesh.get_dev_uuid_lt2())), TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_bd_addr_iut", + "DFUM", "TSPX_bd_addr_iut", stack.gap.iut_addr_get_str())), TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_Client_BLOB_Data", + "DFUM", "TSPX_Client_BLOB_Data", get_test_data_path(pts) + "sample_data_1.txt")), TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_New_Firmware_Image", - get_test_data_path(pts) + "sample_data_1.txt"))] + "DFUM", "TSPX_New_Firmware_Image", + get_test_data_path(pts) + "sample_data_1.txt")), + TestFunc(lambda: pts.update_pixit_param( + "DFUM", "TSPX_Server_Timeout_Base", timeout_base)), + TestFunc(lambda: stack.mesh.timeout_base_set(timeout_base))] custom_test_cases = [ - ZTestCase("DFU", "DFU/SR/FD/BV-05-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FD/BV-05-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_New_Firmware_Image", + "DFUM", "TSPX_New_Firmware_Image", get_test_data_path(pts) + "sample_data_2.txt"))], generic_wid_hdl=mmdl_wid_hdl), - ZTestCase("DFU", "DFU/SR/FD/BV-07-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FD/BV-07-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_Server_Timeout", FD_timeout))], + "DFUM", "TSPX_Server_Timeout_Base", FD_timeout))], generic_wid_hdl=mmdl_wid_hdl), - ZTestCase("DFU", "DFU/SR/FD/BV-19-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FD/BV-19-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_Server_Timeout", FD_timeout))], + "DFUM", "TSPX_Server_Timeout_Base", FD_timeout))], generic_wid_hdl=mmdl_wid_hdl), - ZTestCase("DFU", "DFU/SR/FD/BV-48-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FD/BV-48-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_Firmware_ID", "010000000100000000000000"))], + "DFUM", "TSPX_Firmware_ID", "010000000100000000000000"))], generic_wid_hdl=mmdl_wid_hdl), - ZTestCase("DFU", "DFU/SR/FU/BV-24-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FU/BV-24-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_New_Firmware_Image", + "DFUM", "TSPX_New_Firmware_Image", get_test_data_path(pts) + "sample_data_2.txt"))], generic_wid_hdl=mmdl_wid_hdl), - ZTestCase("DFU", "DFU/SR/FU/BV-27-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FU/BV-27-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_New_Firmware_Image", + "DFUM", "TSPX_New_Firmware_Image", get_test_data_path(pts) + "sample_data_2.txt"))], generic_wid_hdl=mmdl_wid_hdl), - ZTestCase("DFU", "DFU/SR/FD/BV-13-C", cmds=pre_conditions + [ + ZTestCase("DFUM", "DFUM/SR/FD/BV-13-C", cmds=pre_conditions + [ TestFunc(lambda: pts.update_pixit_param( - "DFU", "TSPX_Server_Timeout", FD_timeout))], + "DFUM", "TSPX_Server_Timeout_Base", FD_timeout))], generic_wid_hdl=mmdl_wid_hdl), ] - test_case_name_list = pts.get_test_case_list('DFU') + test_case_name_list = pts.get_test_case_list('DFUM') tc_list = [] for tc_name in test_case_name_list: - instance = ZTestCase('DFU', tc_name, + instance = ZTestCase('DFUM', tc_name, cmds=pre_conditions, generic_wid_hdl=mmdl_wid_hdl) diff --git a/autopts/ptsprojects/zephyr/mbt.py b/autopts/ptsprojects/zephyr/mbtm.py similarity index 58% rename from autopts/ptsprojects/zephyr/mbt.py rename to autopts/ptsprojects/zephyr/mbtm.py index 496a351ed2..8366439ca3 100644 --- a/autopts/ptsprojects/zephyr/mbt.py +++ b/autopts/ptsprojects/zephyr/mbtm.py @@ -40,52 +40,53 @@ def set_pixits(ptses): pts = ptses[0] - pts.set_pixit("MBT", "TSPX_bd_addr_iut", "DEADBEEFDEAD") - pts.set_pixit("MBT", "TSPX_time_guard", "300000") - pts.set_pixit("MBT", "TSPX_use_implicit_send", "TRUE") - pts.set_pixit("MBT", "TSPX_tester_database_file", + pts.set_pixit("MBTM", "TSPX_bd_addr_iut", "DEADBEEFDEAD") + pts.set_pixit("MBTM", "TSPX_time_guard", "300000") + pts.set_pixit("MBTM", "TSPX_use_implicit_send", "TRUE") + pts.set_pixit("MBTM", "TSPX_tester_database_file", r"C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\PTS_SMPP_db.xml") - pts.set_pixit("MBT", "TSPX_mtu_size", "23") - pts.set_pixit("MBT", "TSPX_delete_link_key", "TRUE") - pts.set_pixit("MBT", "TSPX_delete_ltk", "TRUE") - pts.set_pixit("MBT", "TSPX_security_enabled", "FALSE") - pts.set_pixit("MBT", "TSPX_iut_setup_att_over_br_edr", "FALSE") - pts.set_pixit("MBT", "TSPX_scan_interval", "30") - pts.set_pixit("MBT", "TSPX_scan_window", "30") - pts.set_pixit("MBT", "TSPX_scan_filter", "00") - pts.set_pixit("MBT", "TSPX_advertising_interval_min", "160") - pts.set_pixit("MBT", "TSPX_advertising_interval_max", "160") - pts.set_pixit("MBT", "TSPX_tester_OOB_information", "F87F") - pts.set_pixit("MBT", "TSPX_device_uuid", "00000000000000000000000000000000") - pts.set_pixit("MBT", "TSPX_device_uuid2", "001BDC0810210B0E0A0C000B0E0A0C00") - pts.set_pixit("MBT", "TSPX_use_pb_gatt_bearer", "FALSE") - pts.set_pixit("MBT", "TSPX_iut_comp_data_page", "2") - pts.set_pixit("MBT", "TSPX_oob_state_change", "FALSE") - pts.set_pixit("MBT", "TSPX_enable_IUT_provisioner", "FALSE") - pts.set_pixit("MBT", "TSPX_Procedure_Timeout", "60") - pts.set_pixit("MBT", "TSPX_Client_BLOB_ID", "1100000000000011") - pts.set_pixit("MBT", "TSPX_Client_BLOB_Data", r"data.txt") - pts.set_pixit("MBT", "TSPX_Reception_Counter", "1") - pts.set_pixit("MBT", "TSPX_Server_Timeout", "20") - pts.set_pixit("MBT", "TSPX_Transfer_TTL", "3") - pts.set_pixit("MBT", "TSPX_Firmware_ID", "11000011") - pts.set_pixit("MBT", "TSPX_Firmware_Metadata", "1100000000000011") - pts.set_pixit("MBT", "TSPX_Firmware_Update_URI", "http://www.dummy.com") - pts.set_pixit("MBT", "TSPX_New_Firmware_Image", r"data2.txt") - pts.set_pixit("MBT", "TSPX_Update_Firmware_Image_Index", "0") + pts.set_pixit("MBTM", "TSPX_mtu_size", "23") + pts.set_pixit("MBTM", "TSPX_delete_link_key", "TRUE") + pts.set_pixit("MBTM", "TSPX_delete_ltk", "TRUE") + pts.set_pixit("MBTM", "TSPX_security_enabled", "FALSE") + pts.set_pixit("MBTM", "TSPX_iut_setup_att_over_br_edr", "FALSE") + pts.set_pixit("MBTM", "TSPX_scan_interval", "30") + pts.set_pixit("MBTM", "TSPX_scan_window", "30") + pts.set_pixit("MBTM", "TSPX_scan_filter", "00") + pts.set_pixit("MBTM", "TSPX_advertising_interval_min", "160") + pts.set_pixit("MBTM", "TSPX_advertising_interval_max", "160") + pts.set_pixit("MBTM", "TSPX_tester_OOB_information", "F87F") + pts.set_pixit("MBTM", "TSPX_device_uuid", "00000000000000000000000000000000") + pts.set_pixit("MBTM", "TSPX_device_uuid2", "001BDC0810210B0E0A0C000B0E0A0C00") + pts.set_pixit("MBTM", "TSPX_use_pb_gatt_bearer", "FALSE") + pts.set_pixit("MBTM", "TSPX_iut_comp_data_page", "2") + pts.set_pixit("MBTM", "TSPX_oob_state_change", "FALSE") + pts.set_pixit("MBTM", "TSPX_enable_IUT_provisioner", "FALSE") + pts.set_pixit("MBTM", "TSPX_Procedure_Timeout", "60") + pts.set_pixit("MBTM", "TSPX_Client_BLOB_ID", "1100000000000011") + pts.set_pixit("MBTM", "TSPX_Client_BLOB_Data", r"data.txt") + pts.set_pixit("MBTM", "TSPX_Server_Timeout_Base", "5") + pts.set_pixit("MBTM", "TSPX_Client_Timeout_Base", "5") + pts.set_pixit("MBTM", "TSPX_Transfer_TTL", "3") + pts.set_pixit("MBTM", "TSPX_Firmware_ID", "11000011") + pts.set_pixit("MBTM", "TSPX_Firmware_Metadata", "1100000000000011") + pts.set_pixit("MBTM", "TSPX_Firmware_Update_URI", "http://www.dummy.com") + pts.set_pixit("MBTM", "TSPX_New_Firmware_Image", r"data2.txt") + pts.set_pixit("MBTM", "TSPX_Update_Firmware_Image_Index", "0") def test_cases(ptses): - """Returns a list of MMDL test cases + """Returns a list of MBTM test cases ptses -- list of PyPTS instances""" pts = ptses[0] - if 'MBT' not in pts.get_project_list(): + if 'MBTM' not in pts.get_project_list(): return [] stack = get_stack() + device_uuid = hexlify(uuid4().bytes) device_uuid2 = hexlify(uuid4().bytes) @@ -104,8 +105,7 @@ def test_cases(ptses): crpl_size = 10 # Maximum capacity of the replay protection list auth_method = 0x00 iut_device_name = get_unique_name(pts) - timeout = 20 - timeout_base = (timeout // 10) - 1 + timeout_base = 5 transfer_ttl = 3 FD_timeout = 80 @@ -120,32 +120,34 @@ def test_cases(ptses): TestFunc(lambda: stack.mesh.set_prov_data(oob, out_size, rand_out_actions, in_size, rand_in_actions, crpl_size, auth_method)), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_device_uuid", stack.mesh.get_dev_uuid())), + "MBTM", "TSPX_device_uuid", stack.mesh.get_dev_uuid())), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_device_uuid2", stack.mesh.get_dev_uuid_lt2())), + "MBTM", "TSPX_device_uuid2", stack.mesh.get_dev_uuid_lt2())), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_bd_addr_iut", + "MBTM", "TSPX_bd_addr_iut", stack.gap.iut_addr_get_str())), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_Client_BLOB_Data", + "MBTM", "TSPX_Client_BLOB_Data", get_test_data_path(pts) + "sample_data_1.txt")), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_New_Firmware_Image", + "MBTM", "TSPX_New_Firmware_Image", get_test_data_path(pts) + "sample_data_1.txt")), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_Server_Timeout", timeout)), - TestFunc(lambda: stack.mesh.timeout_set(timeout_base)), + "MBTM", "TSPX_Server_Timeout_Base", timeout_base)), + TestFunc(lambda: pts.update_pixit_param( + "MBTM", "TSPX_Client_Timeout_Base", timeout_base)), + TestFunc(lambda: stack.mesh.timeout_base_set(timeout_base)), TestFunc(lambda: pts.update_pixit_param( - "MBT", "TSPX_Transfer_TTL", transfer_ttl)), + "MBTM", "TSPX_Transfer_TTL", transfer_ttl)), TestFunc(lambda: stack.mesh.transfer_ttl_set(transfer_ttl))] custom_test_cases = [] - test_case_name_list = pts.get_test_case_list('MBT') + test_case_name_list = pts.get_test_case_list('MBTM') tc_list = [] for tc_name in test_case_name_list: - instance = ZTestCase('MBT', tc_name, + instance = ZTestCase('MBTM', tc_name, cmds=pre_conditions, generic_wid_hdl=mmdl_wid_hdl) diff --git a/autopts/wid/mmdl.py b/autopts/wid/mmdl.py index 05689d19a1..24cb7e74fe 100644 --- a/autopts/wid/mmdl.py +++ b/autopts/wid/mmdl.py @@ -2188,11 +2188,11 @@ def hdl_wid_850(params: WIDParams): Please initiate the transfer of the test object to the Lower Tester. """ # Do not execute this wid, it's automatically send as part of FU procedure - if params.test_case_name == 'DFU/CL/FU/BV-02-C': + if params.test_case_name == 'DFUM/CL/FU/BV-02-C': return True stack = get_stack() - timeout = stack.mesh.timeout_get() + timeout_base = stack.mesh.timeout_base_get() ttl = stack.mesh.transfer_ttl_get() addr = ["0001"] id = 0x1100000000000011 @@ -2204,7 +2204,7 @@ def hdl_wid_850(params: WIDParams): time.sleep(5) - btp.mmdl_blob_transfer_start(id, block_size, chunk_size, timeout, ttl, blob_data_size) + btp.mmdl_blob_transfer_start(id, block_size, chunk_size, timeout_base, ttl, blob_data_size) return True @@ -2214,7 +2214,7 @@ def hdl_wid_851(_: WIDParams): Please initiate the transfer of the test object to the Lower Tester. """ stack = get_stack() - timeout = stack.mesh.timeout_get() + timeout_base = stack.mesh.timeout_base_get() ttl = stack.mesh.transfer_ttl_get() addr = ["0001"] id = 0x1100000000000011 @@ -2226,7 +2226,7 @@ def hdl_wid_851(_: WIDParams): time.sleep(5) - btp.mmdl_blob_transfer_start(id, block_size, chunk_size, timeout, ttl, blob_data_size) + btp.mmdl_blob_transfer_start(id, block_size, chunk_size, timeout_base, ttl, blob_data_size) return True @@ -2246,12 +2246,12 @@ def hdl_wid_852(params: WIDParams): def hdl_wid_853(_: WIDParams): stack = get_stack() - timeout = stack.mesh.timeout_get() + timeout_base = stack.mesh.timeout_base_get() ttl = stack.mesh.transfer_ttl_get() id = 0x1100000000000011 btp.mesh_store_model_data() - btp.mmdl_blob_srv_recv(id, timeout, ttl) + btp.mmdl_blob_srv_recv(id, timeout_base, ttl) return True @@ -2275,7 +2275,7 @@ def hdl_wid_855(_: WIDParams): def hdl_wid_856(params: WIDParams): stack = get_stack() - timeout = stack.mesh.timeout_get() + timeout_base = stack.mesh.timeout_base_get() ttl = stack.mesh.transfer_ttl_get() addrs = re.findall(r'(0x[0-9a-fA-F]{1,2})', params.description) addrs = [e[2:].rjust(4, '0') for e in addrs] @@ -2286,7 +2286,7 @@ def hdl_wid_856(params: WIDParams): btp.mmdl_blob_info_get(addrs) time.sleep(5) - btp.mmdl_blob_transfer_start(id, block_size, chunk_size, timeout, ttl, blob_data_size) + btp.mmdl_blob_transfer_start(id, block_size, chunk_size, timeout_base, ttl, blob_data_size) return True From 906a2cd8419e890b1856803b82f28a1bf77c8cc5 Mon Sep 17 00:00:00 2001 From: alperen sener Date: Thu, 11 Jul 2024 15:18:57 +0200 Subject: [PATCH 10/44] zephyr: mesh: use prefixes to filter the test cases It is sufficient to use only prefixes of the test groups for filtering. Signed-off-by: alperen sener --- autopts/bot/iut_config/zephyr.py | 208 ++++++------------------------- 1 file changed, 41 insertions(+), 167 deletions(-) diff --git a/autopts/bot/iut_config/zephyr.py b/autopts/bot/iut_config/zephyr.py index 7e32a0f06b..195d6983a1 100644 --- a/autopts/bot/iut_config/zephyr.py +++ b/autopts/bot/iut_config/zephyr.py @@ -190,129 +190,27 @@ ] }, - "mesh_dfd_srv.conf": { + "mesh_proxy_sol.conf": { "pre_overlay": "overlay-mesh.conf", "overlay": { - 'CONFIG_BT_SETTINGS': 'y', - 'CONFIG_FLASH': 'y', - 'CONFIG_FLASH_PAGE_LAYOUT': 'y', - 'CONFIG_FLASH_MAP': 'y', - 'CONFIG_NVS': 'y', - 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', - 'CONFIG_BT_MESH_DFD_SRV': 'y', - 'CONFIG_BT_MESH_DFU_SRV': 'y', - 'CONFIG_BT_MESH_DFD_SRV_OOB_UPLOAD': 'y', - 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', - 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MIN': '256', - 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MAX': '256' + 'CONFIG_BT_MESH_PROXY_CLIENT': 'y', + 'CONFIG_BT_MESH_PROXY_SOLICITATION': 'y', + 'CONFIG_BT_MESH_OD_PRIV_PROXY_CLI': 'y', + 'CONFIG_BT_MESH_OD_PRIV_PROXY_SRV': 'y', + 'CONFIG_BT_MESH_SOL_PDU_RPL_CLI': 'y', }, "test_cases": [ - 'DFU/SR-CL/GEN/BV-01-C' - 'DFUM/CL/FU/BV-01-C', - 'DFUM/CL/FU/BV-02-C', - 'DFUM/CL/FU/BV-03-C', - 'DFUM/SR/FD/BV-01-C', - 'DFUM/SR/FD/BV-02-C', - 'DFUM/SR/FD/BV-03-C', - 'DFUM/SR/FD/BV-04-C', - 'DFUM/SR/FD/BV-05-C', - 'DFUM/SR/FD/BV-06-C', - 'DFUM/SR/FD/BV-07-C', - 'DFUM/SR/FD/BV-08-C', - 'DFUM/SR/FD/BV-09-C', - 'DFUM/SR/FD/BV-10-C', - 'DFUM/SR/FD/BV-11-C', - 'DFUM/SR/FD/BV-12-C', - 'DFUM/SR/FD/BV-13-C', - 'DFUM/SR/FD/BV-14-C', - 'DFUM/SR/FD/BV-15-C', - 'DFUM/SR/FD/BV-16-C', - 'DFUM/SR/FD/BV-17-C', - 'DFUM/SR/FD/BV-18-C', - 'DFUM/SR/FD/BV-19-C', - 'DFUM/SR/FD/BV-20-C', - 'DFUM/SR/FD/BV-21-C', - 'DFUM/SR/FD/BV-22-C', - 'DFUM/SR/FD/BV-23-C', - 'DFUM/SR/FD/BV-24-C', - 'DFUM/SR/FD/BV-25-C', - 'DFUM/SR/FD/BV-26-C', - 'DFUM/SR/FD/BV-27-C', - 'DFUM/SR/FD/BV-28-C', - 'DFUM/SR/FD/BV-29-C', - 'DFUM/SR/FD/BV-30-C', - 'DFUM/SR/FD/BV-31-C', - 'DFUM/SR/FD/BV-32-C', - 'DFUM/SR/FD/BV-33-C', - 'DFUM/SR/FD/BV-34-C', - 'DFUM/SR/FD/BV-59-C', - 'DFUM/SR/FD/BV-50-C', - 'DFUM/SR/FD/BV-35-C', - 'DFUM/SR/FD/BV-36-C', - 'DFUM/SR/FD/BV-37-C', - 'DFUM/SR/FD/BV-38-C', - 'DFUM/SR/FD/BV-39-C', - 'DFUM/SR/FD/BV-40-C', - 'DFUM/SR/FD/BV-41-C', - 'DFUM/SR/FD/BV-51-C', - 'DFUM/SR/FD/BV-42-C', - 'DFUM/SR/FD/BV-43-C', - 'DFUM/SR/FD/BV-44-C', - 'DFUM/SR/FD/BV-45-C', - 'DFUM/SR/FD/BV-46-C', - 'DFUM/SR/FD/BV-47-C', - 'DFUM/SR/FD/BV-49-C', - 'DFUM/SR/FD/BV-52-C', - 'DFUM/SR/FD/BV-53-C', - 'DFUM/SR/FD/BV-54-C', - 'DFUM/SR/FD/BV-55-C', - 'DFUM/SR/FD/BV-56-C', - 'DFUM/SR/FD/BV-57-C', - 'DFUM/SR/FD/BV-58-C', - 'MBTM/SR/BT/BV-01-C', - 'MBTM/SR/BT/BV-02-C', - 'MBTM/SR/BT/BV-03-C', - 'MBTM/SR/BT/BV-04-C', - 'MBTM/SR/BT/BV-05-C', - 'MBTM/SR/BT/BV-06-C', - 'MBTM/SR/BT/BV-07-C', - 'MBTM/SR/BT/BV-08-C', - 'MBTM/SR/BT/BV-09-C', - 'MBTM/SR/BT/BV-10-C', - 'MBTM/SR/BT/BV-11-C', - 'MBTM/SR/BT/BV-12-C', - 'MBTM/SR/BT/BV-13-C', - 'MBTM/SR/BT/BV-14-C', - 'MBTM/SR/BT/BV-15-C', - 'MBTM/SR/BT/BV-16-C', - 'MBTM/SR/BT/BV-17-C', - 'MBTM/SR/BT/BV-18-C', - 'MBTM/SR/BT/BV-19-C', - 'MBTM/SR/BT/BV-20-C', - 'MBTM/SR/BT/BV-21-C', - 'MBTM/SR/BT/BV-22-C', - 'MBTM/SR/BT/BV-23-C', - 'MBTM/SR/BT/BV-24-C', - 'MBTM/SR/BT/BV-25-C', - 'MBTM/SR/BT/BV-26-C', - 'MBTM/SR/BT/BV-27-C', - 'MBTM/SR/BT/BV-28-C', - 'MBTM/SR/BT/BV-29-C', - 'MBTM/SR/BT/BV-30-C', - 'MBTM/SR/BT/BV-31-C', - 'MBTM/SR/BT/BV-32-C', - 'MBTM/SR/BT/BV-33-C', - 'MBTM/SR/BT/BV-34-C', - 'MBTM/SR/BT/BV-35-C', - 'MBTM/SR/BT/BV-36-C', - 'MBTM/SR/BT/BV-37-C', - 'MBTM/SR/BT/BV-38-C', - 'MBTM/SR/BT/BI-01-C', - 'MBTM/SR/BT/BI-02-C', + 'MESH/SR/MPXS/BV-12-C', + 'MESH/SR/MPXS/BV-13-C', + 'MESH/SR/ODP/BV-01-C', + 'MESH/CL/ODP/BV-01-C', + 'MESH/SR/SRPL/BV-01-C', + 'MESH/CL/SRPL/BV-01-C', + 'MESH/CL/MPXS/BV-09-C', ] }, - "mesh_dfu_srv.conf": { + "mesh_dfd_dfu.conf": { "pre_overlay": "overlay-mesh.conf", "overlay": { 'CONFIG_BT_SETTINGS': 'y', @@ -321,46 +219,15 @@ 'CONFIG_FLASH_MAP': 'y', 'CONFIG_NVS': 'y', 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', - 'CONFIG_BT_MESH_DFD_SRV': 'n' + 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', }, "test_cases": [ - 'DFUM/SR/FU/BV-01-C', - 'DFUM/SR/FU/BV-02-C', - 'DFUM/SR/FU/BV-03-C', - 'DFUM/SR/FU/BV-04-C', - 'DFUM/SR/FU/BV-05-C', - 'DFUM/SR/FU/BV-06-C', - 'DFUM/SR/FU/BV-07-C', - 'DFUM/SR/FU/BV-08-C', - 'DFUM/SR/FU/BV-09-C', - 'DFUM/SR/FU/BV-10-C', - 'DFUM/SR/FU/BV-11-C', - 'DFUM/SR/FU/BV-12-C', - 'DFUM/SR/FU/BV-13-C', - 'DFUM/SR/FU/BV-14-C', - 'DFUM/SR/FU/BV-15-C', - 'DFUM/SR/FU/BV-16-C', - 'DFUM/SR/FU/BV-17-C', - 'DFUM/SR/FU/BV-18-C', - 'DFUM/SR/FU/BV-19-C', - 'DFUM/SR/FU/BV-20-C', - 'DFUM/SR/FU/BV-21-C', - 'DFUM/SR/FU/BV-22-C', - 'DFUM/SR/FU/BV-23-C', - 'DFUM/SR/FU/BV-24-C', - 'DFUM/SR/FU/BV-27-C', - 'MBTM/CL/BT/BV-01-C', - 'MBTM/CL/BT/BV-02-C', - 'MBTM/CL/BT/BV-03-C', - 'MBTM/CL/BT/BV-04-C', - 'MBTM/CL/BT/BV-05-C', - 'MBTM/CL/BT/BV-06-C', - 'MBTM/CL/BT/BV-07-C', - 'MBTM/CL/BT/BV-08-C' + 'DFUM/SR/FD/BV-48-C', + 'DFUM/SR/FD/BV-59-C' ] }, - "mesh_dfd_dfu.conf": { + "mesh_dfd_srv.conf": { "pre_overlay": "overlay-mesh.conf", "overlay": { 'CONFIG_BT_SETTINGS': 'y', @@ -368,32 +235,39 @@ 'CONFIG_FLASH_PAGE_LAYOUT': 'y', 'CONFIG_FLASH_MAP': 'y', 'CONFIG_NVS': 'y', - 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', + 'CONFIG_BT_MESH_DFU_SRV': 'n', + 'CONFIG_BT_MESH_DFD_SRV_OOB_UPLOAD': 'y', + 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', + 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MIN': '256', + 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MAX': '256' }, "test_cases": [ - 'DFUM/SR/FD/BV-48-C', - 'DFUM/SR/FD/BV-59-C' + 'DFU/SR-CL/GEN/BV-01-C' + 'DFUM/CL/FU', + 'DFUM/SR/FD', + 'MBTM/SR/BT', ] }, - "mesh_proxy_sol.conf": { + "mesh_dfu_srv.conf": { "pre_overlay": "overlay-mesh.conf", "overlay": { - 'CONFIG_BT_MESH_PROXY_CLIENT': 'y', - 'CONFIG_BT_MESH_PROXY_SOLICITATION': 'y', - 'CONFIG_BT_MESH_OD_PRIV_PROXY_CLI': 'y', - 'CONFIG_BT_MESH_OD_PRIV_PROXY_SRV': 'y', - 'CONFIG_BT_MESH_SOL_PDU_RPL_CLI': 'y', + 'CONFIG_BT_SETTINGS': 'y', + 'CONFIG_FLASH': 'y', + 'CONFIG_FLASH_PAGE_LAYOUT': 'y', + 'CONFIG_FLASH_MAP': 'y', + 'CONFIG_NVS': 'y', + 'CONFIG_BT_MESH_SETTINGS_WORKQ_STACK_SIZE': '1200', + 'CONFIG_BT_MESH_DFD_SRV': 'n', + 'CONFIG_BT_MESH_DFD_SRV_OOB_UPLOAD': 'y', + 'CONFIG_BT_MESH_BLOB_SIZE_MAX': '5000', + 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MIN': '256', + 'CONFIG_BT_MESH_BLOB_BLOCK_SIZE_MAX': '256' }, "test_cases": [ - 'MESH/SR/MPXS/BV-12-C', - 'MESH/SR/MPXS/BV-13-C', - 'MESH/SR/ODP/BV-01-C', - 'MESH/CL/ODP/BV-01-C', - 'MESH/SR/SRPL/BV-01-C', - 'MESH/CL/SRPL/BV-01-C', - 'MESH/CL/MPXS/BV-09-C', + 'DFUM/SR/FU', + 'MBTM/CL/BT' ] }, } From 6b2c4ce97bceeb7e0401026c3fc9a96bfc4eaf8f Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Mon, 22 Jul 2024 16:54:33 +0200 Subject: [PATCH 11/44] errata: Update L2CAP erratas --- errata/common.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/errata/common.yaml b/errata/common.yaml index 40025e5502..7646b0b95d 100644 --- a/errata/common.yaml +++ b/errata/common.yaml @@ -6,3 +6,5 @@ GAP/BOND/BON/BV-03-C: ES-25754 GAP/SEC/SEM/BI-10-C: Request ID 104895 GAP/SEC/SEM/BI-22-C: Request ID 104895 GAP/SEC/SEM/BI-23-C: Request ID 104895 + +L2CAP/LE/REJ/BI-02-C: ES-25764 From 724fb15503c8065502ab0384bdeb7d0819cb2855 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Mon, 29 Jul 2024 08:41:21 +0200 Subject: [PATCH 12/44] cron: Add start of an active hub server It happened that the TCP/IP socket of the server got stuck, so lets restart the process at each cron job. --- tools/cron/common.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tools/cron/common.py b/tools/cron/common.py index f8f22f6921..fd255b5dd7 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -529,10 +529,21 @@ def terminate_processes(config): if 'vm' in config['cron']: close_vm(config) + if 'active_hub_server_start_cmd' in config['cron']: + terminate_process(cmdline='active_hub_server.py') + def _start_processes(config, checkout_repos): srv_process = None + if 'active_hub_server_start_cmd' in config['cron']: + log(f"Running: {config['cron']['active_hub_server_start_cmd']}") + subprocess.Popen(config['cron']['active_hub_server_start_cmd'], + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=config['cron']['autopts_repo']) + if 'vm' in config['cron']: try: start_vm(config, checkout_repos=checkout_repos) From 0b46caef01173c92dce512f2ab4637c06975a569 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Mon, 29 Jul 2024 14:00:12 +0200 Subject: [PATCH 13/44] zephyr: Use builtin DIS service So far GATT service was used to create DIS but we should use one from Zephyr. --- autopts/ptsprojects/zephyr/dis.py | 49 +------------------------------ 1 file changed, 1 insertion(+), 48 deletions(-) diff --git a/autopts/ptsprojects/zephyr/dis.py b/autopts/ptsprojects/zephyr/dis.py index 2f4eda0ea8..e6afb58b6e 100644 --- a/autopts/ptsprojects/zephyr/dis.py +++ b/autopts/ptsprojects/zephyr/dis.py @@ -26,38 +26,11 @@ from autopts.ptsprojects.zephyr.ztestcase import ZTestCase -class Value: - one_byte = '01' - two_bytes = '0123' - eight_bytes_1 = '0123456789ABCDEF' - eight_bytes_2 = 'FEDCBA9876543210' - long_1 = eight_bytes_1 * 4 - long_2 = eight_bytes_2 * 4 - - -# these UUIDs are in little endian -class DIS_DB: - SVC = '0A18' - CHR_MAN_NAME = '292A' - CHR_MODEL_NUM = '242A' - CHR_SER_NUM = '252A' - CHR_HW_REV = '272A' - CHR_FW_REV = '262A' - CHR_SW_REV = '282A' - CHR_PnP_ID = '502A' - - -# Vendor ID Source field, a Vendor ID field, a Product ID field and a Product Version field -# BT SIG assigned Device ID - Nordic Semi - dummy Product ID - Dummy Product Version (1.0.0) -# all values in little endian -dis_pnp_char_val = '0100E5FE110011' - iut_manufacturer_data = 'ABCD' iut_appearance = '1111' iut_svc_data = '1111' iut_flags = '11' iut_svcs = '1111' -iut_attr_db_off = 0x000b def set_pixits(ptses): @@ -86,26 +59,6 @@ def set_pixits(ptses): pts.set_pixit("DIS", "TSPX_tester_appearance", "0000") -init_server = [TestFunc(btp.core_reg_svc_gatt), - TestFunc(btp.gatts_add_svc, 0, DIS_DB.SVC), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_MAN_NAME), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_MODEL_NUM), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_SER_NUM), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_HW_REV), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_FW_REV), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_SW_REV), - TestFunc(btp.gatts_add_char, 0, gatt.Prop.read, - gatt.Perm.read, DIS_DB.CHR_PnP_ID), - TestFunc(btp.gatts_set_val, 0, dis_pnp_char_val), - TestFunc(btp.gatts_start_server)] - - def test_cases(ptses): """Returns a list of DIS Server test cases""" @@ -142,7 +95,7 @@ def test_cases(ptses): for tc_name in test_case_name_list: instance = ZTestCase("DIS", tc_name, - cmds=pre_conditions + init_server, + cmds=pre_conditions, generic_wid_hdl=dis_wid_hdl) for custom_tc in custom_test_cases: From ab30ea7dfaff4b458275c4e419a231b793b7aadb Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Fri, 26 Jul 2024 13:52:41 +0200 Subject: [PATCH 14/44] wid/bap: Add more configurations to ac_configs New TCRL added more tests for CIS connnection in QoS Configured state. --- autopts/wid/bap.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/autopts/wid/bap.py b/autopts/wid/bap.py index 1a3f408d82..5a25f00969 100644 --- a/autopts/wid/bap.py +++ b/autopts/wid/bap.py @@ -1274,6 +1274,15 @@ def get_audio_locations_from_pac(addr_type, addr, audio_dir): 'BAP/UCL/STR/BV-557-C': ([(1, 0)], 1, True), # AC 10, Mono, Default Ch Count 'BAP/UCL/STR/BV-558-C': ([(1, 0)], 1, True), # AC 10, Mono, No PACS 'BAP/UCL/STR/BV-559-C': ([(1, 0)], 1, True), # AC 10, Mono, No PACS, Default Ch Count + # Mono in QoS Configured state + 'BAP/UCL/STR/BV-560-C': ([(0, 1)], 1, True), # AC 2, Generic, QoS, Mono + 'BAP/UCL/STR/BV-561-C': ([(0, 1)], 1, True), # AC 2, Generic, QoS, Mono, Default Ch Count + 'BAP/UCL/STR/BV-562-C': ([(0, 1)], 1, True), # AC 2, Generic, QoS, Mono, No PACS + 'BAP/UCL/STR/BV-563-C': ([(0, 1)], 1, True), # AC 2, Generic, QoS, Mono, Default Ch Count, No PACS + 'BAP/UCL/STR/BV-564-C': ([(1, 0)], 1, True), # AC 1, Generic, QoS, Mono + 'BAP/UCL/STR/BV-565-C': ([(1, 0)], 1, True), # AC 1, Generic, QoS, Mono, Default Ch Count + 'BAP/UCL/STR/BV-566-C': ([(1, 0)], 1, True), # AC 1, Generic, QoS, Mono, No PACS + 'BAP/UCL/STR/BV-567-C': ([(1, 0)], 1, True), # AC 1, Generic, QoS, Mono, Default Ch Count, No PACS } From 886958fa5034770ed2db348214ab6abb3d828e21 Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Wed, 31 Jul 2024 16:25:16 +0200 Subject: [PATCH 15/44] mynewt: errata: l2cap test case Add L2CAP test case to mynewt errata list --- errata/mynewt.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/errata/mynewt.yaml b/errata/mynewt.yaml index 7fcf3110bb..3677cb484a 100644 --- a/errata/mynewt.yaml +++ b/errata/mynewt.yaml @@ -16,3 +16,4 @@ GATT/SR/GAR/BI-45-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAW/BV-05-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAW/BV-09-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAW/BI-33-C: https://github.com/apache/mynewt-nimble/issues/1800 +L2CAP/ECFC/BI-02-C: Request ID 103343 From 9e67c34c77c2a124b86aa4d034dd232ba78703a9 Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Thu, 1 Aug 2024 10:32:20 +0200 Subject: [PATCH 16/44] requirements: add missing module Add missing module to client and server requirements.txt --- autoptsclient_requirements.txt | 1 + autoptsserver_requirements.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/autoptsclient_requirements.txt b/autoptsclient_requirements.txt index 2d7661eac1..be0a66d64a 100644 --- a/autoptsclient_requirements.txt +++ b/autoptsclient_requirements.txt @@ -11,3 +11,4 @@ pylink-square openpyxl pyyaml yepkit-pykush +hidapi diff --git a/autoptsserver_requirements.txt b/autoptsserver_requirements.txt index 82da5424c3..5fa6f71141 100644 --- a/autoptsserver_requirements.txt +++ b/autoptsserver_requirements.txt @@ -3,3 +3,4 @@ pywin32 utils psutil yepkit-pykush +hidapi From b6bc663b21c88554124f4b5cdd5dc327fc74794b Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Tue, 30 Jul 2024 11:02:53 +0200 Subject: [PATCH 17/44] tools: cron: test case limit enchancement This will enable to run specified amount of test cases from among prefixes that are used with magic tag. --- tools/cron/autopts_cron.py | 3 +++ tools/cron/estimations.py | 15 ++++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/tools/cron/autopts_cron.py b/tools/cron/autopts_cron.py index 2c397543b0..8f77c6f0cc 100644 --- a/tools/cron/autopts_cron.py +++ b/tools/cron/autopts_cron.py @@ -227,6 +227,9 @@ def schedule_pr_job(cron, pr_info, job_config): test_cases, est_duration = get_estimations(cfg_dict, included_tc, excluded_tc, job_config['test_case_limit']) + job_config.pop('test_case_limit') + job_config['included'] = test_cases + test_case_count = len(test_cases) estimations = f', test case count: {test_case_count}, '\ f'estimated duration: {est_duration}' diff --git a/tools/cron/estimations.py b/tools/cron/estimations.py index 3376939a69..61bf70212b 100644 --- a/tools/cron/estimations.py +++ b/tools/cron/estimations.py @@ -99,7 +99,20 @@ def get_estimations(config, included_tc, excluded_tc, limit=None): test_cases = estimate_test_cases(config, included_tc, excluded_tc) if limit: - test_cases = test_cases[:limit] + if len(included_tc) == 1: + test_cases = test_cases[:limit] + else: + profile_count = {prefix: 0 for prefix in included_tc} + tc_list = [] + for profile in included_tc: + for test_case in test_cases: + if profile_count[profile] == limit: + break + if test_case.startswith(profile): + tc_list.append(test_case) + profile_count[profile] += 1 + + test_cases = tc_list est_duration = None database_file = config['auto_pts'].get('database_file', None) From 3c2077b6237d39fd6e795d1878ffc3b406640eb7 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Mon, 5 Aug 2024 17:56:57 +0200 Subject: [PATCH 18/44] bot: Load missing tc database into stats backup The loaded tc stats were not inited with tc database, hence regressions could not be caught if the autopts bot was restarted in the meantime. --- autopts/bot/common.py | 7 ++++++- autopts/client.py | 23 ++++++++++++++--------- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index d3fefa9e51..6bb7bde726 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -183,15 +183,19 @@ def load_backup_of_previous_run(self): from the last remembered config/test_case. """ + self.load_test_case_database() + continue_test_case = None continue_config = None if os.path.exists(self.file_paths['ALL_STATS_JSON_FILE']): self.backup['all_stats'] = TestCaseRunStats.load_from_backup(self.file_paths['ALL_STATS_JSON_FILE']) + self.backup['all_stats'].db = self.test_case_database continue_config = self.backup['all_stats'].pending_config # The last config and test case preformed in the broken test run if os.path.exists(self.file_paths['TC_STATS_JSON_FILE']): self.backup['tc_stats'] = TestCaseRunStats.load_from_backup(self.file_paths['TC_STATS_JSON_FILE']) + self.backup['tc_stats'].db = self.test_case_database continue_config = self.backup['tc_stats'].pending_config continue_test_case = self.backup['tc_stats'].pending_test_case @@ -209,7 +213,8 @@ def load_backup_of_previous_run(self): # Skip already completed test cases and the faulty one tc_index = test_cases_per_config[continue_config].index(continue_test_case) test_cases_per_config[continue_config] = test_cases_per_config[continue_config][tc_index + 1:] - + self.backup['tc_stats'].index += 1 + if not test_cases_per_config[continue_config]: # The faulty test case was the last one in the config. Move to the next config self.backup['tc_stats'].update(continue_test_case, 0, 'TIMEOUT') diff --git a/autopts/client.py b/autopts/client.py index fc5e4e0da0..c28da14f69 100755 --- a/autopts/client.py +++ b/autopts/client.py @@ -1327,6 +1327,19 @@ def parse_config_and_args(self, args_namespace=None): self.args, errmsg = self.arg_parser.parse(args_namespace) return errmsg + def load_test_case_database(self): + if not self.args.store or self.test_case_database: + return + + tc_db_table_name = self.store_tag + str(self.args.board_name) + + if os.path.exists(self.args.database_file) and \ + not os.path.exists(self.file_paths['TEST_CASE_DB_FILE']): + shutil.copy(self.args.database_file, self.file_paths['TEST_CASE_DB_FILE']) + + self.test_case_database = TestCaseTable(tc_db_table_name, + self.file_paths['TEST_CASE_DB_FILE']) + def start(self, args=None): """Start main with exception handling.""" @@ -1368,15 +1381,7 @@ def main(self, _args=None): os.makedirs(self.file_paths["TMP_DIR"], exist_ok=True) - if self.args.store: - tc_db_table_name = self.store_tag + str(self.args.board_name) - - if os.path.exists(self.args.database_file) and \ - not os.path.exists(self.file_paths['TEST_CASE_DB_FILE']): - shutil.copy(self.args.database_file, self.file_paths['TEST_CASE_DB_FILE']) - - self.test_case_database = TestCaseTable(tc_db_table_name, - self.file_paths['TEST_CASE_DB_FILE']) + self.load_test_case_database() init_pts(self.args, self.ptses) From 4bc682a1d47842055384e598151ff2581f249327 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Mon, 29 Jul 2024 11:45:20 +0200 Subject: [PATCH 19/44] cron: Add --test-case-limit alias to --test_case_limit option --- tools/cron/autopts_cron.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/cron/autopts_cron.py b/tools/cron/autopts_cron.py index 8f77c6f0cc..fdba256d55 100644 --- a/tools/cron/autopts_cron.py +++ b/tools/cron/autopts_cron.py @@ -86,7 +86,7 @@ def __init__(self, add_help=True): help="Names of test cases to exclude. Groups of " "test cases can be specified by profile names") - self.add_argument("--test_case_limit", nargs='?', type=int, default=0, + self.add_argument("--test_case_limit", "--test-case-limit", nargs='?', type=int, default=0, help="Limit of test cases to run") From e53209a0db12998da22ab19cc8bd32399fb9e8e4 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Mon, 29 Jul 2024 12:00:40 +0200 Subject: [PATCH 20/44] cron: Handle parsing exception Unhandled exception terminated a github cron thread. --- tools/cron/autopts_cron.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tools/cron/autopts_cron.py b/tools/cron/autopts_cron.py index fdba256d55..e904652aa5 100644 --- a/tools/cron/autopts_cron.py +++ b/tools/cron/autopts_cron.py @@ -190,7 +190,11 @@ def autopts_magic_tag_cb(cron, comment_info): for board in cron.tags[magic_tag]['configs']: config = copy.deepcopy(cron.tags[magic_tag]['configs'][board]) parser = config.get('magic_tag_parser', AutoPTSMagicTagParser)() - parsed_args = parser.parse_args(command_args) + try: + parsed_args = parser.parse_args(command_args) + except BaseException as e: + log(e) + continue is_supported, supported_test_cases = check_supported_profiles(parsed_args.included, config) if not is_supported: From 800c154df16257faf31f445847d3621858e2114c Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Thu, 8 Aug 2024 15:29:57 +0200 Subject: [PATCH 21/44] tools: bisect: Allow to set the path of bisected repo --- tools/cron/autopts_bisect.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tools/cron/autopts_bisect.py b/tools/cron/autopts_bisect.py index ed86e106a0..411f216de7 100644 --- a/tools/cron/autopts_bisect.py +++ b/tools/cron/autopts_bisect.py @@ -124,8 +124,11 @@ def bisect(cfg, test_case, good_commit, bad_commit=''): print('Bisect started testing of test_case={} cfg={}'.format(test_case, cfg)) included = '-c {} '.format(test_case) - cfg_dict, cfg_path = load_cfg(cfg) - project_repo = cfg_dict['auto_pts']['project_path'] + cfg_dict, _ = load_cfg(cfg) + if 'repo_path' in cfg_dict['bisect']: + project_repo = cfg_dict['bisect']['repo_path'] + else: + project_repo = cfg_dict['auto_pts']['project_path'] last_bad = get_sha(project_repo) From 6c535650b70939fc60c057e1f9dd228cb9b60bad Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Wed, 7 Aug 2024 15:56:35 +0200 Subject: [PATCH 22/44] cron: Restart bot again in case of timeout Rarely, but it happens that restart of the testing setup fails, e.g. because of some TCP/IP socket. Let's try restart the bot setup again if bot terminated unexpectedly. --- autopts/utils.py | 2 ++ tools/cron/common.py | 9 ++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/autopts/utils.py b/autopts/utils.py index 8a200ba07c..441d3cc75a 100644 --- a/autopts/utils.py +++ b/autopts/utils.py @@ -269,6 +269,8 @@ def terminate_process(pid=None, name=None, cmdline=None): continue process.terminate() + process.wait() + logging.debug(f"The process with pid={process.info['pid']} name={process.info['name']} " f"cmdline={process.info['cmdline']} has been terminated.") diff --git a/tools/cron/common.py b/tools/cron/common.py index fd255b5dd7..ec7795cecd 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -587,6 +587,7 @@ def _run_test(config): backup = config['auto_pts'].get('use_backup', False) timeguard = config['cron']['test_run_timeguard'] results_file_path = config['file_paths']['TC_STATS_JSON_FILE'] + report_file_path = config['file_paths']['REPORT_TXT_FILE'] srv_process, bot_process = _start_processes(config, checkout_repos=True) last_check_time = time() @@ -601,7 +602,13 @@ def _run_test(config): if bot_process.poll() is not None: log('bot process finished.') - break + if os.path.exists(report_file_path): + break + + elif backup: + log("Autopts bot terminated before report creation. Restarting processes...") + srv_process, bot_process = _restart_processes(config) + sleep_job(config['cron']['cancel_job'], timeguard) if not backup: continue From f3ddaed806d87c31601133cf9979cc343d66b50c Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Fri, 9 Aug 2024 12:08:05 +0200 Subject: [PATCH 23/44] mynewt: errata: Update errata list Remove stale ones, add newt test case. --- errata/mynewt.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/errata/mynewt.yaml b/errata/mynewt.yaml index 3677cb484a..9723dd99c1 100644 --- a/errata/mynewt.yaml +++ b/errata/mynewt.yaml @@ -2,11 +2,6 @@ # GAP/CONN/NCON/BV-01-C: CASE00xxxxx # GAP/CONN/NCON/BV-02-C: CASE00xxxxx -GATT/CL/GAD/BV-01-C: Request ID 90121 -GATT/CL/GAD/BV-02-C: Request ID 90121 -GATT/CL/GAD/BV-04-C: Request ID 90121 -GATT/CL/GAD/BV-05-C: Request ID 90121 -GATT/CL/GAD/BV-06-C: Request ID 90121 GATT/SR/GAR/BV-04-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAW/BV-10-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAR/BI-13-C: https://github.com/apache/mynewt-nimble/issues/1800 @@ -17,3 +12,4 @@ GATT/SR/GAW/BV-05-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAW/BV-09-C: https://github.com/apache/mynewt-nimble/issues/1800 GATT/SR/GAW/BI-33-C: https://github.com/apache/mynewt-nimble/issues/1800 L2CAP/ECFC/BI-02-C: Request ID 103343 +GAP/SEC/SEM/BV-26-C: Request ID 136305 From 83b322596137fdf7c65f42c90d2549e2ea63d703 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 9 Aug 2024 11:53:33 +0200 Subject: [PATCH 24/44] cron: pr job: Rebase remote autopts server to PR branch --- tools/cron/autopts_cron.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tools/cron/autopts_cron.py b/tools/cron/autopts_cron.py index e904652aa5..ccedc5497e 100644 --- a/tools/cron/autopts_cron.py +++ b/tools/cron/autopts_cron.py @@ -262,6 +262,17 @@ def schedule_pr_job(cron, pr_info, job_config): job_config['excluded'] = ' '.join(job_config['excluded']) job_config['cancel_job'] = CancelJob(False) + if pr_info['html_url'].startswith('https://github.com/auto-pts/auto-pts'): + try: + vm_autopts = job_config['remote_machine']['git']['autopts'] + vm_autopts['checkout_cmd'] = f"git fetch {vm_autopts['remote']} & " \ + f"git fetch {vm_autopts['remote']} pull/{pr_number}/head & " \ + f"git checkout FETCH_HEAD & " \ + f"set GIT_COMMITTER_NAME=Name & set GIT_COMMITTER_EMAIL=temp@example.com & " \ + f"git pull --rebase {vm_autopts['remote']} {vm_autopts['branch']} > NUL 2>&1" + except KeyError: + pass + getattr(schedule.every(), start_time.strftime('%A').lower()) \ .at(start_time.strftime('%H:%M:%S')) \ .do(lambda *args, **kwargs: pr_job_finish_wrapper( From 702f4a76a88a1e9875e0f5f0d1b2395d851cb0de Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 9 Aug 2024 12:03:37 +0200 Subject: [PATCH 25/44] cron: pr job: Pass estimated test cases only if limit provided If the requested test case prefixes were not found in cached list of test cases, the test_cases list was empty and autopts bot was triggered to run all test cases from PTS workspace. --- tools/cron/autopts_cron.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/tools/cron/autopts_cron.py b/tools/cron/autopts_cron.py index ccedc5497e..ff536b688d 100644 --- a/tools/cron/autopts_cron.py +++ b/tools/cron/autopts_cron.py @@ -231,16 +231,19 @@ def schedule_pr_job(cron, pr_info, job_config): test_cases, est_duration = get_estimations(cfg_dict, included_tc, excluded_tc, job_config['test_case_limit']) - job_config.pop('test_case_limit') - job_config['included'] = test_cases - test_case_count = len(test_cases) - estimations = f', test case count: {test_case_count}, '\ - f'estimated duration: {est_duration}' if test_case_count > 0: + if job_config['test_case_limit']: + job_config['included'] = test_cases + + estimations = f', test case count: {test_case_count}, ' \ + f'estimated duration: {est_duration}' estimations += f'
Test cases to be run{"
".join(test_cases)}
\n' + else: + estimations = f', test case count: estimation not available' + job_config.pop('test_case_limit') job_config['estimated_duration'] = est_duration except Exception as e: # Probably the configuration missed some parameters, From dcbcbf07b3e0300735fc5fab9d5969f28f8b2255 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Thu, 8 Aug 2024 21:31:50 +0200 Subject: [PATCH 26/44] zephyr: Update PTS workspace for PTS 8.6.1 GMCS was set according to test plan. --- .../zephyr/zephyr-master/zephyr-master.pqw6 | 618 ++++++++++++++++++ 1 file changed, 618 insertions(+) diff --git a/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 b/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 index 86b742bf4c..3c72570205 100755 --- a/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 +++ b/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 @@ -20096,6 +20096,372 @@ FALSE TRUE + + TSPC_MCS_20_1 + GMCS v1.0 (M) + TRUE + TRUE + + + TSPC_MCS_21_1 + Service supported over BR/EDR (C.1) + FALSE + FALSE + + + TSPC_MCS_21_2 + Service supported over LE (C.1) + TRUE + FALSE + + + TSPC_MCS_22_1 + Generic Media Control Service (M) + TRUE + TRUE + + + TSPC_MCS_22_2 + Object Transfer Service (C.1) + TRUE + FALSE + + + TSPC_MCS_22_3 + Media Player Name Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_4 + Media Player Name Read Long Supported (O) + TRUE + FALSE + + + TSPC_MCS_22_5 + Media Player Icon Object ID Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_6 + Media Player Icon URL Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_7 + Track Changed Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_8 + Track Title Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_9 + Track Title Read Long Supported (O) + TRUE + FALSE + + + TSPC_MCS_22_10 + Track Duration Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_11 + Track Position Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_12 + Playback Speed Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_13 + Seeking Speed Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_14 + Current Track Segments Object ID Characteristic (C.2) + TRUE + FALSE + + + TSPC_MCS_22_15 + Current Track Object ID Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_16 + Next Track Object ID Characteristic (C.2) + TRUE + FALSE + + + TSPC_MCS_22_17 + Parent Group Object ID Characteristic (C.2) + TRUE + FALSE + + + TSPC_MCS_22_18 + Current Group Object ID Characteristic (C.2) + TRUE + FALSE + + + TSPC_MCS_22_19 + Playing Order Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_20 + Playing Order Supported Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_21 + Media State Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_22 + Media Control Point Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_23 + Media Control Point Opcodes Supported Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_22_24 + Search Results Object ID Characteristic (O) + TRUE + FALSE + + + TSPC_MCS_22_25 + Search Control Point Characteristic (C.3) + TRUE + FALSE + + + TSPC_MCS_22_26 + Content Control ID Characteristic (M) + TRUE + TRUE + + + TSPC_MCS_23_1 + Play (C.1) + TRUE + FALSE + + + TSPC_MCS_23_2 + Pause (C.1) + TRUE + FALSE + + + TSPC_MCS_23_3 + Fast Rewind (C.1) + TRUE + FALSE + + + TSPC_MCS_23_4 + Fast Forward (C.1) + TRUE + FALSE + + + TSPC_MCS_23_5 + Stop (C.1) + TRUE + FALSE + + + TSPC_MCS_23_6 + Move Relative (C.1) + TRUE + FALSE + + + TSPC_MCS_23_7 + Previous Segment (C.1) + TRUE + FALSE + + + TSPC_MCS_23_8 + Next Segment (C.1) + TRUE + FALSE + + + TSPC_MCS_23_9 + First Segment (C.1) + TRUE + FALSE + + + TSPC_MCS_23_10 + Last Segment (C.1) + TRUE + FALSE + + + TSPC_MCS_23_11 + Goto Segment (C.1) + TRUE + FALSE + + + TSPC_MCS_23_12 + Previous Track (C.1) + TRUE + FALSE + + + TSPC_MCS_23_13 + Next Track (C.1) + TRUE + FALSE + + + TSPC_MCS_23_14 + First Track (C.1) + TRUE + FALSE + + + TSPC_MCS_23_15 + Last Track (C.1) + TRUE + FALSE + + + TSPC_MCS_23_16 + Goto Track (C.1) + TRUE + FALSE + + + TSPC_MCS_23_17 + Previous Group (C.1) + TRUE + FALSE + + + TSPC_MCS_23_18 + Next Group (C.1) + TRUE + FALSE + + + TSPC_MCS_23_19 + First Group (C.1) + TRUE + FALSE + + + TSPC_MCS_23_20 + Last Group (C.1) + TRUE + FALSE + + + TSPC_MCS_23_21 + Goto Group (C.1) + TRUE + FALSE + + + TSPC_MCS_24_1 + Play (C.1) + TRUE + FALSE + + + TSPC_MCS_24_2 + Pause (C.2) + TRUE + FALSE + + + TSPC_MCS_24_3 + Seeking (O) + TRUE + FALSE + + + TSPC_MCS_24_4 + Inactive (M) + TRUE + TRUE + + + TSPC_MCS_25_1 + Write without Response (M) + TRUE + TRUE + + + TSPC_MCS_25_2 + Notifications (M) + TRUE + TRUE + + + TSPC_MCS_25_3 + Write Characteristic Descriptors (M) + TRUE + TRUE + + + TSPC_MCS_25_4 + Read Characteristic Descriptors (M) + TRUE + TRUE + + + TSPC_MCS_25_5 + GATT Server over BR/EDR (C.1) + FALSE + FALSE + + + TSPC_MCS_25_6 + GATT Server over LE (C.2) + TRUE + FALSE + + + TSPC_MCS_26_1 + SDP record present for GMCS (M) + FALSE + TRUE + TSPC_ALL Enables all test cases when set. @@ -24687,6 +25053,258 @@ FALSE TRUE + + TSPC_TBS_20_1 + GTBS v1.0 (M) + TRUE + TRUE + + + TSPC_TBS_21_1 + Service supported over BR/EDR (C.1) + FALSE + FALSE + + + TSPC_TBS_21_2 + Service supported over LE (C.1) + FALSE + FALSE + + + TSPC_TBS_22_1 + Generic Telephone Bearer Service (M) + TRUE + TRUE + + + TSPC_TBS_22_2 + Bearer Provider Name Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_3 + Bearer Provider Name Read Long Support (O) + FALSE + FALSE + + + TSPC_TBS_22_4 + Bearer UCI Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_5 + Bearer Technology Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_6 + Bearer URI Schemes Supported List Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_7 + Bearer URI Schemes Supported List Notification (O) + FALSE + FALSE + + + TSPC_TBS_22_8 + Bearer URI Schemes Supported List Read Long Support (C.1) + FALSE + FALSE + + + TSPC_TBS_22_9 + Bearer Signal Strength Characteristic (O) + FALSE + FALSE + + + TSPC_TBS_22_10 + Bearer Signal Strength Reporting Interval Characteristic (C.2) + FALSE + FALSE + + + TSPC_TBS_22_11 + Bearer List Current Calls Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_12 + Bearer List Current Calls Read Long Support (O) + FALSE + FALSE + + + TSPC_TBS_22_13 + Content Control ID Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_14 + Status Flags Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_15 + Incoming Call Target Bearer URI Characteristic (O) + FALSE + FALSE + + + TSPC_TBS_22_16 + Incoming Call Target Bearer URI Read Long Support (C.3) + FALSE + FALSE + + + TSPC_TBS_22_17 + Call State Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_18 + Call State Read Long Support (O) + FALSE + FALSE + + + TSPC_TBS_22_19 + Call Control Point Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_20 + Call Control Point Optional Opcodes Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_21 + Termination Reason Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_22 + Incoming Call Characteristic (M) + TRUE + TRUE + + + TSPC_TBS_22_23 + Incoming Call Read Long Support (O) + FALSE + FALSE + + + TSPC_TBS_22_24 + Call Friendly Name Characteristic (O) + FALSE + FALSE + + + TSPC_TBS_22_25 + Call Friendly Name Read Long Support (C.4) + FALSE + FALSE + + + TSPC_TBS_23_1 + Accept (M) + TRUE + TRUE + + + TSPC_TBS_23_2 + Terminate (M) + TRUE + TRUE + + + TSPC_TBS_23_3 + Local Hold (O) + FALSE + FALSE + + + TSPC_TBS_23_4 + Local Retrieve (O) + FALSE + FALSE + + + TSPC_TBS_23_5 + Originate (M) + TRUE + TRUE + + + TSPC_TBS_23_6 + Join (O) + FALSE + FALSE + + + TSPC_TBS_24_1 + Write Characteristic Value (M) + TRUE + TRUE + + + TSPC_TBS_24_2 + Write without Response (M) + TRUE + TRUE + + + TSPC_TBS_24_3 + Notifications (M) + TRUE + TRUE + + + TSPC_TBS_24_4 + Read Characteristic Descriptors (M) + TRUE + TRUE + + + TSPC_TBS_24_5 + Write Characteristic Descriptors (M) + TRUE + TRUE + + + TSPC_TBS_24_6 + GATT Server over BR/EDR (C.1) + FALSE + FALSE + + + TSPC_TBS_24_7 + GATT Server over LE (C.2) + FALSE + FALSE + + + TSPC_TBS_25_1 + SDP record present for GTBS (M) + TRUE + TRUE + TSPC_ALL Enables all test cases when set. From a6500c391122367666c1a5d4586f715dd11998f4 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Thu, 8 Aug 2024 21:32:27 +0200 Subject: [PATCH 27/44] zephyr: Move GTBS to TBS profile GTBS tests are now part of TBS profile. --- autopts/ptsprojects/zephyr/__init__.py | 1 - autopts/ptsprojects/zephyr/gtbs.py | 94 -------------------------- 2 files changed, 95 deletions(-) delete mode 100644 autopts/ptsprojects/zephyr/gtbs.py diff --git a/autopts/ptsprojects/zephyr/__init__.py b/autopts/ptsprojects/zephyr/__init__.py index 30798ccc9d..3cd0f612c4 100644 --- a/autopts/ptsprojects/zephyr/__init__.py +++ b/autopts/ptsprojects/zephyr/__init__.py @@ -44,7 +44,6 @@ import autopts.ptsprojects.zephyr.bass import autopts.ptsprojects.zephyr.gmcs import autopts.ptsprojects.zephyr.tbs -import autopts.ptsprojects.zephyr.gtbs import autopts.ptsprojects.zephyr.tmap import autopts.ptsprojects.zephyr.ots # GENERATOR append 1 diff --git a/autopts/ptsprojects/zephyr/gtbs.py b/autopts/ptsprojects/zephyr/gtbs.py deleted file mode 100644 index bb4254cfdf..0000000000 --- a/autopts/ptsprojects/zephyr/gtbs.py +++ /dev/null @@ -1,94 +0,0 @@ -# -# auto-pts - The Bluetooth PTS Automation Framework -# -# Copyright (c) 2024, Codecoup. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms and conditions of the GNU General Public License, -# version 2, as published by the Free Software Foundation. -# -# This program is distributed in the hope it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for -# more details. -# - -from autopts.pybtp import btp -from autopts.client import get_unique_name -from autopts.ptsprojects.stack import get_stack -from autopts.ptsprojects.testcase import TestFunc -from autopts.ptsprojects.zephyr.tbs_wid import tbs_wid_hdl -from autopts.ptsprojects.zephyr.ztestcase import ZTestCase -from autopts.pybtp.types import IOCap, Addr -from autopts.utils import ResultWithFlag - - -def set_pixits(ptses): - pts = ptses[0] - - pts.set_pixit("GTBS", "TSPX_bd_addr_iut", "DEADBEEFDEAD") - pts.set_pixit("GTBS", "TSPX_iut_device_name_in_adv_packet_for_random_address", "") - pts.set_pixit("GTBS", "TSPX_time_guard", "180000") - pts.set_pixit("GTBS", "TSPX_use_implicit_send", "TRUE") - pts.set_pixit("GTBS", "TSPX_secure_simple_pairing_pass_key_confirmation", "FALSE") - pts.set_pixit("GTBS", "TSPX_mtu_size", 23) - pts.set_pixit("GTBS", "TSPX_delete_link_key", "FALSE") - pts.set_pixit("GTBS", "TSPX_pin_code", "0000") - pts.set_pixit("GTBS", "TSPX_use_dynamic_pin", "FALSE") - pts.set_pixit("GTBS", "TSPX_delete_ltk", "TRUE") - pts.set_pixit("GTBS", "TSPX_security_enabled", "FALSE") - pts.set_pixit("GTBS", "TSPX_Signal_Strength_Reporting_Interval", 10) - pts.set_pixit("GTBS", "TSPX_Incoming_Call_Target_Caller_ID", "tel:+19991111234") - pts.set_pixit("GTBS", "TSPX_Incoming_friendly_name", "tel:+19991110011") - pts.set_pixit("GTBS", "TSPX_Outgoing_uri", "tel:+19991111234") - pts.set_pixit("GTBS", "TSPX_Outgoing_uri2", "tel:+19991111235") - - -def test_cases(ptses): - """ - Returns a list of GTBS test cases - ptses -- list of PyPTS instances - """ - - pts = ptses[0] - pts_bd_addr = pts.q_bd_addr - iut_device_name = get_unique_name(pts) - stack = get_stack() - - iut_addr = ResultWithFlag() - - def set_addr(addr): - iut_addr.set(addr) - - pre_conditions = [ - TestFunc(btp.core_reg_svc_gap), - TestFunc(stack.gap_init, iut_device_name), - TestFunc(btp.core_reg_svc_gatt), - TestFunc(btp.gap_read_ctrl_info), - TestFunc(lambda: pts.update_pixit_param( - "GTBS", "TSPX_bd_addr_iut", - stack.gap.iut_addr_get_str())), - TestFunc(lambda: set_addr( - stack.gap.iut_addr_get_str())), - TestFunc(btp.gap_set_io_cap, IOCap.display_only), - TestFunc(btp.set_pts_addr, pts_bd_addr, Addr.le_public), - TestFunc(stack.gatt_init), - TestFunc(btp.gap_set_conn), - TestFunc(btp.gap_set_gendiscov), - TestFunc(btp.core_reg_svc_tbs), - TestFunc(lambda: pts.update_pixit_param( - "GTBS", "TSPX_iut_device_name_in_adv_packet_for_random_address", iut_device_name)), - TestFunc(stack.tbs_init) - ] - - test_case_name_list = pts.get_test_case_list('GTBS') - tc_list = [] - - # Using TBS wid for GTBS tests - for tc_name in test_case_name_list: - instance = ZTestCase('GTBS', tc_name, cmds=pre_conditions, - generic_wid_hdl=tbs_wid_hdl) - - tc_list.append(instance) - - return tc_list From 73e8b0a2df39f85f05cda1a882e83f39499abaf5 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Thu, 8 Aug 2024 21:33:14 +0200 Subject: [PATCH 28/44] zephyr: Move GMCS test to MCS profile GMCS testa are now part of MCS profile. --- autopts/ptsprojects/zephyr/__init__.py | 2 +- .../ptsprojects/zephyr/{gmcs.py => mcs.py} | 50 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) rename autopts/ptsprojects/zephyr/{gmcs.py => mcs.py} (60%) diff --git a/autopts/ptsprojects/zephyr/__init__.py b/autopts/ptsprojects/zephyr/__init__.py index 3cd0f612c4..209e3faf46 100644 --- a/autopts/ptsprojects/zephyr/__init__.py +++ b/autopts/ptsprojects/zephyr/__init__.py @@ -42,7 +42,7 @@ import autopts.ptsprojects.zephyr.cap import autopts.ptsprojects.zephyr.mcp import autopts.ptsprojects.zephyr.bass -import autopts.ptsprojects.zephyr.gmcs +import autopts.ptsprojects.zephyr.mcs import autopts.ptsprojects.zephyr.tbs import autopts.ptsprojects.zephyr.tmap import autopts.ptsprojects.zephyr.ots diff --git a/autopts/ptsprojects/zephyr/gmcs.py b/autopts/ptsprojects/zephyr/mcs.py similarity index 60% rename from autopts/ptsprojects/zephyr/gmcs.py rename to autopts/ptsprojects/zephyr/mcs.py index 21fc23183c..39474b73a1 100644 --- a/autopts/ptsprojects/zephyr/gmcs.py +++ b/autopts/ptsprojects/zephyr/mcs.py @@ -13,7 +13,7 @@ # more details. # -"""GMCS test cases""" +"""MCS/GMCS test cases""" from autopts.pybtp import btp from autopts.client import get_unique_name @@ -32,7 +32,7 @@ def set_addr(addr): def set_pixits(ptses): - """Setup GMCS profile PIXITS for workspace. Those values are used for test + """Setup MCS/GMCS profile PIXITS for workspace. Those values are used for test case if not updated within test case. PIXITS always should be updated accordingly to project and newest version of @@ -42,28 +42,28 @@ def set_pixits(ptses): pts = ptses[0] - pts.set_pixit("GMCS", "TSPX_bd_addr_iut", "DEADBEEFDEAD") - pts.set_pixit("GMCS", "TSPX_iut_device_name_in_adv_packet_for_random_address", "") - pts.set_pixit("GMCS", "TSPX_time_guard", "180000") - pts.set_pixit("GMCS", "TSPX_use_implicit_send", "TRUE") - pts.set_pixit("GMCS", "TSPX_secure_simple_pairing_pass_key_confirmation", "FALSE") - pts.set_pixit("GMCS", "TSPX_mtu_size", 23) - pts.set_pixit("GMCS", "TSPX_delete_link_key", "FALSE") - pts.set_pixit("GMCS", "TSPX_pin_code", "0000") - pts.set_pixit("GMCS", "TSPX_use_dynamic_pin", "FALSE") - pts.set_pixit("GMCS", "TSPX_delete_ltk", "TRUE") - pts.set_pixit("GMCS", "TSPX_security_enabled", "FALSE") - pts.set_pixit("GMCS", "TSPX_Track_Name", "Track") - pts.set_pixit("GMCS", "TSPX_Artist_Name", "Artist") - pts.set_pixit("GMCS", "TSPX_Album_Name", "Album") - pts.set_pixit("GMCS", "TSPX_Group_Name", "Group") - pts.set_pixit("GMCS", "TSPX_Earliest_Year", "1999") - pts.set_pixit("GMCS", "TSPX_Latest_Year", "2000") - pts.set_pixit("GMCS", "TSPX_Genre", "Genre") + pts.set_pixit("MCS", "TSPX_bd_addr_iut", "DEADBEEFDEAD") + pts.set_pixit("MCS", "TSPX_iut_device_name_in_adv_packet_for_random_address", "") + pts.set_pixit("MCS", "TSPX_time_guard", "180000") + pts.set_pixit("MCS", "TSPX_use_implicit_send", "TRUE") + pts.set_pixit("MCS", "TSPX_secure_simple_pairing_pass_key_confirmation", "FALSE") + pts.set_pixit("MCS", "TSPX_mtu_size", 23) + pts.set_pixit("MCS", "TSPX_delete_link_key", "FALSE") + pts.set_pixit("MCS", "TSPX_pin_code", "0000") + pts.set_pixit("MCS", "TSPX_use_dynamic_pin", "FALSE") + pts.set_pixit("MCS", "TSPX_delete_ltk", "TRUE") + pts.set_pixit("MCS", "TSPX_security_enabled", "FALSE") + pts.set_pixit("MCS", "TSPX_Track_Name", "Track") + pts.set_pixit("MCS", "TSPX_Artist_Name", "Artist") + pts.set_pixit("MCS", "TSPX_Album_Name", "Album") + pts.set_pixit("MCS", "TSPX_Group_Name", "Group") + pts.set_pixit("MCS", "TSPX_Earliest_Year", "1999") + pts.set_pixit("MCS", "TSPX_Latest_Year", "2000") + pts.set_pixit("MCS", "TSPX_Genre", "Genre") def test_cases(ptses): - """Returns a list of GMCS Server test cases""" + """Returns a list of MCS/GMCS Server test cases""" pts = ptses[0] @@ -77,7 +77,7 @@ def test_cases(ptses): TestFunc(btp.core_reg_svc_gatt), TestFunc(btp.gap_read_ctrl_info), TestFunc(lambda: pts.update_pixit_param( - "GMCS", "TSPX_bd_addr_iut", + "MCS", "TSPX_bd_addr_iut", stack.gap.iut_addr_get_str())), TestFunc(btp.gap_set_io_cap, IOCap.display_only), TestFunc(btp.set_pts_addr, pts_bd_addr, Addr.le_public), @@ -86,15 +86,15 @@ def test_cases(ptses): TestFunc(btp.gap_set_gendiscov), TestFunc(btp.core_reg_svc_gmcs), TestFunc(lambda: pts.update_pixit_param( - "GMCS", "TSPX_iut_device_name_in_adv_packet_for_random_address", iut_device_name)), + "MCS", "TSPX_iut_device_name_in_adv_packet_for_random_address", iut_device_name)), TestFunc(stack.gmcs_init), ] - test_case_name_list = pts.get_test_case_list('GMCS') + test_case_name_list = pts.get_test_case_list('MCS') tc_list = [] for tc_name in test_case_name_list: - instance = ZTestCase("GMCS", tc_name, + instance = ZTestCase("MCS", tc_name, cmds=pre_conditions, generic_wid_hdl=gmcs_wid_hdl) tc_list.append(instance) From faa17b46a335e01bfdc34c971c86b61053c634c1 Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Mon, 12 Aug 2024 13:19:52 +0200 Subject: [PATCH 29/44] mynewt: gatt: add btp gap pair command Adding gap_pair in wid_142 fixes various GATT/CL testcases. It may be that PTS used to (before some update) initiate pairing procedure which is why no btp command was used in this handler. --- autopts/ptsprojects/mynewt/gatt_client_wid.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/autopts/ptsprojects/mynewt/gatt_client_wid.py b/autopts/ptsprojects/mynewt/gatt_client_wid.py index 6665d59262..aa4a2c56a7 100644 --- a/autopts/ptsprojects/mynewt/gatt_client_wid.py +++ b/autopts/ptsprojects/mynewt/gatt_client_wid.py @@ -43,6 +43,8 @@ def hdl_wid_142(_: WIDParams): Discover all characteristics if needed. """ + btp.gap_pair() + return True From f5d2d927ed02590fe853c31f0657331795bd992f Mon Sep 17 00:00:00 2001 From: Michele Imbriani Date: Wed, 14 Aug 2024 13:33:03 +0200 Subject: [PATCH 30/44] zephyr: fix wrong DIS PIXIT value Fixes issue 1227. --- autopts/ptsprojects/zephyr/dis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autopts/ptsprojects/zephyr/dis.py b/autopts/ptsprojects/zephyr/dis.py index e6afb58b6e..9793484f3b 100644 --- a/autopts/ptsprojects/zephyr/dis.py +++ b/autopts/ptsprojects/zephyr/dis.py @@ -55,7 +55,7 @@ def set_pixits(ptses): pts.set_pixit("DIS", "TSPX_use_dynamic_pin", "FALSE") pts.set_pixit("DIS", "TSPX_delete_ltk", "TRUE") pts.set_pixit("DIS", "TSPX_security_enabled", "FALSE") - pts.set_pixit("MESH", "TSPX_iut_setup_att_over_br_edr", "FALSE") + pts.set_pixit("DIS", "TSPX_iut_setup_att_over_br_edr", "FALSE") pts.set_pixit("DIS", "TSPX_tester_appearance", "0000") From c6f821460fd110faba85ceca56550b62c431bb75 Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Mon, 12 Aug 2024 15:06:45 +0200 Subject: [PATCH 31/44] tools: cron: Deep copy a config loaded as module A .py file loaded as a module is cached until it is explicitly removed from sys.modules. So subsequent attempts to load the file only return a reference to the existing module-dictionary. So we need to deep-copy a config.py to prevent overwriting its original values and reuse it e.g. in subsequent PR jobs. --- autoptsclient_bot.py | 4 ++-- tools/cron/autopts_bisect.py | 4 ++-- tools/cron/common.py | 3 ++- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/autoptsclient_bot.py b/autoptsclient_bot.py index 9a33adde15..e6d16cbe03 100755 --- a/autoptsclient_bot.py +++ b/autoptsclient_bot.py @@ -1,5 +1,4 @@ #!/usr/bin/env python - # # auto-pts - The Bluetooth PTS Automation Framework # @@ -21,6 +20,7 @@ import threading import time import schedule +import copy from autopts.bot.common import get_absolute_module_path, load_module_from_path from autopts.utils import log_running_threads, have_admin_rights, set_global_end @@ -59,7 +59,7 @@ def import_bot_projects(): return None, config_path module = load_module_from_path(config_path) - return getattr(module, "BotProjects", None), config_path + return copy.deepcopy(getattr(module, "BotProjects", None)), config_path def import_bot_module(project): diff --git a/tools/cron/autopts_bisect.py b/tools/cron/autopts_bisect.py index 411f216de7..5374eb4dff 100644 --- a/tools/cron/autopts_bisect.py +++ b/tools/cron/autopts_bisect.py @@ -24,7 +24,7 @@ If last_bad_commit is empty, then takes HEAD commit. """ - +import copy import importlib import os import re @@ -110,7 +110,7 @@ def load_cfg(cfg): raise Exception('{} does not exists!'.format(cfg_path)) mod = load_module_from_path(cfg_path) - return mod.BotProjects[0], cfg_path + return copy.deepcopy(mod.BotProjects[0]), cfg_path def bisect(cfg, test_case, good_commit, bad_commit=''): diff --git a/tools/cron/common.py b/tools/cron/common.py index ec7795cecd..241afe0385 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -24,6 +24,7 @@ $ eval `ssh-agent` $ ssh-add path/to/id_rsa """ +import copy import logging import os import re @@ -233,7 +234,7 @@ def load_config(cfg): if not mod: raise Exception(f'Could not load the config {cfg}') - return mod.BotProjects[0] + return copy.deepcopy(mod.BotProjects[0]) def find_workspace_in_tree(tree_path, workspace, init_depth=4): From c8a64eb2dc7261753b4e689382f70ad989077251 Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Fri, 16 Aug 2024 12:00:54 +0200 Subject: [PATCH 32/44] bot: report: add project name This commit implements project name field into report email body. --- autopts/bot/common.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index 6bb7bde726..d47c40fc38 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -494,6 +494,7 @@ def start(self, args=None): report_data['start_time_stamp'] = datetime.datetime.fromtimestamp( bot_state['start_time']).strftime("%Y_%m_%d_%H_%M_%S") + report_data['project_name'] = self.autopts_project_name report_data['status_count'] = stats.get_status_count() report_data['tc_results'] = stats.get_results() report_data['descriptions'] = stats.get_descriptions() @@ -683,7 +684,8 @@ def send_email(self, report_data): descriptions = report_data['descriptions'] - mail_ctx = {'repos_info': report_data['repo_status'], + mail_ctx = {'project_name': report_data['project_name'], + 'repos_info': report_data['repo_status'], 'summary': [mail.status_dict2summary_html(report_data['status_count'])], 'log_url': [], 'board': self.bot_config['auto_pts']['board'], @@ -747,7 +749,7 @@ def compose_mail(self, mail_ctx):

Bluetooth test session - {ww_dd_str}

{additional_info}

1. IUT Setup

-

Type: Zephyr
+

Type: {project_name}
Board: {board}
Source: {repos_info}

2. PTS Setup

From 4224c2c0798caf3fc3e4726c84e125768428cc06 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Wed, 21 Aug 2024 13:06:05 +0200 Subject: [PATCH 33/44] zephyr: Update PTS workspace This enabled few missing features in GAP, GATT and LE Audio profiles. --- .../zephyr/zephyr-master/zephyr-master.pqw6 | 146 +++++++++--------- 1 file changed, 73 insertions(+), 73 deletions(-) diff --git a/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 b/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 index 3c72570205..3bd7b59770 100755 --- a/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 +++ b/autopts/workspaces/zephyr/zephyr-master/zephyr-master.pqw6 @@ -6865,7 +6865,7 @@ TSPC_CAP_28_7 Change Volume Offset procedure (C.6) - FALSE + TRUE FALSE @@ -6883,13 +6883,13 @@ TSPC_CAP_28_10 Change Microphone Gain Setting procedure (C.8) - FALSE + TRUE FALSE TSPC_CAP_28_11 Find Content Control Service procedure (O) - FALSE + TRUE FALSE @@ -6931,7 +6931,7 @@ TSPC_CAP_31_2 Set Volume Offset (O) - FALSE + TRUE FALSE @@ -6955,7 +6955,7 @@ TSPC_CAP_32_2 Set Gain Setting (C.1) - FALSE + TRUE FALSE @@ -10451,7 +10451,7 @@ TSPC_GAP_17b_4 LE security mode 3 level 3 (C.2) - FALSE + TRUE FALSE @@ -10559,7 +10559,7 @@ TSPC_GAP_20a_6 Security Manager OOB (C.3) - FALSE + TRUE FALSE @@ -10787,7 +10787,7 @@ TSPC_GAP_23_7 Connected Isochronous Stream Terminate procedure (C.1) - FALSE + TRUE FALSE @@ -10955,7 +10955,7 @@ TSPC_GAP_27_7 Writable Appearance (O) - FALSE + TRUE FALSE @@ -12466,7 +12466,7 @@ TSPC_GATT_3_16 Characteristic Value Reliable Writes (O) - FALSE + TRUE FALSE @@ -24052,13 +24052,13 @@ TSPC_PBP_6_1 Transmit Program_Info Metadata (O) - FALSE + TRUE FALSE TSPC_PBP_6_2 BAP Broadcast Audio Stream Metadata Update (C.1) - FALSE + TRUE FALSE @@ -25068,7 +25068,7 @@ TSPC_TBS_21_2 Service supported over LE (C.1) - FALSE + TRUE FALSE @@ -25086,7 +25086,7 @@ TSPC_TBS_22_3 Bearer Provider Name Read Long Support (O) - FALSE + TRUE FALSE @@ -25110,25 +25110,25 @@ TSPC_TBS_22_7 Bearer URI Schemes Supported List Notification (O) - FALSE + TRUE FALSE TSPC_TBS_22_8 Bearer URI Schemes Supported List Read Long Support (C.1) - FALSE + TRUE FALSE TSPC_TBS_22_9 Bearer Signal Strength Characteristic (O) - FALSE + TRUE FALSE TSPC_TBS_22_10 Bearer Signal Strength Reporting Interval Characteristic (C.2) - FALSE + TRUE FALSE @@ -25140,7 +25140,7 @@ TSPC_TBS_22_12 Bearer List Current Calls Read Long Support (O) - FALSE + TRUE FALSE @@ -25158,13 +25158,13 @@ TSPC_TBS_22_15 Incoming Call Target Bearer URI Characteristic (O) - FALSE + TRUE FALSE TSPC_TBS_22_16 Incoming Call Target Bearer URI Read Long Support (C.3) - FALSE + TRUE FALSE @@ -25176,7 +25176,7 @@ TSPC_TBS_22_18 Call State Read Long Support (O) - FALSE + TRUE FALSE @@ -25206,19 +25206,19 @@ TSPC_TBS_22_23 Incoming Call Read Long Support (O) - FALSE + TRUE FALSE TSPC_TBS_22_24 Call Friendly Name Characteristic (O) - FALSE + TRUE FALSE TSPC_TBS_22_25 Call Friendly Name Read Long Support (C.4) - FALSE + TRUE FALSE @@ -25236,13 +25236,13 @@ TSPC_TBS_23_3 Local Hold (O) - FALSE + TRUE FALSE TSPC_TBS_23_4 Local Retrieve (O) - FALSE + TRUE FALSE @@ -25254,7 +25254,7 @@ TSPC_TBS_23_6 Join (O) - FALSE + TRUE FALSE @@ -25296,13 +25296,13 @@ TSPC_TBS_24_7 GATT Server over LE (C.2) - FALSE + TRUE FALSE TSPC_TBS_25_1 SDP record present for GTBS (M) - TRUE + FALSE TRUE @@ -25452,7 +25452,7 @@ TSPC_TMAP_1_1 Call Gateway (CG) (C.1) - FALSE + TRUE FALSE @@ -25524,217 +25524,217 @@ TSPC_TMAP_10_1 TMAP v1.0 (M) - FALSE + TRUE TRUE TSPC_TMAP_12_1 Common Audio Profile (M) - FALSE + TRUE TRUE TSPC_TMAP_12_2 Basic Audio Profile (M) - FALSE + TRUE TRUE TSPC_TMAP_12_3 Call Control Profile (M) - FALSE + TRUE TRUE TSPC_TMAP_13_1 Advertise Call Gateway Support (O) - FALSE + TRUE FALSE TSPC_TMAP_14_1 CAP Initiator (M) - FALSE + TRUE TRUE TSPC_TMAP_14_2 CAP Commander (M) - FALSE + TRUE TRUE TSPC_TMAP_14_3 CCP Call Control Server (M) - FALSE + TRUE TRUE TSPC_TMAP_14_4 VCP Volume Controller (M) - FALSE + TRUE TRUE TSPC_TMAP_14_5 Audio Source (M) - FALSE + TRUE TRUE TSPC_TMAP_14_6 Audio Sink (M) - FALSE + TRUE TRUE TSPC_TMAP_14_7 BAP Unicast Client (M) - FALSE + TRUE TRUE TSPC_TMAP_15_1 16_1 LC3: 16 kHz Sampling Frequency, 7.5 ms Frame Duration, 30 Octets (Audio Source) (O) - FALSE + TRUE FALSE TSPC_TMAP_15_2 32_1 LC3: 32 kHz Sampling Frequency, 7.5 ms Frame Duration, 60 Octets (Audio Source) (O) - FALSE + TRUE FALSE TSPC_TMAP_15_3 32_2 LC3: 32 kHz Sampling Frequency, 10 ms Frame Duration, 80 Octets (Audio Source) (M) - FALSE + TRUE TRUE TSPC_TMAP_15_4 16_1 LC3: 16 kHz Sampling Frequency, 7.5 ms Frame Duration, 30 Octets (Audio Sink) (O) - FALSE + TRUE FALSE TSPC_TMAP_15_5 32_1 LC3: 32 kHz Sampling Frequency, 7.5 ms Frame Duration, 60 Octets (Audio Sink) (O) - FALSE + TRUE FALSE TSPC_TMAP_15_6 32_2 LC3: 32 kHz Sampling Frequency, 10 ms Frame Duration, 80 Octets (Audio Sink) (M) - FALSE + TRUE TRUE TSPC_TMAP_16_1 16_1_1 LC3: 7500 SDU Interval, unframed, 30 Max SDU Size, 2 RTN, 8 Max_Transport_Latency (Audio Source) (C.1) - FALSE + TRUE FALSE TSPC_TMAP_16_2 32_1_1 LC3: 7500 SDU Interval, unframed, 60 Max SDU Size, 2 RTN, 8 Max_Transport_Latency (Audio Source) (C.2) - FALSE + TRUE FALSE TSPC_TMAP_16_3 32_2_1 LC3: 10000 SDU Interval, unframed, 80 Max SDU Size, 2 RTN, 10 Max_Transport_Latency (Audio Source) (M) - FALSE + TRUE TRUE TSPC_TMAP_16_4 16_1_1 LC3: 7500 SDU Interval, unframed, 30 Max SDU Size, 2 RTN, 8 Max_Transport_Latency (Audio Sink) (C.3) - FALSE + TRUE FALSE TSPC_TMAP_16_5 32_1_1 LC3: 7500 SDU Interval, unframed, 60 Max SDU Size, 2 RTN, 8 Max_Transport_Latency (Audio Sink) (C.4) - FALSE + TRUE FALSE TSPC_TMAP_16_6 32_2_1 LC3: 10000 SDU Interval, unframed, 80 Max SDU Size, 2 RTN, 10 Max_Transport_Latency (Audio Sink) (M) - FALSE + TRUE TRUE TSPC_TMAP_17_1 Configuration A: 1 bidirectional stream between CG and CT (M) - FALSE + TRUE TRUE TSPC_TMAP_17_2 Configuration B: 1 stream with 1 channel from CG to CT, 1 stream with 1 channel from 2nd CT to CG. (M) - FALSE + TRUE TRUE TSPC_TMAP_17_3 Configuration C: 1 stream with 1 channel from CG to CT, 1 bidirectional stream between CG and a 2nd CT (M) - FALSE + TRUE TRUE TSPC_TMAP_17_4 Configuration D: 1 stream with 1 channel from CG to CT, 1 bidirectional stream between CG and same CT (M) - FALSE + TRUE TRUE TSPC_TMAP_17_5 Configuration E: 1 bidirectional stream with 2 channels from CG to CT (O) - FALSE + TRUE FALSE TSPC_TMAP_17_6 Configuration F: 2 bidirectional streams between CG and CT (O) - FALSE + TRUE FALSE TSPC_TMAP_17_7 Configuration G: 1 bidirectional stream from CG to CT, 1 bidirectional stream between CG and a 2nd CT (O) - FALSE + TRUE FALSE TSPC_TMAP_18_1 Sink Audio Locations characteristic (M) - FALSE + TRUE TRUE TSPC_TMAP_18_2 Source Audio Locations characteristic (M) - FALSE + TRUE TRUE TSPC_TMAP_19_1 Front Left Audio Location (O) - FALSE + TRUE FALSE TSPC_TMAP_19_2 Front Right Audio Location (O) - FALSE + TRUE FALSE TSPC_TMAP_19_3 Front Right and Front Left Audio Locations (C.1) - FALSE + TRUE FALSE @@ -26040,7 +26040,7 @@ TSPC_TMAP_54_2 CAP Commander (C.1) - FALSE + TRUE FALSE @@ -26238,7 +26238,7 @@ TSPC_TMAP_73_1 Advertise Call Terminal Support (O) - FALSE + TRUE FALSE @@ -26250,7 +26250,7 @@ TSPC_TMAP_74_2 CAP Commander (O) - FALSE + TRUE FALSE @@ -26424,7 +26424,7 @@ TSPC_TMAP_94_2 CAP Commander (O) - FALSE + TRUE FALSE @@ -26676,7 +26676,7 @@ TSPC_TMAP_114_2 CAP Commander (O) - FALSE + TRUE FALSE @@ -26994,7 +26994,7 @@ TSPC_TMAP_153_1 Call Gateway Support (C.1) - FALSE + TRUE FALSE From 255428621f4ef073d516ec12feef0b6854e88341 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ayt=C3=BCrk=20D=C3=BCzen?= Date: Thu, 15 Aug 2024 15:02:23 +0200 Subject: [PATCH 34/44] boards: add nRF54H20 device nRF54H20 SoC device added --- autopts/ptsprojects/boards/nrf54h.py | 55 ++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 autopts/ptsprojects/boards/nrf54h.py diff --git a/autopts/ptsprojects/boards/nrf54h.py b/autopts/ptsprojects/boards/nrf54h.py new file mode 100644 index 0000000000..6910a3b5e6 --- /dev/null +++ b/autopts/ptsprojects/boards/nrf54h.py @@ -0,0 +1,55 @@ +# +# auto-pts - The Bluetooth PTS Automation Framework +# +# Copyright (c) 2024, Nordic Semiconductor ASA. +# Copyright (c) 2024, Codecoup. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# +import logging +import os + +from autopts.bot.common import check_call + +supported_projects = ['zephyr'] + +board_type = 'nrf54h20dk/nrf54h20/cpuapp' + + +def reset_cmd(iutctl): + """Return reset command for nRF54H DUT + + Dependency: nRF54H command line tools + """ + return f'nrfutil device reset --reset-kind RESET_PIN --serial-number {iutctl.debugger_snr}' + + +def build_and_flash(zephyr_wd, board, debugger_snr, conf_file=None, *args): + """Build and flash Zephyr binary + :param zephyr_wd: Zephyr source path + :param board: IUT + :param debugger_snr serial number + :param conf_file: configuration file to be used + """ + logging.debug("%s: %s %s %s", build_and_flash.__name__, zephyr_wd, + board, conf_file) + tester_dir = os.path.join(zephyr_wd, "tests", "bluetooth", "tester") + + check_call('rm -rf build/'.split(), cwd=tester_dir) + + cmd = ['west', 'build', '-p', 'auto', '-b', board] + if conf_file and conf_file not in ['default', 'prj.conf']: + if 'audio' in conf_file: + conf_file += ';overlay-le-audio-ctlr.conf' + cmd.extend(('--', f'-DOVERLAY_CONFIG=\'{conf_file}\'')) + + check_call(cmd, cwd=tester_dir) + check_call(['west', 'flash', '--skip-rebuild', + '-i', debugger_snr], cwd=tester_dir) From 5f1e424c4d469199962de88af78eec66f19dc1fd Mon Sep 17 00:00:00 2001 From: Frode van der Meeren Date: Wed, 20 Dec 2023 16:53:29 +0100 Subject: [PATCH 35/44] zephyr: Add support for the nRF5340 Audio devkit Adds the nRF5340 audio devkit board, with capabilities to build both application and network cores with configurable separate builds. --- autopts/bot/common.py | 2 +- autopts/bot/zephyr.py | 8 ++- autopts/ptsprojects/boards/__init__.py | 23 +++++-- autopts/ptsprojects/boards/nrf53_audio.py | 81 +++++++++++++++++++++++ autopts/ptsprojects/zephyr/iutctl.py | 1 + 5 files changed, 106 insertions(+), 9 deletions(-) create mode 100644 autopts/ptsprojects/boards/nrf53_audio.py diff --git a/autopts/bot/common.py b/autopts/bot/common.py index d47c40fc38..b8512b4e87 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -169,7 +169,7 @@ def parse_or_find_tty(self, args): if args.debugger_snr is None: args.tty_file, args.debugger_snr = get_free_device(args.board_name) else: - args.tty_file = get_tty(args.debugger_snr) + args.tty_file = get_tty(args.debugger_snr, args.board_name) if args.tty_file is None: log('TTY mode: No free device found') diff --git a/autopts/bot/zephyr.py b/autopts/bot/zephyr.py index b02c695ba2..669326c7dd 100755 --- a/autopts/bot/zephyr.py +++ b/autopts/bot/zephyr.py @@ -62,10 +62,12 @@ def apply_overlay(zephyr_wd, cfg_name, overlay): :param overlay: defines changes to be applied :return: None """ - tester_app_dir = os.path.join(zephyr_wd, "tests", "bluetooth", "tester") + tester_app_dir = os.getenv("AUTOPTS_SOURCE_DIR_APP") + if tester_app_dir is None: + tester_app_dir = os.path.join("tests", "bluetooth", "tester") cwd = os.getcwd() - os.chdir(tester_app_dir) + os.chdir(os.path.join(zephyr_wd, tester_app_dir)) with open(cfg_name, 'w') as config: for k, v in list(overlay.items()): @@ -114,7 +116,7 @@ def apply_config(self, args, config, value): configs = [] for name in pre_overlay + [config] + post_overlay: if name in self.iut_config and 'overlay' in self.iut_config[name] \ - and len(self.iut_config[name]['overlay']): + and len(self.iut_config[name]['overlay']) and name != 'prj.conf': apply_overlay(args.project_path, name, self.iut_config[name]['overlay']) elif not os.path.exists(os.path.join(args.project_path, "tests", "bluetooth", "tester", name)): diff --git a/autopts/ptsprojects/boards/__init__.py b/autopts/ptsprojects/boards/__init__.py index 968a28c835..23385fe055 100644 --- a/autopts/ptsprojects/boards/__init__.py +++ b/autopts/ptsprojects/boards/__init__.py @@ -171,12 +171,22 @@ def get_free_device(board=None): """Returns tty path and jlink serial number of a free device.""" devices = get_device_list() + ret_snr = None + ret_tty = None + for tty, snr in devices.items(): if tty not in devices_in_use and len(snr) >= 9 and snr.isnumeric(): - devices_in_use.append(tty) - return tty, snr + ret_snr = snr + ret_tty = tty + # Opposite enumeration-order for TTY to coproccessor cores on nRF5340 Audio devkit. + if board != 'nrf53_audio': + break + + if ret_tty is not None: + devices_in_use.append(ret_tty) - return None, None + log("Got free rtt for device {}: {}".format(ret_snr, ret_tty)) + return ret_tty, ret_snr def get_debugger_snr(tty): @@ -195,7 +205,7 @@ def get_debugger_snr(tty): return jlink -def get_tty(debugger_snr): +def get_tty(debugger_snr, board=None): """Return tty or COM of the device with given serial number. """ tty = None @@ -204,8 +214,11 @@ def get_tty(debugger_snr): for dev in devices.keys(): if devices[dev] == debugger_snr: tty = dev - break + # Opposite enumeration-order for TTY to coproccessor cores on nRF5340 Audio devkit. + if board != 'nrf53_audio': + break + log("Got tty for device {}: {}".format(debugger_snr, tty)) return tty diff --git a/autopts/ptsprojects/boards/nrf53_audio.py b/autopts/ptsprojects/boards/nrf53_audio.py new file mode 100644 index 0000000000..51dd8e649f --- /dev/null +++ b/autopts/ptsprojects/boards/nrf53_audio.py @@ -0,0 +1,81 @@ +# +# auto-pts - The Bluetooth PTS Automation Framework +# +# Copyright (c) 2024, Nordic Semiconductor ASA. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# + +import os + +from .nrf5x import * +from autopts.bot.common import check_call + +board_type = 'nrf5340_audio_dk_nrf5340_cpuapp' + + +def build_and_flash_core(zephyr_wd, build_dir, board, debugger_snr, configs, recover = False): + build_dir = os.path.join(zephyr_wd, build_dir) + check_call('rm -rf build/'.split(), cwd=build_dir) + + overlay = '-- -DCMAKE_C_FLAGS="-Werror"' + for conf in configs: + overlay += f' -D{conf}' + cmd = ['west', 'build', '-b', board] + cmd.extend(overlay.split()) + check_call(cmd, cwd=build_dir) + + build_name = str(build_dir).split('/')[-1] + check_call("rm ./build_{}.zip || exit 0".format(build_name).split(), cwd=zephyr_wd) + check_call("zip -r {}/build_{}.zip build -i '*.hex' '*.config'".format(zephyr_wd, build_name).split(), cwd=build_dir) + + cmd = ['west', 'flash', '--skip-rebuild', '-i', debugger_snr] + if recover: + cmd.append('--recover') + check_call(cmd, cwd=build_dir) + +def build_and_flash(zephyr_wd, board, debugger_snr, conf_file=None, *args): + """Build and flash Zephyr binary + :param zephyr_wd: Zephyr source path + :param board: IUT + :param debugger_snr serial number + :param conf_file: configuration file to be used + """ + source_dir = os.getenv("AUTOPTS_SOURCE_DIR_APP") + if source_dir is None: + source_dir = os.path.join('tests', 'bluetooth', 'tester') + + logging.debug("%s: %s %s %s %s", build_and_flash.__name__, zephyr_wd, + board, conf_file, source_dir) + + app_core_configs = [] + if conf_file and conf_file != 'default' and conf_file != 'prj.conf': + app_core_configs = [f'OVERLAY_CONFIG=\'{conf_file}\''] + + build_and_flash_core(zephyr_wd, + source_dir, + board, + debugger_snr, + app_core_configs, + True) + + config_dir_net = os.getenv("AUTOPTS_SOURCE_DIR_NET") + if config_dir_net is None: + net_core_configs = [f'OVERLAY_CONFIG=\'nrf5340_cpunet_iso-bt_ll_sw_split.conf;' + f'../../../tests/bluetooth/tester/nrf5340_hci_ipc_cpunet.conf\''] + else: + conf_path = os.path.join(zephyr_wd, config_dir_net, 'hci_ipc.conf') + net_core_configs = [f'OVERLAY_CONFIG=\'{conf_path}\''] + + build_and_flash_core(zephyr_wd, + os.path.join('samples', 'bluetooth', 'hci_ipc'), + 'nrf5340_audio_dk_nrf5340_cpunet', + debugger_snr, + net_core_configs) diff --git a/autopts/ptsprojects/zephyr/iutctl.py b/autopts/ptsprojects/zephyr/iutctl.py index 74e3b167ca..209dca1476 100644 --- a/autopts/ptsprojects/zephyr/iutctl.py +++ b/autopts/ptsprojects/zephyr/iutctl.py @@ -205,6 +205,7 @@ def rtt_logger_start(self): def rtt_logger_stop(self): if self.rtt_logger: + time.sleep(0.1) # Make sure all logs have been collected, in case test failed early. self.rtt_logger.stop() def wait_iut_ready_event(self, reset=True): From 528a86f47b49f8f6be7e1d0f628d350ad79e9d1f Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 23 Aug 2024 12:29:20 +0200 Subject: [PATCH 36/44] bot: Reuse session folder between config runs Each call to run_test_cases created a separate session folder. Bot enters this function after each rebuild and flash, so many of these folders were created, making it difficult to find logs of a specific test case. --- autopts/bot/common.py | 1 + autopts/client.py | 25 +++++++++++++++---------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index b8512b4e87..bcf0210667 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -383,6 +383,7 @@ def run_test_cases(self): config_args.retry, self.test_case_database, xml_results_file=self.file_paths['TC_STATS_RESULTS_XML_FILE']) + stats.session_log_dir = all_stats.session_log_dir if self.args.use_backup: self._backup_tc_stats(config=config, test_case=None, stats=stats) diff --git a/autopts/client.py b/autopts/client.py index c28da14f69..4f537b9754 100755 --- a/autopts/client.py +++ b/autopts/client.py @@ -576,6 +576,7 @@ def __init__(self, projects, test_cases, retry_count, db=None, self.pending_config = None self.pending_test_case = None self.test_run_completed = False + self.session_log_dir = None if self.xml_results and not os.path.exists(self.xml_results): os.makedirs(dirname(self.xml_results), exist_ok=True) @@ -612,6 +613,7 @@ def merge(self, stats2): self.est_duration = self.est_duration + stats2.est_duration self.pending_config = stats2.pending_config self.pending_test_case = stats2.pending_test_case + self.session_log_dir = stats2.session_log_dir stats2_tree = ElementTree.parse(stats2.xml_results) root2 = stats2_tree.getroot() @@ -1214,16 +1216,19 @@ def get_test_cases(pts, test_cases, excluded): def run_test_cases(ptses, test_case_instances, args, stats, **kwargs): """Runs a list of test cases""" - - ports_str = '_'.join(str(x) for x in args.cli_port) - now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") - logs_folder = kwargs["file_paths"]["IUT_LOGS_DIR"] - session_log_dir = f'{logs_folder}/cli_port_{ports_str}/{now}' - try: - os.makedirs(session_log_dir) - except OSError as e: - if e.errno != errno.EEXIST: - raise + session_log_dir = stats.session_log_dir + + if not session_log_dir: + ports_str = '_'.join(str(x) for x in args.cli_port) + now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") + logs_folder = kwargs["file_paths"]["IUT_LOGS_DIR"] + session_log_dir = f'{logs_folder}/cli_port_{ports_str}/{now}' + stats.session_log_dir = session_log_dir + try: + os.makedirs(session_log_dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise test_cases = args.test_cases retry_config = getattr(args, 'retry_config', None) From f5bb7595e01fa6b12e651e8d889eaccdf381ac25 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 23 Aug 2024 12:34:26 +0200 Subject: [PATCH 37/44] bot: Move stats files after bot run In backup mode, these files have to be moved/deleted to start fresh bot start. Cron does this automatically, but without the cron, one had to do it manually. --- autopts/bot/common.py | 49 +++++++++++++++++++++++++++++++------------ autopts/config.py | 1 + 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index bcf0210667..67fa1193e5 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -281,21 +281,27 @@ def bot_pre_cleanup(self): """Perform cleanup before test run :return: None """ - try: - files_to_save = [ - self.file_paths['TMP_DIR'], - self.file_paths['IUT_LOGS_DIR'], - ] + files_to_save = [ + self.file_paths['TMP_DIR'], + self.file_paths['IUT_LOGS_DIR'], + ] - save_dir = os.path.join(self.file_paths['OLD_LOGS_DIR'], - datetime.datetime.now().strftime("%Y_%m_%d_%H_%M")) - Path(save_dir).mkdir(parents=True, exist_ok=True) + save_dir = os.path.join(self.file_paths['OLD_LOGS_DIR'], + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M")) + save_files(files_to_save, save_dir) + + def bot_post_cleanup(self): + files_to_save = [ + self.file_paths['ALL_STATS_RESULTS_XML_FILE'], + self.file_paths['TC_STATS_RESULTS_XML_FILE'], + self.file_paths['TEST_CASES_JSON_FILE'], + self.file_paths['ALL_STATS_JSON_FILE'], + self.file_paths['TC_STATS_JSON_FILE'], + self.file_paths['BOT_STATE_JSON_FILE'], + ] - for file_path in files_to_save: - if os.path.exists(file_path): - shutil.move(file_path, os.path.join(save_dir, os.path.basename(file_path))) - except OSError as e: - pass + save_dir = self.file_paths['BOT_STATE_DIR'] + save_files(files_to_save, save_dir) def _yield_next_config(self): limit_counter = 0 @@ -548,6 +554,8 @@ def start(self, args=None): if 'mail' in self.bot_config: self.send_email(report_data) + self.bot_post_cleanup() + print("Done") def run_tests(self): @@ -943,3 +951,18 @@ def load_module_from_path(cfg): sys.path.remove(config_dirname) return module + + +def save_files(files_to_save, save_dir: str): + try: + for file_path in files_to_save: + if os.path.exists(file_path): + Path(save_dir).mkdir(parents=True, exist_ok=True) + break + + for file_path in files_to_save: + if os.path.exists(file_path): + dst_file_path = os.path.join(save_dir, os.path.basename(file_path)) + shutil.move(file_path, dst_file_path) + except OSError as e: + pass diff --git a/autopts/config.py b/autopts/config.py index dacb17dbd0..c6582f0d5e 100644 --- a/autopts/config.py +++ b/autopts/config.py @@ -44,6 +44,7 @@ def generate_file_paths(file_paths=None, autopts_root_dir=AUTOPTS_ROOT_DIR): 'TC_STATS_JSON_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'tc_stats.json'), 'TEST_CASE_DB_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'TestCase.db'), 'BOT_STATE_JSON_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'bot_state.json'), + 'BOT_STATE_DIR': os.path.join(FILE_PATHS['TMP_DIR'], 'final_state'), 'REPORT_README_MD_FILE': os.path.join(FILE_PATHS['TMP_DIR'], 'README.md'), 'REPORT_DIR': os.path.join(FILE_PATHS['TMP_DIR'], 'autopts_report'), 'IUT_LOGS_DIR': os.path.join(autopts_root_dir, 'logs'), From 4cf377b630cdd4110726511c0b72572dafb1eb41 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 23 Aug 2024 12:39:39 +0200 Subject: [PATCH 38/44] bot: Fix missing result of test case with timeout result If a test case triggered cron timeout and bot restart at its first attempt, its result was missing in stats. --- autopts/bot/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index 67fa1193e5..3c055a94e6 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -214,10 +214,10 @@ def load_backup_of_previous_run(self): tc_index = test_cases_per_config[continue_config].index(continue_test_case) test_cases_per_config[continue_config] = test_cases_per_config[continue_config][tc_index + 1:] self.backup['tc_stats'].index += 1 + self.backup['tc_stats'].update(continue_test_case, 0, 'TIMEOUT') if not test_cases_per_config[continue_config]: # The faulty test case was the last one in the config. Move to the next config - self.backup['tc_stats'].update(continue_test_case, 0, 'TIMEOUT') self._merge_stats(self.backup['all_stats'], self.backup['tc_stats']) self.backup['all_stats'].save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) self.backup['tc_stats'] = None From 7d0bdd888d71efe742419e737887c77f3a608554 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 23 Aug 2024 12:59:49 +0200 Subject: [PATCH 39/44] cron: Fix missing descriptions in report emails If the autopts bot was pulling logs from server longer than configured cron timeout, cron restarted the bot in the middle. Because stats were not saved after downloading the test case description from PTS, after restarting the bot, it did not have them available when creating a report. --- autopts/bot/common.py | 2 ++ tools/cron/common.py | 27 ++++++++++++++++++++++----- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index 3c055a94e6..653d5a554f 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -443,6 +443,8 @@ def run_test_cases(self): all_stats.pts_ver = str(self.ptses[0].get_version()) all_stats.platform = str(self.ptses[0].get_system_model()) all_stats.system_version = str(self.ptses[0].get_system_version()) + if self.args.use_backup: + all_stats.save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) except: log('Failed to generate some stats.') diff --git a/tools/cron/common.py b/tools/cron/common.py index 241afe0385..f6be1c7b66 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -25,6 +25,7 @@ $ ssh-add path/to/id_rsa """ import copy +import json import logging import os import re @@ -585,9 +586,11 @@ def _restart_processes(config): def _run_test(config): + test_cases_completed = False backup = config['auto_pts'].get('use_backup', False) timeguard = config['cron']['test_run_timeguard'] results_file_path = config['file_paths']['TC_STATS_JSON_FILE'] + all_stats_file_path = config['file_paths']['ALL_STATS_JSON_FILE'] report_file_path = config['file_paths']['REPORT_TXT_FILE'] srv_process, bot_process = _start_processes(config, checkout_repos=True) @@ -616,7 +619,7 @@ def _run_test(config): current_time = time() - if not os.path.exists(results_file_path): + if not test_cases_completed and not os.path.exists(results_file_path): if timedelta(seconds=current_time - last_check_time) > timedelta(seconds=timeguard): log("Test run has not been started on time. Restarting processes...") srv_process, bot_process = _restart_processes(config) @@ -625,10 +628,24 @@ def _run_test(config): last_check_time = current_time - if timedelta(seconds=current_time - os.path.getmtime(results_file_path)) > timedelta(seconds=timeguard): - log("Test run results have not been updated for a while. Restarting processes...") - srv_process, bot_process = _restart_processes(config) - sleep_job(config['cron']['cancel_job'], timeguard) + if (not test_cases_completed and + timedelta(seconds=current_time - os.path.getmtime(results_file_path)) > timedelta(seconds=timeguard)): + if os.path.exists(all_stats_file_path): + try: + with open(all_stats_file_path, 'r') as f: + data = json.load(f) + test_cases_completed = data.get('test_run_completed', False) + except BaseException as e: + log(e) + + # Do not restart bot if test_run_completed, because pulling PTS logs at the end + # of the bot run takes a while, and it should not be interrupted. + if test_cases_completed: + log("Bot completed running the test cases. Waiting for report to be generated ...") + else: + log("Test run results have not been updated for a while. Restarting processes...") + srv_process, bot_process = _restart_processes(config) + sleep_job(config['cron']['cancel_job'], timeguard) def run_test(config): From a603aee21f636f983297595dd315a4f78e39fa65 Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 23 Aug 2024 12:55:17 +0200 Subject: [PATCH 40/44] cron: Do not create empty save-folders --- tools/cron/common.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tools/cron/common.py b/tools/cron/common.py index f6be1c7b66..debcb2c360 100644 --- a/tools/cron/common.py +++ b/tools/cron/common.py @@ -50,7 +50,7 @@ from autopts.utils import get_global_end, terminate_process from tools.cron.autopts_bisect import Bisect, set_run_test_fun -from autopts.bot.common import load_module_from_path +from autopts.bot.common import load_module_from_path, save_files from autopts.bot.common_features.github import update_repos from autopts.bot.common_features.mail import send_mail from autopts.config import generate_file_paths, FILE_PATHS @@ -351,10 +351,7 @@ def pre_cleanup_files(config): else: shutil.rmtree(file_path, ignore_errors=True) - for file in files_to_save: - file_path = os.path.join(autopts_repo, file) - if os.path.exists(file_path): - shutil.move(file_path, os.path.join(save_dir, os.path.basename(file_path))) + save_files(files_to_save, save_dir) except: pass From 951097a01d732323e4246ae57c869c8bc480d07f Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Fri, 23 Aug 2024 13:23:21 +0200 Subject: [PATCH 41/44] boards: Add config for building/flashing only appcore of nrf53 If someone would like to use a different controller and make the bot to build/flash only appcore of nrf53 board, all they need to do from now is to set this option "board": "nrf53_appcore" in the bot's config.py file. --- autopts/ptsprojects/boards/nrf53_appcore.py | 43 +++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 autopts/ptsprojects/boards/nrf53_appcore.py diff --git a/autopts/ptsprojects/boards/nrf53_appcore.py b/autopts/ptsprojects/boards/nrf53_appcore.py new file mode 100644 index 0000000000..d9d03e6759 --- /dev/null +++ b/autopts/ptsprojects/boards/nrf53_appcore.py @@ -0,0 +1,43 @@ +# +# auto-pts - The Bluetooth PTS Automation Framework +# +# Copyright (c) 2023, Codecoup. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# + +from .nrf5x import * +from autopts.bot.common import check_call + +board_type = 'nrf5340dk/nrf5340/cpuapp' + + +def build_and_flash(zephyr_wd, board, debugger_snr, conf_file=None, *args): + """Build and flash Zephyr binary + :param zephyr_wd: Zephyr source path + :param board: IUT + :param debugger_snr serial number + :param conf_file: configuration file to be used + """ + logging.debug("%s: %s %s %s", build_and_flash.__name__, zephyr_wd, + board, conf_file) + + tester_dir = os.path.join(zephyr_wd, 'tests', 'bluetooth', 'tester') + + check_call('rm -rf build/'.split(), cwd=tester_dir) + + bttester_overlay = 'nrf5340_hci_ipc.conf' + + if conf_file and conf_file != 'default' and conf_file != 'prj.conf': + bttester_overlay += f';{conf_file}' + + cmd = ['west', 'build', '-b', board, '--', f'-DOVERLAY_CONFIG=\'{bttester_overlay}\''] + check_call(cmd, cwd=tester_dir) + check_call(['west', 'flash', '--skip-rebuild', '--recover', '-i', debugger_snr], cwd=tester_dir) From a3e0ecfb8b9dc95686d0f45491fd0280fc4abe8b Mon Sep 17 00:00:00 2001 From: Magdalena Kasenberg Date: Mon, 26 Aug 2024 17:04:33 +0200 Subject: [PATCH 42/44] bot: Fix exception at fetching description of GMCS and GTBS test cases --- autopts/bot/common.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/autopts/bot/common.py b/autopts/bot/common.py index 653d5a554f..11668f3ebd 100644 --- a/autopts/bot/common.py +++ b/autopts/bot/common.py @@ -432,12 +432,18 @@ def run_test_cases(self): all_stats.save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) try: + mapping = {'GMCS': 'MCS', + 'GTBS': 'TBS'} results = all_stats.get_results() descriptions = {} for test_case_name in list(results.keys()): - project_name = test_case_name.split('/')[0] - descriptions[test_case_name] = \ - self.ptses[0].get_test_case_description(project_name, test_case_name) + try: + project_name = test_case_name.split('/')[0] + project_name = mapping.get(project_name, project_name) + descriptions[test_case_name] = \ + self.ptses[0].get_test_case_description(project_name, test_case_name) + except: + log(f'Failed to get description of {test_case_name}') all_stats.update_descriptions(descriptions) all_stats.pts_ver = str(self.ptses[0].get_version()) @@ -445,8 +451,8 @@ def run_test_cases(self): all_stats.system_version = str(self.ptses[0].get_system_version()) if self.args.use_backup: all_stats.save_to_backup(self.file_paths['ALL_STATS_JSON_FILE']) - except: - log('Failed to generate some stats.') + except BaseException as e: + log(f'Failed to generate some stats, {e}.') return all_stats From 727efbf9aa7cc7b97249dcc1572056791a4d2274 Mon Sep 17 00:00:00 2001 From: Szymon Janc Date: Mon, 26 Aug 2024 16:52:17 +0200 Subject: [PATCH 43/44] zephyr: Fix multiple CAP/INI/UST tests CAP WIDs are using MICP and VCP commands and thus shall initialize required BTP services. --- autopts/ptsprojects/zephyr/cap.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/autopts/ptsprojects/zephyr/cap.py b/autopts/ptsprojects/zephyr/cap.py index 338760deea..38b33d0448 100644 --- a/autopts/ptsprojects/zephyr/cap.py +++ b/autopts/ptsprojects/zephyr/cap.py @@ -158,8 +158,12 @@ def set_addr(addr): TestFunc(stack.ascs_init), TestFunc(stack.bap_init), TestFunc(stack.cap_init), + TestFunc(stack.micp_init), + TestFunc(stack.vcp_init), TestFunc(btp.core_reg_svc_cap), TestFunc(btp.core_reg_svc_cas), + TestFunc(btp.core_reg_svc_micp), + TestFunc(btp.core_reg_svc_vcp), TestFunc(btp.gap_set_extended_advertising_on), # Gives a signal to the LT2 to continue its preconditions TestFunc(lambda: set_addr(stack.gap.iut_addr_get_str())), From 61d1d01a5087d30b3dfab7ca28358326d0ffbe00 Mon Sep 17 00:00:00 2001 From: Piotr Narajowski Date: Tue, 13 Aug 2024 15:04:35 +0200 Subject: [PATCH 44/44] mynewt: gatt_cl: add mynewt specific wid handler Currently there is no way to access included service handle in Nimble with find included services procedure. We still can use other values that we have access to, to compare them with values from MMI which should be enough to pass test GATT/CL/GAD/BV-03-C. --- autopts/ptsprojects/mynewt/gatt_client_wid.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/autopts/ptsprojects/mynewt/gatt_client_wid.py b/autopts/ptsprojects/mynewt/gatt_client_wid.py index aa4a2c56a7..e56d0d9be0 100644 --- a/autopts/ptsprojects/mynewt/gatt_client_wid.py +++ b/autopts/ptsprojects/mynewt/gatt_client_wid.py @@ -17,6 +17,7 @@ import socket from autopts.ptsprojects.stack import get_stack +from autopts.ptsprojects.testcase import MMI from autopts.pybtp import btp from autopts.pybtp.types import WIDParams from autopts.wid import generic_wid_hdl @@ -36,6 +37,49 @@ def gattc_wid_hdl(wid, description, test_case_name): [__name__, 'autopts.ptsprojects.mynewt.gatt_wid', 'autopts.wid.gatt']) + +def hdl_wid_24(params: WIDParams): + """ + Please confirm IUT received include services: + Attribute Handle = '0002'O + Included Service Attribute handle = '0080'O, + End Group Handle = '0085'O, + Service UUID = 'A00B'O + Attribute Handle = '0021'O + Included Service Attribute handle = '0001'O, + End Group Handle = '0006'O, + Service UUID = 'A00D'O + Attribute Handle = '0091'O + Included Service Attribute handle = '0001'O + End Group Handle = '0006'O, + Service UUID = 'A00D'O + + Click Yes if IUT received it, otherwise click No. + + Description: Verify that the Implementation Under Test (IUT) can send + Discover all include services in database. + """ + MMI.reset() + MMI.parse_description(params.description) + + if not MMI.args: + return False + + # split MMI args into tuples (att_hdl, incl_svc_hdl, end_gp_hdl, svc_uuid) + mmi_args_tupled = [] + for i in range(0, len(MMI.args), 4): + mmi_args_tupled.append(tuple(MMI.args[i:i + 4])) + + stack = get_stack() + # TODO: there is no way to access included service handle wit current API. + # For now, we skip this value when comparing find included services results + # with description + incl_svcs = [tup[1:] for tup in stack.gatt_cl.incl_svcs] + mmi_args = [tup[1:] for tup in mmi_args_tupled] + + return set(incl_svcs).issubset(set(mmi_args)) + + def hdl_wid_142(_: WIDParams): """ Please send an ATT_Write_Request to Client Support Features handle = '0015'O with 0x02 to enable Enhanced ATT.