Browse Source

twister: rework board handling

We now use hwmv2 to list boards instead of relying on twister specific
config files.
One yaml files (twister.yaml for now) will have all the data needed for
all possible targets and variations of a board reusing most of the data
where possible and variations can override the top level data.

Twister keeps track of 'aliases' of boards and identifies that for
example native_sim is the same as native_sim/native, so either names
will be possible in both test yaml files or on the command line,
however, the reporting will always use the full name, so no there is no
confusion about what is being tested/built.

Signed-off-by: Anas Nashif <anas.nashif@intel.com>
pull/80264/head
Anas Nashif 11 months ago
parent
commit
dfc7860ab1
  1. 2
      boards/arm/fvp_base_revc_2xaemv8a/fvp_base_revc_2xaemv8a_fvp_base_revc_2xaemv8a_smp_ns.yaml
  2. 4
      scripts/pylib/twister/twisterlib/config_parser.py
  3. 75
      scripts/pylib/twister/twisterlib/platform.py
  4. 15
      scripts/pylib/twister/twisterlib/testinstance.py
  5. 240
      scripts/pylib/twister/twisterlib/testplan.py
  6. 2
      scripts/pylib/twister/twisterlib/twister_main.py
  7. 13
      scripts/schemas/twister/platform-schema.yaml
  8. 2
      tests/arch/arm64/arm64_smc_call/testcase.yaml

2
boards/arm/fvp_base_revc_2xaemv8a/fvp_base_revc_2xaemv8a_fvp_base_revc_2xaemv8a_smp_ns.yaml

@ -1,7 +1,7 @@ @@ -1,7 +1,7 @@
# Copyright (c) 2022 Arm Limited (or its affiliates). All rights reserved.
# SPDX-License-Identifier: Apache-2.0
identifier: fvp_base_revc_2xaemv8a//smp/ns
identifier: fvp_base_revc_2xaemv8a/fvp_base_revc_2xaemv8a/smp/ns
name: FVP Emulation FVP_Base_RevC-2xAEMvA (SMP)
arch: arm64
type: sim

4
scripts/pylib/twister/twisterlib/config_parser.py

@ -93,12 +93,14 @@ class TwisterConfigParser: @@ -93,12 +93,14 @@ class TwisterConfigParser:
self.common = {}
def load(self):
self.data = scl.yaml_load_verify(self.filename, self.schema)
data = scl.yaml_load_verify(self.filename, self.schema)
self.data = data
if 'tests' in self.data:
self.scenarios = self.data['tests']
if 'common' in self.data:
self.common = self.data['common']
return data
def _cast_value(self, value, typestr):
if isinstance(value, str):

75
scripts/pylib/twister/twisterlib/platform.py

@ -6,8 +6,11 @@ @@ -6,8 +6,11 @@
import os
import scl
from twisterlib.config_parser import TwisterConfigParser
from twisterlib.environment import ZEPHYR_BASE
import logging
logger = logging.getLogger('twister')
logger.setLevel(logging.DEBUG)
class Platform:
"""Class representing metadata for a particular platform
@ -23,6 +26,7 @@ class Platform: @@ -23,6 +26,7 @@ class Platform:
"""
self.name = ""
self.aliases = []
self.normalized_name = ""
# if sysbuild to be used by default on a given platform
self.sysbuild = False
@ -38,7 +42,7 @@ class Platform: @@ -38,7 +42,7 @@ class Platform:
self.flash = 512
self.supported = set()
self.arch = ""
self.arch = None
self.vendor = ""
self.tier = -1
self.type = "na"
@ -50,41 +54,58 @@ class Platform: @@ -50,41 +54,58 @@ class Platform:
self.filter_data = dict()
self.uart = ""
self.resc = ""
self.qualifier = None
def load(self, board, target, aliases, data):
"""Load the platform data from the board data and target data
board: the board object as per the zephyr build system
target: the target name of the board as per the zephyr build system
aliases: list of aliases for the target
data: the data from the twister.yaml file for the target
"""
self.name = target
self.aliases = aliases
# Get data for various targets and use the main board data as a
# defauly. Individual variant information will replace the default data
# provded in the main twister configuration for this board.
variants = data.get("variants", {})
variant_data = {}
for alias in aliases:
variant_data = variants.get(alias, {})
if variant_data:
break
def load(self, platform_file):
scp = TwisterConfigParser(platform_file, self.platform_schema)
scp.load()
data = scp.data
self.name = data['identifier']
self.normalized_name = self.name.replace("/", "_")
self.sysbuild = data.get("sysbuild", False)
self.twister = data.get("twister", True)
self.sysbuild = variant_data.get("sysbuild", data.get("sysbuild", self.sysbuild))
self.twister = variant_data.get("twister", data.get("twister", self.twister))
# if no RAM size is specified by the board, take a default of 128K
self.ram = data.get("ram", 128)
testing = data.get("testing", {})
self.timeout_multiplier = testing.get("timeout_multiplier", 1.0)
self.ignore_tags = testing.get("ignore_tags", [])
self.only_tags = testing.get("only_tags", [])
self.default = testing.get("default", False)
self.ram = variant_data.get("ram", data.get("ram", self.ram))
# if no flash size is specified by the board, take a default of 512K
self.flash = variant_data.get("flash", data.get("flash", self.flash))
testing = variant_data.get("testing", data.get("testing", {}))
self.timeout_multiplier = testing.get("timeout_multiplier", self.timeout_multiplier)
self.ignore_tags = testing.get("ignore_tags", self.ignore_tags)
self.only_tags = testing.get("only_tags", self.only_tags)
self.default = testing.get("default", self.default)
self.binaries = testing.get("binaries", [])
renode = testing.get("renode", {})
self.uart = renode.get("uart", "")
self.resc = renode.get("resc", "")
# if no flash size is specified by the board, take a default of 512K
self.flash = data.get("flash", 512)
self.supported = set()
for supp_feature in data.get("supported", []):
for supp_feature in variant_data.get("supported", data.get("supported", [])):
for item in supp_feature.split(":"):
self.supported.add(item)
self.arch = data['arch']
self.vendor = data.get('vendor', '')
self.tier = data.get("tier", -1)
self.type = data.get('type', "na")
self.simulation = data.get('simulation', "na")
self.simulation_exec = data.get('simulation_exec')
self.supported_toolchains = data.get("toolchain", [])
self.arch = variant_data.get('arch', data.get('arch', self.arch))
self.vendor = board.vendor
self.tier = variant_data.get("tier", data.get("tier", self.tier))
self.type = variant_data.get('type', data.get('type', self.type))
self.simulation = variant_data.get('simulation', data.get('simulation', self.simulation))
self.simulation_exec = variant_data.get('simulation_exec', data.get('simulation_exec', self.simulation_exec))
self.supported_toolchains = variant_data.get("toolchain", data.get("toolchain", []))
if self.supported_toolchains is None:
self.supported_toolchains = []
@ -111,7 +132,7 @@ class Platform: @@ -111,7 +132,7 @@ class Platform:
if toolchain not in self.supported_toolchains:
self.supported_toolchains.append(toolchain)
self.env = data.get("env", [])
self.env = variant_data.get("env", data.get("env", []))
self.env_satisfied = True
for env in self.env:
if not os.environ.get(env, None):

15
scripts/pylib/twister/twisterlib/testinstance.py

@ -241,7 +241,14 @@ class TestInstance: @@ -241,7 +241,14 @@ class TestInstance:
self.handler = handler
# Global testsuite parameters
def check_runnable(self, enable_slow=False, filter='buildable', fixtures=[], hardware_map=None):
def check_runnable(self,
options,
hardware_map=None):
enable_slow = options.enable_slow
filter = options.filter
fixtures = options.fixture
device_testing = options.device_testing
if os.name == 'nt':
# running on simulators is currently supported only for QEMU on Windows
@ -264,8 +271,7 @@ class TestInstance: @@ -264,8 +271,7 @@ class TestInstance:
target_ready = bool(self.testsuite.type == "unit" or \
self.platform.type == "native" or \
(self.platform.simulation in SUPPORTED_SIMS and \
self.platform.simulation not in self.testsuite.simulation_exclude) or \
filter == 'runnable')
self.platform.simulation not in self.testsuite.simulation_exclude) or device_testing)
# check if test is runnable in pytest
if self.testsuite.harness == 'pytest':
@ -317,7 +323,8 @@ class TestInstance: @@ -317,7 +323,8 @@ class TestInstance:
content = "\n".join(new_config_list)
if enable_coverage:
if platform.name in coverage_platform:
for cp in coverage_platform:
if cp in platform.aliases:
content = content + "\nCONFIG_COVERAGE=y"
content = content + "\nCONFIG_COVERAGE_DUMP=y"

240
scripts/pylib/twister/twisterlib/testplan.py

@ -182,6 +182,7 @@ class TestPlan: @@ -182,6 +182,7 @@ class TestPlan:
if self.options.test:
self.run_individual_testsuite = self.options.test
self.add_configurations()
num = self.add_testsuites(testsuite_filter=self.run_individual_testsuite)
if num == 0:
raise TwisterRuntimeError("No test cases found at the specified location...")
@ -192,9 +193,7 @@ class TestPlan: @@ -192,9 +193,7 @@ class TestPlan:
self.scenarios.append(ts.id)
self.report_duplicates()
self.parse_configuration(config_file=self.env.test_config)
self.add_configurations()
if self.load_errors:
raise TwisterRuntimeError("Errors while loading configurations")
@ -398,8 +397,13 @@ class TestPlan: @@ -398,8 +397,13 @@ class TestPlan:
sys.stdout.write(what + "\n")
sys.stdout.flush()
def find_twister_data(self, board_data_list, board_aliases):
"""Find the twister data for a board in the list of board data based on the aliases"""
for board_data in board_data_list:
if board_data.get('identifier') in board_aliases:
return board_data
def add_configurations(self):
board_dirs = set()
# Create a list of board roots as defined by the build system in general
# Note, internally in twister a board root includes the `boards` folder
# but in Zephyr build system, the board root is without the `boards` in folder path.
@ -407,82 +411,91 @@ class TestPlan: @@ -407,82 +411,91 @@ class TestPlan:
lb_args = Namespace(arch_roots=self.env.arch_roots, soc_roots=self.env.soc_roots,
board_roots=board_roots, board=None, board_dir=None)
v1_boards = list_boards.find_boards(lb_args)
v2_dirs = list_boards.find_v2_board_dirs(lb_args)
for b in v1_boards:
board_dirs.add(b.dir)
board_dirs.update(v2_dirs)
logger.debug("Reading platform configuration files under %s..." % self.env.board_roots)
known_boards = list_boards.find_v2_boards(lb_args)
bdirs = {}
platform_config = self.test_config.get('platforms', {})
for folder in board_dirs:
for file in glob.glob(os.path.join(folder, "*.yaml")):
# If the user set a platform filter, we can, if no other option would increase
# the allowed platform pool, save on time by not loading YAMLs of any boards
# that do not start with the required names.
if self.options.platform and \
not self.options.all and \
not self.options.integration and \
not any([
os.path.basename(file).startswith(
re.split('[/@]', p)[0]
) for p in self.options.platform
]):
continue
try:
# helper function to initialize and add platforms
def init_and_add_platforms(data, board, target, qualifier, aliases):
platform = Platform()
platform.load(file)
if not new_config_found:
data = self.find_twister_data(bdirs[board.dir], aliases)
if not data:
return
platform.load(board, target, aliases, data)
platform.qualifier = qualifier
if platform.name in [p.name for p in self.platforms]:
logger.error(f"Duplicate platform {platform.name} in {file}")
logger.error(f"Duplicate platform {platform.name} in {board.dir}")
raise Exception(f"Duplicate platform identifier {platform.name} found")
if not platform.twister:
return
logger.debug(f"Adding platform {platform.name} with aliases {platform.aliases}")
self.platforms.append(platform)
for board in known_boards:
new_config_found = False
# don't load the same board data twice
if not bdirs.get(board.dir):
datas = []
for file in glob.glob(os.path.join(board.dir, "*.yaml")):
if os.path.basename(file) == "twister.yaml":
continue
try:
scp = TwisterConfigParser(file, Platform.platform_schema)
sdata = scp.load()
datas.append(sdata)
except Exception as e:
logger.error(f"Error loading {file}: {e!r}")
self.load_errors += 1
continue
bdirs[board.dir] = datas
data = {}
if os.path.exists(board.dir / 'twister.yaml'):
try:
scp = TwisterConfigParser(board.dir / 'twister.yaml', Platform.platform_schema)
data = scp.load()
except Exception as e:
logger.error(f"Error loading {board.dir / 'twister.yaml'}: {e!r}")
self.load_errors += 1
continue
new_config_found = True
self.platforms.append(platform)
if not platform_config.get('override_default_platforms', False):
if platform.default:
self.default_platforms.append(platform.name)
else:
if platform.name in platform_config.get('default_platforms', []):
logger.debug(f"adding {platform.name} to default platforms")
self.default_platforms.append(platform.name)
# support board@revision
# if there is already an existed <board>_<revision>.yaml, then use it to
# load platform directly, otherwise, iterate the directory to
# get all valid board revision based on each <board>_<revision>.conf.
if '@' not in platform.name:
tmp_dir = os.listdir(os.path.dirname(file))
for item in tmp_dir:
# Need to make sure the revision matches
# the permitted patterns as described in
# cmake/modules/extensions.cmake.
revision_patterns = ["[A-Z]",
"[0-9]+",
"(0|[1-9][0-9]*)(_[0-9]+){0,2}"]
for pattern in revision_patterns:
result = re.match(f"{platform.name}_(?P<revision>{pattern})\\.conf", item)
if result:
revision = result.group("revision")
yaml_file = f"{platform.name}_{revision}.yaml"
if yaml_file not in tmp_dir:
platform_revision = copy.deepcopy(platform)
revision = revision.replace("_", ".")
platform_revision.name = f"{platform.name}@{revision}"
platform_revision.normalized_name = platform_revision.name.replace("/", "_")
platform_revision.default = False
self.platforms.append(platform_revision)
break
for qual in list_boards.board_v2_qualifiers(board):
if board.revisions:
for rev in board.revisions:
target = f"{board.name}@{rev.name}/{qual}"
aliases = [target]
target_no_rev = f"{board.name}/{qual}"
if rev.name == board.revision_default:
aliases.append(target_no_rev)
if '/' not in qual and len(board.socs) == 1:
if rev.name == board.revision_default:
aliases.append(f"{board.name}")
aliases.append(f"{board.name}@{rev.name}")
except RuntimeError as e:
logger.error("E: %s: can't load: %s" % (file, e))
self.load_errors += 1
init_and_add_platforms(data, board, target, qual, aliases)
else:
target = f"{board.name}/{qual}"
aliases = [target]
if '/' not in qual and len(board.socs) == 1:
aliases.append(board.name)
init_and_add_platforms(data, board, target, qual, aliases)
self.platform_names = [p.name for p in self.platforms]
for platform in self.platforms:
if not platform_config.get('override_default_platforms', False):
if platform.default:
self.default_platforms.append(platform.name)
#logger.debug(f"adding {platform.name} to default platforms")
continue
for pp in platform_config.get('default_platforms', []):
if pp in platform.aliases:
logger.debug(f"adding {platform.name} to default platforms (override mode)")
self.default_platforms.append(platform.name)
self.platform_names = [a for p in self.platforms for a in p.aliases]
def get_all_tests(self):
testcases = []
@ -550,6 +563,30 @@ class TestPlan: @@ -550,6 +563,30 @@ class TestPlan:
for name in parsed_data.scenarios.keys():
suite_dict = parsed_data.get_scenario(name)
suite = TestSuite(root, suite_path, name, data=suite_dict, detailed_test_id=self.options.detailed_test_id)
# convert to fully qualified names
_integration = []
_platform_allow = []
_platform_exclude = []
for _ip in suite.integration_platforms:
if _ip in self.platform_names:
_integration.append(self.get_platform(_ip).name)
else:
logger.error(f"Platform {_ip} not found in the list of platforms")
suite.integration_platforms = _integration
for _pe in suite.platform_exclude:
if _pe in self.platform_names:
_platform_exclude.append(self.get_platform(_pe).name)
else:
logger.error(f"Platform {_pe} not found in the list of platforms")
suite.platform_exclude = _platform_exclude
for _pa in suite.platform_allow:
if _pa in self.platform_names:
_platform_allow.append(self.get_platform(_pa).name)
else:
logger.error(f"Platform {_pa} not found in the list of platforms")
suite.platform_allow = _platform_allow
if suite.harness in ['ztest', 'test']:
if subcases is None:
# scan it only once per testsuite
@ -575,7 +612,7 @@ class TestPlan: @@ -575,7 +612,7 @@ class TestPlan:
def get_platform(self, name):
selected_platform = None
for platform in self.platforms:
if platform.name == name:
if name in platform.aliases:
selected_platform = platform
break
return selected_platform
@ -608,13 +645,10 @@ class TestPlan: @@ -608,13 +645,10 @@ class TestPlan:
instance.run_id = ts.get("run_id")
if self.options.device_testing:
tfilter = 'runnable'
else:
tfilter = 'buildable'
self.options.filter = 'runnable'
instance.run = instance.check_runnable(
self.options.enable_slow,
tfilter,
self.options.fixture,
self.options,
self.hwm
)
@ -660,14 +694,24 @@ class TestPlan: @@ -660,14 +694,24 @@ class TestPlan:
if tc.get('log'):
case.output = tc.get('log')
instance.create_overlay(platform, self.options.enable_asan, self.options.enable_ubsan, self.options.enable_coverage, self.options.coverage_platform)
instance.create_overlay(platform,
self.options.enable_asan,
self.options.enable_ubsan,
self.options.enable_coverage,
self.options.coverage_platform
)
instance_list.append(instance)
self.add_instances(instance_list)
except FileNotFoundError as e:
logger.error(f"{e}")
return 1
def check_platform(self, platform, platform_list):
for p in platform_list:
if p in platform.aliases:
return True
return False
def apply_filters(self, **kwargs):
toolchain = self.env.toolchain
@ -709,8 +753,16 @@ class TestPlan: @@ -709,8 +753,16 @@ class TestPlan:
elif vendor_filter:
vendor_platforms = True
_platforms = []
if platform_filter:
logger.debug(f"Checking platform filter: {platform_filter}")
# find in aliases and rename
self.verify_platforms_existence(platform_filter, f"platform_filter")
for pf in platform_filter:
logger.debug(f"Checking platform in filter: {pf}")
if pf in self.platform_names:
_platforms.append(self.get_platform(pf).name)
platform_filter = _platforms
platforms = list(filter(lambda p: p.name in platform_filter, self.platforms))
elif emu_filter:
platforms = list(filter(lambda p: p.simulation != 'na', self.platforms))
@ -776,19 +828,12 @@ class TestPlan: @@ -776,19 +828,12 @@ class TestPlan:
instance_list = []
for plat in platform_scope:
instance = TestInstance(ts, plat, self.env.outdir)
if runnable:
tfilter = 'runnable'
else:
tfilter = 'buildable'
instance.run = instance.check_runnable(
self.options.enable_slow,
tfilter,
self.options.fixture,
self.options,
self.hwm
)
if not force_platform and plat.name in exclude_platform:
if not force_platform and self.check_platform(plat,exclude_platform):
instance.add_filter("Platform is excluded on command line.", Filters.CMD_LINE)
if (plat.arch == "unit") != (ts.type == "unit"):
@ -961,13 +1006,13 @@ class TestPlan: @@ -961,13 +1006,13 @@ class TestPlan:
keyed_test = keyed_tests.get(test_keys)
if keyed_test is not None:
plat_key = {key_field: getattr(keyed_test['plat'], key_field) for key_field in key_fields}
instance.add_filter(f"Already covered for key {tuple(key)} by platform {keyed_test['plat'].name} having key {plat_key}", Filters.PLATFORM_KEY)
instance.add_filter(f"Already covered for key {key} by platform {keyed_test['plat'].name} having key {plat_key}", Filters.PLATFORM_KEY)
else:
# do not add a platform to keyed tests if previously filtered
# do not add a platform to keyed tests if previously
# filtered
if not instance.filters:
keyed_tests[test_keys] = {'plat': plat, 'ts': ts}
else:
instance.add_filter(f"Excluded platform missing key fields demanded by test {key_fields}", Filters.PLATFORM)
# if nothing stopped us until now, it means this configuration
# needs to be added.
@ -981,11 +1026,11 @@ class TestPlan: @@ -981,11 +1026,11 @@ class TestPlan:
# take all default platforms
if default_platforms and not ts.build_on_all and not integration:
if ts.platform_allow:
a = set(self.default_platforms)
b = set(ts.platform_allow)
c = a.intersection(b)
if c:
aa = list(filter(lambda ts: ts.platform.name in c, instance_list))
_default_p = set(self.default_platforms)
_platform_allow = set(ts.platform_allow)
_intersection = _default_p.intersection(_platform_allow)
if _intersection:
aa = list(filter(lambda _scenario: _scenario.platform.name in _intersection, instance_list))
self.add_instances(aa)
else:
self.add_instances(instance_list)
@ -1011,7 +1056,11 @@ class TestPlan: @@ -1011,7 +1056,11 @@ class TestPlan:
self.add_instances(instance_list)
for _, case in self.instances.items():
case.create_overlay(case.platform, self.options.enable_asan, self.options.enable_ubsan, self.options.enable_coverage, self.options.coverage_platform)
case.create_overlay(case.platform,
self.options.enable_asan,
self.options.enable_ubsan,
self.options.enable_coverage,
self.options.coverage_platform)
self.selected_platforms = set(p.platform.name for p in self.instances.values())
@ -1105,3 +1154,4 @@ def change_skip_to_error_if_integration(options, instance): @@ -1105,3 +1154,4 @@ def change_skip_to_error_if_integration(options, instance):
return
instance.status = TwisterStatus.ERROR
instance.reason += " but is one of the integration platforms"
logger.debug(f"Changing status of {instance.name} to ERROR because it is an integration platform")

2
scripts/pylib/twister/twisterlib/twister_main.py

@ -139,7 +139,7 @@ def main(options: argparse.Namespace, default_options: argparse.Namespace): @@ -139,7 +139,7 @@ def main(options: argparse.Namespace, default_options: argparse.Namespace):
for i in tplan.instances.values():
if i.status == TwisterStatus.FILTER:
if options.platform and i.platform.name not in options.platform:
if options.platform and not tplan.check_platform(i.platform, options.platform):
continue
logger.debug(
"{:<25} {:<50} {}SKIPPED{}: {}".format(

13
scripts/schemas/twister/platform-schema.yaml

@ -9,8 +9,15 @@ @@ -9,8 +9,15 @@
# The original spec comes from Zephyr's twister script
#
type: map
mapping:
schema;platform-schema:
type: map
mapping:
"variants":
type: map
matching-rule: "any"
mapping:
regex;(([a-zA-Z0-9_]+)):
include: platform-schema
"identifier":
type: str
"maintainers":
@ -109,3 +116,5 @@ mapping: @@ -109,3 +116,5 @@ mapping:
type: str
"resc":
type: str
include: platform-schema

2
tests/arch/arm64/arm64_smc_call/testcase.yaml

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
tests:
arch.arm64.smc_call.smc:
platform_allow: fvp_base_revc_2xaemv8a//smp/ns
platform_allow: fvp_base_revc_2xaemv8a/fvp_base_revc_2xaemv8a/smp/ns
tags:
- arm
- smc

Loading…
Cancel
Save