Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
265 changes: 244 additions & 21 deletions jenkins/helper/test_launch_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import argparse
import copy
import sys
import json
from traceback import print_exc
import yaml

from site_config import IS_ARM, IS_WINDOWS, IS_MAC, IS_COVERAGE

Expand Down Expand Up @@ -81,6 +83,23 @@ def list_generator(cluster):
return res_sg + res_cl
return list_generator(args.cluster)

def filter_one_test(args, test):
"""filter testcase by operations target Single/Cluster/full"""
if args.all:
return False
if IS_COVERAGE:
if 'coverage' in test:
return True
full = args.full
filters = []

if 'full' in test:
if full and not test['full']:
return True
if not full and test['full']:
return True
return False

formats = {
"dump": generate_dump_output,
"launch": launch,
Expand All @@ -89,6 +108,7 @@ def list_generator(cluster):
known_flags = {
"cluster": "this test requires a cluster",
"single": "this test requires a single server",
"mixed": "some buckets will run cluster, some not.",
"full": "this test is only executed in full tests",
"!full": "this test is only executed in non-full tests",
"gtest": "only the testsuites starting with 'gtest' are to be executed",
Expand All @@ -110,7 +130,10 @@ def list_generator(cluster):
"buckets": "number of buckets to use for this test",
"suffix": "suffix that is appended to the tests folder name",
"priority": "priority that controls execution order. Testsuites with lower priority are executed later",
"parallelity": "parallelity how many resources will the job use in the SUT? Default: 1 in Single server, 4 in Clusters"
"parallelity": "parallelity how many resources will the job use in the SUT? Default: 1 in Single server, 4 in Clusters",
"type": "single or cluster flag",
"full": "whether to spare from a single or full run",
"sniff": "whether to enable sniffing",
}


Expand Down Expand Up @@ -160,8 +183,8 @@ def parse_number(value):
raise Exception(f"invalid numeric value: {value}") from exc

def parse_number_or_default(key, default_value=None):
""" check number """
if key in params:
"""check number"""
if key in params and not isinstance(params[key], int):
if params[key][0] == '*': # factor the default
params[key] = default_value * parse_number(params[key][1:])
else:
Expand All @@ -177,15 +200,15 @@ def parse_number_or_default(key, default_value=None):


def validate_flags(flags):
""" check whether target flags are valid """
"""check whether target flags are valid"""
if "cluster" in flags and "single" in flags:
raise Exception("`cluster` and `single` specified for the same test")
if "full" in flags and "!full" in flags:
raise Exception("`full` and `!full` specified for the same test")


def read_definition_line(line):
""" parse one test definition line """
"""parse one test definition line"""
bits = line.split()
if len(bits) < 1:
raise Exception("expected at least one argument: <testname>")
Expand Down Expand Up @@ -221,33 +244,233 @@ def read_definition_line(line):
validate_flags(flags)
params = validate_params(params, 'cluster' in flags)

if len(arangosh_args) == 0:
arangosh_args = ""
run_job = 'run-linux-tests'
return {
"name": params.get("name", suites),
"suite": suites,
"suites": suites,
"priority": params["priority"],
"parallelity": params["parallelity"],
"flags": flags,
"args": args,
"arangosh_args": arangosh_args,
"params": params
"params": params,
"testfile_definitions": testfile_definitions,
"run_job": run_job,
}

def read_yaml_suite(name, suite, definition, testfile_definitions):
""" convert yaml representation into the internal one """
if not 'options' in definition:
definition['options'] = {}
flags = []
params = {}
arangosh_args = []
args = []
if 'args' in definition:
if not isinstance(definition['args'], dict):
raise Exception(f"expected args to be a key value list! have: {definition['args']}")
for key, val in definition['args'].items():
if key == 'moreArgv':
args.append(val)
else:
args.append(f"--{key}")
if isinstance(val, bool):
args.append("true" if val else "false")
else:
args.append(val)
if 'arangosh_args' in definition:
if not isinstance(definition['arangosh_args'], dict):
raise Exception(f"expected arangosh_args to be a key value list! have: {definition['arangosh_args']}")
for key, val in definition['arangosh_args'].items():
arangosh_args.append(f"--{key}")
if isinstance(val, bool):
arangosh_args.append("true" if val else "false")
else:
arangosh_args.append(val)

medium_size = False
is_cluster = (definition['options'] and
'type' in definition['options'] and
definition['options']['type'] == 'cluster')
params = validate_params(definition['options'], is_cluster)
if 'type' in params:
if params['type'] == "cluster":
medium_size = True
flags.append('cluster')
elif params['type'] == "mixed":
medium_size = True
flags.append('mixed')
else:
flags.append('single')
size = "medium" if medium_size else "small"
size = size if not "size" in params else params['size']

if 'full' in params:
flags.append("full" if params["full"] else "!full")
if 'coverage' in params:
flags.append("coverage" if params["coverage"] else "!coverage")
if 'sniff' in params:
flags.append("sniff" if params["sniff"] else "!sniff")
run_job = 'run-linux-tests'
return {
"name": name if not "name" in params else params['name'],
"suite": suite,
"size": size,
"flags": flags,
"args": args.copy(),
"priority": params["priority"],
"arangosh_args": arangosh_args.copy(),
"params": params.copy(),
"testfile_definitions": testfile_definitions,
"run_job": run_job,
"parallelity": params["parallelity"],
}

def get_args(args):
""" serialize args into json similar to fromArgv in testing.js """
sub_args = {}
for key in args.keys():
value = args[key]
if ":" in key:
keyparts = key.split(":")
if not keyparts[0] in sub_args:
sub_args[keyparts[0]] = {}
sub_args[keyparts[0]][keyparts[1]] = value
elif key in sub_args:
if isinstance(sub_args[key], list):
sub_args[key].append(value)
else:
sub_args[key] = [value]
else:
sub_args[key] = value
return sub_args

def read_definitions(filename):
""" read test definitions txt """

def read_yaml_multi_suite(name, definition, testfile_definitions, cli_args):
""" convert yaml representation into the internal one """
generated_definition = {
}
args = {}
if 'options' in definition:
generated_definition['options'] = definition['options']
if 'args' in definition:
args = definition['args'].copy()
suite_strs = []
options_json = []
for suite in definition['suites']:
if isinstance(suite, str):
options_json.append({})
suite_name = suite
else:
suite_name = list(suite.keys())[0]
if not isinstance(suite, dict):
raise Exception(f"suite should be a dict, it is {type(suite)}")
if 'options' in suite[suite_name]:
if filter_one_test(cli_args, suite[suite_name]['options']):
print(f"skipping {suite}")
continue
if 'args' in suite[suite_name]:
options_json.append(get_args(suite[suite_name]['args']))
else:
options_json.append({})
suite_strs.append(suite_name)
generated_name = ','.join(suite_strs)
args['optionsJson'] = json.dumps(options_json, separators=(',', ':'))
if args != {}:
generated_definition['args'] = args
return read_yaml_suite(name, generated_name, generated_definition, testfile_definitions)

def read_yaml_bucket_suite(bucket_name, definition, testfile_definitions, cli_args):
""" convert yaml representation into the internal one """
ret = []
args = {}
options = []
if 'options' in definition:
options = definition['options']
if 'args' in definition:
args = definition['args']
for suite in definition['suites']:
if isinstance(suite, str):
ret.append(
read_yaml_suite(suite,
suite,
{
'options': definition['options'],
'name': bucket_name,
'args': args,
'suite': suite
},
testfile_definitions)
)
else:
suite_name = list(suite.keys())[0]
local_options = options.copy()
if 'options' in suite[suite_name]:
local_options = local_options | suite[suite_name]['options']
local_args = args.copy()
if 'args' in suite[suite_name]:
local_args = local_args | suite[suite_name]['args']
ret.append(
read_yaml_suite(suite_name,
suite_name,
{
'options': local_options,
'name': bucket_name,
'args': local_args,
'suite': suite_name
},
testfile_definitions)
)

return ret

def read_definitions(filename, override_branch, args):
"""read test definitions txt"""
tests = []
has_error = False
with open(filename, "r", encoding="utf-8") as filep:
for line_no, line in enumerate(filep):
line = line.strip()
if line.startswith("#") or len(line) == 0:
continue # ignore comments
try:
test = read_definition_line(line)
tests.append(test)
except Exception as exc:
print(f"{filename}:{line_no + 1}: \n`{line}`\n {exc}", file=sys.stderr)
has_error = True
testfile_definitions = {}
yaml_text = ""
if filename.endswith(".yml"):
with open(filename, "r", encoding="utf-8") as filep:
config = yaml.safe_load(filep)
if isinstance(config, dict):
if "add-yaml" in config:
parsed_yaml = {"add-yaml": copy.deepcopy(config["add-yaml"])}
if "jobProperties" in config:
testfile_definitions = copy.deepcopy(config["jobProperties"])
config = config['tests']
for testcase in config:
suite_name = list(testcase.keys())[0]
try:
suite = testcase[suite_name]
if "suites" in suite:
if ('options' in suite and
'buckets' in suite['options'] and
suite['options']['buckets'] == "auto"):
del suite['options']['buckets']
tests += read_yaml_bucket_suite(suite_name, suite, testfile_definitions, args)
else:
tests.append(read_yaml_multi_suite(suite_name, suite, testfile_definitions, args))
else:
tests.append(read_yaml_suite(suite_name, suite_name,
suite, testfile_definitions))
except Exception as ex:
print(f"while parsing {suite_name} {testcase}")
raise ex
else:
with open(filename, "r", encoding="utf-8") as filep:
for line_no, line in enumerate(filep):
line = line.strip()
if line.startswith("#") or len(line) == 0:
continue # ignore comments
try:
test = read_definition_line(line)
tests.append(test)
except Exception as exc:
print(f"{filename}:{line_no + 1}: \n`{line}`\n {exc}", file=sys.stderr)
has_error = True
if has_error:
raise Exception("abort due to errors")
return tests
Expand All @@ -263,7 +486,7 @@ def main():
""" entrypoint """
try:
args = parse_arguments()
tests = read_definitions(args.definitions)
tests = read_definitions(args.definitions, "", args)
if args.validate_only:
return # nothing left to do
tests = filter_tests(args, tests)
Expand Down
6 changes: 5 additions & 1 deletion jenkins/runCoverageRocksDB.fish
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
#!/usr/bin/env fish

set -xg TEST_DEFINITIONS test-definitions.txt
if test -f "$INNERWORKDIR/ArangoDB/tests/test-definitions.yml"
set -xg TEST_DEFINITIONS test-definitions.yml
else
set -xg TEST_DEFINITIONS test-definitions.txt
end
if test (count $argv) -gt 0
set -xg TEST_DEFINITIONS $argv[1]
end
Expand Down
Loading