33import argparse
44import copy
55import sys
6+ import json
67from traceback import print_exc
8+ import yaml
79
810from site_config import IS_ARM , IS_WINDOWS , IS_MAC , IS_COVERAGE
911
@@ -81,6 +83,23 @@ def list_generator(cluster):
8183 return res_sg + res_cl
8284 return list_generator (args .cluster )
8385
86+ def filter_one_test (args , test ):
87+ """filter testcase by operations target Single/Cluster/full"""
88+ if args .all :
89+ return False
90+ if IS_COVERAGE :
91+ if 'coverage' in test :
92+ return True
93+ full = args .full
94+ filters = []
95+
96+ if 'full' in test :
97+ if full and not test ['full' ]:
98+ return True
99+ if not full and test ['full' ]:
100+ return True
101+ return False
102+
84103formats = {
85104 "dump" : generate_dump_output ,
86105 "launch" : launch ,
@@ -89,6 +108,7 @@ def list_generator(cluster):
89108known_flags = {
90109 "cluster" : "this test requires a cluster" ,
91110 "single" : "this test requires a single server" ,
111+ "mixed" : "some buckets will run cluster, some not." ,
92112 "full" : "this test is only executed in full tests" ,
93113 "!full" : "this test is only executed in non-full tests" ,
94114 "gtest" : "only the testsuites starting with 'gtest' are to be executed" ,
@@ -110,7 +130,10 @@ def list_generator(cluster):
110130 "buckets" : "number of buckets to use for this test" ,
111131 "suffix" : "suffix that is appended to the tests folder name" ,
112132 "priority" : "priority that controls execution order. Testsuites with lower priority are executed later" ,
113- "parallelity" : "parallelity how many resources will the job use in the SUT? Default: 1 in Single server, 4 in Clusters"
133+ "parallelity" : "parallelity how many resources will the job use in the SUT? Default: 1 in Single server, 4 in Clusters" ,
134+ "type" : "single or cluster flag" ,
135+ "full" : "whether to spare from a single or full run" ,
136+ "sniff" : "whether to enable sniffing" ,
114137}
115138
116139
@@ -160,8 +183,8 @@ def parse_number(value):
160183 raise Exception (f"invalid numeric value: { value } " ) from exc
161184
162185 def parse_number_or_default (key , default_value = None ):
163- """ check number """
164- if key in params :
186+ """check number"""
187+ if key in params and not isinstance ( params [ key ], int ) :
165188 if params [key ][0 ] == '*' : # factor the default
166189 params [key ] = default_value * parse_number (params [key ][1 :])
167190 else :
@@ -177,15 +200,15 @@ def parse_number_or_default(key, default_value=None):
177200
178201
179202def validate_flags (flags ):
180- """ check whether target flags are valid """
203+ """check whether target flags are valid"""
181204 if "cluster" in flags and "single" in flags :
182205 raise Exception ("`cluster` and `single` specified for the same test" )
183206 if "full" in flags and "!full" in flags :
184207 raise Exception ("`full` and `!full` specified for the same test" )
185208
186209
187210def read_definition_line (line ):
188- """ parse one test definition line """
211+ """parse one test definition line"""
189212 bits = line .split ()
190213 if len (bits ) < 1 :
191214 raise Exception ("expected at least one argument: <testname>" )
@@ -221,33 +244,233 @@ def read_definition_line(line):
221244 validate_flags (flags )
222245 params = validate_params (params , 'cluster' in flags )
223246
247+ if len (arangosh_args ) == 0 :
248+ arangosh_args = ""
249+ run_job = 'run-linux-tests'
224250 return {
225251 "name" : params .get ("name" , suites ),
226- "suite " : suites ,
252+ "suites " : suites ,
227253 "priority" : params ["priority" ],
228254 "parallelity" : params ["parallelity" ],
229255 "flags" : flags ,
230256 "args" : args ,
231257 "arangosh_args" : arangosh_args ,
232- "params" : params
258+ "params" : params ,
259+ "testfile_definitions" : testfile_definitions ,
260+ "run_job" : run_job ,
261+ }
262+
263+ def read_yaml_suite (name , suite , definition , testfile_definitions ):
264+ """ convert yaml representation into the internal one """
265+ if not 'options' in definition :
266+ definition ['options' ] = {}
267+ flags = []
268+ params = {}
269+ arangosh_args = []
270+ args = []
271+ if 'args' in definition :
272+ if not isinstance (definition ['args' ], dict ):
273+ raise Exception (f"expected args to be a key value list! have: { definition ['args' ]} " )
274+ for key , val in definition ['args' ].items ():
275+ if key == 'moreArgv' :
276+ args .append (val )
277+ else :
278+ args .append (f"--{ key } " )
279+ if isinstance (val , bool ):
280+ args .append ("true" if val else "false" )
281+ else :
282+ args .append (val )
283+ if 'arangosh_args' in definition :
284+ if not isinstance (definition ['arangosh_args' ], dict ):
285+ raise Exception (f"expected arangosh_args to be a key value list! have: { definition ['arangosh_args' ]} " )
286+ for key , val in definition ['arangosh_args' ].items ():
287+ arangosh_args .append (f"--{ key } " )
288+ if isinstance (val , bool ):
289+ arangosh_args .append ("true" if val else "false" )
290+ else :
291+ arangosh_args .append (val )
292+
293+ medium_size = False
294+ is_cluster = (definition ['options' ] and
295+ 'type' in definition ['options' ] and
296+ definition ['options' ]['type' ] == 'cluster' )
297+ params = validate_params (definition ['options' ], is_cluster )
298+ if 'type' in params :
299+ if params ['type' ] == "cluster" :
300+ medium_size = True
301+ flags .append ('cluster' )
302+ elif params ['type' ] == "mixed" :
303+ medium_size = True
304+ flags .append ('mixed' )
305+ else :
306+ flags .append ('single' )
307+ size = "medium" if medium_size else "small"
308+ size = size if not "size" in params else params ['size' ]
309+
310+ if 'full' in params :
311+ flags .append ("full" if params ["full" ] else "!full" )
312+ if 'coverage' in params :
313+ flags .append ("coverage" if params ["coverage" ] else "!coverage" )
314+ if 'sniff' in params :
315+ flags .append ("sniff" if params ["sniff" ] else "!sniff" )
316+ run_job = 'run-linux-tests'
317+ return {
318+ "name" : name if not "name" in params else params ['name' ],
319+ "suite" : suite ,
320+ "size" : size ,
321+ "flags" : flags ,
322+ "args" : args .copy (),
323+ "priority" : params ["priority" ],
324+ "arangosh_args" : arangosh_args .copy (),
325+ "params" : params .copy (),
326+ "testfile_definitions" : testfile_definitions ,
327+ "run_job" : run_job ,
328+ "parallelity" : params ["parallelity" ],
233329 }
234330
331+ def get_args (args ):
332+ """ serialize args into json similar to fromArgv in testing.js """
333+ sub_args = {}
334+ for key in args .keys ():
335+ value = args [key ]
336+ if ":" in key :
337+ keyparts = key .split (":" )
338+ if not keyparts [0 ] in sub_args :
339+ sub_args [keyparts [0 ]] = {}
340+ sub_args [keyparts [0 ]][keyparts [1 ]] = value
341+ elif key in sub_args :
342+ if isinstance (sub_args [key ], list ):
343+ sub_args [key ].append (value )
344+ else :
345+ sub_args [key ] = [value ]
346+ else :
347+ sub_args [key ] = value
348+ return sub_args
235349
236- def read_definitions (filename ):
237- """ read test definitions txt """
350+
351+ def read_yaml_multi_suite (name , definition , testfile_definitions , cli_args ):
352+ """ convert yaml representation into the internal one """
353+ generated_definition = {
354+ }
355+ args = {}
356+ if 'options' in definition :
357+ generated_definition ['options' ] = definition ['options' ]
358+ if 'args' in definition :
359+ args = definition ['args' ].copy ()
360+ suite_strs = []
361+ options_json = []
362+ for suite in definition ['suites' ]:
363+ if isinstance (suite , str ):
364+ options_json .append ({})
365+ suite_name = suite
366+ else :
367+ suite_name = list (suite .keys ())[0 ]
368+ if not isinstance (suite , dict ):
369+ raise Exception (f"suite should be a dict, it is { type (suite )} " )
370+ if 'options' in suite [suite_name ]:
371+ if filter_one_test (cli_args , suite [suite_name ]['options' ]):
372+ print (f"skipping { suite } " )
373+ continue
374+ if 'args' in suite [suite_name ]:
375+ options_json .append (get_args (suite [suite_name ]['args' ]))
376+ else :
377+ options_json .append ({})
378+ suite_strs .append (suite_name )
379+ generated_name = ',' .join (suite_strs )
380+ args ['optionsJson' ] = json .dumps (options_json , separators = (',' , ':' ))
381+ if args != {}:
382+ generated_definition ['args' ] = args
383+ return read_yaml_suite (name , generated_name , generated_definition , testfile_definitions )
384+
385+ def read_yaml_bucket_suite (bucket_name , definition , testfile_definitions , cli_args ):
386+ """ convert yaml representation into the internal one """
387+ ret = []
388+ args = {}
389+ options = []
390+ if 'options' in definition :
391+ options = definition ['options' ]
392+ if 'args' in definition :
393+ args = definition ['args' ]
394+ for suite in definition ['suites' ]:
395+ if isinstance (suite , str ):
396+ ret .append (
397+ read_yaml_suite (suite ,
398+ suite ,
399+ {
400+ 'options' : definition ['options' ],
401+ 'name' : bucket_name ,
402+ 'args' : args ,
403+ 'suite' : suite
404+ },
405+ testfile_definitions )
406+ )
407+ else :
408+ suite_name = list (suite .keys ())[0 ]
409+ local_options = options .copy ()
410+ if 'options' in suite [suite_name ]:
411+ local_options = local_options | suite [suite_name ]['options' ]
412+ local_args = args .copy ()
413+ if 'args' in suite [suite_name ]:
414+ local_args = local_args | suite [suite_name ]['args' ]
415+ ret .append (
416+ read_yaml_suite (suite_name ,
417+ suite_name ,
418+ {
419+ 'options' : local_options ,
420+ 'name' : bucket_name ,
421+ 'args' : local_args ,
422+ 'suite' : suite_name
423+ },
424+ testfile_definitions )
425+ )
426+
427+ return ret
428+
429+ def read_definitions (filename , override_branch , args ):
430+ """read test definitions txt"""
238431 tests = []
239432 has_error = False
240- with open (filename , "r" , encoding = "utf-8" ) as filep :
241- for line_no , line in enumerate (filep ):
242- line = line .strip ()
243- if line .startswith ("#" ) or len (line ) == 0 :
244- continue # ignore comments
245- try :
246- test = read_definition_line (line )
247- tests .append (test )
248- except Exception as exc :
249- print (f"{ filename } :{ line_no + 1 } : \n `{ line } `\n { exc } " , file = sys .stderr )
250- has_error = True
433+ testfile_definitions = {}
434+ yaml_text = ""
435+ if filename .endswith (".yml" ):
436+ with open (filename , "r" , encoding = "utf-8" ) as filep :
437+ config = yaml .safe_load (filep )
438+ if isinstance (config , dict ):
439+ if "add-yaml" in config :
440+ parsed_yaml = {"add-yaml" : copy .deepcopy (config ["add-yaml" ])}
441+ if "jobProperties" in config :
442+ testfile_definitions = copy .deepcopy (config ["jobProperties" ])
443+ config = config ['tests' ]
444+ for testcase in config :
445+ suite_name = list (testcase .keys ())[0 ]
446+ try :
447+ suite = testcase [suite_name ]
448+ if "suites" in suite :
449+ if ('options' in suite and
450+ 'buckets' in suite ['options' ] and
451+ suite ['options' ]['buckets' ] == "auto" ):
452+ del suite ['options' ]['buckets' ]
453+ tests += read_yaml_bucket_suite (suite_name , suite , testfile_definitions , args )
454+ else :
455+ tests .append (read_yaml_multi_suite (suite_name , suite , testfile_definitions , args ))
456+ else :
457+ tests .append (read_yaml_suite (suite_name , suite_name ,
458+ suite , testfile_definitions ))
459+ except Exception as ex :
460+ print (f"while parsing { suite_name } { testcase } " )
461+ raise ex
462+ else :
463+ with open (filename , "r" , encoding = "utf-8" ) as filep :
464+ for line_no , line in enumerate (filep ):
465+ line = line .strip ()
466+ if line .startswith ("#" ) or len (line ) == 0 :
467+ continue # ignore comments
468+ try :
469+ test = read_definition_line (line )
470+ tests .append (test )
471+ except Exception as exc :
472+ print (f"{ filename } :{ line_no + 1 } : \n `{ line } `\n { exc } " , file = sys .stderr )
473+ has_error = True
251474 if has_error :
252475 raise Exception ("abort due to errors" )
253476 return tests
@@ -263,7 +486,7 @@ def main():
263486 """ entrypoint """
264487 try :
265488 args = parse_arguments ()
266- tests = read_definitions (args .definitions )
489+ tests = read_definitions (args .definitions , "" , args )
267490 if args .validate_only :
268491 return # nothing left to do
269492 tests = filter_tests (args , tests )
0 commit comments