[ONOS-7985]: Refactor list of tests used in TestON Jenkins Pipelines
Change-Id: Ic3d67fd0b0b0eb4a74f9f0c198dc5a868c5afa95
diff --git a/TestON/JenkinsFile/FUNCJenkinsFile b/TestON/JenkinsFile/FUNCJenkinsFile
index 3bcd47e..6989fd5 100644
--- a/TestON/JenkinsFile/FUNCJenkinsFile
+++ b/TestON/JenkinsFile/FUNCJenkinsFile
@@ -22,10 +22,11 @@
// read the dependency files
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
fileRelated.init()
+test_list.init()
// initialize the funcs with category of FUNC
funcs.initialize( "FUNC" );
@@ -37,7 +38,7 @@
prop = funcs.getProperties()
// get the list of the test and init branch to it.
-FUNC = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "FUNC" ]
+FUNC_tests = test_list.getTestsFromCategory( "FUNC" )
// init some directories
graph_generator_file = fileRelated.trendIndividual
@@ -51,12 +52,12 @@
// run the test sequentially and save the function into the dictionary.
def tests = [ : ]
-for ( String test : FUNC.keySet() ){
+for ( String test : FUNC_tests.keySet() ){
def toBeRun = testsToRun.contains( test )
def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
- def pureTestName = ( FUNC[ test ].containsKey( "test" ) ? FUNC[ test ][ "test" ].split().head() : test )
+ def pureTestName = ( FUNC_tests[ test ].containsKey( "test" ) ? FUNC_tests[ test ][ "test" ].split().head() : test )
tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false,
- FUNC, graph_generator_file, graph_saved_directory )
+ FUNC_tests, graph_generator_file, graph_saved_directory )
}
// get the start time of the test.
@@ -68,7 +69,7 @@
}
// generate the overall graph of the FUNC tests.
-funcs.generateOverallGraph( prop, FUNC, graph_saved_directory )
+funcs.generateOverallGraph( prop, FUNC_tests, graph_saved_directory )
// send the notification to Slack that running FUNC tests was ended.
funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/FabricJenkinsfileTrigger b/TestON/JenkinsFile/FabricJenkinsfileTrigger
index 1e5a628..72a2c2f 100644
--- a/TestON/JenkinsFile/FabricJenkinsfileTrigger
+++ b/TestON/JenkinsFile/FabricJenkinsfileTrigger
@@ -22,11 +22,12 @@
// init dependencies functions
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/TriggerFuncs.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
fileRelated.init()
+test_list.init()
// set the versions of the onos.
current_version = "master"
@@ -70,7 +71,7 @@
}
// get the list of the tests from the JenkinsTestONTests.groovy
-AllTheTests = test_lists.getAllTheTests( onos_b )
+// AllTheTests = test_list.getAllTests()
day = ""
@@ -219,16 +220,15 @@
}*/
// Way to add list of the tests with specific category to the result
-def adder( testCat, set, getResult ){
- // testCat : test Category ( Eg. FUNC, HA, SR ... )
- // set : set of the test ( Eg. basic, extra_A ... )
+def adder( category, day, branch, getResult ){
+ // category : test Category ( Eg. FUNC, HA, SR ... )
// if getResult == true, it will add the result.
result = ""
- for ( String test in AllTheTests[ testCat ].keySet() ){
- if ( AllTheTests[ testCat ][ test ][ set ] ){
- if ( getResult ){
- result += test + ","
- }
+ selectedTests = test_list.getTestsFromCategory( category, test_list.getTestsFromDay( day, branch ) )
+
+ for ( String test in selectedTests.keySet() ){
+ if ( getResult ){
+ result += test + ","
}
}
return result
diff --git a/TestON/JenkinsFile/HAJenkinsFile b/TestON/JenkinsFile/HAJenkinsFile
index d51806c..9eee0f9 100644
--- a/TestON/JenkinsFile/HAJenkinsFile
+++ b/TestON/JenkinsFile/HAJenkinsFile
@@ -26,6 +26,7 @@
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
fileRelated.init()
+test_list.init()
// initialize the funcs with category of HA
funcs.initialize( "HA" );
@@ -37,7 +38,7 @@
prop = funcs.getProperties()
// get the list of the test and init branch to it.
-HA = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "HA" ]
+HA_tests = test_list.getTestsFromCategory( "HA" )
// init some directories
graph_generator_file = fileRelated.trendIndividual
@@ -51,12 +52,12 @@
// run the test sequentially and save the function into the dictionary.
def tests = [ : ]
-for ( String test : HA.keySet() ){
+for ( String test : HA_tests.keySet() ){
toBeRun = testsToRun.contains( test )
def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
- def pureTestName = ( HA[ test ].containsKey( "test" ) ? HA[ test ][ "test" ].split().head() : test )
+ def pureTestName = ( HA_tests[ test ].containsKey( "test" ) ? HA_tests[ test ][ "test" ].split().head() : test )
tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false,
- HA, graph_generator_file, graph_saved_directory )
+ HA_tests, graph_generator_file, graph_saved_directory )
}
// get the start time of the test.
@@ -67,7 +68,7 @@
}
// generate the overall graph of the HA tests.
-funcs.generateOverallGraph( prop, HA, graph_saved_directory )
+funcs.generateOverallGraph( prop, HA_tests, graph_saved_directory )
// send the notification to Slack that running HA tests was ended.
funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/Overall_Graph_Generator b/TestON/JenkinsFile/Overall_Graph_Generator
index 012b11d..6b30ed0 100644
--- a/TestON/JenkinsFile/Overall_Graph_Generator
+++ b/TestON/JenkinsFile/Overall_Graph_Generator
@@ -22,7 +22,6 @@
// read the dependency functions.
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
fileRelated.init()
@@ -36,13 +35,11 @@
funcs.initializeTrend( "VM" )
onos_branch = params.ONOSbranch
-AllTheTests = test_lists.getAllTheTests( "" )
// generate the graph and post the result on TestStation-VMs. Right now, all the pie and histograms are saved
// on VM.
funcs.generateStatGraph( "TestStation-VMs",
onos_branch,
- AllTheTests,
stat_graph_generator_file,
pie_graph_generator_file,
graph_saved_directory )
diff --git a/TestON/JenkinsFile/SRHAJenkinsFile b/TestON/JenkinsFile/SRHAJenkinsFile
index 9549ebf..3ca8810 100644
--- a/TestON/JenkinsFile/SRHAJenkinsFile
+++ b/TestON/JenkinsFile/SRHAJenkinsFile
@@ -22,11 +22,12 @@
// read dependencies.
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
// init configuration to be SR
fileRelated.init()
+test_list.init()
funcs.initialize( "SR" )
// Allow to run with --params-file argument
@@ -39,7 +40,7 @@
// ( currently master on Fabric4, 2.1 on Fabric3 and 1.15 on Fabric2 )
def prop = null
prop = funcs.getProperties()
-SRHA = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "SRHA" ]
+SRHA_tests = test_list.getTestsFromCategory( "SRHA" )
// set the file paths and directory
graph_generator_file = fileRelated.trendIndividual
@@ -53,12 +54,12 @@
// save the functions to run test in the dictionary.
def tests = [ : ]
-for ( String test : SRHA.keySet() ){
+for ( String test : SRHA_tests.keySet() ){
toBeRun = testsToRun.contains( test )
def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
- def pureTestName = ( SRHA[ test ].containsKey( "test" ) ? SRHA[ test ][ "test" ].split().head() : test )
+ def pureTestName = ( SRHA_tests[ test ].containsKey( "test" ) ? SRHA_tests[ test ][ "test" ].split().head() : test )
tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false,
- SRHA, graph_generator_file, graph_saved_directory )
+ SRHA_tests, graph_generator_file, graph_saved_directory )
}
// get start time
diff --git a/TestON/JenkinsFile/SRJenkinsFile b/TestON/JenkinsFile/SRJenkinsFile
index dfd702c..e4af109 100644
--- a/TestON/JenkinsFile/SRJenkinsFile
+++ b/TestON/JenkinsFile/SRJenkinsFile
@@ -22,11 +22,12 @@
// read dependencies.
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
// init configuration to be SR
fileRelated.init()
+test_list.init()
funcs.initialize( "SR" )
// get the name of the Jenkins job.
@@ -39,7 +40,7 @@
// ( currently master on Fabric4, 1.15 on Fabric2 and 2.1 on Fabric3 )
def prop = null
prop = funcs.getProperties()
-SR = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "SR" ]
+SR_tests = test_list.getTestsFromCategory( "SR" )
// set the file paths and directory
graph_generator_file = fileRelated.trendIndividual
@@ -53,12 +54,12 @@
// save the functions to run test in the dictionary.
def tests = [ : ]
-for ( String test : SR.keySet() ){
+for ( String test : SR_tests.keySet() ){
toBeRun = testsToRun.contains( test )
def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
- def pureTestName = ( SR[ test ].containsKey( "test" ) ? SR[ test ][ "test" ].split().head() : test )
+ def pureTestName = ( SR_tests[ test ].containsKey( "test" ) ? SR_tests[ test ][ "test" ].split().head() : test )
tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false,
- SR, graph_generator_file, graph_saved_directory )
+ SR_tests, graph_generator_file, graph_saved_directory )
}
// get start time
diff --git a/TestON/JenkinsFile/USECASEJenkinsFile b/TestON/JenkinsFile/USECASEJenkinsFile
index d83148f..62da8ff 100644
--- a/TestON/JenkinsFile/USECASEJenkinsFile
+++ b/TestON/JenkinsFile/USECASEJenkinsFile
@@ -22,17 +22,18 @@
// set the dependencies.
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
// init configuration of funcs to be USECASE
fileRelated.init()
+test_list.init()
funcs.initialize( "USECASE" );
// read the TestON.property files and save it as a dictionary
def prop = null
prop = funcs.getProperties()
-USECASE = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "USECASE" ]
+USECASE_tests = test_list.getTestsFromCategory( "USECASE" )
// save directory and file.
graph_generator_file = fileRelated.trendIndividual
@@ -46,12 +47,12 @@
// save the function of the test running into the dictionary.
def tests = [ : ]
-for ( String test : USECASE.keySet() ){
+for ( String test : USECASE_tests.keySet() ){
toBeRun = testsToRun.contains( test )
def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
- def pureTestName = ( USECASE[ test ].containsKey( "test" ) ? USECASE[ test ][ "test" ].split().head() : test )
+ def pureTestName = ( USECASE_tests[ test ].containsKey( "test" ) ? USECASE_tests[ test ][ "test" ].split().head() : test )
tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false,
- USECASE, graph_generator_file, graph_saved_directory )
+ USECASE_tests, graph_generator_file, graph_saved_directory )
}
// get start time of the test.
@@ -65,4 +66,4 @@
funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
// generate the overall graph for USECASE.
-funcs.generateOverallGraph( prop, USECASE, graph_saved_directory )
+funcs.generateOverallGraph( prop, USECASE_tests, graph_saved_directory )
diff --git a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
index 739e308..09ee84a 100644
--- a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
+++ b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
@@ -22,9 +22,9 @@
// set the functions of the dependencies.
funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/TriggerFuncs.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
// set the versions of the onos
fileRelated.init()
@@ -80,9 +80,6 @@
}
}
-// Get all the list of the tests from the JenkinsTestONTests.groovy
-AllTheTests = test_lists.getAllTheTests( onos_b )
-
// list of the tests to be run will be saved in each choices.
day = ""
SCPF_choices = ""
@@ -166,7 +163,6 @@
if ( !manually_run ){
funcs.generateStatGraph( "TestStation-VMs",
funcs.branchWithPrefix( onos_b ),
- AllTheTests,
stat_graph_generator_file,
pie_graph_generator_file,
graph_saved_directory )
@@ -174,259 +170,76 @@
// function that will divide tests depends on which day it is.
def testDivider( today ){
- switch ( today ){
- case Calendar.MONDAY:
- // THe reason Monday calls all the days is because we want to post the test schedules on the wiki
- // and slack channel every monday.
- // It will only generate the list of the test for monday.
- initHtmlForWiki()
- monday( true )
- tuesday( true, false )
- wednesday( true, false )
- thursday( true, false )
- friday( true, false )
- saturday( false, false )
- sunday( false, false )
- day = "Monday"
- closeHtmlForWiki()
- postToWiki( wikiContents )
- slackSend( color: '#FFD988',
- message: "Tests to be run this weekdays : \n" +
- triggerFuncs.printDaysForTest( AllTheTests ) )
- break
- case Calendar.TUESDAY:
- tuesday( false, true )
- day = "Tuesday"
- break
- case Calendar.WEDNESDAY:
- wednesday( false, true )
- day = "Wednesday"
- break
- case Calendar.THURSDAY:
- thursday( false, true )
- day = "Thursday"
- break
- case Calendar.FRIDAY:
- friday( false, true )
- day = "Friday"
- break
- case Calendar.SATURDAY:
- saturday( false, true )
- day = "Saturday"
- break
- case Calendar.SUNDAY:
- sunday( false, true )
- day = "Sunday"
- break
+ todayStr = today.toString()
+
+ dayMap = [ ( Calendar.MONDAY ) : "mon",
+ ( Calendar.TUESDAY ) : "tue",
+ ( Calendar.WEDNESDAY ) : "wed",
+ ( Calendar.THURSDAY ) : "thu",
+ ( Calendar.FRIDAY ) : "fri",
+ ( Calendar.SATURDAY ) : "sat",
+ ( Calendar.SUNDAY ) : "sun" ]
+ fullDayMap = [ ( Calendar.MONDAY ) : "Monday",
+ ( Calendar.TUESDAY ) : "Tuesday",
+ ( Calendar.WEDNESDAY ) : "Wednesday",
+ ( Calendar.THURSDAY ) : "Thursday",
+ ( Calendar.FRIDAY ) : "Friday",
+ ( Calendar.SATURDAY ) : "Saturday",
+ ( Calendar.SUNDAY ) : "Sunday" ]
+
+ if ( today == Calendar.MONDAY ){
+ initHtmlForWiki()
+ day = fullDayMap[ today ]
+ for ( key in dayMap.keySet() ){
+ buildDay( dayMap[ key ], false )
+ }
+ closeHtmlForWiki()
+ postToWiki( wikiContents )
+ slackSend( color: '#FFD988',
+ message: "Tests to be run this weekdays : \n" +
+ triggerFuncs.printDaysForTest() )
+ } else {
+ day = fullDayMap[ today ]
+ buildDay( dayMap[ today ], true )
}
}
-// function for monday.
-def monday( getResult ){
- // add header for wiki page script.
+def buildDay( dayStr, getResult ){
addingHeader( "FUNC" )
- // call category of basic and extra_A of FUNC tests.
- // put M into the dictionary.
- FUNC_choices += adder( "FUNC", "basic", true, "M", getResult )
- FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
+ FUNC_choices += adder( "FUNC", dayStr, getResult )
closingHeader( "FUNC" )
addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", true, "M", getResult )
- HA_choices += adder( "HA", "extra_A", true, "M", getResult )
+ HA_choices += adder( "HA", dayStr, getResult )
closingHeader( "HA" )
addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", true, "M", getResult )
- SCPF_choices += adder( "SCPF", "extra_B", true, "M", getResult )
+ SCPF_choices += adder( "SCPF", dayStr, getResult )
closingHeader( "SCPF" )
addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", true, "M", false )
+ SR_choices += adder( "SR", dayStr, false )
closingHeader( "SR" )
addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", true, "M", false )
+ SRHA_choices += adder( "SRHA", dayStr, false )
closingHeader( "SRHA" )
addingHeader( "USECASE" )
- closingHeader( "USECASE" )
-}
-
-// If get result is false, it will not add the test result to xx_choices, but will generate the
-// header and days
-def tuesday( getDay, getResult ){
- addingHeader( "FUNC" )
- FUNC_choices += adder( "FUNC", "basic", getDay, "T", getResult )
- FUNC_choices += adder( "FUNC", "extra_B", getDay, "T", getResult )
- closingHeader( "FUNC" )
- addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", getDay, "T", getResult )
- HA_choices += adder( "HA", "extra_B", getDay, "T", getResult )
- closingHeader( "HA" )
- addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", getDay, "T", getResult )
- SCPF_choices += adder( "SCPF", "extra_C", getDay, "T", getResult )
- closingHeader( "SCPF" )
- addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", getDay, "T", false )
- closingHeader( "SR" )
- addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", getDay, "T", false )
- closingHeader( "SRHA" )
- addingHeader( "USECASE" )
- USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
- USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
- closingHeader( "USECASE" )
-}
-
-def wednesday( getDay, getResult ){
- addingHeader( "FUNC" )
- FUNC_choices += adder( "FUNC", "basic", getDay, "W", getResult )
- FUNC_choices += adder( "FUNC", "extra_A", getDay, "W", getResult )
- closingHeader( "FUNC" )
- addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", getDay, "W", getResult )
- HA_choices += adder( "HA", "extra_A", getDay, "W", getResult )
- closingHeader( "HA" )
- addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
- SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
- closingHeader( "SCPF" )
- addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", getDay, "W", false )
- closingHeader( "SR" )
- addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", getDay, "W", false )
- closingHeader( "SRHA" )
- addingHeader( "USECASE" )
- closingHeader( "USECASE" )
-}
-
-def thursday( getDay, getResult ){
- addingHeader( "FUNC" )
- FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
- FUNC_choices += adder( "FUNC", "extra_B", getDay, "Th", getResult )
- closingHeader( "FUNC" )
- addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", getDay, "Th", getResult )
- HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
- closingHeader( "HA" )
- addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
- SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
- closingHeader( "SCPF" )
- addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", getDay, "Th", false )
- closingHeader( "SR" )
- addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", getDay, "Th", false )
- closingHeader( "SRHA" )
- addingHeader( "USECASE" )
- closingHeader( "USECASE" )
-}
-
-def friday( getDay, getResult ){
- addingHeader( "FUNC" )
- FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
- FUNC_choices += adder( "FUNC", "extra_A", getDay, "F", getResult )
- closingHeader( "FUNC" )
- addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", getDay, "F", getResult )
- HA_choices += adder( "HA", "extra_A", getDay, "F", getResult )
- closingHeader( "HA" )
- addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", getDay, "F", getResult )
- SCPF_choices += adder( "SCPF", "extra_A", getDay, "F", getResult )
- SCPF_choices += adder( "SCPF", "extra_D", getDay, "F", getResult )
- closingHeader( "SCPF" )
- addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", getDay, "F", false )
- SR_choices += adder( "SR", "extra_A", getDay, "F", false )
- closingHeader( "SR" )
- addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", getDay, "F", false )
- SRHA_choices += adder( "SRHA", "extra_A", getDay, "F", false )
- closingHeader( "SRHA" )
- addingHeader( "USECASE" )
- closingHeader( "USECASE" )
-}
-
-def saturday( getDay, getResult ){
- addingHeader( "FUNC" )
- FUNC_choices += adder( "FUNC", "basic", getDay, "Sa", getResult )
- FUNC_choices += adder( "FUNC", "extra_A", getDay, "Sa", getResult )
- FUNC_choices += adder( "FUNC", "extra_B", getDay, "Sa", getResult )
- closingHeader( "FUNC" )
- addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", getDay, "Sa", getResult )
- HA_choices += adder( "HA", "extra_A", getDay, "Sa", getResult )
- HA_choices += adder( "HA", "extra_B", getDay, "Sa", getResult )
- closingHeader( "HA" )
- addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", getDay, "Sa", getResult )
- SCPF_choices += adder( "SCPF", "extra_A", getDay, "Sa", getResult )
- SCPF_choices += adder( "SCPF", "extra_B", getDay, "Sa", getResult )
- SCPF_choices += adder( "SCPF", "extra_C", getDay, "Sa", getResult )
- SCPF_choices += adder( "SCPF", "extra_D", getDay, "Sa", getResult )
- closingHeader( "SCPF" )
- addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", getDay, "Sa", false )
- SR_choices += adder( "SR", "extra_B", getDay, "Sa", false )
- closingHeader( "SR" )
- addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", getDay, "Sa", false )
- SRHA_choices += adder( "SRHA", "extra_B", getDay, "Sa", false )
- closingHeader( "SRHA" )
- addingHeader( "USECASE" )
- USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
- closingHeader( "USECASE" )
-}
-
-def sunday( getDay, getResult ){
- addingHeader( "FUNC" )
- FUNC_choices += adder( "FUNC", "basic", getDay, "S", getResult )
- FUNC_choices += adder( "FUNC", "extra_A", getDay, "S", getResult )
- FUNC_choices += adder( "FUNC", "extra_B", getDay, "S", getResult )
- closingHeader( "FUNC" )
- addingHeader( "HA" )
- HA_choices += adder( "HA", "basic", getDay, "S", getResult )
- HA_choices += adder( "HA", "extra_A", getDay, "S", getResult )
- HA_choices += adder( "HA", "extra_B", getDay, "S", getResult )
- closingHeader( "HA" )
- addingHeader( "SCPF" )
- SCPF_choices += adder( "SCPF", "basic", getDay, "S", getResult )
- SCPF_choices += adder( "SCPF", "extra_A", getDay, "S", getResult )
- SCPF_choices += adder( "SCPF", "extra_B", getDay, "S", getResult )
- SCPF_choices += adder( "SCPF", "extra_C", getDay, "S", getResult )
- SCPF_choices += adder( "SCPF", "extra_D", getDay, "S", getResult )
- closingHeader( "SCPF" )
- addingHeader( "SR" )
- SR_choices += adder( "SR", "basic", getDay, "S", false )
- closingHeader( "SR" )
- addingHeader( "SRHA" )
- SRHA_choices += adder( "SRHA", "basic", getDay, "S", false )
- closingHeader( "SRHA" )
- addingHeader( "USECASE" )
- USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
+ USECASE_choices += adder( "USECASE", dayStr, getResult )
closingHeader( "USECASE" )
}
// adder that will return the list of the tests.
-def adder( testCat, set, dayAdding, day, getResult ){
- // testCat : the category of the test which will be either FUNC,HA,SR...
- // set : the set of the test to be run which will be basic,extra_A,extra_B...
- // dayAdding : boolean whether to add the days into the list or not
+def adder( category, day, getResult ){
+ // category : the category of the test which will be either FUNC,HA,SR...
// day : the day you are trying to add (m,t,w,th... )
// getResult : if want to get the list of the test to be run. False will return empty list.
// And once the list is empty, it will not be run.
def result = ""
- for ( String test in AllTheTests[ testCat ].keySet() ){
- if ( AllTheTests[ testCat ][ test ][ set ] ){
- if ( getResult ){
- result += test + ","
- }
- if ( dayAdding ){
- dayAdder( testCat, test, day )
- }
- // make HTML columns for wiki page on schedule.
- makeHtmlColList( testCat, test )
+ selectedTests = getTestsFromCategory( category, getTestsFromDay( day, onos_b ) )
+
+ for ( String test in selectedTests.keySet() ){
+ if ( getResult ){
+ result += test + ","
}
+ // make HTML columns for wiki page on schedule.
+ makeHtmlColList( category, test )
}
return result
}
@@ -513,8 +326,3 @@
workspace + filename )
}
}
-
-// add the day to the "day" on the dictionary.
-def dayAdder( testCat, testName, dayOfWeek ){
- AllTheTests[ testCat ][ testName ][ "day" ] += dayOfWeek + ","
-}
diff --git a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
index 68d75e9..d64f12e 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
@@ -26,8 +26,10 @@
generalFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/GeneralFuncs.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated.init()
+test_list.init()
def initializeTrend( machine ){
// For initializing any trend graph jobs
@@ -495,22 +497,6 @@
}
}
-def makeTestList( list, commaNeeded ){
- // make the list of the test in to a string.
- // list : list of the test
- // commaNeeded : if comma is needed for the string
-
- return generalFuncs.getTestList( list ) + ( commaNeeded ? "," : "" )
-}
-
-def createStatsList( testCategory, list, semiNeeded ){
- // make the list for stats
- // testCategory : category of the test
- // list : list of the test
- // semiNeeded: if semi colon is needed
-
- return testCategory + "-" + generalFuncs.getTestList( list ) + ( semiNeeded ? ";" : "" )
-}
def generateOverallGraph( prop, testCategory, graph_saved_directory ){
// generate the overall graph for the test
@@ -567,18 +553,24 @@
done '''
}
-def generateStatGraph( testMachineOn, onos_branch, AllTheTests, stat_graph_generator_file, pie_graph_generator_file,
+def generateStatGraph( testMachineOn, onos_branch, stat_graph_generator_file, pie_graph_generator_file,
graph_saved_directory ){
- // Will generate the stats graph.
- testListPart = createStatsList( "FUNC", AllTheTests[ "FUNC" ], true ) +
- createStatsList( "HA", AllTheTests[ "HA" ], true ) +
- createStatsList( "USECASE", AllTheTests[ "USECASE" ], false )
- pieTestList = makeTestList( AllTheTests[ "FUNC" ], true ) +
- makeTestList( AllTheTests[ "HA" ], true ) +
- makeTestList( AllTheTests[ "USECASE" ], false )
+ // Will generate the stats graph.
+ FUNCtestsStr = test_list.getTestListAsString( test_list.getTestsFromCategory( "FUNC" ) )
+ HAtestsStr = test_list.getTestListAsString( test_list.getTestsFromCategory( "HA" ) )
+ USECASEtestsStr = test_list.getTestListAsString( test_list.getTestsFromCategory( "USECASE" ) )
+
+ testListParam = "FUNC-" + FUNCtestsStr + ";" +
+ "HA-" + HAtestsStr + ";" +
+ "USECASE-" + USECASEtestsStr
+
+ pieTestListParam = FUNCtestsStr + "," +
+ HAtestsStr + "," +
+ USECASEtestsStr
+
generateCategoryStatsGraph( testMachineOn, "false", "true", stat_graph_generator_file, pie_graph_generator_file,
- "ALL", onos_branch, testListPart, graph_saved_directory, pieTestList )
+ "ALL", onos_branch, testListParam, graph_saved_directory, pieTestListParam )
}
def branchWithPrefix( branch ){
diff --git a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
index 0fcec01..e4690c3 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
@@ -1,6 +1,6 @@
#!groovy
-// Copyright 2017 Open Networking Foundation (ONF)
+// Copyright 2019 Open Networking Foundation (ONF)
//
// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
@@ -19,589 +19,86 @@
// You should have received a copy of the GNU General Public License
// along with TestON. If not, see <http://www.gnu.org/licenses/>.
-// This is the dependency Jenkins script.
-// This will provide the basic information for the tests for scheduling.
-// Any new test to be added should be added here.
+import groovy.json.*
+allTests = [:]
+schedules = [:]
-def getAllTheTests( wikiPrefix ){
- // This contains the dictionary of the test and the category of them
- // wikiPrefix : master, 2.1, 1.15 ...
+def init(){
+ def jsonSlurper = new JsonSlurper()
+ def tests_buffer = new BufferedReader( new InputStreamReader( new FileInputStream( "tests.json" ),"UTF-8" ) )
+ def schedules_buffer = new BufferedReader( new InputStreamReader( new FileInputStream( "schedule.json" ),"UTF-8" ) )
+ allTests = jsonSlurper.parse( tests_buffer )
+ schedules = jsonSlurper.parse( schedules_buffer )
+}
- // category: it will be used to distinguish which category to be run on which days ( basic,extra_A, extra_B ... )
- // day: it will be used to display the schedule of the test to be run to the slack. It is empty in the first place but will be
- // filled out every monday.
- // wiki_link : link of the wiki page that will be used to publish to confluence later on. SCPF tests don't need one.
+def getAllTests(){
+ return allTests
+}
- return [
- "FUNC": [
- "FUNCipv6Intent": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCipv6Intent",
- wiki_file: "FUNCipv6IntentWiki.txt" ],
- "FUNCoptical": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCoptical",
- wiki_file: "FUNCopticalWiki.txt" ],
- "FUNCflow": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCflow",
- wiki_file: "FUNCflowWiki.txt" ],
- "FUNCnetCfg": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCnetCfg",
- wiki_file: "FUNCnetCfgWiki.txt" ],
- "FUNCovsdbtest": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCovsdbtest",
- wiki_file: "FUNCovsdbtestWiki.txt" ],
- "FUNCnetconf": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCnetconf",
- wiki_file: "FUNCnetconfWiki.txt" ],
- "FUNCgroup": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCgroup",
- wiki_file: "FUNCgroupWiki.txt" ],
- "FUNCintent": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCintent",
- wiki_file: "FUNCintentWiki.txt" ],
- "FUNCintentRest": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCintentRest",
- wiki_file: "FUNCintentRestWiki.txt" ],
- "FUNCformCluster": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCformCluster",
- wiki_file: "FUNCformClusterWiki.txt" ]
- ],
- "HA": [
- "HAsanity": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Sanity",
- wiki_file: "HAsanityWiki.txt" ],
- "HAclusterRestart": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Cluster Restart",
- wiki_file: "HAclusterRestartWiki.txt" ],
- "HAsingleInstanceRestart": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Single Instance Restart",
- wiki_file: "HAsingleInstanceRestartWiki.txt" ],
- "HAstopNodes": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Stop Nodes",
- wiki_file: "HAstopNodes.txt" ],
- "HAfullNetPartition": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Full Network Partition",
- wiki_file: "HAfullNetPartitionWiki.txt" ],
- "HAswapNodes": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Swap Nodes",
- wiki_file: "HAswapNodesWiki.txt" ],
- "HAscaling": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Scaling",
- wiki_file: "HAscalingWiki.txt" ],
- "HAkillNodes": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Kill Nodes",
- wiki_file: "HAkillNodes.txt" ],
- "HAbackupRecover": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Backup Recover",
- wiki_file: "HAbackupRecoverWiki.txt" ],
- "HAupgrade": [
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Upgrade",
- wiki_file: "HAupgradeWiki.txt" ],
- "HAupgradeRollback": [
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "HA Upgrade Rollback",
- wiki_file: "HAupgradeRollbackWiki.txt" ]
- ],
- "SCPF": [
- "SCPFswitchLat": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": "" ],
- "SCPFcbench": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFportLat": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFflowTp1g": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFintentEventTp": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFhostLat": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFbatchFlowResp": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFintentRerouteLat": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFintentInstallWithdrawLat": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFflowTp1gWithFlowObj": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFintentEventTpWithFlowObj": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFintentRerouteLatWithFlowObj": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFscalingMaxIntentsWithFlowObj": [
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFintentInstallWithdrawLatWithFlowObj": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFscaleTopo": [
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "extra_C": true,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFscalingMaxIntents": [
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": false,
- "new_Test": false,
- "day": " " ],
- "SCPFmastershipFailoverLat": [
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "extra_C": false,
- "extra_D": true,
- "new_Test": false,
- "day": " " ]
- ],
- "USECASE": [
- "FUNCvirNetNB": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCvirNetNB",
- wiki_file: "FUNCvirNetNBWiki.txt" ],
- "FUNCbgpls": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "FUNCbgpls",
- wiki_file: "FUNCbgplsWiki.txt" ],
- "VPLSBasic": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "VPLSBasic",
- wiki_file: "VPLSBasicWiki.txt" ],
- "VPLSfailsafe": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "VPLSfailsafe",
- wiki_file: "VPLSfailsafeWiki.txt" ],
- "USECASE_SdnipFunction": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SDNIP Function",
- wiki_file: "USECASE_SdnipFunctionWiki.txt" ],
- "USECASE_SdnipFunctionCluster": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SDNIP Function Cluster",
- wiki_file: "USECASE_SdnipFunctionClusterWiki.txt" ],
- "PLATdockertest": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: "Docker Images sanity test",
- wiki_file: "PLATdockertestTableWiki.txt" ]
- ],
- "SR": [
- "SRBridging": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Bridging",
- wiki_file: "SRBridgingWiki.txt" ],
- "SRRouting": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Routing",
- wiki_file: "SRRoutingWiki.txt" ],
- "SRDhcprelay": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Dhcp Relay",
- wiki_file: "SRDhcprelayWiki.txt" ],
- "SRDynamicConf": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Dynamic Config",
- wiki_file: "SRDynamicConfWiki.txt" ],
- "SRMulticast": [
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Multi Cast",
- wiki_file: "SRMulticastWiki.txt" ],
- "SRSanity": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Sanity",
- wiki_file: "SRSanityWiki.txt" ],
- "SRSwitchFailure": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Switch Failure",
- wiki_file: "SRSwitchFailureWiki.txt" ],
- "SRLinkFailure": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Link Failure",
- wiki_file: "SRLinkFailureWiki.txt" ],
- "SROnosFailure": [
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Onos node Failure",
- wiki_file: "SROnosFailureWiki.txt" ],
- "SRClusterRestart": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Cluster Restart",
- wiki_file: "SRClusterRestartWiki.txt" ],
- "SRDynamic": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR Dynamic",
- wiki_file: "SRDynamicWiki.txt" ],
- "SRHighAvailability": [
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR High Availability",
- wiki_file: "SRHighAvailabilityWiki.txt" ]
- ],
- "SRHA": [
- "SRHAsanity": [
- "test": "HAsanity --params-file HAsanity.params.fabric",
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Sanity",
- wiki_file: "HAsanityWiki.txt" ],
- "SRHAclusterRestart": [
- "test": "HAclusterRestart --params-file HAclusterRestart.params.fabric",
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Cluster Restart",
- wiki_file: "HAclusterRestartWiki.txt" ],
- "SRHAsingleInstanceRestart": [
- "test": "HAsingleInstanceRestart --params-file HAsingleInstanceRestart.params.fabric",
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Single Instance Restart",
- wiki_file: "HAsingleInstanceRestartWiki.txt" ],
- "SRHAstopNodes": [
- "test": "HAstopNodes --params-file HAstopNodes.params.fabric",
- "basic": true,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Stop Nodes",
- wiki_file: "HAstopNodes.txt" ],
- "SRHAfullNetPartition": [
- "test": "HAfullNetPartition --params-file HAfullNetPartition.params.fabric",
- "basic": false,
- "extra_A": true,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Full Network Partition",
- wiki_file: "HAfullNetPartitionWiki.txt" ],
- "SRHAswapNodes": [
- "test": "HAswapNodes --params-file HAswapNodes.params.fabric",
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Swap Nodes",
- wiki_file: "HAswapNodesWiki.txt" ],
- "SRHAscaling": [
- "test": "HAscaling --params-file HAscaling.params.fabric",
- "basic": false,
- "extra_A": false,
- "extra_B": true,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Scaling",
- wiki_file: "HAscalingWiki.txt" ],
- "SRHAkillNodes": [
- "test": "HAkillNodes --params-file HAkillNodes.params.fabric",
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Kill Nodes",
- wiki_file: "HAkillNodes.txt" ],
- "SRHAbackupRecover": [
- "test": "HAbackupRecover --params-file HAbackupRecover.params.fabric",
- "basic": true,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Backup Recover",
- wiki_file: "HAbackupRecoverWiki.txt" ],
- "SRHAupgrade": [
- "test": "HAupgrade --params-file HAupgrade.params.fabric",
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Upgrade",
- wiki_file: "HAupgradeWiki.txt" ],
- "SRHAupgradeRollback": [
- "test": "HAupgradeRollback --params-file HAupgradeRollback.params.fabric",
- "basic": false,
- "extra_A": false,
- "extra_B": false,
- "new_Test": false,
- "day": "",
- wiki_link: wikiPrefix + "-" + "SR HA Upgrade Rollback",
- wiki_file: "HAupgradeRollbackWiki.txt" ]
- ]
- ]
+def getSchedules(){
+ return schedules
+}
+
+def getTestsFromCategory( category, tests=[:] ){
+ result = [:]
+ if ( tests == [:] ){
+ tests = allTests
+ }
+ for ( String key in tests.keySet() ){
+ if ( tests[ key ][ "category" ] == category ){
+ result.put( key, tests[ key ] )
+ }
+ }
+ return result
+}
+
+def getTestsFromDay( day, branch, tests=[:] ){
+ result = [:]
+ if ( tests == [:] ){
+ tests = allTests
+ }
+ validSchedules = []
+ for ( String key in schedules.keySet() ){
+ if ( schedules[ key ].contains( day ) ){
+ validSchedules += key
+ }
+ }
+ echo validSchedules.toString()
+ for ( String key in tests.keySet() ){
+ schedule = tests[ key ][ "schedule" ][ branch ]
+ if ( validSchedules.contains( schedule ) ){
+ result.put( key, tests[ key ] )
+ }
+ }
+ return result
+}
+
+def getTestsFromNodeLabel( nodeLabel, tests=[:] ){
+ if ( tests == [:] ){
+ tests = allTests
+ }
+ for ( String key in tests.keySet() ){
+ if ( tests[ key ][ "nodeLabel" ] == nodeLabel ){
+ result.put( key, tests[ key ] )
+ }
+ }
+}
+
+def getTestListAsString( tests ){
+ result = ""
+ for ( String key in tests.keySet() ){
+ result += test + ","
+ }
+ return result[ 0..-2 ]
+}
+
+def getTestSchedule( test ){
+ return allTests[ test ][ "schedule" ]
+}
+
+def convertScheduleKeyToDays( sch ){
+ return schedules[ sch ]
}
return this
diff --git a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
index effc766..c35a94e 100644
--- a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
@@ -23,6 +23,8 @@
// This will provide the portion that will set up the environment of the machine
// and trigger the corresponding jobs.
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list.init()
def init( commonFuncs ){
funcs = commonFuncs
@@ -37,17 +39,20 @@
return str
}
-def printDaysForTest( AllTheTests ){
+def printDaysForTest(){
// Print the days for what test has.
+ AllTheTests = test_list.getAllTests()
result = ""
for ( String test in AllTheTests.keySet() ){
- result += test + " : \n"
- for ( String each in AllTheTests[ test ].keySet() ){
- AllTheTests[ test ][ each ][ "day" ] = lastCommaRemover( AllTheTests[ test ][ each ][ "day" ] )
- result += " " + each + ":[" + AllTheTests[ test ][ each ][ "day" ] + "]\n"
+ result += test + ": ["
+ test_schedule = test_list.getTestSchedule( test )
+ for ( String sch in test_schedule.keySet() ){
+ for ( String day in convertScheduleKeyToDays( sch ) ){
+ result += day + " "
+ }
}
- result += "\n"
+ result += "]\n"
}
return result
}
diff --git a/TestON/JenkinsFile/dependencies/schedule.json b/TestON/JenkinsFile/dependencies/schedule.json
new file mode 100644
index 0000000..a82daa2
--- /dev/null
+++ b/TestON/JenkinsFile/dependencies/schedule.json
@@ -0,0 +1,19 @@
+{
+ "everyday": [ "sun", "mon", "tue", "wed", "thu", "fri", "sat" ],
+ "weekdays": [ "mon", "tue", "wed", "thu", "fri" ],
+ "weekends": [ "sun", "sat" ],
+ "mon_wed_fri": [ "mon", "wed", "fri" ],
+ "tue_thu": [ "tue", "thu" ],
+ "wed_fri": [ "wed", "fri" ],
+ "mon_thu": [ "mon", "thu" ],
+ "sun": [ "sun" ],
+ "mon": [ "mon" ],
+ "tue": [ "tue" ],
+ "wed": [ "wed" ],
+ "thu": [ "thu" ],
+ "fri": [ "fri" ],
+ "sat": [ "sat" ],
+ "onos-1.x_schedule": [ "sat" ],
+ "onos-2.x_schedule": [ "sun" ],
+ "USECASE_master": [ "tue" ]
+}
diff --git a/TestON/JenkinsFile/dependencies/tests.json b/TestON/JenkinsFile/dependencies/tests.json
new file mode 100644
index 0000000..89927de
--- /dev/null
+++ b/TestON/JenkinsFile/dependencies/tests.json
@@ -0,0 +1,790 @@
+{
+ "FUNCflow": {
+ "wikiName": "FUNCflow",
+ "wikiFile": "FUNCflowWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCintent": {
+ "wikiName": "FUNCintent",
+ "wikiFile": "FUNCintentWiki.txt",
+ "schedule": {
+ "master": "mon_wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCintentRest": {
+ "wikiName": "FUNCintentRest",
+ "wikiFile": "FUNCintentRestWiki.txt",
+ "schedule": {
+ "master": "tue_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCipv6Intent": {
+ "wikiName": "FUNCipv6Intent",
+ "wikiFile": "FUNCipv6IntentWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCnetCfg": {
+ "wikiName": "FUNCnetCfg",
+ "wikiFile": "FUNCnetCfgWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCnetconf": {
+ "wikiName": "FUNCnetconf",
+ "wikiFile": "FUNCnetconfWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCoptical": {
+ "wikiName": "FUNCoptical",
+ "wikiFile": "FUNCopticalWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCovsdbtest": {
+ "wikiName": "FUNCovsdbtest",
+ "wikiFile": "FUNCovsdbtestWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCgroup": {
+ "wikiName": "FUNCgroup",
+ "wikiFile": "FUNCgroupWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCformCluster": {
+ "wikiName": "FUNCformCluster",
+ "wikiFile": "FUNCformClusterWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "FUNC",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAsanity": {
+ "wikiName": "HA Sanity",
+ "wikiFile": "HAsanityWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAsingleInstanceRestart": {
+ "wikiName": "HA Single Instance Restart",
+ "wikiFile": "HAsingleInstanceRestartWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAclusterRestart": {
+ "wikiName": "HA Cluster Restart",
+ "wikiFile": "HAclusterRestart.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAkillNodes": {
+ "wikiName": "HA Kill Nodes",
+ "wikiFile": "HAkillNodes.txt",
+ "schedule": {
+ "master": "tue_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAstopNodes": {
+ "wikiName": "HA Stop Nodes",
+ "wikiFile": "HAstopNodes.txt",
+ "schedule": {
+ "master": "mon_wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAfullNetPartition": {
+ "wikiName": "HA Full Network Partition",
+ "wikiFile": "HAfullNetPartitionWiki.txt",
+ "schedule": {
+ "master": "mon_wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAscaling": {
+ "wikiName": "HA Scaling",
+ "wikiFile": "HAscalingWiki.txt",
+ "schedule": {
+ "master": "tue_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAswapNodes": {
+ "wikiName": "HA Swap Nodes",
+ "wikiFile": "HAswapNodesWiki.txt",
+ "schedule": {
+ "master": "tue_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAupgrade": {
+ "wikiName": "HA Upgrade",
+ "wikiFile": "HAupgradeWiki.txt",
+ "schedule": {
+
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAupgradeRollback": {
+ "wikiName": "HA Upgrade Rollback",
+ "wikiFile": "HAupgradeRollbackWiki.txt",
+ "schedule": {
+
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "HAbackupRecover": {
+ "wikiName": "HA Backup Recover",
+ "wikiFile": "HAbackupRecoverWiki.txt",
+ "schedule": {
+ "master": "tue_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "HA",
+ "nodeLabel": "VM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFswitchLat": {
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFportLat": {
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFintentInstallWithdrawLat": {
+ "schedule": {
+ "master": "wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFintentInstallWithdrawLatWithFlowObj": {
+ "schedule": {
+ "master": "mon_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFintentRerouteLat": {
+ "schedule": {
+ "master": "wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFintentRerouteLatWithFlowObj": {
+ "schedule": {
+ "master": "mon_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFintentEventTp": {
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFintentEventTpWithFlowObj": {
+ "schedule": {
+ "master": "mon_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFscaleTopo": {
+ "schedule": {
+ "master": "tue",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFflowTp1g": {
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFflowTp1gWithFlowObj": {
+ "schedule": {
+ "master": "mon_thu",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFcbench": {
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFscalingMaxIntents": {
+ "schedule": {
+
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFscalingMaxIntentsWithFlowObj": {
+ "schedule": {
+
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFbatchFlowResp": {
+ "schedule": {
+ "master": "wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFhostLat": {
+ "schedule": {
+ "master": "wed_fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SCPFmastershipFailoverLat": {
+ "schedule": {
+ "master": "fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SCPF",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "SRBridging": {
+ "wikiName": "SR Bridging",
+ "wikiFile": "SRBridgingWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRRouting": {
+ "wikiName": "SR Routing",
+ "wikiFile": "SRRoutingWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRDhcprelay": {
+ "wikiName": "SR Dhcp Relay",
+ "wikiFile": "SRDhcprelayWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRDynamicConf": {
+ "wikiName": "SR Dynamic Config",
+ "wikiFile": "SRDynamicConfWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRMulticast": {
+ "wikiName": "SR Multi Cast",
+ "wikiFile": "SRMulticastWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRSanity": {
+ "wikiName": "SR Sanity",
+ "wikiFile": "SRSanityWiki.txt",
+ "schedule": {
+ "master": "fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRSwitchFailure": {
+ "wikiName": "SR Switch Failure",
+ "wikiFile": "SRSwitchFailureWiki.txt",
+ "schedule": {
+ "master": "fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRLinkFailure": {
+ "wikiName": "SR Link Failure",
+ "wikiFile": "SRLinkFailureWiki.txt",
+ "schedule": {
+ "master": "fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SROnosFailure": {
+ "wikiName": "SR Onos node Failure",
+ "wikiFile": "SROnosFailureWiki.txt",
+ "schedule": {
+ "master": "fri",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRClusterRestart": {
+ "wikiName": "SR Cluster Restart",
+ "wikiFile": "SRClusterRestartWiki.txt",
+ "schedule": {
+ "master": "sat",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRDynamic": {
+ "wikiName": "SR Dynamic",
+ "wikiFile": "SRDynamicWiki.txt",
+ "schedule": {
+ "master": "sat",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHighAvailability": {
+ "wikiName": "SR High Availability",
+ "wikiFile": "SRHighAvailabilityWiki.txt",
+ "schedule": {
+ "master": "sat",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAsanity": {
+ "test": "HAsanity --params-file HAsanity.params.fabric",
+ "wikiName": "SR HA Sanity",
+ "wikiFile": "HAsanityWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAclusterRestart": {
+ "test": "HAclusterRestart --params-file HAclusterRestart.params.fabric",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAsingleInstanceRestart": {
+ "test": "HAsingleInstanceRestart --params-file HAsingleInstanceRestart.params.fabric",
+ "wikiName": "SR HA Single Instance Restart",
+ "wikiFile": "HAsingleInstanceRestartWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAstopNodes": {
+ "test": "HAstopNodes --params-file HAstopNodes.params.fabric",
+ "wikiName": "SR HA Stop Nodes",
+ "wikiFile": "HAstopNodes.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAfullNetPartition": {
+ "test": "HAfullNetPartition --params-file HAfullNetPartition.params.fabric",
+ "wikiName": "SR HA Full Network Partition",
+ "wikiFile": "HAfullNetPartitionWiki.txt",
+ "schedule": {
+ "master": "fri"
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAswapNodes": {
+ "test": "HAswapNodes --params-file HAswapNodes.params.fabric",
+ "wikiName": "SR HA Swap Nodes",
+ "wikiFile": "HAswapNodesWiki.txt",
+ "schedule": {
+ "master": "sat"
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAscaling": {
+ "test": "HAscaling --params-file HAscaling.params.fabric",
+ "wikiName": "SR HA Scaling",
+ "wikiFile": "HAscalingWiki.txt",
+ "schedule": {
+ "master": "sat"
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAkillNodes": {
+ "test": "HAkillNodes --params-file HAkillNodes.params.fabric",
+ "wikiName": "SR HA Kill Nodes",
+ "wikiFile": "HAkillNodes.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAbackupRecover": {
+ "test": "HAbackupRecover --params-file HAbackupRecover.params.fabric",
+ "wikiName": "SR HA Backup Recover",
+ "wikiFile": "HAbackupRecoverWiki.txt",
+ "schedule": {
+ "master": "weekdays",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAupgrade": {
+ "test": "HAupgrade --params-file HAupgrade.params.fabric",
+ "wikiName": "SR HA Upgrade",
+ "wikiFile": "HAupgradeWiki.txt",
+ "schedule": {
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "SRHAupgradeRollback": {
+ "test": "HAupgradeRollback --params-file HAupgradeRollback.params.fabric",
+ "wikiName": "SR HA Upgrade Rollback",
+ "wikiFile": "HAupgradeRollbackWiki.txt",
+ "schedule": {
+ },
+ "category": "SR",
+ "nodeLabel": "Fabric",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCvirNetNB": {
+ "wikiName": "FUNCvirNetNB",
+ "wikiFile": "FUNCvirNetNBWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "FUNCbgpls": {
+ "wikiName": "FUNCbgpls",
+ "wikiFile": "FUNCbgplsWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "VPLSBasic": {
+ "wikiName": "VPLSBasic",
+ "wikiFile": "VPLSBasicWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "VPLSfailsafe": {
+ "wikiName": "VPLSfailsafe",
+ "wikiFile": "VPLSfailsafeWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "USECASE_SdnipFunction": {
+ "wikiName": "SDNIP Function",
+ "wikiFile": "USECASE_SdnipFunctionWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "USECASE_SdnipFunctionCluster": {
+ "wikiName": "SDNIP Function Cluster",
+ "wikiFile": "USECASE_SdnipFunctionClusterWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ },
+ "PLATdockertest": {
+ "wikiName": "Docker Images sanity test",
+ "wikiFile": "PLATdockertestTableWiki.txt",
+ "schedule": {
+ "master": "USECASE_master",
+ "onos-1.x": "onos-1.x_schedule",
+ "onos-2.x": "onos-2.x_schedule"
+ },
+ "category": "USECASE",
+ "nodeLabel": "BM",
+ "supportedBranches": [ "all" ]
+ }
+}
diff --git a/TestON/JenkinsFile/generateReleaseTestONWiki.groovy b/TestON/JenkinsFile/generateReleaseTestONWiki.groovy
index a2526cf..3f9862f 100644
--- a/TestON/JenkinsFile/generateReleaseTestONWiki.groovy
+++ b/TestON/JenkinsFile/generateReleaseTestONWiki.groovy
@@ -25,7 +25,7 @@
// example, if you want to generate the test category pages, you will need the page ID
// of the top level branch results.
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+test_list = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
runningNode = "TestStation-BMs"
@@ -45,9 +45,6 @@
"1Gbps NIC",
"JAVA_OPTS=\"\${JAVA_OPTS:--Xms8G -Xmx8G}\"" ]
-// Get all the list of the tests from the JenkinsTestONTests.groovy
-AllTheTests = test_lists.getAllTheTests( onos_v )
-
// get the name of the job.
jobName = env.JOB_NAME
@@ -158,12 +155,12 @@
}
-def pageTree( category ){
+def pageTree( category, testsFromCategory ){
pTree = "<ul>"
- for ( String test in AllTheTests[ category ].keySet() ){
- testTitle = AllTheTests[ category ][ test ][ "wiki_link" ]
+ for ( String test in testsFromCategory.keySet() ){
+ testTitle = onos_v + "-" + testsFromCategory[ test ][ "wikiName" ]
pTree += "<li><h3><a href=\"https://wiki.onosproject.org/display/ONOS/" + testTitle + "\">"
- pTree += AllTheTests[ category ][ test ][ "wiki_link" ] + "</a></h3></li>"
+ pTree += testTitle + "</a></h3></li>"
}
pTree += "</ul>"
return pTree
@@ -272,7 +269,8 @@
titleHTML = "<h1>Test Results - " + title + "</h1>"
- pageTreeHTML = pageTree( category )
+ testsFromCategory = test_list.getTestsFromCategory( category )
+ pageTreeHTML = pageTree( category, testsFromCategory )
descriptionHTML = "<p>For test details, check out the <a href=\"" + testPlanLink + "\">test plans for " + category + " test cases</a>.</p>"
@@ -281,11 +279,12 @@
testGraphsHTML = ""
testGraphsClass = "confluence-embedded-image confluence-external-resource confluence-content-image-border"
- for ( String key in AllTheTests[ category ].keySet() ){
+
+
+ for ( String key in testsFromCategory.keySet() ){
imageLink = testTrendPrefix + key + testTrendSuffix
testGraphsHTML += makeImage( testGraphsClass, imageLink, 500 )
}
-
result = overallTrendHTML + titleHTML + pageTreeHTML + descriptionHTML + testGraphsHTML
return result