Add Fabric HA tests to Jenkins
Change-Id: Id48bb961e24b18c65d7542b2744d567bb06c60f8
diff --git a/TestON/JenkinsFile/FabricJenkinsfileTrigger b/TestON/JenkinsFile/FabricJenkinsfileTrigger
index 04695f7..349324b 100644
--- a/TestON/JenkinsFile/FabricJenkinsfileTrigger
+++ b/TestON/JenkinsFile/FabricJenkinsfileTrigger
@@ -47,6 +47,7 @@
"HA": [ tests: "", nodeName: "VM", wikiContent: "" ],
"SCPF": [ tests: "", nodeName: "BM", wikiContent: "" ],
"SR": [ tests: "", nodeName: [ "Fabric2", "Fabric3", "Fabric4" ], wikiContent: "" ],
+ "SRHA": [ tests: "", nodeName: "Fabric", wikiContent: "" ],
"USECASE": [ tests: "", nodeName: "BM", wikiContent: "" ]
]
@@ -97,16 +98,21 @@
// Choices will get the list of the test with Segment Routing type tests.
SR_choices += adder( "SR", "basic", true )
+ SRHA_choices += adder( "SRHA", "basic", true )
if ( today == Calendar.FRIDAY ){
// if today is Friday, it will also test tests with extra_A category
SR_choices += adder( "SR", "extra_A", true )
+ SRHA_choices += adder( "SRHA", "extra_A", true )
}
else if ( today == Calendar.SATURDAY ){
// if today is Saturday, it will add the test with extra_B category
SR_choices += adder( "SR", "extra_B", true )
+ SRHA_choices += adder( "SRHA", "extra_B", true )
}
// removing last comma added at the end of the last test name.
SR_choices = triggerFuncs.lastCommaRemover( SR_choices )
+ SRHA_choices = triggerFuncs.lastCommaRemover( SRHA_choices )
+
}
@@ -119,6 +125,7 @@
else {
// set the list of the tests to run.
testcases[ "SR" ][ "tests" ] = SR_choices
+ testcases[ "SRHA" ][ "tests" ] = SRHA_choices
println "Defaulting to " + day + " tests:"
}
@@ -127,6 +134,7 @@
// This will hold the block of code to be run.
def runTest = [
+ "Fabric": [ : ],
"Fabric2": [ : ],
"Fabric3": [ : ],
"Fabric4": [ : ]
@@ -164,11 +172,17 @@
testcases[ "SR" ][ "nodeName" ][ 1 ],
"SR",
manually_run, onos_tag )
+ runTest[ "Fabric" ][ "SRHA" ] = triggerFuncs.trigger_pipeline( current_version,
+ testcases[ "SRHA" ][ "tests" ],
+ testcases[ "SRHA" ][ "nodeName" ],
+ "SRHA",
+ manually_run, onos_tag )
}
def finalList = [ : ]
// It will run each category of test to run sequentially on each branch.
+finalList[ "Fabric" ] = triggerFuncs.runTestSeq( runTest[ "Fabric" ] )
finalList[ "Fabric2" ] = triggerFuncs.runTestSeq( runTest[ "Fabric2" ] )
finalList[ "Fabric3" ] = triggerFuncs.runTestSeq( runTest[ "Fabric3" ] )
finalList[ "Fabric4" ] = triggerFuncs.runTestSeq( runTest[ "Fabric4" ] )
diff --git a/TestON/JenkinsFile/SRHAJenkinsFile b/TestON/JenkinsFile/SRHAJenkinsFile
new file mode 100644
index 0000000..218a9db
--- /dev/null
+++ b/TestON/JenkinsFile/SRHAJenkinsFile
@@ -0,0 +1,75 @@
+#!groovy
+// Copyright 2018 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+// TestON is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 2 of the License, or
+// (at your option) any later version.
+//
+// TestON is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with TestON. If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for SRHA-pipeline-<branch>
+
+// read dependencies.
+funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
+test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
+fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
+
+// init configuration to be SR
+fileRelated.init()
+funcs.initialize( "SR" )
+
+// Allow to run with --params-file argument
+hasArgs = true
+
+// get the name of the Jenkins job.
+jobName = env.JOB_NAME
+
+// SRHA has it's own test machine
+// testmachine = "Fabrics"
+
+// read the TestON.property depends on which branch it is running.
+// ( currently master on Fabric4, 1.13 on Fabric2 and 1.12 on Fabric3 )
+def prop = null
+prop = funcs.getProperties()
+SRHA = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "SRHA" ]
+
+// set the file paths and directory
+graph_generator_file = fileRelated.trendIndividual
+graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-Fabric" + "/"
+
+// get the list of the tests.
+echo( "Testcases:" )
+def testsToRun = null
+testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
+funcs.printTestToRun( testsToRun )
+
+// save the functions to run test in the dictionary.
+def tests = [ : ]
+for ( String test : SRHA.keySet() ){
+ toBeRun = testsToRun.contains( test )
+ def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
+ tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false,
+ SRHA, graph_generator_file, graph_saved_directory )
+}
+
+// get start time
+start = funcs.getCurrentTime()
+
+// run the tests sequentially
+for ( test in tests.keySet() ){
+ tests[ test ].call()
+}
+//funcs.generateOverallGraph( prop, SR, graph_saved_directory )
+// send the notification of ending test after SRHA tests are done.
+funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/SRJenkinsFile b/TestON/JenkinsFile/SRJenkinsFile
index 8bee5b8..18958e3 100644
--- a/TestON/JenkinsFile/SRJenkinsFile
+++ b/TestON/JenkinsFile/SRJenkinsFile
@@ -25,9 +25,9 @@
test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
-// init configuratin to be SR
+// init configuration to be SR
fileRelated.init()
-funcs.initialize( "SR" );
+funcs.initialize( "SR" )
// get the name of the Jenkins job.
jobName = env.JOB_NAME
diff --git a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
index d203f07..99ed843 100644
--- a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
+++ b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
@@ -47,6 +47,7 @@
"HA": [ tests: "", nodeName: "VM", wikiContent: "" ],
"SCPF": [ tests: "", nodeName: "BM", wikiContent: "" ],
"SR": [ tests: "", nodeName: "Fabric", wikiContent: "" ],
+ "SRHA": [ tests: "", nodeName: "Fabric", wikiContent: "" ],
"USECASE": [ tests: "", nodeName: "BM", wikiContent: "" ]
]
@@ -89,6 +90,7 @@
FUNC_choices = ""
HA_choices = ""
SR_choices = ""
+SRHA_choices = ""
// init some paths for the files and directories.
stat_graph_generator_file = fileRelated.histogramMultiple
@@ -106,6 +108,7 @@
SCPF_choices = triggerFuncs.lastCommaRemover( SCPF_choices )
USECASE_choices = triggerFuncs.lastCommaRemover( USECASE_choices )
SR_choices = triggerFuncs.lastCommaRemover( SR_choices )
+ SRHA_choices = triggerFuncs.lastCommaRemover( SRHA_choices )
}
@@ -121,6 +124,7 @@
testcases[ "FUNC" ][ "tests" ] = FUNC_choices
testcases[ "HA" ][ "tests" ] = HA_choices
testcases[ "SR" ][ "tests" ] = SR_choices
+ testcases[ "SRHA" ][ "tests" ] = SRHA_choices
println "Defaulting to " + day + " tests:"
}
@@ -237,6 +241,9 @@
addingHeader( "SR" )
SR_choices += adder( "SR", "basic", true, "M", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", true, "M", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
closingHeader( "USECASE" )
}
@@ -259,6 +266,9 @@
addingHeader( "SR" )
SR_choices += adder( "SR", "basic", getDay, "T", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", getDay, "T", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
@@ -281,6 +291,9 @@
addingHeader( "SR" )
SR_choices += adder( "SR", "basic", getDay, "W", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", getDay, "W", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
closingHeader( "USECASE" )
}
@@ -301,6 +314,9 @@
addingHeader( "SR" )
SR_choices += adder( "SR", "basic", getDay, "Th", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", getDay, "Th", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
closingHeader( "USECASE" )
}
@@ -323,6 +339,10 @@
SR_choices += adder( "SR", "basic", getDay, "F", false )
SR_choices += adder( "SR", "extra_A", getDay, "F", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", getDay, "F", false )
+ SRHA_choices += adder( "SRHA", "extra_A", getDay, "F", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
closingHeader( "USECASE" )
}
@@ -349,6 +369,10 @@
SR_choices += adder( "SR", "basic", getDay, "Sa", false )
SR_choices += adder( "SR", "extra_B", getDay, "Sa", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", getDay, "Sa", false )
+ SRHA_choices += adder( "SRHA", "extra_B", getDay, "Sa", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
closingHeader( "USECASE" )
@@ -375,6 +399,9 @@
addingHeader( "SR" )
SR_choices += adder( "SR", "basic", getDay, "S", false )
closingHeader( "SR" )
+ addingHeader( "SRHA" )
+ SRHA_choices += adder( "SRHA", "basic", getDay, "S", false )
+ closingHeader( "SRHA" )
addingHeader( "USECASE" )
USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
closingHeader( "USECASE" )
diff --git a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
index 9cf5e7b..7a0fd9a 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
@@ -38,6 +38,7 @@
testMachine = "TestStation-" + machine + "s"
this.machine = machine
isSCPF = false
+ hasArgs = false
isTrend = true
}
@@ -49,6 +50,7 @@
init( type )
SCPFfunc = SCPFfuncs
isSCPF = true
+ hasArgs = true // Has TestON cli arguments to be added when running the test
machine = machineType[ type ]
}
@@ -63,6 +65,7 @@
trend_generator_file = fileRelated.trendMultiple
build_stats_generator_file = fileRelated.histogramMultiple
isSCPF = false
+ hasArgs = false
}
def init( type ){
@@ -188,10 +191,10 @@
timeout 240 stc teardown | head -100
timeout 240 stc shutdown | head -100
cd ~/OnosSystemTest/TestON/bin
- git log |head
+ git log | head
./cleanup.sh -f
''' + "./cli.py run " +
- ( !isSCPF ? testName : testCategory[ testName ][ 'test' ] ) +
+ ( !hasArgs ? testName : testCategory[ testName ][ 'test' ] ) +
" --params GRAPH/nodeCluster=" + machineType[ testType ] + '''
./cleanup.sh -f
# cleanup config changes
@@ -367,7 +370,7 @@
// prop : dictionary property on the machine
// pureTestName : Pure name of the test. ( ex. pureTestName of SCPFflowTpFobj will be SCPFflowTp )
// graphOnly : check if it is generating graph job. If so, it will only generate the generating graph part
- // testCategory : category of the test ( SCPF, SR, FUNC ... )
+ // testCategory : Map for the test suit ( SCPF, SR, FUNC, ... ) which contains information about the tests
// graph_generator_file : Rscript file with the full path.
// graph_saved_directory : where the generated graph will be saved to.
diff --git a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
index b98fad2..8c998fd 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
@@ -499,6 +499,107 @@
"day": "",
wiki_link: wikiPrefix + "-" + "SR High Availability",
wiki_file: "SRHighAvailabilityWiki.txt" ]
+ ],
+ "SRHA": [
+ "SRHAsanity": [
+ "test": "HAsanity --params-file HAsanity.params.fabric",
+ "basic": true,
+ "extra_A": false,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Sanity",
+ wiki_file: "HAsanityWiki.txt" ],
+ "SRHAclusterRestart": [
+ "test": "HAclusterRestart --params-file HAclusterRestart.params.fabric",
+ "basic": true,
+ "extra_A": false,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Cluster Restart",
+ wiki_file: "HAclusterRestartWiki.txt" ],
+ "SRHAsingleInstanceRestart": [
+ "test": "HAsingleInstanceRestart --params-file HAsingleInstanceRestart.params.fabric",
+ "basic": true,
+ "extra_A": false,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Single Instance Restart",
+ wiki_file: "HAsingleInstanceRestartWiki.txt" ],
+ "SRHAstopNodes": [
+ "test": "HAstopNodes --params-file HAstopNodes.params.fabric",
+ "basic": true,
+ "extra_A": true,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Stop Nodes",
+ wiki_file: "HAstopNodes.txt" ],
+ "SRHAfullNetPartition": [
+ "test": "HAfullNetPartition --params-file HAfullNetPartition.params.fabric",
+ "basic": false,
+ "extra_A": true,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Full Network Partition",
+ wiki_file: "HAfullNetPartitionWiki.txt" ],
+ "SRHAswapNodes": [
+ "test": "HAswapNodes --params-file HAswapNodes.params.fabric",
+ "basic": false,
+ "extra_A": false,
+ "extra_B": true,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Swap Nodes",
+ wiki_file: "HAswapNodesWiki.txt" ],
+ "SRHAscaling": [
+ "test": "HAscaling --params-file HAscaling.params.fabric",
+ "basic": false,
+ "extra_A": false,
+ "extra_B": true,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Scaling",
+ wiki_file: "HAscalingWiki.txt" ],
+ "SRHAkillNodes": [
+ "test": "HAkillNodes --params-file HAkillNodes.params.fabric",
+ "basic": true,
+ "extra_A": false,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Kill Nodes",
+ wiki_file: "HAkillNodes.txt" ],
+ "SRHAbackupRecover": [
+ "test": "HAbackupRecover --params-file HAbackupRecover.params.fabric",
+ "basic": true,
+ "extra_A": false,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Backup Recover",
+ wiki_file: "HAbackupRecoverWiki.txt" ],
+ "SRHAupgrade": [
+ "test": "HAupgrade --params-file HAupgrade.params.fabric",
+ "basic": false,
+ "extra_A": false,
+ "extra_B": true,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Upgrade",
+ wiki_file: "HAupgradeWiki.txt" ],
+ "SRHAupgradeRollback": [
+ "test": "HAupgradeRollback --params-file HAupgradeRollback.params.fabric",
+ "basic": false,
+ "extra_A": true,
+ "extra_B": false,
+ "new_Test": false,
+ "day": "",
+ wiki_link: wikiPrefix + "-" + "SR HA Upgrade Rollback",
+ wiki_file: "HAupgradeRollbackWiki.txt" ]
]
]
}
diff --git a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
index 07ea98b..1670449 100644
--- a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
@@ -106,7 +106,6 @@
test_branch = "master"
node( "TestStation-" + nodeName + "s" ) {
envSetup( branch, test_branch, onosTag, jobOn, manuallyRun )
-
exportEnvProperty( branch, test_branch, wiki, tests, post_result, manuallyRun, onosTag, isOldFlow )
}