[ONOS-6957] Create Jenkins File for the other tests
Change-Id: I4d5b494fe778328bfa6c05fb0987a842214446a8
To do when this is merged:
0. Clean up the tested builds and reset the build #s to 1.
1. make jobs to get jenkins script from the github:
https://onos-jenkins.onlab.us/job/ALL_Pipeline_Trigger/configure
https://onos-jenkins.onlab.us/job/FUNC_Pipeline_1.10/configure
https://onos-jenkins.onlab.us/job/FUNC_Pipeline_1.11/configure
https://onos-jenkins.onlab.us/job/FUNC_Pipeline_master/configure
https://onos-jenkins.onlab.us/job/FUNC_Pipeline_manually/configure
https://onos-jenkins.onlab.us/job/HA_Pipeline_1.10/configure
https://onos-jenkins.onlab.us/job/HA_Pipeline_1.11/configure
https://onos-jenkins.onlab.us/job/HA_Pipeline_master/configure
https://onos-jenkins.onlab.us/job/HA_Pipeline_manually/configure
https://onos-jenkins.onlab.us/job/SCPF_Pipeline_1.10/configure
https://onos-jenkins.onlab.us/job/SCPF_Pipeline_1.11/configure
https://onos-jenkins.onlab.us/job/SCPF_Pipeline_master/configure
https://onos-jenkins.onlab.us/job/SCPF_Pipeline_manually/configure
https://onos-jenkins.onlab.us/job/USECASE_Pipeline_1.10/configure
https://onos-jenkins.onlab.us/job/USECASE_Pipeline_1.11/configure
https://onos-jenkins.onlab.us/job/USECASE_Pipeline_master/configure
https://onos-jenkins.onlab.us/job/USECASE_Pipeline_manually/configure
2. Replace the current graph from the wiki page to the new graphs:
- Link them to :
For VMs :
https://onos-jenkins.onlab.us/job/Pipeline_postjob_VM/lastSuccessfulBuild/artifact/<testName>_<branch>_graph.jpg
For BMs :
https://onos-jenkins.onlab.us/job/Pipeline_postjob_BM/lastSuccessfulBuild/artifact/<testName>_<branch>_graph.jpg
SCPFs :
https://onos-jenkins.onlab.us/job/Pipeline_postjob_BM/lastSuccessfulBuild/artifact/< many different graph names .. >
Change-Id: I4d5b494fe778328bfa6c05fb0987a842214446a8
diff --git a/TestON/JenkinsFile/FUNCJenkinsFile b/TestON/JenkinsFile/FUNCJenkinsFile
new file mode 100644
index 0000000..51c613e
--- /dev/null
+++ b/TestON/JenkinsFile/FUNCJenkinsFile
@@ -0,0 +1,179 @@
+#!groovy
+// This is a Jenkinsfile for a scripted pipeline for the FUNC tests
+def prop = null
+node("TestStation-VMs"){
+ prop = readProperties(file:'/var/jenkins/TestONOS.property')
+}
+// TODO: Exception handling around steps
+FUNC = [
+"FUNCipv6Intent" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCipv6Intent", wiki_file:"FUNCipv6IntentWiki.txt", ],
+"FUNCoptical" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCoptical", wiki_file:"FUNCopticalWiki.txt"],
+"FUNCflow" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCflow", wiki_file:"FUNCflowWiki.txt"],
+"FUNCnetCfg": [wiki_link:prop["WikiPrefix"]+"-"+"FUNCnetCfg", wiki_file:"FUNCnetCfgWiki.txt"],
+"FUNCovsdbtest" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCovsdbtestWiki", wiki_file:"FUNCovsdbtestWiki.txt"],
+"FUNCnetconf" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCnetconf", wiki_file:"FUNCnetconfWiki.txt"],
+"FUNCgroup" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCgroup", wiki_file:"FUNCgroupWiki.txt"],
+"FUNCintent" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCintent", wiki_file:"FUNCintentWiki.txt"],
+"FUNCintentRest" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCintentRest", wiki_file:"FUNCintentRestWiki.txt"]
+]
+table_name = "executed_test_tests"
+result_name = "executed_test_results"
+graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/testCaseGraphGenerator.R"
+graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_VM/"
+echo("Testcases:")
+def testsToRun = null
+testsToRun = prop["Tests"].tokenize("\n;, ")
+
+for ( String test : testsToRun ) {
+ println test
+}
+def tests = [:]
+for( String test : FUNC.keySet() ){
+ toBeRun = testsToRun.contains( test )
+ def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
+ tests[stepName] = FUNCTest(test, toBeRun, prop)
+}
+
+// run the tests
+for ( test in tests.keySet() ){
+ tests[test].call()
+}
+
+// The testName should be the key from the FUNC
+def FUNCTest( testName, toBeRun, prop ) {
+ return {
+ catchError{
+ stage(testName) {
+ if ( toBeRun ){
+ workSpace = "/var/jenkins/workspace/"+testName
+ def fileContents = ""
+ node("TestStation-VMs"){
+ withEnv(['ONOSBranch='+prop["ONOSBranch"],
+ 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
+ 'TestONBranch='+prop["TestONBranch"],
+ 'ONOSTag='+prop["ONOSTag"],
+ 'WikiPrefix='+prop["WikiPrefix"],
+ 'WORKSPACE='+workSpace]){
+ sh '''#!/bin/bash -l
+ set -i # interactive
+ shopt -s expand_aliases # expand alias in non-interactive mode
+ export PYTHONUNBUFFERED=1
+
+ ifconfig
+
+ echo "ONOS Branch is: $ONOSBranch"
+ echo "TestON Branch is: $TestONBranch"
+ echo "Test date: "
+ date
+
+ cd ~
+ export PATH=$PATH:onos/tools/test/bin
+
+ timeout 240 stc shutdown | head -100
+ timeout 240 stc teardown | head -100
+ timeout 240 stc shutdown | head -100
+
+ cd ~/OnosSystemTest/TestON/bin
+ git log |head
+ ./cleanup.sh
+ ''' + "./cli.py run " + testName + '''
+ # cleanup config changes
+ cd ~/onos/tools/package/config
+ git clean -df'''
+
+ // For the Wiki page
+ sh '''#!/bin/bash -i
+
+ echo "ONOS Branch is: ${ONOSBranch}"
+ echo "TestON Branch is: ${TestONBranch}"
+
+ echo "Job name is: "''' + testName + '''
+ echo "Workspace is: ${WORKSPACE}/"
+
+ echo "Wiki page to post is: ${WikiPrefix}-"
+
+ # remove any leftover files from previous tests
+ sudo rm ${WORKSPACE}/*Wiki.txt
+ sudo rm ${WORKSPACE}/*Summary.txt
+ sudo rm ${WORKSPACE}/*Result.txt
+ sudo rm ${WORKSPACE}/*.csv
+
+ #copy files to workspace
+ cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
+ sudo cp *.txt ${WORKSPACE}/
+ sudo cp *.csv ${WORKSPACE}/
+ cd ${WORKSPACE}/
+ for i in *.csv
+ do mv "$i" "$WikiPrefix"-"$i"
+ done
+ ls -al
+ cd '''
+ if( prop["manualRun"] == "false" ){
+ // Post Results
+ withCredentials([
+ string(credentialsId: 'db_pass', variable: 'pass'),
+ string(credentialsId: 'db_user', variable: 'user'),
+ string(credentialsId: 'db_host', variable: 'host'),
+ string(credentialsId: 'db_port', variable: 'port')]) {
+ def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
+
+ sh '''#!/bin/bash
+ set +e
+ export DATE=\$(date +%F_%T)
+ cd ~
+ pwd
+ sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
+ do
+ echo \$line
+ echo ''' + database_command + '''
+
+ done
+ Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
+
+ }
+ }
+ // Fetch Logs
+ sh '''#!/bin/bash
+
+ cd ~/OnosSystemTest/TestON/logs
+ echo "Job Name is: " + ''' + testName + '''
+ TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1)
+ echo "########################################################################################"
+ echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
+ echo "########################################################################################"
+ cd $TestONlogDir
+ if [ $? -eq 1 ]
+ then
+ echo "Job name does not match any test suite name to move log!"
+ else
+ pwd
+ for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
+ fi
+ cd'''
+ fileContents = readFile workSpace+"/"+FUNC[testName]['wiki_file']
+ }
+ }
+
+ if( prop["manualRun"] == "false" ){
+ def post = build job: "Pipeline_postjob_VM", propagate: false,
+ parameters: [
+ string(name: 'Wiki_Contents', value: fileContents),
+ string(name: 'Wiki_Link', value: FUNC[testName]['wiki_link'])
+ ]
+ }
+ node("TestStation-VMs"){
+ sh '''#!/bin/bash
+
+ if [ -e ''' + workSpace + "/" + testName + "Result.txt ] && grep -q \"1\" " + workSpace + "/" + testName + "Result.txt" + '''
+ then
+ echo ''' + testName + " : All passed." + '''
+ else
+ echo ''' + testName + " : not all passed." + '''
+ DoingThisToSetTheResultFalse
+ fi'''
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/HAJenkinsFile b/TestON/JenkinsFile/HAJenkinsFile
new file mode 100644
index 0000000..1a30377
--- /dev/null
+++ b/TestON/JenkinsFile/HAJenkinsFile
@@ -0,0 +1,182 @@
+#!groovy
+// This is a Jenkinsfile for a scripted pipeline for the HA tests
+
+def prop = null
+node("TestStation-VMs"){
+ prop = readProperties(file:'/var/jenkins/TestONOS.property')
+}
+// TODO: Exception handling around steps
+HA = [
+"HAsanity" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Sanity", wiki_file:"HAsanityWiki.txt"],
+"HAswapNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Swap Nodes", wiki_file:"HAswapNodesWiki.txt"],
+"HAscaling" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Scaling", wiki_file:"HAscalingWiki.txt"],
+"HAclusterRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Cluster Restart", wiki_file:"HAclusterRestartWiki.txt"],
+"HAstopNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Stop Nodes", wiki_file:"HAstopNodes.txt"],
+"HAfullNetPartition" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Full Network Partition", wiki_file:"HAfullNetPartitionWiki.txt"],
+"HAsingleInstanceRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Single Instance Restart", wiki_file:"HAsingleInstanceRestartWiki.txt"],
+"HAkillNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Kill Nodes", wiki_file:"HAkillNodesWiki.txt"] ]
+
+table_name = "executed_test_tests"
+result_name = "executed_test_results"
+graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/testCaseGraphGenerator.R"
+graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_VM/"
+
+echo("Testcases:")
+def testsToRun = null
+testsToRun = prop["Tests"].tokenize("\n;, ")
+for ( String test : testsToRun ) {
+ println test
+}
+
+def tests = [:]
+for( String test : HA.keySet() ){
+ toBeRun = testsToRun.contains( test )
+ def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
+ tests[stepName] = HATest(test, toBeRun, prop)
+}
+
+// run the tests
+for ( test in tests.keySet() ){
+ tests[test].call()
+}
+
+
+// The testName should be the key from the FUNC
+def HATest( testName, toBeRun, prop ) {
+ return {
+ catchError{
+ stage(testName) {
+ if ( toBeRun ){
+ workSpace = "/var/jenkins/workspace/"+testName
+ def fileContents = ""
+ node("TestStation-VMs"){
+ withEnv(['ONOSBranch='+prop["ONOSBranch"],
+ 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
+ 'TestONBranch='+prop["TestONBranch"],
+ 'ONOSTag='+prop["ONOSTag"],
+ 'WikiPrefix='+prop["WikiPrefix"],
+ 'WORKSPACE='+workSpace]){
+ sh '''#!/bin/bash -l
+ set -i # interactive
+ set +e
+ shopt -s expand_aliases # expand alias in non-interactive mode
+ export PYTHONUNBUFFERED=1
+
+ ifconfig
+
+ echo "ONOS Branch is: ${ONOSBranch}"
+ echo "TestON Branch is: ${TestONBranch}"
+ echo "Test date: "
+ date
+
+ cd ~
+ export PATH=$PATH:onos/tools/test/bin
+
+ timeout 240 stc shutdown | head -100
+ timeout 240 stc teardown | head -100
+ timeout 240 stc shutdown | head -100
+
+ cd ~/OnosSystemTest/TestON/bin
+ git log |head
+ ./cleanup.sh -f
+ ''' + "./cli.py run " + testName+ '''
+ ./cleanup.sh -f
+ cd'''
+
+ // For the Wiki page
+ sh '''#!/bin/bash -i
+ set +e
+ echo "ONOS Branch is: ${ONOSBranch}"
+ echo "TestON Branch is: ${TestONBranch}"
+
+ echo "Job name is: "''' + testName + '''
+ echo "Workspace is: ${WORKSPACE}/"
+
+ echo "Wiki page to post is: ${WikiPrefix}-"
+
+ # remove any leftover files from previous tests
+ sudo rm ${WORKSPACE}/*Wiki.txt
+ sudo rm ${WORKSPACE}/*Summary.txt
+ sudo rm ${WORKSPACE}/*Result.txt
+ sudo rm ${WORKSPACE}/*.csv
+
+ #copy files to workspace
+ cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
+ sudo cp *.txt ${WORKSPACE}/
+ sudo cp *.csv ${WORKSPACE}/
+ cd ${WORKSPACE}/
+ for i in *.csv
+ do mv "$i" "$WikiPrefix"-"$i"
+ done
+ ls -al
+ cd '''
+
+ if( prop["manualRun"] == "false" ){
+ // Post Results
+ withCredentials([
+ string(credentialsId: 'db_pass', variable: 'pass'),
+ string(credentialsId: 'db_user', variable: 'user'),
+ string(credentialsId: 'db_host', variable: 'host'),
+ string(credentialsId: 'db_port', variable: 'port')]) {
+ def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
+
+ sh '''#!/bin/bash
+ set +e
+ export DATE=\$(date +%F_%T)
+ cd ~
+ pwd
+ sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
+ do
+ echo \$line
+ echo ''' + database_command + '''
+
+ done
+ Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
+
+ }
+ }
+ // Fetch Logs
+ sh '''#!/bin/bash
+ set +e
+ cd ~/OnosSystemTest/TestON/logs
+ echo "Job Name is: " + ''' + testName + '''
+ TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1)
+ echo "########################################################################################"
+ echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
+ echo "########################################################################################"
+ cd $TestONlogDir
+ if [ $? -eq 1 ]
+ then
+ echo "Job name does not match any test suite name to move log!"
+ else
+ pwd
+ for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
+ fi
+ cd'''
+ fileContents = readFile workSpace+"/"+HA[testName]['wiki_file']
+ }
+ }
+
+ if( prop["manualRun"] == "false" ){
+ def post = build job: "Pipeline_postjob_VM", propagate: false,
+ parameters: [
+ string(name: 'Wiki_Contents', value: fileContents),
+ string(name: 'Wiki_Link', value: HA[testName]['wiki_link'])
+ ]
+ }
+ node("TestStation-VMs"){
+ sh '''#!/bin/bash
+
+ if [ -e ''' + workSpace + "/" + testName + "Result.txt ] && grep -q \"1\" " + workSpace + "/" + testName + "Result.txt" + '''
+ then
+ echo ''' + testName + " : All passed." + '''
+ else
+ echo ''' + testName + " : not all passed." + '''
+ DoingThisToSetTheResultFalse
+ fi'''
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
new file mode 100644
index 0000000..70b82b4
--- /dev/null
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -0,0 +1,257 @@
+#!groovy
+// This is a Jenkinsfile for a scripted pipeline for the SCPF tests
+// Define sets of tests
+SCPF_Basic = "SCPFswitchLat,SCPFportLat,SCPFintentInstallWithdrawLat,SCPFintentEventTp,SCPFflowTp1g,SCPFcbench,SCPFbatchFlowResp"
+SCPF_ExtraSetA = "SCPFintentRerouteLat,SCPFscalingMaxIntents,SCPFhostLat,SCPFscaleTopo,SCPFmastershipFailoverLat"
+SCPF_NEW_TEST = "SCPFmastershipFailoverLat"
+SCPF_ExtraSetB = "SCPFintentInstallWithdrawLatWithFlowObj,SCPFintentEventTpWithFlowObj,SCPFintentRerouteLatWithFlowObj,SCPFscalingMaxIntentsWithFlowObj,SCPFflowTp1gWithFlowObj"
+FUNC_Basic = "FUNCipv6Intent,FUNCoptical,FUNCflow,FUNCnetCfg,FUNCovsdbtest,FUNCnetconf"
+FUNC_ExtraSetA = "FUNCgroup,FUNCintent"
+FUNC_ExtraSetB = "FUNCintentRest"
+HA_Basic = "HAsanity,HAsingleInstanceRestart,HAclusterRestart"
+HA_ExtraSetA = "HAstopNodes,HAfullNetPartition"
+HA_ExtraSetB = "HAkillNodes,HAswapNodes,HAscaling"
+USECASE_Basic = "FUNCvirNetNB,FUNCbgpls,VPLSBasic,PLATdockertest,SRSanity,SRSwitchFailure,SRLinkFailure,SROnosFailure,SRClusterRestart,SRDynamic,SRHA,USECASE_SdnipFunction,USECASE_SdnipFunctionCluster"
+USECASE_NEW_TEST = "VPLSfailsafe"
+testcases = [
+ "FUNC" : [ tests : "" , nodeName : "VM"],
+ "HA" : [ tests : "" , nodeName : "VM"],
+ "SCPF" : [ tests : "" , nodeName : "BM"],
+ "USECASE" : [ tests : "" , nodeName : "BM"]
+]
+Prefix_organizer = [
+ "FU" : "FUNC",
+ "HA" : "HA",
+ "PL" : "USECASE",
+ "SA" : "USECASE",
+ "SC" : "SCPF",
+ "SR" : "USECASE",
+ "US" : "USECASE",
+ "VP" : "USECASE"
+]
+
+onos_branch = "1.11"
+// Set tests based on day of week
+def now = new Date()
+echo(now.toString())
+today = now[Calendar.DAY_OF_WEEK]
+day = ""
+SCPF_choices = SCPF_Basic
+USECASE_choices = ""
+FUNC_choices = FUNC_Basic
+HA_choices = HA_Basic
+
+switch (today) {
+ case Calendar.MONDAY:
+ SCPF_choices += "," + SCPF_ExtraSetB
+ FUNC_choices += "," + FUNC_ExtraSetA
+ HA_choices += "," + HA_ExtraSetA
+ day = "Monday"
+ break
+ case Calendar.TUESDAY:
+ day = "Tuesday"
+ USECASE_choices = USECASE_Basic + "," + USECASE_NEW_TEST
+ FUNC_choices += "," + FUNC_ExtraSetB
+ HA_choices += "," + HA_ExtraSetB
+ break
+ case Calendar.WEDNESDAY:
+ SCPF_choices += "," + SCPF_ExtraSetA + "," + SCPF_NEW_TEST
+ FUNC_choices += "," + FUNC_ExtraSetA
+ HA_choices += "," + HA_ExtraSetA
+ day = "Wednesday"
+ break
+ case Calendar.THURSDAY:
+ SCPF_choices += "," + SCPF_ExtraSetB
+ FUNC_choices += "," + FUNC_ExtraSetB
+ HA_choices += "," + HA_ExtraSetB
+ day = "Thursday"
+ break
+ case Calendar.FRIDAY:
+ SCPF_choices += "," + SCPF_ExtraSetA + "," + SCPF_NEW_TEST + "," + SCPF_ExtraSetB
+ FUNC_choices += "," + FUNC_ExtraSetA
+ HA_choices += "," + HA_ExtraSetA
+ day = "Friday"
+ break
+ case Calendar.SATURDAY:
+ SCPF_choices += "," + SCPF_ExtraSetA + "," + SCPF_ExtraSetB
+ USECASE_choices = USECASE_Basic
+ FUNC_choices += "," + FUNC_ExtraSetA + "," + FUNC_ExtraSetB
+ HA_choices += "," + HA_ExtraSetA + "," + HA_ExtraSetB
+ onos_branch= "1.10"
+ day = "Saturday"
+ break
+ case Calendar.SUNDAY:
+ SCPF_choices += "," + SCPF_ExtraSetA + "," + SCPF_NEW_TEST + "," + SCPF_ExtraSetB
+ USECASE_choices = USECASE_Basic + "," + USECASE_NEW_TEST
+ FUNC_choices += "," + FUNC_ExtraSetA + "," + FUNC_ExtraSetB
+ HA_choices += "," + HA_ExtraSetA + "," + HA_ExtraSetB
+ onos_branch= "master"
+ day = "Sunday"
+ break
+}
+
+manually_run = params.manual_run
+post_result = params.PostResult
+if ( manually_run ){
+ organize_tests( params.Tests )
+ onos_branch = params.ONOSVersion
+ test_branch = params.TestONBranch
+ println "Tests to be run manually : "
+}else{
+ testcases["SCPF"]["tests"] = SCPF_choices
+ testcases["USECASE"]["tests"] = USECASE_choices
+ testcases["FUNC"]["tests"] = FUNC_choices
+ testcases["HA"]["tests"] = HA_choices
+ println "Defaulting to " + day + " tests:"
+}
+print_tests( testcases )
+
+def runTest = [
+ "VM" : [:],
+ "BM" : [:]
+]
+for( String test in testcases.keySet() ){
+ println test
+ if (testcases[test]["tests"] != ""){
+ runTest[testcases[test]["nodeName"]][test] = trigger_pipeline( onos_branch, testcases[test]["tests"], testcases[test]["nodeName"], test, manually_run )
+ }
+}
+def finalList = [:]
+finalList["VM"] = runTestSeq( runTest["VM"] )
+finalList["BM"] = runTestSeq( runTest["BM"] )
+parallel finalList
+
+def runTestSeq( testList ){
+ return{
+ for ( test in testList.keySet() ){
+ testList[test].call()
+ }
+ }
+}
+
+def print_tests( tests ){
+ for( String test in tests.keySet() ){
+ if( tests[test]["tests"] != "" ){
+ println test + ":"
+ println tests[test]["tests"]
+ }
+ }
+}
+def organize_tests( tests ){
+ testList = tests.tokenize("\n;, ")
+ for( String test in testList )
+ testcases [ Prefix_organizer[ ( test == "FUNCbgpls" || test == "FUNCvirNetNB" ? "US" : ( test[ 0 ] + test[ 1 ] ) ) ] ][ "tests" ] += test + ","
+}
+def trigger_pipeline( branch, tests, nodeName, jobOn, manuallyRun ){
+// nodeName : "BM" or "VM"
+// jobOn : "SCPF" or "USECASE" or "FUNC" or "HA"
+ return{
+ if (branch == "master"){
+ onos_branch = branch
+ }else{
+ onos_branch = "onos-" + branch
+ }
+ wiki = branch
+ if ( !manuallyRun )
+ test_branch = onos_branch
+ if (onos_branch == "onos-1.11")
+ test_branch = "master"
+ println jobOn + "_Pipeline_" + manuallyRun ? "manually" : branch
+ node("TestStation-" + nodeName + "s"){
+ if (!manuallyRun)
+ envSetup(onos_branch, test_branch)
+
+ exportEnvProperty( onos_branch, test_branch, wiki, tests, post_result, manuallyRun )
+ }
+
+ jobToRun = jobOn + "_Pipeline_" + ( manuallyRun ? "manually" : branch )
+ build job: jobToRun, propagate: false
+ }
+}
+
+// export Environment properties.
+def exportEnvProperty( onos_branch, test_branch, wiki, tests, postResult, manually_run ){
+ stage("export Property"){
+ sh '''
+ echo "ONOSBranch=''' + onos_branch +'''" > /var/jenkins/TestONOS.property
+ echo "TestONBranch=''' + test_branch +'''" >> /var/jenkins/TestONOS.property
+ echo "ONOSTag='''+ env.ONOSTag +'''" >> /var/jenkins/TestONOS.property
+ echo "WikiPrefix=''' + wiki +'''" >> /var/jenkins/TestONOS.property
+ echo "ONOSJVMHeap='''+ env.ONOSJVMHeap +'''" >> /var/jenkins/TestONOS.property
+ echo "Tests=''' + tests +'''" >> /var/jenkins/TestONOS.property
+ echo "postResult=''' + postResult +'''" >> /var/jenkins/TestONOS.property
+ echo "manualRun=''' + manually_run +'''" >> /var/jenkins/TestONOS.property
+ '''
+ }
+}
+// Initialize the environment Setup for the onos and OnosSystemTest
+def envSetup( onos_branch, test_branch ){
+ stage("envSetup") {
+ sh '''
+ #!/bin/bash
+ set +e
+ . ~/.bashrc
+ env
+
+ echo -e "\n##### Set TestON Branch #####"
+ echo "TestON Branch is set on: " + test_branch
+
+ cd ~/OnosSystemTest/
+ git checkout HEAD~1 # Make sure you aren't pn a branch
+ git branch | grep -v "detached from" | xargs git branch -d # delete all local branches merged with remote
+ git branch -D $TestONBranch # just incase there are local changes. This will normally result in a branch not found error
+ git clean -df # clean any local files
+ git fetch --all # update all caches from remotes
+ git reset --hard origin/$TestONBranch # force local index to match remote branch
+ git clean -df # clean any local files
+ git checkout $TestONBranch #create new local branch
+ git branch
+ git log -1 --decorate
+
+
+ echo -e "\n##### Set ONOS Branch #####"
+ echo "ONOS Branch is set on: + onos_branch
+
+ echo -e "\n #### check karaf version ######"
+ env |grep karaf
+
+ cd ~/onos
+ rm -rf buck-out/*
+ ~/onos/tools/build/onos-buck clean
+ git checkout HEAD~1 # Make sure you aren't pn a branch
+ git branch | grep -v "detached from" | xargs git branch -d # delete all local branches merged with remote
+ git branch -D $ONOSBranch # just incase there are local changes. This will normally result in a branch not found error
+ git clean -df # clean any local files
+ git fetch --all # update all caches from remotes
+ git reset --hard origin/$ONOSBranch # force local index to match remote branch
+ git clean -df # clean any local files
+ if [ -z "$ONOSTag" ] #if tag is not specified
+ then
+ git checkout $ONOSBranch #create new local branch
+ else
+ git checkout $ONOSTag #checkout the tag
+ fi
+ git branch
+ git log -1 --decorate
+
+
+ echo -e "\n##### set jvm heap size to 8G #####"
+ echo ${ONOSJVMHeap}
+
+ inserted_line="export JAVA_OPTS="${ONOSJVMHeap}""
+ sed -i "s/bash/bash\n$inserted_line/" ~/onos/tools/package/bin/onos-service
+
+ echo "##### Check onos-service setting..... #####"
+ cat ~/onos/tools/package/bin/onos-service
+
+ export JAVA_HOME=/usr/lib/jvm/java-8-oracle
+
+ echo -e "\n##### build ONOS skip unit tests ######"
+ #mvn clean install -DskipTests
+ # Force buck update
+ rm -f ~/onos/bin/buck
+ ~/onos/tools/build/onos-buck build onos
+
+ git branch'''
+ }
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/SCPFJenkinsFile b/TestON/JenkinsFile/SCPFJenkinsFile
new file mode 100644
index 0000000..22ad46f
--- /dev/null
+++ b/TestON/JenkinsFile/SCPFJenkinsFile
@@ -0,0 +1,189 @@
+#!groovy
+// This is a Jenkinsfile for a scripted pipeline for the SCPF tests
+// properties([pipelineTriggers([cron('30 19 * * *')])])
+
+// TODO: Exception handling around steps
+
+none = [ "" ]
+batches = [1,100,1000]
+neighbors = ['y', 'n']
+SCPF = [
+ SCPFcbench: [ test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:['Cbench Test'], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)'],
+ SCPFhostLat: [ test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:['Host Latency Test'], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)'],
+ SCPFportLat: [ test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb', rFile:'SCPFportLat.R', extra:none, finalResult:1, graphTitle:['Port Latency Test - Port Up','Port Latency Test - Port Down'], dbCols:[ 'up_ofp_to_dev_avg,up_dev_to_link_avg,up_link_to_graph_avg', 'down_ofp_to_dev_avg,down_dev_to_link_avg,down_link_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+ SCPFflowTp1g: [ test:'SCPFflowTp1g --params TEST/flows:12250', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB', rFile:'SCPFflowTp1g.R n', extra:neighbors,finalResult:1, graphTitle:['Flow Througphput Test - neighbors=0','Flow Througphput Test - neighbors=4'], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ], y_axis:'Throughput (,000 Flows/sec)' ],
+ SCPFflowTp1gWithFlowObj: [ test:'SCPFflowTp1g --params TEST/flowObj:True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj', rFile:'SCPFflowTp1g.R y', extra:neighbors, finalResult:0],
+ SCPFscaleTopo: [ test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb', rFile:'SCPFscaleTopo.R', extra:none, finalResult:1, graphTitle:['Scale Topology Test'], dbCols:[ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ], dbWhere:'AND scale=20' , y_axis:'Latency (s)'],
+ SCPFswitchLat: [ test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb', rFile:'SCPFswitchLat.R', extra:none, finalResult:1, graphTitle:['Switch Latency Test - Switch Up','Switch Latency Test - Switch Down'], dbCols:[ 'tcp_to_feature_reply_avg,feature_reply_to_role_request_avg,role_request_to_role_reply_avg,role_reply_to_device_avg,up_device_to_graph_avg', 'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+ SCPFbatchFlowResp: [ test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData', rFile:'SCPFbatchFlowResp.R', extra:none, finalResult:1, graphTitle:['Batch Flow Test - Elapsepost', 'Batch Flow Test - Elapsedel'], dbCols:[ 'elapsepost, posttoconfrm', 'elapsedel, deltoconfrm' ], dbWhere:'', y_axis:'Latency (ms)'],
+ SCPFintentEventTp: [ test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB', rFile:'SCPFintentEventTp.R n', extra:neighbors, finalResult:1, graphTitle:['Intent Throughput Test - neighbors=0','Intent Throughput Test - neighbors=4'], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ], y_axis:'Throughput (Ops/sec)'],
+ SCPFintentRerouteLat: [ test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches, finalResult:1, graphTitle:['Intent Reroute Test'], dbCols:'avg', dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
+ SCPFscalingMaxIntents: [ test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB', rFile:'SCPFscalingMaxIntents.R n', extra:none, finalResult:0],
+ SCPFintentEventTpWithFlowObj: [ test:'SCPFintentEventTp --params TEST/flowObj:True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB', rFile:'SCPFintentEventTp.R y', extra:neighbors,finalResult:0],
+ SCPFintentInstallWithdrawLat: [ test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches,finalResult:1, graphTitle:['Intent Installation Test','Intent Withdrawal Test'], dbCols:[ 'install_avg','withdraw_avg' ], dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
+ SCPFintentRerouteLatWithFlowObj: [ test:'SCPFintentRerouteLat --params TEST/flowObj:True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
+ SCPFscalingMaxIntentsWithFlowObj: [ test:'SCPFscalingMaxIntents --params TEST/flowObj:True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO', rFile:'SCPFscalingMaxIntents.R y', extra:none, finalResult:0],
+ SCPFintentInstallWithdrawLatWithFlowObj: [ test:'SCPFintentInstallWithdrawLat --params TEST/flowObj:True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
+ SCPFmastershipFailoverLat: [test:'SCPFmastershipFailoverLat', table:'mastership_failover_tests', results:'mastership_failover_results', file:'mastershipFailoverLatDB', rFile:'SCPFmastershipFailoverLat.R', extra:none, finalResult:1, graphTitle:['Mastership Failover Test'], dbCols:[ 'kill_deact_avg,deact_role_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ]
+]
+
+echo("Testcases:")
+graph_generator_directory = "~/OnosSystemTest/TestON/JenkinsFile/SCPF/"
+graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_BM/"
+def testsToRun = null
+def prop = null
+node("TestStation-BMs"){
+ prop = readProperties(file:'/var/jenkins/TestONOS.property') // TODO set defaults
+ testsToRun = prop["Tests"].tokenize("\n;, ")
+ for ( String test : testsToRun ) {
+ println test
+ }
+}
+
+def tests = [:]
+for( String test : SCPF.keySet() ){
+ toBeRun = testsToRun.contains( test )
+ def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
+ tests[stepName] = SCPFTest(test, toBeRun, prop)
+}
+
+// run the tests
+for ( test in tests.keySet() ){
+ tests[test].call()
+}
+
+// The testName should be the key from the SCPF map
+def SCPFTest( testName, toBeRun, prop ) {
+ return {
+ catchError{
+ stage(testName) {
+ if ( toBeRun ){
+ workSpace = "/var/jenkins/workspace/"+testName
+ node("TestStation-BMs"){
+ withEnv(['ONOSBranch='+prop["ONOSBranch"],
+ 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
+ 'TestONBranch='+prop["TestONBranch"],
+ 'ONOSTag='+prop["ONOSTag"],
+ 'WikiPrefix='+prop["WikiPrefix"],
+ 'WORKSPACE='+workSpace]){
+ sh '''#!/bin/bash -l
+ set -i # interactive
+ shopt -s expand_aliases # expand alias in non-interactive mode
+ export PYTHONUNBUFFERED=1
+
+ ifconfig
+
+ echo "ONOS Branch is: $ONOSBranch"
+ echo "TestON Branch is: $TestONBranch"
+ echo "Test date: "
+ date
+
+ cd ~
+ export PATH=$PATH:onos/tools/test/bin
+
+ timeout 240 stc shutdown | head -100
+ timeout 240 stc teardown | head -100
+ timeout 240 stc shutdown | head -100
+
+ cd ~/OnosSystemTest/TestON/bin
+ git log |head
+ ./cleanup.sh
+ ''' + "./cli.py run " + SCPF[testName]['test']
+
+ // For moving results
+ sh '''#!/bin/bash -i
+ # remove any leftover files from previous tests
+ sudo rm ${WORKSPACE}/*Result.txt
+
+ #copy files to workspace
+ cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
+ sudo cp *Result.txt ${WORKSPACE}/
+ cd ${WORKSPACE}/
+ ls -al
+ cd '''
+ // Post Results
+ withCredentials([
+ string(credentialsId: 'db_pass', variable: 'pass'),
+ string(credentialsId: 'db_user', variable: 'user'),
+ string(credentialsId: 'db_host', variable: 'host'),
+ string(credentialsId: 'db_port', variable: 'port')]) {
+ def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', '\$ONOSBranch', \$line);\""
+ if (testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat") {
+ database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
+ }
+ sh '''#!/bin/bash
+
+ export DATE=\$(date +%F_%T)
+ cd ~
+ pwd
+ cd /tmp
+ while read line
+ do
+
+ echo \$line
+ echo ''' + database_command + '''
+
+ done< ''' + SCPF[testName]['file'] + '''
+ ''' + getGraphCommand( SCPF[testName]['rFile'], SCPF[testName]['extra'], host, port, user, pass, testName, prop["ONOSBranch"] ) + '''
+ ''' + ( SCPF[testName]['finalResult'] ? generateCombinedResultGraph( host,port, user, pass, testName, prop["ONOSBranch"] ) : "" )
+ }
+ // Fetch Logs
+ sh '''#!/bin/bash
+
+ cd ~/OnosSystemTest/TestON/logs
+ echo "Job Name is: ${JOB_NAME}"
+ TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1)
+ echo "########################################################################################"
+ echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
+ echo "########################################################################################"
+ cd $TestONlogDir
+ if [ $? -eq 1 ]
+ then
+ echo "Job name does not match any test suite name to move log!"
+ else
+ pwd
+ for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
+ fi'''
+ }
+ }
+ if( prop["postResult"] == "true" ){
+ def post = build job: "Pipeline_postjob_BM", propagate: false
+ }
+ node("TestStation-BMs"){
+ sh '''#!/bin/bash
+
+ if [ -e ''' + workSpace + "/" + testName + "Result.txt ] && grep -q \"1\" " + workSpace + "/" + testName + "Result.txt" + '''
+ then
+ echo ''' + testName + " : All passed." + '''
+ else
+ echo ''' + testName + " : not all passed." + '''
+ DoingThisToSetTheResultFalse
+ fi'''
+ }
+ }
+ }
+ }
+ }
+}
+def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName ){
+ result = ""
+ for( extra in extras ){
+ result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName ) + ";"
+ }
+ return result
+}
+def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName){
+ return "Rscript " + graph_generator_directory + rFileName + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + branchName + " " + batch + " " + graph_saved_directory
+}
+def generateCombinedResultGraph( host, port, user, pass, testName, branchName ){
+ result = ""
+ for ( int i=0; i< SCPF[testName]['graphTitle'].size(); i++){
+ result += "Rscript " + graph_generator_directory + "SCPFLineGraph.R " + host + " " + port + " " + user + " " + pass + " \"" + SCPF[testName]['graphTitle'][i] + "\" " + branchName + " " + 100 + " \"SELECT " +
+ checkIfList( testName, 'dbCols', i ) + " FROM " + SCPF[testName]['table'] + " WHERE branch=\'" + branchName + "\' " + checkIfList( testName, 'dbWhere', i ) +
+ " ORDER BY date ASC LIMIT 100\" \"" + SCPF[testName]['y_axis'] + "\" " + graph_saved_directory + ";"
+ }
+ return result
+}
+def checkIfList( testName, forWhich, pos ){
+ return SCPF[testName][forWhich] instanceof List ? SCPF[testName][forWhich][pos] : SCPF[testName][forWhich]
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/USECASEJenkinsFile b/TestON/JenkinsFile/USECASEJenkinsFile
new file mode 100644
index 0000000..84577a2
--- /dev/null
+++ b/TestON/JenkinsFile/USECASEJenkinsFile
@@ -0,0 +1,190 @@
+#!groovy
+// This is a Jenkinsfile for a scripted pipeline for the USECASETest tests
+
+// TODO: Exception handling around steps
+
+def prop = null
+node("TestStation-BMs"){
+ prop = readProperties(file:'/var/jenkins/TestONOS.property')
+}
+USECASE = [
+ "FUNCvirNetNB" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCvirNetNB", wiki_file:"FUNCvirNetNBWiki.txt"],
+ "FUNCbgpls" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCbgpls", wiki_file:"FUNCbgplsWiki.txt"],
+ "VPLSBasic" : [wiki_link:prop["WikiPrefix"]+"-"+"VPLSBasic", wiki_file:"VPLSBasicWiki.txt"],
+ "VPLSfailsafe" : [wiki_link:prop["WikiPrefix"]+"-"+"VPLSfailsafe", wiki_file:"VPLSfailsafeWiki.txt"],
+ "PLATdockertest": [wiki_link:"Docker Images sanity test", wiki_file:"PLATdockertestTableWiki.txt"],
+ "SRSanity": [wiki_link:prop["WikiPrefix"]+"-"+"SR Sanity", wiki_file:"SRSanityWiki.txt"],
+ "SRSwitchFailure": [wiki_link:prop["WikiPrefix"]+"-"+"SR Switch Failure", wiki_file:"SRSwitchFailureWiki.txt"],
+ "SRLinkFailure": [wiki_link:prop["WikiPrefix"]+"-"+"SR Link Failure", wiki_file:"SRLinkFailureWiki.txt"],
+ "SROnosFailure": [wiki_link:prop["WikiPrefix"]+"-"+"SR Onos node Failure", wiki_file:"SROnosFailureWiki.txt"],
+ "SRClusterRestart": [wiki_link:prop["WikiPrefix"]+"-"+"SR Cluster Restart", wiki_file:"SRClusterRestartWiki.txt"],
+ "SRDynamic": [wiki_link:prop["WikiPrefix"]+"-"+"SR Dynamic Config", wiki_file:"SRDynamicWiki.txt"],
+ "SRHA": [wiki_link:prop["WikiPrefix"]+"-"+"SR High Availability", wiki_file:"SRHighAvailabilityWiki.txt"],
+ "USECASE_SdnipFunction": [wiki_link:prop["WikiPrefix"]+"-"+"SDNIP Function", wiki_file:"USECASE_SdnipFunctionWiki.txt"],
+ "USECASE_SdnipFunctionCluster": [wiki_link:prop["WikiPrefix"]+"-"+"SDNIP Function Cluster", wiki_file:"USECASE_SdnipFunctionClusterWiki.txt"]
+]
+
+table_name = "executed_test_tests"
+result_name = "executed_test_results"
+graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/testCaseGraphGenerator.R"
+graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_BM/"
+
+echo("Testcases:")
+testsToRun = prop["Tests"].tokenize("\n;, ")
+for ( String test : testsToRun ) {
+ println test
+}
+
+def tests = [:]
+for( String test : USECASE.keySet() ){
+ toBeRun = testsToRun.contains( test )
+ def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
+ tests[stepName] = USECASETest(test, toBeRun, prop)
+}
+
+// run the tests
+for ( test in tests.keySet() ){
+ tests[test].call()
+}
+
+// The testName should be the key from the FUNC
+def USECASETest( testName, toBeRun, prop ) {
+ return {
+ catchError{
+ stage(testName) {
+ if ( toBeRun ){
+ workSpace = "/var/jenkins/workspace/"+testName
+ def fileContents = ""
+ node("TestStation-BMs"){
+ withEnv(['ONOSBranch='+prop["ONOSBranch"],
+ 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
+ 'TestONBranch='+prop["TestONBranch"],
+ 'ONOSTag='+prop["ONOSTag"],
+ 'WikiPrefix='+prop["WikiPrefix"],
+ 'WORKSPACE='+workSpace]){
+ sh '''#!/bin/bash -l
+ set -i # interactive
+ set +e
+ shopt -s expand_aliases # expand alias in non-interactive mode
+ export PYTHONUNBUFFERED=1
+
+ ifconfig
+
+ echo "ONOS Branch is: $ONOSBranch"
+ echo "TestON Branch is: $TestONBranch"
+ echo "Test date: "
+ date
+
+ cd ~
+ export PATH=$PATH:onos/tools/test/bin
+
+ . .bash_killcmd
+ killTestONall
+ onos-group uninstall
+ timeout 240 stc teardown | head -100
+
+ cd ~/OnosSystemTest/TestON/bin
+ git log |head
+ ./cleanup.sh -f
+ ''' + "./cli.py run " + testName + '''
+ ./cleanup.sh -f
+ cd ~/onos/tools/package/config
+ git clean -df'''
+
+ // For the Wiki page
+ sh '''#!/bin/bash -i
+
+ echo "ONOS Branch is: ${ONOSBranch}"
+ echo "TestON Branch is: ${TestONBranch}"
+
+ echo "Job name is: "''' + testName + '''
+ echo "Workspace is: ${WORKSPACE}/"
+
+ echo "Wiki page to post is: ${WikiPrefix}-"
+
+ # remove any leftover files from previous tests
+ sudo rm ${WORKSPACE}/*Wiki.txt
+ sudo rm ${WORKSPACE}/*Summary.txt
+ sudo rm ${WORKSPACE}/*Result.txt
+ sudo rm ${WORKSPACE}/*.csv
+
+ #copy files to workspace
+ cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
+ sudo cp *.txt ${WORKSPACE}/
+ sudo cp *.csv ${WORKSPACE}/
+ cd ${WORKSPACE}/
+ for i in *.csv
+ do mv "$i" "$WikiPrefix"-"$i"
+ done
+ ls -al
+ cd '''
+
+ if( prop["manualRun"] == "false" ){
+ // Post Results
+ withCredentials([
+ string(credentialsId: 'db_pass', variable: 'pass'),
+ string(credentialsId: 'db_user', variable: 'user'),
+ string(credentialsId: 'db_host', variable: 'host'),
+ string(credentialsId: 'db_port', variable: 'port')]) {
+ def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
+
+ sh '''#!/bin/bash
+ set +e
+ export DATE=\$(date +%F_%T)
+ cd ~
+ pwd
+ sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
+ do
+ echo \$line
+ echo ''' + database_command + '''
+
+ done
+ Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
+
+ }
+ }
+ // Fetch Logs
+ sh '''#!/bin/bash
+
+ cd ~/OnosSystemTest/TestON/logs
+ echo "Job Name is: " + ''' + testName + '''
+ TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1)
+ echo "########################################################################################"
+ echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
+ echo "########################################################################################"
+ cd $TestONlogDir
+ if [ $? -eq 1 ]
+ then
+ echo "Job name does not match any test suite name to move log!"
+ else
+ pwd
+ for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
+ fi
+ cd'''
+ fileContents = readFile workSpace+"/"+USECASE[testName]['wiki_file']
+
+ }
+ }
+ if( prop["manualRun"] == "false" ){
+ def post = build job: "Pipeline_postjob_BM", propagate: false,
+ parameters: [
+ string(name: 'Wiki_Contents', value: fileContents),
+ string(name: 'Wiki_Link', value: USECASE[testName]['wiki_link'])
+ ]
+ }
+ node("TestStation-BMs"){
+ sh '''#!/bin/bash
+
+ if [ -e ''' + workSpace + "/" + testName + "Result.txt ] && grep -q \"1\" " + workSpace + "/" + testName + "Result.txt" + '''
+ then
+ echo ''' + testName + " : All passed." + '''
+ else
+ echo ''' + testName + " : not all passed." + '''
+ DoingThisToSetTheResultFalse
+ fi'''
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/TestON/core/logger.py b/TestON/core/logger.py
index b3b5746..ed9b0bd 100644
--- a/TestON/core/logger.py
+++ b/TestON/core/logger.py
@@ -123,6 +123,8 @@
main.WikiFileName = main.logdir + "/" + main.TEST + "Wiki.txt"
main.SummaryFileName = main.logdir + "/" + main.TEST + "Summary.txt"
main.JenkinsCSV = main.logdir + "/" + main.TEST + ".csv"
+ main.resultFile = main.logdir + "/" + main.TEST + "Result.txt"
+
main.TOTAL_TC_SUCCESS = 0
#### Add log-level - Report
@@ -299,9 +301,16 @@
#NOTE: the elements were orded based on the colors assigned to the data
logfile = open(main.JenkinsCSV ,"w")
logfile.write(",".join( ['Tests Failed', 'Tests Passed', 'Tests Planned'] ) + "\n")
- logfile.write(",".join( [str(int(main.TOTAL_TC_FAIL)), str(int(main.TOTAL_TC_PASS)), str(int(main.TOTAL_TC_PLANNED))] ))
+ logfile.write(",".join( [str(int(main.TOTAL_TC_FAIL)), str(int(main.TOTAL_TC_PASS)), str(int(main.TOTAL_TC_PLANNED))] ) + "\n")
logfile.close()
+ executedStatus = open(main.resultFile, "w")
+ if main.TOTAL_TC_FAIL == 0 and main.TOTAL_TC_NORESULT + main.TOTAL_TC_PASS == main.TOTAL_TC_PLANNED:
+ executedStatus.write("1\n")
+ else:
+ executedStatus.write("0\n")
+ executedStatus.close()
+
def updateCaseResults(self,main):
'''
Update the case result based on the steps execution and asserting each step in the test-case
diff --git a/TestON/tests/SCPF/Jenkinsfile b/TestON/tests/SCPF/Jenkinsfile
deleted file mode 100644
index bdcdc89..0000000
--- a/TestON/tests/SCPF/Jenkinsfile
+++ /dev/null
@@ -1,184 +0,0 @@
-#!groovy
-// This is a Jenkinsfile for a scripted pipeline for the SCPF tests
-// properties([pipelineTriggers([cron('30 19 * * *')])])
-
-// TODO: Exception handling around steps
-SCPF = [
- SCPFcbench: [ test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB'],
- SCPFhostLat: [ test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency'],
- SCPFportLat: [ test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb'],
- SCPFflowTp1g: [ test:'SCPFflowTp1g', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB'],
- SCPFflowTp1gWithFlowObj: [ test:'SCPFflowTp1g --params TEST/flowObj=True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj'],
- SCPFscaleTopo: [ test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb'],
- SCPFswitchLat: [ test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb'],
- SCPFbatchFlowResp: [ test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData'],
- SCPFintentEventTp: [ test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB'],
- SCPFintentRerouteLat: [ test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB'],
- SCPFscalingMaxIntents: [ test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB'],
- SCPFintentEventTpWithFlowObj: [ test:'SCPFintentEventTp --params TEST/flowObj=True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB'],
- SCPFintentInstallWithdrawLat: [ test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB'],
- SCPFintentRerouteLatWithFlowObj: [ test:'SCPFintentRerouteLat --params TEST/flowObj=True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj'],
- SCPFscalingMaxIntentsWithFlowObj: [ test:'SCPFscalingMaxIntents --params TEST/flowObj=True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO'],
- SCPFintentInstallWithdrawLatWithFlowObj: [ test:'SCPFintentInstallWithdrawLat --params TEST/flowObj=True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO']
-]
-
-// Get just the test names
-def defaultTests = SCPF.keySet().toArray()
-// Convert to a string with new line deliminators for user input
-StringBuilder sb = new StringBuilder();
-for (String s : defaultTests)
-{
- sb.append(s);
- sb.append("\n");
-}
-choices = sb.toString();
-// Define sets of tests
-SCPF_Basic = "SCPFswitchLat\nSCPFportLat\nSCPFintentInstallWithdrawLat\nSCPFintentEventTp\nSCPFflowTp1g\nSCPFcbench\nSCPFbatchFlowResp"
-SCPF_ExtraSetA = "SCPFintentRerouteLat\nSCPFscalingMaxIntents\nSCPFhostLat\nSCPFscaleTopo"
-SCPF_ExtraSetB = "SCPFintentInstallWithdrawLatWithFlowObj\nSCPFintentEventTpWithFlowObj\nSCPFintentRerouteLatWithFlowObj\nSCPFscalingMaxIntentsWithFlowObj\nSCPFflowTp1gWithFlowObj"
-
-try {
- timeout(time: 120, unit: 'SECONDS') {
- // This is kind of hacky, I can't seem to find the correct way to get a "build with parameters" button
- testcases = input message: 'Tests to run?', parameters:[[$class:'TextParameterDefinition', defaultValue: choices, description:'', name: 'Run these tests']]
- }
-} catch(org.jenkinsci.plugins.workflow.steps.FlowInterruptedException Err) {
- echo("Input timed out or cancled, using default values")
- // Set tests based on day of week
- def now = new Date()
- echo(now.toString())
- today = now[Calendar.DAY_OF_WEEK]
- switch (today) {
- case Calendar.MONDAY:
- choices = SCPF_Basic + "\n" + SCPF_ExtraSetB
- println "Defaulting to Monday tests:" + choices
- break
- case Calendar.TUESDAY:
- choices = SCPF_Basic
- println "Defaulting to Tuesday tests:" + choices
- break
- case Calendar.WEDNESDAY:
- choices = SCPF_Basic + "\n" + SCPF_ExtraSetA
- println "Defaulting to Wednesday tests:" + choices
- break
- case Calendar.THURSDAY:
- choices = SCPF_Basic + "\n" + SCPF_ExtraSetB
- println "Defaulting to Thursday tests:" + choices
- break
- case Calendar.FRIDAY:
- choices = SCPF_Basic + "\n" + SCPF_ExtraSetA + "\n" + SCPF_ExtraSetB
- println "Defaulting to Fridat tests:" + choices
- break
- case Calendar.SATURDAY:
- choices = SCPF_Basic + "\n" + SCPF_ExtraSetA + "\n" + SCPF_ExtraSetB
- println "Defaulting to Saturday tests:" + choices
- break
- case Calendar.SUNDAY:
- choices = SCPF_Basic + "\n" + SCPF_ExtraSetA
- println "Defaulting to Sunday tests:" + choices
- break
- }
- testcases = choices
-}
-
-echo("Testcases:")
-//println testcases
-// TODO REMOVE AFTER DEBUGGING
-def testsToRun = testcases.tokenize("\n;, ")
-for (String test : testsToRun) {
- println test
-}
-
-def tests = [:]
-for (int i = 0; i < testsToRun.size(); i++) {
- // Get the actual string here.
- def testName = testsToRun[i]
- def stepName = "Running ${testName}"
- tests[stepName] = SCPFTest(testName)
-}
-
-// run the tests
-parallel tests
-
-// The testName should be the key from the SCPF map
-def SCPFTest( testName ) {
- return {
- node ("TestStation-BMs"){ // only run SCPF tests on the BM cluster
- def prop = readProperties(file:'/var/jenkins/TestONOS.property') // TODO set defaults
-
- withEnv(['ONOSBranch='+prop["ONOSBranch"],
- 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
- 'TestONBranch='+prop["TestONBranch"],
- 'ONOSTag='+prop["ONOSTag"],
- 'WikiPrefix='+prop["WikiPrefix"]]){
- stage(testName) {
- sh '''#!/bin/bash -l
- set -i # interactive
- shopt -s expand_aliases # expand alias in non-interactive mode
- export PYTHONUNBUFFERED=1
-
- ifconfig
-
- echo "ONOS Branch is: $ONOSBranch"
- echo "TestON Branch is: $TestONBranch"
- echo "Test date: "
- date
-
- cd ~
- export PATH=$PATH:onos/tools/test/bin
-
- timeout 240 stc shutdown
- timeout 240 stc teardown
- timeout 240 stc shutdown
-
- cd ~/OnosSystemTest/TestON/bin
- git log |head
- ./cleanup.sh
- ''' + "./cli.py run " + SCPF[testName]['test']
-
- // Post Results
- withCredentials([
- string(credentialsId: 'db_pass', variable: 'pass'),
- string(credentialsId: 'db_user', variable: 'user'),
- string(credentialsId: 'db_host', variable: 'host'),
- string(credentialsId: 'db_port', variable: 'port')]) {
- def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', '\$ONOSBranch', \$line);\""
- if (testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat") {
- database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
- }
- sh '''#!/bin/bash
-
- export DATE=\$(date +%F_%T)
- cd ~
- pwd
- cd /tmp
- while read line
- do
-
- echo \$line
- echo ''' + database_command + '''
-
- done< ''' + SCPF[testName]['file']
- }
- // Fetch Logs
- sh '''#!/bin/bash
-
- cd ~/OnosSystemTest/TestON/logs
- echo "Job Name is: ${JOB_NAME}"
- TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1)
- echo "########################################################################################"
- echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
- echo "########################################################################################"
- cd $TestONlogDir
- if [ $? -eq 1 ]
- then
- echo "Job name does not match any test suite name to move log!"
- else
- pwd
- for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
- fi'''
- }
- }
- }
- }
-}