blob: 1a303779e633610bb2dbd39f6b26ae0b7b497465 [file] [log] [blame]
#!groovy
// This is a Jenkinsfile for a scripted pipeline for the HA tests
def prop = null
node("TestStation-VMs"){
prop = readProperties(file:'/var/jenkins/TestONOS.property')
}
// TODO: Exception handling around steps
HA = [
"HAsanity" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Sanity", wiki_file:"HAsanityWiki.txt"],
"HAswapNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Swap Nodes", wiki_file:"HAswapNodesWiki.txt"],
"HAscaling" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Scaling", wiki_file:"HAscalingWiki.txt"],
"HAclusterRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Cluster Restart", wiki_file:"HAclusterRestartWiki.txt"],
"HAstopNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Stop Nodes", wiki_file:"HAstopNodes.txt"],
"HAfullNetPartition" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Full Network Partition", wiki_file:"HAfullNetPartitionWiki.txt"],
"HAsingleInstanceRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Single Instance Restart", wiki_file:"HAsingleInstanceRestartWiki.txt"],
"HAkillNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Kill Nodes", wiki_file:"HAkillNodesWiki.txt"] ]
table_name = "executed_test_tests"
result_name = "executed_test_results"
graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/testCaseGraphGenerator.R"
graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_VM/"
echo("Testcases:")
def testsToRun = null
testsToRun = prop["Tests"].tokenize("\n;, ")
for ( String test : testsToRun ) {
println test
}
def tests = [:]
for( String test : HA.keySet() ){
toBeRun = testsToRun.contains( test )
def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
tests[stepName] = HATest(test, toBeRun, prop)
}
// run the tests
for ( test in tests.keySet() ){
tests[test].call()
}
// The testName should be the key from the FUNC
def HATest( testName, toBeRun, prop ) {
return {
catchError{
stage(testName) {
if ( toBeRun ){
workSpace = "/var/jenkins/workspace/"+testName
def fileContents = ""
node("TestStation-VMs"){
withEnv(['ONOSBranch='+prop["ONOSBranch"],
'ONOSJVMHeap='+prop["ONOSJVMHeap"],
'TestONBranch='+prop["TestONBranch"],
'ONOSTag='+prop["ONOSTag"],
'WikiPrefix='+prop["WikiPrefix"],
'WORKSPACE='+workSpace]){
sh '''#!/bin/bash -l
set -i # interactive
set +e
shopt -s expand_aliases # expand alias in non-interactive mode
export PYTHONUNBUFFERED=1
ifconfig
echo "ONOS Branch is: ${ONOSBranch}"
echo "TestON Branch is: ${TestONBranch}"
echo "Test date: "
date
cd ~
export PATH=$PATH:onos/tools/test/bin
timeout 240 stc shutdown | head -100
timeout 240 stc teardown | head -100
timeout 240 stc shutdown | head -100
cd ~/OnosSystemTest/TestON/bin
git log |head
./cleanup.sh -f
''' + "./cli.py run " + testName+ '''
./cleanup.sh -f
cd'''
// For the Wiki page
sh '''#!/bin/bash -i
set +e
echo "ONOS Branch is: ${ONOSBranch}"
echo "TestON Branch is: ${TestONBranch}"
echo "Job name is: "''' + testName + '''
echo "Workspace is: ${WORKSPACE}/"
echo "Wiki page to post is: ${WikiPrefix}-"
# remove any leftover files from previous tests
sudo rm ${WORKSPACE}/*Wiki.txt
sudo rm ${WORKSPACE}/*Summary.txt
sudo rm ${WORKSPACE}/*Result.txt
sudo rm ${WORKSPACE}/*.csv
#copy files to workspace
cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
sudo cp *.txt ${WORKSPACE}/
sudo cp *.csv ${WORKSPACE}/
cd ${WORKSPACE}/
for i in *.csv
do mv "$i" "$WikiPrefix"-"$i"
done
ls -al
cd '''
if( prop["manualRun"] == "false" ){
// Post Results
withCredentials([
string(credentialsId: 'db_pass', variable: 'pass'),
string(credentialsId: 'db_user', variable: 'user'),
string(credentialsId: 'db_host', variable: 'host'),
string(credentialsId: 'db_port', variable: 'port')]) {
def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
sh '''#!/bin/bash
set +e
export DATE=\$(date +%F_%T)
cd ~
pwd
sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
do
echo \$line
echo ''' + database_command + '''
done
Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
}
}
// Fetch Logs
sh '''#!/bin/bash
set +e
cd ~/OnosSystemTest/TestON/logs
echo "Job Name is: " + ''' + testName + '''
TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1)
echo "########################################################################################"
echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
echo "########################################################################################"
cd $TestONlogDir
if [ $? -eq 1 ]
then
echo "Job name does not match any test suite name to move log!"
else
pwd
for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
fi
cd'''
fileContents = readFile workSpace+"/"+HA[testName]['wiki_file']
}
}
if( prop["manualRun"] == "false" ){
def post = build job: "Pipeline_postjob_VM", propagate: false,
parameters: [
string(name: 'Wiki_Contents', value: fileContents),
string(name: 'Wiki_Link', value: HA[testName]['wiki_link'])
]
}
node("TestStation-VMs"){
sh '''#!/bin/bash
if [ -e ''' + workSpace + "/" + testName + "Result.txt ] && grep -q \"1\" " + workSpace + "/" + testName + "Result.txt" + '''
then
echo ''' + testName + " : All passed." + '''
else
echo ''' + testName + " : not all passed." + '''
DoingThisToSetTheResultFalse
fi'''
}
}
}
}
}
}