Devin Lim | 90803a8 | 2017-08-29 13:41:44 -0700 | [diff] [blame^] | 1 | #!groovy |
| 2 | // This is a Jenkinsfile for a scripted pipeline for the HA tests |
| 3 | |
| 4 | def prop = null |
| 5 | node("TestStation-VMs"){ |
| 6 | prop = readProperties(file:'/var/jenkins/TestONOS.property') |
| 7 | } |
| 8 | // TODO: Exception handling around steps |
| 9 | HA = [ |
| 10 | "HAsanity" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Sanity", wiki_file:"HAsanityWiki.txt"], |
| 11 | "HAswapNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Swap Nodes", wiki_file:"HAswapNodesWiki.txt"], |
| 12 | "HAscaling" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Scaling", wiki_file:"HAscalingWiki.txt"], |
| 13 | "HAclusterRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Cluster Restart", wiki_file:"HAclusterRestartWiki.txt"], |
| 14 | "HAstopNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Stop Nodes", wiki_file:"HAstopNodes.txt"], |
| 15 | "HAfullNetPartition" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Full Network Partition", wiki_file:"HAfullNetPartitionWiki.txt"], |
| 16 | "HAsingleInstanceRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Single Instance Restart", wiki_file:"HAsingleInstanceRestartWiki.txt"], |
| 17 | "HAkillNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Kill Nodes", wiki_file:"HAkillNodesWiki.txt"] ] |
| 18 | |
| 19 | table_name = "executed_test_tests" |
| 20 | result_name = "executed_test_results" |
| 21 | graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/testCaseGraphGenerator.R" |
| 22 | graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_VM/" |
| 23 | |
| 24 | echo("Testcases:") |
| 25 | def testsToRun = null |
| 26 | testsToRun = prop["Tests"].tokenize("\n;, ") |
| 27 | for ( String test : testsToRun ) { |
| 28 | println test |
| 29 | } |
| 30 | |
| 31 | def tests = [:] |
| 32 | for( String test : HA.keySet() ){ |
| 33 | toBeRun = testsToRun.contains( test ) |
| 34 | def stepName = ( toBeRun ? "" : "Not " ) + "Running $test" |
| 35 | tests[stepName] = HATest(test, toBeRun, prop) |
| 36 | } |
| 37 | |
| 38 | // run the tests |
| 39 | for ( test in tests.keySet() ){ |
| 40 | tests[test].call() |
| 41 | } |
| 42 | |
| 43 | |
| 44 | // The testName should be the key from the FUNC |
| 45 | def HATest( testName, toBeRun, prop ) { |
| 46 | return { |
| 47 | catchError{ |
| 48 | stage(testName) { |
| 49 | if ( toBeRun ){ |
| 50 | workSpace = "/var/jenkins/workspace/"+testName |
| 51 | def fileContents = "" |
| 52 | node("TestStation-VMs"){ |
| 53 | withEnv(['ONOSBranch='+prop["ONOSBranch"], |
| 54 | 'ONOSJVMHeap='+prop["ONOSJVMHeap"], |
| 55 | 'TestONBranch='+prop["TestONBranch"], |
| 56 | 'ONOSTag='+prop["ONOSTag"], |
| 57 | 'WikiPrefix='+prop["WikiPrefix"], |
| 58 | 'WORKSPACE='+workSpace]){ |
| 59 | sh '''#!/bin/bash -l |
| 60 | set -i # interactive |
| 61 | set +e |
| 62 | shopt -s expand_aliases # expand alias in non-interactive mode |
| 63 | export PYTHONUNBUFFERED=1 |
| 64 | |
| 65 | ifconfig |
| 66 | |
| 67 | echo "ONOS Branch is: ${ONOSBranch}" |
| 68 | echo "TestON Branch is: ${TestONBranch}" |
| 69 | echo "Test date: " |
| 70 | date |
| 71 | |
| 72 | cd ~ |
| 73 | export PATH=$PATH:onos/tools/test/bin |
| 74 | |
| 75 | timeout 240 stc shutdown | head -100 |
| 76 | timeout 240 stc teardown | head -100 |
| 77 | timeout 240 stc shutdown | head -100 |
| 78 | |
| 79 | cd ~/OnosSystemTest/TestON/bin |
| 80 | git log |head |
| 81 | ./cleanup.sh -f |
| 82 | ''' + "./cli.py run " + testName+ ''' |
| 83 | ./cleanup.sh -f |
| 84 | cd''' |
| 85 | |
| 86 | // For the Wiki page |
| 87 | sh '''#!/bin/bash -i |
| 88 | set +e |
| 89 | echo "ONOS Branch is: ${ONOSBranch}" |
| 90 | echo "TestON Branch is: ${TestONBranch}" |
| 91 | |
| 92 | echo "Job name is: "''' + testName + ''' |
| 93 | echo "Workspace is: ${WORKSPACE}/" |
| 94 | |
| 95 | echo "Wiki page to post is: ${WikiPrefix}-" |
| 96 | |
| 97 | # remove any leftover files from previous tests |
| 98 | sudo rm ${WORKSPACE}/*Wiki.txt |
| 99 | sudo rm ${WORKSPACE}/*Summary.txt |
| 100 | sudo rm ${WORKSPACE}/*Result.txt |
| 101 | sudo rm ${WORKSPACE}/*.csv |
| 102 | |
| 103 | #copy files to workspace |
| 104 | cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'` |
| 105 | sudo cp *.txt ${WORKSPACE}/ |
| 106 | sudo cp *.csv ${WORKSPACE}/ |
| 107 | cd ${WORKSPACE}/ |
| 108 | for i in *.csv |
| 109 | do mv "$i" "$WikiPrefix"-"$i" |
| 110 | done |
| 111 | ls -al |
| 112 | cd ''' |
| 113 | |
| 114 | if( prop["manualRun"] == "false" ){ |
| 115 | // Post Results |
| 116 | withCredentials([ |
| 117 | string(credentialsId: 'db_pass', variable: 'pass'), |
| 118 | string(credentialsId: 'db_user', variable: 'user'), |
| 119 | string(credentialsId: 'db_host', variable: 'host'), |
| 120 | string(credentialsId: 'db_port', variable: 'port')]) { |
| 121 | def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\"" |
| 122 | |
| 123 | sh '''#!/bin/bash |
| 124 | set +e |
| 125 | export DATE=\$(date +%F_%T) |
| 126 | cd ~ |
| 127 | pwd |
| 128 | sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line |
| 129 | do |
| 130 | echo \$line |
| 131 | echo ''' + database_command + ''' |
| 132 | |
| 133 | done |
| 134 | Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory |
| 135 | |
| 136 | } |
| 137 | } |
| 138 | // Fetch Logs |
| 139 | sh '''#!/bin/bash |
| 140 | set +e |
| 141 | cd ~/OnosSystemTest/TestON/logs |
| 142 | echo "Job Name is: " + ''' + testName + ''' |
| 143 | TestONlogDir=$(ls -t | grep ${TEST_NAME}_ |head -1) |
| 144 | echo "########################################################################################" |
| 145 | echo "##### copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}" |
| 146 | echo "########################################################################################" |
| 147 | cd $TestONlogDir |
| 148 | if [ $? -eq 1 ] |
| 149 | then |
| 150 | echo "Job name does not match any test suite name to move log!" |
| 151 | else |
| 152 | pwd |
| 153 | for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done |
| 154 | fi |
| 155 | cd''' |
| 156 | fileContents = readFile workSpace+"/"+HA[testName]['wiki_file'] |
| 157 | } |
| 158 | } |
| 159 | |
| 160 | if( prop["manualRun"] == "false" ){ |
| 161 | def post = build job: "Pipeline_postjob_VM", propagate: false, |
| 162 | parameters: [ |
| 163 | string(name: 'Wiki_Contents', value: fileContents), |
| 164 | string(name: 'Wiki_Link', value: HA[testName]['wiki_link']) |
| 165 | ] |
| 166 | } |
| 167 | node("TestStation-VMs"){ |
| 168 | sh '''#!/bin/bash |
| 169 | |
| 170 | if [ -e ''' + workSpace + "/" + testName + "Result.txt ] && grep -q \"1\" " + workSpace + "/" + testName + "Result.txt" + ''' |
| 171 | then |
| 172 | echo ''' + testName + " : All passed." + ''' |
| 173 | else |
| 174 | echo ''' + testName + " : not all passed." + ''' |
| 175 | DoingThisToSetTheResultFalse |
| 176 | fi''' |
| 177 | } |
| 178 | } |
| 179 | } |
| 180 | } |
| 181 | } |
| 182 | } |