Merge "[ONOS-7280] Refactoring Jenkins file"
diff --git a/TestON/JenkinsFile/FUNCJenkinsFile b/TestON/JenkinsFile/FUNCJenkinsFile
index 857ff54..1201c92 100644
--- a/TestON/JenkinsFile/FUNCJenkinsFile
+++ b/TestON/JenkinsFile/FUNCJenkinsFile
@@ -1,192 +1,39 @@
 #!groovy
-import groovy.time.*
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+funcs.initialize( "FUNC" );
 // This is a Jenkinsfile for a scripted pipeline for the FUNC tests
 def prop = null
-node("TestStation-VMs"){
-    prop = readProperties(file:'/var/jenkins/TestONOS.property')
-}
-// TODO: Exception handling around steps
+prop = funcs.getProperties()
 FUNC = [
-"FUNCipv6Intent" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCipv6Intent", wiki_file:"FUNCipv6IntentWiki.txt", ],
-"FUNCoptical" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCoptical", wiki_file:"FUNCopticalWiki.txt"],
-"FUNCflow" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCflow", wiki_file:"FUNCflowWiki.txt"],
-"FUNCnetCfg": [wiki_link:prop["WikiPrefix"]+"-"+"FUNCnetCfg", wiki_file:"FUNCnetCfgWiki.txt"],
-"FUNCovsdbtest" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCovsdbtestWiki", wiki_file:"FUNCovsdbtestWiki.txt"],
-"FUNCnetconf" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCnetconf", wiki_file:"FUNCnetconfWiki.txt"],
-"FUNCgroup" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCgroup", wiki_file:"FUNCgroupWiki.txt"],
-"FUNCintent" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCintent", wiki_file:"FUNCintentWiki.txt"],
-"FUNCintentRest" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCintentRest", wiki_file:"FUNCintentRestWiki.txt"],
-"FUNCformCluster" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCformCluster", wiki_file:"FUNCformClusterWiki.txt"]
+"FUNCipv6Intent" :  [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCipv6Intent", wiki_file:"FUNCipv6IntentWiki.txt" ],
+"FUNCoptical" :     [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCoptical", wiki_file:"FUNCopticalWiki.txt" ],
+"FUNCflow" :        [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCflow", wiki_file:"FUNCflowWiki.txt" ],
+"FUNCnetCfg":       [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCnetCfg", wiki_file:"FUNCnetCfgWiki.txt" ],
+"FUNCovsdbtest" :   [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCovsdbtestWiki", wiki_file:"FUNCovsdbtestWiki.txt" ],
+"FUNCnetconf" :     [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCnetconf", wiki_file:"FUNCnetconfWiki.txt" ],
+"FUNCgroup" :       [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCgroup", wiki_file:"FUNCgroupWiki.txt" ],
+"FUNCintent" :      [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCintent", wiki_file:"FUNCintentWiki.txt" ],
+"FUNCintentRest" :  [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCintentRest", wiki_file:"FUNCintentRestWiki.txt" ],
+"FUNCformCluster" : [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCformCluster", wiki_file:"FUNCformClusterWiki.txt" ]
 ]
-table_name = "executed_test_tests"
-result_name = "executed_test_results"
 graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R"
-graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_VM/"
-echo("Testcases:")
+graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
+echo( "Testcases:" )
 def testsToRun = null
-testsToRun = prop["Tests"].tokenize("\n;, ")
+testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
+funcs.printTestToRun( testsToRun )
 
-for ( String test : testsToRun ) {
-    println test
-}
 def tests = [:]
 for( String test : FUNC.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = FUNCTest(test, toBeRun, prop)
+    tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, FUNC, graph_generator_file, graph_saved_directory )
 }
 
-def now = new Date()
+start = funcs.getCurrentTime()
 // run the tests
 for ( test in tests.keySet() ){
-    tests[test].call()
+    tests[ test ].call()
 }
-try{
-    if( prop["manualRun"] == "false" ){
-        def end = new Date()
-        TimeDuration duration = TimeCategory.minus( end, now )
-        slackSend( color:"#5816EE", message: "FUNC tests ended at: " + end.toString() + "\nTime took : " + duration )
-    }
-}
-catch(all){}
-// The testName should be the key from the FUNC
-def FUNCTest( testName, toBeRun, prop ) {
-    return {
-        catchError{
-            stage(testName) {
-                if ( toBeRun ){
-                    workSpace = "/var/jenkins/workspace/"+testName
-                    def fileContents = ""
-                    node("TestStation-VMs"){
-                        withEnv(['ONOSBranch='+prop["ONOSBranch"],
-                                 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
-                                 'TestONBranch='+prop["TestONBranch"],
-                                 'ONOSTag='+prop["ONOSTag"],
-                                 'WikiPrefix='+prop["WikiPrefix"],
-                                 'WORKSPACE='+workSpace]){
-                            sh '''#!/bin/bash -l
-                            set -i # interactive
-                            set +e
-                            shopt -s expand_aliases # expand alias in non-interactive mode
-                            export PYTHONUNBUFFERED=1
-
-                            ifconfig
-
-                            echo "ONOS Branch is: $ONOSBranch"
-                            echo "TestON Branch is: $TestONBranch"
-                            echo "Test date: "
-                            date
-
-                            cd ~
-                            export PATH=$PATH:onos/tools/test/bin
-
-                            timeout 240 stc shutdown | head -100
-                            timeout 240 stc teardown | head -100
-                            timeout 240 stc shutdown | head -100
-
-                            cd ~/OnosSystemTest/TestON/bin
-                            git log |head
-                            ./cleanup.sh
-                            ''' + "./cli.py run " + testName  + '''
-                            # cleanup config changes
-                            cd ~/onos/tools/package/config
-                            git clean -df'''
-
-                            // For the Wiki page
-                            sh '''#!/bin/bash -i
-                            set +e
-                            echo "ONOS Branch is: ${ONOSBranch}"
-                            echo "TestON Branch is: ${TestONBranch}"
-
-                            echo "Job name is: "''' + testName + '''
-                            echo "Workspace is: ${WORKSPACE}/"
-
-                            echo "Wiki page to post is: ${WikiPrefix}-"
-
-                            # remove any leftover files from previous tests
-                            sudo rm ${WORKSPACE}/*Wiki.txt
-                            sudo rm ${WORKSPACE}/*Summary.txt
-                            sudo rm ${WORKSPACE}/*Result.txt
-                            sudo rm ${WORKSPACE}/*.csv
-
-                            #copy files to workspace
-                            cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
-                            sudo cp *.txt ${WORKSPACE}/
-                            sudo cp *.csv ${WORKSPACE}/
-                            cd ${WORKSPACE}/
-                            for i in *.csv
-                                do mv "$i" "$WikiPrefix"-"$i"
-                            done
-                            ls -al
-                            cd '''
-                            if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                                // Post Results
-                                withCredentials([
-                                    string(credentialsId: 'db_pass', variable: 'pass'),
-                                    string(credentialsId: 'db_user', variable: 'user'),
-                                    string(credentialsId: 'db_host', variable: 'host'),
-                                    string(credentialsId: 'db_port', variable: 'port')]) {
-                                        def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
-
-                                        sh '''#!/bin/bash
-                                        export DATE=\$(date +%F_%T)
-                                        cd ~
-                                        pwd
-                                        sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
-                                        do
-                                        echo \$line
-                                        echo ''' + database_command + '''
-
-                                        done
-                                        Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
-
-                                }
-                            }
-                            // Fetch Logs
-                            sh '''#!/bin/bash
-                            set +e
-                            cd ~/OnosSystemTest/TestON/logs
-                            echo "Job Name is: " + ''' + testName + '''
-                            TestONlogDir=$(ls -t | grep ${TEST_NAME}_  |head -1)
-                            echo "########################################################################################"
-                            echo "#####  copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
-                            echo "########################################################################################"
-                            cd $TestONlogDir
-                            if [ $? -eq 1 ]
-                            then
-                                echo "Job name does not match any test suite name to move log!"
-                            else
-                                pwd
-                                for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
-                            fi
-                            cd'''
-                            fileContents = readFile workSpace+"/"+FUNC[testName]['wiki_file']
-                        }
-                    }
-
-                    if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                        def post = build job: "Pipeline_postjob_VM", propagate: false,
-                            parameters: [
-                                string(name: 'Wiki_Contents', value: fileContents),
-                                string(name: 'Wiki_Link', value: FUNC[testName]['wiki_link'])
-                            ]
-                    }
-                    node("TestStation-VMs"){
-                        resultContents = readFile workSpace + "/" + testName + "Result.txt"
-                        resultContents = resultContents.split("\n")
-                        if( resultContents[ 0 ] == "1" ){
-                            print "All passed"
-                        }else{
-                            print "Failed"
-                            if( prop["manualRun"] == "false" )
-                                slackSend(color:"FF0000", message: "[" + prop["ONOSBranch"] + "]" + testName + " : Failed!\n"
-                                                                    + resultContents[ 1 ] + "\n"
-                                                                    + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline" )
-                            Failed
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
\ No newline at end of file
+funcs.generateOverallGraph( prop, FUNC, graph_saved_directory )
+funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
\ No newline at end of file
diff --git a/TestON/JenkinsFile/GeneralFuncs.groovy b/TestON/JenkinsFile/GeneralFuncs.groovy
new file mode 100644
index 0000000..70b0fe0
--- /dev/null
+++ b/TestON/JenkinsFile/GeneralFuncs.groovy
@@ -0,0 +1,18 @@
+#!groovy
+
+def database_command_create( pass, host, port, user ){
+  return pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c "
+}
+def basicGraphPart( rFileName, host, port, user, pass, subject, branchName ){
+  return " Rscript " + rFileName + " " + host + " " + port + " " + user + " " + pass + " " + subject + " " + branchName
+}
+def initBasicVars(){
+  rScriptLocation = "~/OnosSystemTest/TestON/JenkinsFile/scripts/"
+}
+def getTestList( tests ){
+    list = ""
+    for( String test : tests.keySet() )
+        list += test + ","
+    return list[ 0..-2 ]
+}
+return this;
\ No newline at end of file
diff --git a/TestON/JenkinsFile/HAJenkinsFile b/TestON/JenkinsFile/HAJenkinsFile
index 8493f3c..b856536 100644
--- a/TestON/JenkinsFile/HAJenkinsFile
+++ b/TestON/JenkinsFile/HAJenkinsFile
@@ -1,193 +1,39 @@
 #!groovy
-import groovy.time.*
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+funcs.initialize( "HA" );
 // This is a Jenkinsfile for a scripted pipeline for the HA tests
-
 def prop = null
-node("TestStation-VMs"){
-    prop = readProperties(file:'/var/jenkins/TestONOS.property')
-}
-// TODO: Exception handling around steps
+prop = funcs.getProperties()
 HA = [
-"HAsanity" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Sanity", wiki_file:"HAsanityWiki.txt"],
-"HAswapNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Swap Nodes", wiki_file:"HAswapNodesWiki.txt"],
-"HAscaling" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Scaling", wiki_file:"HAscalingWiki.txt"],
-"HAclusterRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Cluster Restart", wiki_file:"HAclusterRestartWiki.txt"],
-"HAstopNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Stop Nodes", wiki_file:"HAstopNodes.txt"],
-"HAfullNetPartition" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Full Network Partition", wiki_file:"HAfullNetPartitionWiki.txt"],
-"HAsingleInstanceRestart" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Single Instance Restart", wiki_file:"HAsingleInstanceRestartWiki.txt"],
-"HAkillNodes" : [wiki_link:prop["WikiPrefix"]+"-"+"HA Kill Nodes", wiki_file:"HAkillNodesWiki.txt"] ]
+"HAsanity" :                [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Sanity", wiki_file:"HAsanityWiki.txt" ],
+"HAswapNodes" :             [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Swap Nodes", wiki_file:"HAswapNodesWiki.txt" ],
+"HAscaling" :               [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Scaling", wiki_file:"HAscalingWiki.txt" ],
+"HAclusterRestart" :        [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Cluster Restart", wiki_file:"HAclusterRestartWiki.txt" ],
+"HAstopNodes" :             [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Stop Nodes", wiki_file:"HAstopNodes.txt" ],
+"HAfullNetPartition" :      [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Full Network Partition", wiki_file:"HAfullNetPartitionWiki.txt" ],
+"HAsingleInstanceRestart" : [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Single Instance Restart", wiki_file:"HAsingleInstanceRestartWiki.txt" ],
+"HAupgrade" :               [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Upgrade", wiki_file:"HAupgradeWiki.txt" ],
+"HAupgradeRollback" :       [ wiki_link:prop[ "WikiPrefix" ] + "-" + "HA Upgrade Rollback", wiki_file:"HAupgradeRollbackWiki.txt" ] ]
 
-table_name = "executed_test_tests"
-result_name = "executed_test_results"
 graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R"
-graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_VM/"
+graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
 
-echo("Testcases:")
+echo( "Testcases:" )
 def testsToRun = null
-testsToRun = prop["Tests"].tokenize("\n;, ")
-for ( String test : testsToRun ) {
-    println test
-}
+testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
+funcs.printTestToRun( testsToRun )
 
 def tests = [:]
 for( String test : HA.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = HATest(test, toBeRun, prop)
+    tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, HA, graph_generator_file, graph_saved_directory )
 }
 
-def now = new Date()
+start = funcs.getCurrentTime()
 // run the tests
 for ( test in tests.keySet() ){
-    tests[test].call()
+    tests[ test ].call()
 }
-try{
-    if( prop["manualRun"] == "false" ){
-        def end = new Date()
-        TimeDuration duration = TimeCategory.minus( end, now )
-        slackSend( color:"#5816EE", message: "HA tests ended at: " + end.toString() + "\nTime took : " + duration )
-    }
-}
-catch(all){}
-
-// The testName should be the key from the FUNC
-def HATest( testName, toBeRun, prop ) {
-    return {
-        catchError{
-            stage(testName) {
-                if ( toBeRun ){
-                    workSpace = "/var/jenkins/workspace/"+testName
-                    def fileContents = ""
-                    node("TestStation-VMs"){
-                        withEnv(['ONOSBranch='+prop["ONOSBranch"],
-                                 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
-                                 'TestONBranch='+prop["TestONBranch"],
-                                 'ONOSTag='+prop["ONOSTag"],
-                                 'WikiPrefix='+prop["WikiPrefix"],
-                                 'WORKSPACE='+workSpace]){
-                            sh '''#!/bin/bash -l
-                            set -i # interactive
-                            set +e
-                            shopt -s expand_aliases # expand alias in non-interactive mode
-                            export PYTHONUNBUFFERED=1
-
-                            ifconfig
-
-                            echo "ONOS Branch is: ${ONOSBranch}"
-                            echo "TestON Branch is: ${TestONBranch}"
-                            echo "Test date: "
-                            date
-
-                            cd ~
-                            export PATH=$PATH:onos/tools/test/bin
-
-                            timeout 240 stc shutdown | head -100
-                            timeout 240 stc teardown | head -100
-                            timeout 240 stc shutdown | head -100
-
-                            cd ~/OnosSystemTest/TestON/bin
-                            git log |head
-                            ./cleanup.sh -f
-                            ''' + "./cli.py run " + testName+ '''
-                            ./cleanup.sh -f
-                            cd'''
-
-                            // For the Wiki page
-                            sh '''#!/bin/bash -i
-                            set +e
-                            echo "ONOS Branch is: ${ONOSBranch}"
-                            echo "TestON Branch is: ${TestONBranch}"
-
-                            echo "Job name is: "''' + testName + '''
-                            echo "Workspace is: ${WORKSPACE}/"
-
-                            echo "Wiki page to post is: ${WikiPrefix}-"
-
-                            # remove any leftover files from previous tests
-                            sudo rm ${WORKSPACE}/*Wiki.txt
-                            sudo rm ${WORKSPACE}/*Summary.txt
-                            sudo rm ${WORKSPACE}/*Result.txt
-                            sudo rm ${WORKSPACE}/*.csv
-
-                            #copy files to workspace
-                            cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
-                            sudo cp *.txt ${WORKSPACE}/
-                            sudo cp *.csv ${WORKSPACE}/
-                            cd ${WORKSPACE}/
-                            for i in *.csv
-                                do mv "$i" "$WikiPrefix"-"$i"
-                            done
-                            ls -al
-                            cd '''
-
-                            if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                                // Post Results
-                                withCredentials([
-                                    string(credentialsId: 'db_pass', variable: 'pass'),
-                                    string(credentialsId: 'db_user', variable: 'user'),
-                                    string(credentialsId: 'db_host', variable: 'host'),
-                                    string(credentialsId: 'db_port', variable: 'port')]) {
-                                        def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
-
-                                        sh '''#!/bin/bash
-                                        export DATE=\$(date +%F_%T)
-                                        cd ~
-                                        pwd
-                                        sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
-                                        do
-                                        echo \$line
-                                        echo ''' + database_command + '''
-
-                                        done
-                                        Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
-
-                                }
-                            }
-                            // Fetch Logs
-                            sh '''#!/bin/bash
-                            set +e
-                            cd ~/OnosSystemTest/TestON/logs
-                            echo "Job Name is: " + ''' + testName + '''
-                            TestONlogDir=$(ls -t | grep ${TEST_NAME}_  |head -1)
-                            echo "########################################################################################"
-                            echo "#####  copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
-                            echo "########################################################################################"
-                            cd $TestONlogDir
-                            if [ $? -eq 1 ]
-                            then
-                                echo "Job name does not match any test suite name to move log!"
-                            else
-                                pwd
-                                for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
-                            fi
-                            cd'''
-                            fileContents = readFile workSpace+"/"+HA[testName]['wiki_file']
-                        }
-                    }
-
-                    if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                        def post = build job: "Pipeline_postjob_VM", propagate: false,
-                            parameters: [
-                                string(name: 'Wiki_Contents', value: fileContents),
-                                string(name: 'Wiki_Link', value: HA[testName]['wiki_link'])
-                            ]
-                    }
-                    node("TestStation-VMs"){
-                        resultContents = readFile workSpace + "/" + testName + "Result.txt"
-                        resultContents = resultContents.split("\n")
-                        if( resultContents[ 0 ] == "1" ){
-                            print "All passed"
-                        }else{
-                            print "Failed"
-                            if( prop["manualRun"] == "false" )
-                                slackSend(color:"FF0000", message: "[" + prop["ONOSBranch"] + "]" + testName + " : Failed!\n"
-                                                                    + resultContents[ 1 ] + "\n"
-                                                                    + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline" )
-                            Failed
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
\ No newline at end of file
+funcs.generateOverallGraph( prop, HA, graph_saved_directory )
+funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
\ No newline at end of file
diff --git a/TestON/JenkinsFile/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
new file mode 100644
index 0000000..ea0ee12
--- /dev/null
+++ b/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
@@ -0,0 +1,331 @@
+#!groovy
+import groovy.time.*
+generalFuncs = evaluate readTrusted( 'GeneralFuncs.groovy' )
+def initializeTrend( machine ){
+  table_name = "executed_test_tests"
+  result_name = "executed_test_results"
+  testMachine = "TestStation-" + machine + "s";
+  this.machine = machine
+  isSCPF = false
+  generalFuncs.initBasicVars();
+}
+def initialize( type, SCPFfuncs ){
+  init( type )
+  SCPFfunc = SCPFfuncs
+  isSCPF = true
+  machine = machineType[ type ]
+}
+def initialize( type ){
+  init( type )
+  SCPFfunc = null
+  table_name = "executed_test_tests"
+  result_name = "executed_test_results"
+  trend_generator_file = generalFuncs.rScriptLocation + "testCategoryTrend.R"
+  build_stats_generator_file = generalFuncs.rScriptLocation + "testCategoryBuildStats.R"
+  isSCPF = false
+}
+def init( type ){
+  machineType = [ "FUNC"    : "VM",
+                  "HA"      : "VM",
+                  "SR"      : "VM",
+                  "SCPF"    : "BM",
+                  "USECASE" : "BM" ]
+  testType = type;
+  testMachine = "TestStation-" + machineType[ type ] + "s";
+  generalFuncs.initBasicVars();
+}
+
+def printType(){
+  echo testType;
+  echo testMachine;
+}
+def getProperties(){
+  node( testMachine ){
+    return readProperties( file:'/var/jenkins/TestONOS.property' );
+  }
+}
+def getTestsToRun( testList ){
+  testList.tokenize("\n;, ")
+}
+def getCurrentTime(){
+  return new Date();
+}
+def getTotalTime( start, end ){
+  return TimeCategory.minus( end, start );
+}
+def printTestToRun( testList ){
+  for ( String test : testList ) {
+      println test;
+  }
+}
+def sendResultToSlack( start, isManualRun, branch ){
+  try{
+    if( isManualRun == "false" ){
+        end = getCurrentTime();
+        TimeDuration duration = TimeCategory.minus( end , start );
+        slackSend( color:"#5816EE",
+                   message: testType + "-" + branch + " tests ended at: " + end.toString() + "\nTime took : " + duration )
+    }
+  }
+  catch( all ){}
+}
+def initAndRunTest( testName, testCategory ){
+  return '''#!/bin/bash -l
+        set -i # interactive
+        set +e
+        shopt -s expand_aliases # expand alias in non-interactive mode
+        export PYTHONUNBUFFERED=1
+        ifconfig
+        ''' + borrowCell( testName ) + '''
+        echo "ONOS Branch is: $ONOSBranch"
+        echo "TestON Branch is: $TestONBranch"
+        echo "Test date: "
+        date
+        cd ~
+        export PATH=$PATH:onos/tools/test/bin
+        timeout 240 stc shutdown | head -100
+        timeout 240 stc teardown | head -100
+        timeout 240 stc shutdown | head -100
+        cd ~/OnosSystemTest/TestON/bin
+        git log |head
+        ./cleanup.sh -f
+        ''' + "./cli.py run " + ( !isSCPF ? testName : testCategory[ testName ][ 'test' ] )  + '''
+        ./cleanup.sh -f
+        # cleanup config changes
+        cd ~/onos/tools/package/config
+        git clean -df'''
+}
+def copyLogs( testName ){
+  result = ""
+    if( testType == "SR" ){
+      result = '''
+      sudo rm /var/jenkins/workspace/SR-log-${WikiPrefix}/*
+      sudo cp *karaf.log.* /var/jenkins/workspace/SR-log-${WikiPrefix}/
+      sudo cp *Flows* /var/jenkins/workspace/SR-log-${WikiPrefix}/
+      sudo cp *Groups* /var/jenkins/workspace/SR-log-${WikiPrefix}/
+      '''
+  }
+  return result
+}
+def cleanAndCopyFiles( testName ){
+  return '''#!/bin/bash -i
+        set +e
+        echo "ONOS Branch is: ${ONOSBranch}"
+        echo "TestON Branch is: ${TestONBranch}"
+        echo "Job name is: "''' + testName + '''
+        echo "Workspace is: ${WORKSPACE}/"
+        echo "Wiki page to post is: ${WikiPrefix}-"
+        # remove any leftover files from previous tests
+        sudo rm ${WORKSPACE}/*Wiki.txt
+        sudo rm ${WORKSPACE}/*Summary.txt
+        sudo rm ${WORKSPACE}/*Result.txt
+        sudo rm ${WORKSPACE}/*.csv
+        #copy files to workspace
+        cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
+        ''' + copyLogs( testName ) + '''
+        sudo cp *.txt ${WORKSPACE}/
+        sudo cp *.csv ${WORKSPACE}/
+        cd ${WORKSPACE}/
+        for i in *.csv
+            do mv "$i" "$WikiPrefix"-"$i"
+        done
+        ls -al
+        cd '''
+}
+def fetchLogs( testName ){
+  return '''#!/bin/bash
+  set +e
+  cd ~/OnosSystemTest/TestON/logs
+  echo "Job Name is: " + ''' + testName + '''
+  TestONlogDir=$(ls -t | grep ${TEST_NAME}_  |head -1)
+  echo "########################################################################################"
+  echo "#####  copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
+  echo "########################################################################################"
+  cd $TestONlogDir
+  if [ $? -eq 1 ]
+  then
+      echo "Job name does not match any test suite name to move log!"
+  else
+      pwd
+      for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
+  fi
+  cd'''
+}
+def isPostingResult( manual, postresult ){
+  return manual == "false" || postresult == "true"
+}
+def postResult( prop, graphOnly ){
+  if( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+    def post = build job: "postjob-" + ( graphOnly ? machine : machineType[ testType ] ), propagate: false
+  }
+}
+def postLogs( testName, prefix ){
+  resultURL = ""
+  if( testType == "SR" ){
+    def post = build job: "SR-log-" + prefix, propagate: false
+    resultURL = post.getAbsoluteUrl()
+  }
+  return resultURL
+}
+def getSlackChannel(){
+  return "#" + ( testType == "SR" ? "sr-failures" : "jenkins-related" )
+}
+def analyzeResult( prop, workSpace, testName, otherTestName, resultURL, wikiLink, isSCPF ){
+  node( testMachine ){
+    resultContents = readFile workSpace + "/" + testName + "Result.txt"
+    resultContents = resultContents.split("\n")
+    if( resultContents[ 0 ] == "1" ){
+        print "All passed"
+    }else{
+        print "Failed"
+      if( prop[ "manualRun" ] == "false" ){
+        slackSend( channel:getSlackChannel(), color:"FF0000", message: "[" + prop[ "ONOSBranch" ] + "]"
+                                            + otherTestName + " : Failed!\n" + resultContents[ 1 ] + "\n"
+                                            + "[TestON log] : \n"
+                                            + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline"
+                                            + ( isSCPF ? "" : ( "\n[Result on Wiki] : \n" + "https://wiki.onosproject.org/display/ONOS/" + wikiLink.replaceAll( "\\s","+" ) ) )
+                                            + ( resultURL != "" ? ( "\n[Karaf log] : \n" + resultURL + "artifact/" ) : "" ),
+                   teamDomain: 'onosproject' )
+      }
+        Failed
+    }
+  }
+}
+def publishToConfluence( prop, wikiLink, file ){
+  if( isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+    publishConfluence siteName: 'wiki.onosproject.org', pageName: wikiLink, spaceName: 'ONOS',
+                  attachArchivedArtifacts: true,
+                  editorList: [
+                      confluenceWritePage( confluenceFile( file ) )
+                  ]
+  }
+
+}
+def runTest( testName, toBeRun, prop, pureTestName, graphOnly, testCategory, graph_generator_file, graph_saved_directory ) {
+  return {
+      catchError{
+          stage( testName ) {
+              if ( toBeRun ){
+                  workSpace = "/var/jenkins/workspace/" + testName
+                  def fileContents = ""
+                  node( testMachine ){
+                      withEnv( [ 'ONOSBranch=' + prop[ "ONOSBranch" ],
+                                 'ONOSJVMHeap=' + prop[ "ONOSJVMHeap" ],
+                                 'TestONBranch=' + prop[ "TestONBranch" ],
+                                 'ONOSTag=' + prop[ "ONOSTag" ],
+                                 'WikiPrefix=' + prop[ "WikiPrefix" ],
+                                 'WORKSPACE=' + workSpace ] ){
+                        if( ! graphOnly ){
+                          sh initAndRunTest( testName, testCategory )
+                          // For the Wiki page
+                          sh cleanAndCopyFiles( pureTestName )
+                        }
+                          databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory )
+                        if( ! graphOnly ){
+                          sh fetchLogs( pureTestName )
+                          if( !isSCPF )
+                            publishToConfluence( prop, testCategory[ testName ][ 'wiki_link' ], workSpace + "/" + testCategory[ testName ][ 'wiki_file' ] )
+                        }
+                      }
+
+
+                  }
+                    postResult( prop, graphOnly )
+                  if( ! graphOnly ){
+                    resultURL = postLogs( testName, prop[ "WikiPrefix" ] )
+                    analyzeResult( prop, workSpace, pureTestName, testName, resultURL, isSCPF ? "" : testCategory[ testName ][ 'wiki_link' ], isSCPF )
+                  }
+              }
+          }
+      }
+  }
+}
+def borrowCell( testName ){
+  result = ""
+  if( testType == "SR" ){
+      result = '''
+      cd
+      source ~/borrow.cell
+      '''
+  }
+  return result
+}
+def databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory ){
+  if( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+      // Post Results
+      withCredentials( [
+          string( credentialsId: 'db_pass', variable: 'pass' ),
+          string( credentialsId: 'db_user', variable: 'user' ),
+          string( credentialsId: 'db_host', variable: 'host' ),
+          string( credentialsId: 'db_port', variable: 'port' ) ] ) {
+              def database_command =  generalFuncs.database_command_create( pass, host, port, user ) + ( !isSCPF ? sqlCommand( testName ) : SCPFfunc.sqlCommand( testName ) )
+              sh '''#!/bin/bash
+              export DATE=\$(date +%F_%T)
+              cd ~
+              pwd ''' + ( graphOnly ? "" : ( !isSCPF ? databasePart( prop[ "WikiPrefix" ], testName, database_command )  :
+                         SCPFfunc.databasePart( testName, database_command ) ) ) + '''
+              ''' + ( !isSCPF ? graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory, graph_generator_file ) : SCPFfunc.getGraphGeneratingCommand( host, port, user, pass, testName, prop ) )
+      }
+  }
+}
+def generateCategoryStatsGraph( manualRun, postresult, stat_file, pie_file, type, branch, testListPart, save_path, pieTestListPart ){
+
+  if( isPostingResult( manualRun, postresult ) ){
+    node( testMachine ){
+
+      withCredentials( [
+          string( credentialsId: 'db_pass', variable: 'pass' ),
+          string( credentialsId: 'db_user', variable: 'user' ),
+          string( credentialsId: 'db_host', variable: 'host' ),
+          string( credentialsId: 'db_port', variable: 'port' ) ] ) {
+              sh '''#!/bin/bash
+              ''' + generalFuncs.basicGraphPart( generalFuncs.rScriptLocation + stat_file, host, port, user, pass, type, branch ) + " \"" + testListPart + "\" latest " + save_path + '''
+              ''' + getOverallPieGraph( generalFuncs.rScriptLocation + pie_file, host, port, user, pass, branch, type, pieTestListPart, 'y', save_path ) + '''
+              ''' + getOverallPieGraph( generalFuncs.rScriptLocation + pie_file, host, port, user, pass, branch, type, pieTestListPart, 'n', save_path )
+          }
+        }
+      postResult( [], true )
+    }
+}
+def makeTestList( list, commaNeeded ){
+  return generalFuncs.getTestList( list ) + ( commaNeeded ? "," : "" )
+}
+def createStatsList( testCategory, list, semiNeeded ){
+  return testCategory + "-" + generalFuncs.getTestList( list ) + ( semiNeeded ? ";" : "" )
+}
+def generateOverallGraph( prop, testCategory, graph_saved_directory ){
+
+  if( isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+    node( testMachine ){
+
+      withCredentials( [
+          string( credentialsId: 'db_pass', variable: 'pass' ),
+          string( credentialsId: 'db_user', variable: 'user' ),
+          string( credentialsId: 'db_host', variable: 'host' ),
+          string( credentialsId: 'db_port', variable: 'port' ) ] ) {
+              testList = generalFuncs.getTestList( testCategory )
+              sh '''#!/bin/bash
+              ''' + generalFuncs.basicGraphPart( trend_generator_file, host, port, user, pass, testType, prop[ "ONOSBranch" ] ) + " " + testList + " 20 " + graph_saved_directory
+          }
+        }
+      postResult( prop, false )
+    }
+}
+def getOverallPieGraph( file, host, port, user, pass, branch, type, testList, yOrN, path ){
+   return generalFuncs.basicGraphPart( file, host, port, user, pass, type, branch ) + " \"" + testList + "\" latest " + yOrN + " " + path
+}
+def sqlCommand( testName ){
+  return "\"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\" "
+}
+def graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory, graph_generator_file ){
+  return generalFuncs.basicGraphPart( graph_generator_file, host, port, user, pass, testName, prop[ "ONOSBranch" ] ) + " 20 " + graph_saved_directory
+}
+def databasePart( wikiPrefix, testName, database_command ){
+  return '''
+    sed 1d ''' + workSpace + "/" + wikiPrefix + "-" + testName + '''.csv | while read line
+    do
+    echo \$line
+    echo ''' + database_command + '''
+    done '''
+}
+return this;
\ No newline at end of file
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
index 6b1da7b..fb35bbd 100644
--- a/TestON/JenkinsFile/JenkinsfileTrigger
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -1,74 +1,80 @@
 #!groovy
-// This is a Jenkinsfile for a scripted pipeline for the SCPF tests
-// Define sets of tests
-previous_version = "1.11"
-before_previous_version = "1.10"
+
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+funcs.initializeTrend( "VM" );
+previous_version = "1.12"
+before_previous_version = "1.11"
 AllTheTests=
 [
     "FUNC":[
-            "FUNCipv6Intent" : ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCoptical" :    ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCflow" :       ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCnetCfg":      ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCovsdbtest" :  ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCnetconf" :    ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCgroup" :      ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCformCluster" :["basic":false, "extra_A":false, "extra_B":false, "new_Test":true, "day":""],
-            "FUNCintent" :     ["basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCintentRest" : ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
+            "FUNCipv6Intent" : [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCoptical" :    [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCflow" :       [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCnetCfg":      [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCovsdbtest" :  [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCnetconf" :    [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCgroup" :      [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCformCluster" :[ "basic":false, "extra_A":false, "extra_B":false, "new_Test":true, "day":"" ],
+            "FUNCintent" :     [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCintentRest" : [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"" ],
     ],
     "HA":[
-            "HAsanity" :                ["basic":true, "extra_A":false, "extra_B":false, "day":""],
-            "HAclusterRestart" :        ["basic":true, "extra_A":false, "extra_B":false, "day":""],
-            "HAsingleInstanceRestart" : ["basic":true, "extra_A":false, "extra_B":false, "day":""],
-            "HAstopNodes" :             ["basic":false, "extra_A":true, "extra_B":false, "day":""],
-            "HAfullNetPartition" :      ["basic":false, "extra_A":true, "extra_B":false, "day":""],
-            "HAswapNodes" :             ["basic":false, "extra_A":false, "extra_B":true, "day":""],
-            "HAscaling" :               ["basic":false, "extra_A":false, "extra_B":true, "day":""],
-            "HAkillNodes" :             ["basic":false, "extra_A":false, "extra_B":true, "day":""]
+            "HAsanity" :                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "HAclusterRestart" :        [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "HAsingleInstanceRestart" : [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "HAupgrade" :               [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":true, "day":"" ],
+            "HAupgradeRollback" :       [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":true, "day":"" ],
+            "HAstopNodes" :             [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"" ],
+            "HAfullNetPartition" :      [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"" ],
+            "HAswapNodes" :             [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"" ],
+            "HAscaling" :               [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"" ],
+            "HAkillNodes" :             [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"" ]
     ],
     "SCPF":[
-            "SCPFswitchLat":                           ["basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFcbench":                              ["basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFportLat":                             ["basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFflowTp1g":                            ["basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFintentEventTp":                       ["basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFhostLat":                             ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            // batch will be on extra_A after fixing from the ONOS side.
-            "SCPFbatchFlowResp":                       ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFintentRerouteLat":                    ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFintentInstallWithdrawLat":            ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFflowTp1gWithFlowObj":                 ["basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFintentEventTpWithFlowObj":            ["basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFintentRerouteLatWithFlowObj":         ["basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFscalingMaxIntentsWithFlowObj":        ["basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFintentInstallWithdrawLatWithFlowObj": ["basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
-            "SCPFscaleTopo":                           ["basic":false, "extra_A":false, "extra_B":false, "extra_C":true, "extra_D":false, "new_Test":false, day:""],
-            "SCPFscalingMaxIntents":                   ["basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":true, "new_Test":false, day:""],
-            "SCPFmastershipFailoverLat":               ["basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":true, day:""]
+            "SCPFswitchLat":                           [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFcbench":                              [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFportLat":                             [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFflowTp1g":                            [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFintentEventTp":                       [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFhostLat":                             [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFbatchFlowResp":                       [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFintentRerouteLat":                    [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFintentInstallWithdrawLat":            [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFflowTp1gWithFlowObj":                 [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFintentEventTpWithFlowObj":            [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFintentRerouteLatWithFlowObj":         [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFscalingMaxIntentsWithFlowObj":        [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFintentInstallWithdrawLatWithFlowObj": [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFscaleTopo":                           [ "basic":false, "extra_A":false, "extra_B":false, "extra_C":true, "extra_D":false, "new_Test":false, day:"" ],
+            "SCPFscalingMaxIntents":                   [ "basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":true, "new_Test":false, day:"" ],
+            "SCPFmastershipFailoverLat":               [ "basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":true, day:"" ]
     ],
     "USECASE":[
-            "FUNCvirNetNB" :                ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "FUNCbgpls" :                   ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "VPLSBasic" :                   ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "USECASE_SdnipFunction":        ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "USECASE_SdnipFunctionCluster": ["basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":""],
-            "PLATdockertest":               ["basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":""],
-            "SRSanity":                     ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "SRSwitchFailure":              ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "SRLinkFailure":                ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "SROnosFailure":                ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "SRClusterRestart":             ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "SRDynamic":                    ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "SRHighAvailability":           ["basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":""],
-            "VPLSfailsafe" :                ["basic":false, "extra_A":false, "extra_B":false, "new_Test":true, "day":""]
+            "FUNCvirNetNB" :                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "FUNCbgpls" :                   [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "VPLSBasic" :                   [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "USECASE_SdnipFunction":        [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "USECASE_SdnipFunctionCluster": [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "PLATdockertest":               [ "basic":true, "extra_A":true, "extra_B":false, "new_Test":false, "day":"" ],
+            "SRSanity":                     [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "SRSwitchFailure":              [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "SRLinkFailure":                [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "SROnosFailure":                [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "SRClusterRestart":             [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "SRDynamic":                    [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "SRHighAvailability":           [ "basic":false, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ],
+            "VPLSfailsafe" :                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ]
+    ],
+    "SR":[
+            "SRBridging":                   [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"" ]
     ]
 ]
 testcases = [
-    "FUNC" : [ tests : "" , nodeName : "VM"],
-    "HA" : [ tests : "" , nodeName : "VM"],
-    "SCPF" : [ tests : "" , nodeName : "BM"],
-    "USECASE" : [ tests : "" , nodeName : "BM"]
+    "FUNC" : [ tests : "" , nodeName : "VM" ],
+    "HA" : [ tests : "" , nodeName : "VM" ],
+    "SR" : [ tests : "", nodeName : "VM" ],
+    "SCPF" : [ tests : "" , nodeName : "BM" ],
+    "USECASE" : [ tests : "" , nodeName : "BM" ]
 ]
 Prefix_organizer = [
     "FU" : "FUNC",
@@ -76,48 +82,55 @@
     "PL" : "USECASE",
     "SA" : "USECASE",
     "SC" : "SCPF",
-    "SR" : "USECASE",
+    "SR" : "SR",
     "US" : "USECASE",
     "VP" : "USECASE"
 ]
 
-onos_branch = "master"
+onos_b = "master"
 test_branch = ""
 onos_tag = params.ONOSTag
-isOldFlow = false
+isOldFlow = true
 // Set tests based on day of week
 def now = new Date()
-echo(now.toString())
-today = now[Calendar.DAY_OF_WEEK]
+echo( now.toString() )
+today = now[ Calendar.DAY_OF_WEEK ]
 day = ""
 SCPF_choices = ""
 USECASE_choices = ""
 FUNC_choices = ""
 HA_choices = ""
+SR_choices = ""
+stat_graph_generator_file = "testCategoryBuildStats.R"
+pie_graph_generator_file = "testCategoryPiePassFail.R"
+graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
 
 manually_run = params.manual_run
 post_result = params.PostResult
 if( !manually_run ){
-    sendToSlack( '#03CD9F', ":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
-                            + "Starting tests on : " + now.toString()
-                            + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
+    slackSend( color:'#03CD9F',
+               message:":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
+                        + "Starting tests on : " + now.toString()
+                        + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
     testDivider( today )
     FUNC_choices =  lastCommaRemover( FUNC_choices )
     HA_choices =  lastCommaRemover( HA_choices )
     SCPF_choices =  lastCommaRemover( SCPF_choices )
     USECASE_choices =  lastCommaRemover( USECASE_choices )
+    SR_choices =  lastCommaRemover( SR_choices )
 }
 
 if ( manually_run ){
     organize_tests( params.Tests )
-    onos_branch = params.ONOSVersion
+    onos_b = params.ONOSVersion
     isOldFlow = params.isOldFlow
     println "Tests to be run manually : "
 }else{
-    testcases["SCPF"]["tests"] = SCPF_choices
-    testcases["USECASE"]["tests"] = USECASE_choices
-    testcases["FUNC"]["tests"] = FUNC_choices
-    testcases["HA"]["tests"] = HA_choices
+    testcases[ "SCPF" ][ "tests" ] = SCPF_choices
+    testcases[ "USECASE" ][ "tests" ] = USECASE_choices
+    testcases[ "FUNC" ][ "tests" ] = FUNC_choices
+    testcases[ "HA" ][ "tests" ] = HA_choices
+    testcases[ "SR" ][ "tests" ] = SR_choices
     println "Defaulting to " + day + " tests:"
 }
 print_tests( testcases )
@@ -127,17 +140,19 @@
 ]
 for( String test in testcases.keySet() ){
     println test
-    if (testcases[test]["tests"] != ""){
-        runTest[testcases[test]["nodeName"]][test] = trigger_pipeline( onos_branch, testcases[test]["tests"], testcases[test]["nodeName"], test, manually_run, onos_tag )
+    if ( testcases[ test ][ "tests" ] != "" ){
+        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test, manually_run, onos_tag )
     }
 }
 def finalList = [:]
-finalList["VM"] = runTestSeq( runTest["VM"] )
-finalList["BM"] = runTestSeq( runTest["BM"] )
+finalList[ "VM" ] = runTestSeq( runTest[ "VM" ] )
+finalList[ "BM" ] = runTestSeq( runTest[ "BM" ] )
 parallel finalList
+//finalList[ "BM" ].call()
+generateStatGraph()
 
 def testDivider( today ){
-    switch (today) {
+    switch ( today ) {
         case Calendar.MONDAY:
             monday( true )
             tuesday( true, false )
@@ -145,7 +160,7 @@
             thursday( true, false )
             friday( true, false )
             day = "Monday"
-            sendToSlack( '#FFD988', "Tests to be run this weekdays : \n" + printDaysForTest() )
+            slackSend( color:'#FFD988', message:"Tests to be run this weekdays : \n" + printDaysForTest() )
             break
         case Calendar.TUESDAY:
             tuesday( true, true )
@@ -158,23 +173,23 @@
         case Calendar.THURSDAY:
             thursday( true, true )
             day = "Thursday"
-            isOldFlow = true
+            isOldFlow = false
             break
         case Calendar.FRIDAY:
             friday( true, true )
             day = "Friday"
-            isOldFlow = true
+            isOldFlow = false
             break
         case Calendar.SATURDAY:
             saturday()
-            onos_branch= previous_version
+            onos_b= previous_version
             day = "Saturday"
             break
         case Calendar.SUNDAY:
             sunday()
-            onos_branch= before_previous_version
+            onos_b= before_previous_version
             day = "Sunday"
-            isOldFlow = true
+            isOldFlow = false
             break
     }
 }
@@ -202,6 +217,8 @@
     FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
     HA_choices += adder( "HA", "basic", true, "M", getResult )
     HA_choices += adder( "HA", "extra_A", true, "M", getResult )
+    //HA_choices += adder( "HA", "new_Test", true, "M", getResult )
+    SR_choices += adder( "SR", "basic", true, "M", getResult )
     SCPF_choices += adder( "SCPF", "basic", true, "M", getResult )
     SCPF_choices += adder( "SCPF", "extra_B", true, "M", getResult )
 }
@@ -211,6 +228,8 @@
     FUNC_choices += adder( "FUNC", "extra_B", getDay, "T", getResult )
     HA_choices += adder( "HA", "basic", getDay, "T", getResult )
     HA_choices += adder( "HA", "extra_B", getDay, "T", getResult )
+    HA_choices += adder( "HA", "new_Test", getDay, "T", getResult )
+    SR_choices += adder( "SR", "basic", getDay, "T", getResult )
     SCPF_choices += adder( "SCPF", "basic", getDay, "T", getResult )
     SCPF_choices += adder( "SCPF", "extra_C", getDay, "T", getResult )
     USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
@@ -223,6 +242,8 @@
     FUNC_choices += adder( "FUNC", "extra_A", getDay, "W", getResult )
     HA_choices += adder( "HA", "basic", getDay, "W", getResult )
     HA_choices += adder( "HA", "extra_A", getDay, "W", getResult )
+    //HA_choices += adder( "HA", "new_Test", getDay, "W", getResult )
+    SR_choices += adder( "SR", "basic", getDay, "W", getResult )
     SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
     SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
     SCPF_choices += adder( "SCPF", "new_Test", getDay, "W", getResult )
@@ -233,6 +254,8 @@
     FUNC_choices += adder( "FUNC", "extra_B", getDay, "Th", getResult )
     HA_choices += adder( "HA", "basic", getDay, "Th", getResult )
     HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
+    HA_choices += adder( "HA", "new_Test", getDay, "Th", getResult )
+    SR_choices += adder( "SR", "basic", getDay, "Th", getResult )
     SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
     SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
 }
@@ -242,6 +265,8 @@
     FUNC_choices += adder( "FUNC", "extra_A", getDay, "F", getResult )
     HA_choices += adder( "HA", "basic", getDay, "F", getResult )
     HA_choices += adder( "HA", "extra_A", getDay, "F", getResult )
+    //HA_choices += adder( "HA", "new_Test", getDay, "F", getResult )
+    SR_choices += adder( "SR", "basic", getDay, "F", getResult )
     SCPF_choices += adder( "SCPF", "basic", getDay, "F", getResult )
     SCPF_choices += adder( "SCPF", "extra_A", getDay, "F", getResult )
     SCPF_choices += adder( "SCPF", "extra_D", getDay, "F", getResult )
@@ -250,9 +275,12 @@
     FUNC_choices += adder( "FUNC", "basic", false, "Sa", true )
     FUNC_choices += adder( "FUNC", "extra_A", false, "Sa", true )
     FUNC_choices += adder( "FUNC", "extra_B", false, "Sa", true )
+    FUNC_choices += adder( "FUNC", "new_Test", true, "Sa", true )
     HA_choices += adder( "HA", "basic", false, "Sa", true )
     HA_choices += adder( "HA", "extra_A", false, "Sa", true )
     HA_choices += adder( "HA", "extra_B", false, "Sa", true )
+    HA_choices += adder( "HA", "new_Test", false, "Sa", true )
+    SR_choices += adder( "SR", "basic", false, "Sa", true )
     SCPF_choices += adder( "SCPF", "basic", false, "Sa", true )
     SCPF_choices += adder( "SCPF", "extra_A", false, "Sa", true )
     SCPF_choices += adder( "SCPF", "extra_B", false, "Sa", true )
@@ -260,7 +288,6 @@
     SCPF_choices += adder( "SCPF", "extra_D", false, "Sa", true )
     SCPF_choices += adder( "SCPF", "new_Test", false, "Sa", true )
     USECASE_choices += adder( "USECASE", "basic", false, "Sa", true )
-    USECASE_choices += adder( "USECASE", "new_Test", false, "Sa", true )
 }
 def sunday(){
     FUNC_choices += adder( "FUNC", "basic", false, "S", true )
@@ -269,6 +296,7 @@
     HA_choices += adder( "HA", "basic", false, "S", true )
     HA_choices += adder( "HA", "extra_A", false, "S", true )
     HA_choices += adder( "HA", "extra_B", false, "S", true )
+    SR_choices += adder( "SR", "basic", false, "S", true )
     SCPF_choices += adder( "SCPF", "basic", false, "S", true )
     USECASE_choices += adder( "USECASE", "basic", false, "S", true )
 }
@@ -290,52 +318,65 @@
 def runTestSeq( testList ){
     return{
         for ( test in testList.keySet() ){
-            testList[test].call()
+            testList[ test ].call()
         }
     }
 }
 
 def print_tests( tests ){
     for( String test in tests.keySet() ){
-        if( tests[test]["tests"] != "" ){
+        if( tests[ test ][ "tests" ] != "" ){
             println test + ":"
-            println tests[test]["tests"]
+            println tests[ test ][ "tests" ]
         }
     }
 }
 def organize_tests( tests ){
-    testList = tests.tokenize("\n;, ")
+    testList = tests.tokenize( "\n;, " )
     for( String test in testList )
         testcases [ Prefix_organizer[ ( test == "FUNCbgpls" || test == "FUNCvirNetNB" ? "US" : ( test[ 0 ] + test[ 1 ] ) ) ] ][ "tests" ] += test + ","
 }
+def borrow_mn( jobOn ){
+    result = ""
+    if( jobOn == "SR" ){
+        result = "~/cell_borrow.sh"
+    }
+    return result
+}
+def trigger( branch, tests, nodeName, jobOn, manuallyRun, onosTag ){
+    println jobOn + "-pipeline-" + manuallyRun ? "manually" : branch
+    wiki = branch
+    if ( branch != "master" ){
+        branch = "onos-" + branch
+    }
+    test_branch = "master"
+    node( "TestStation-" + nodeName + "s" ){
+        envSetup( branch, test_branch, onosTag, jobOn, manuallyRun )
+
+        exportEnvProperty( branch, test_branch, wiki, tests, post_result, manuallyRun, onosTag, isOldFlow )
+    }
+
+    jobToRun = jobOn + "-pipeline-" + ( manuallyRun ? "manually" : wiki )
+    build job: jobToRun, propagate: false
+}
 def trigger_pipeline( branch, tests, nodeName, jobOn, manuallyRun, onosTag ){
 // nodeName : "BM" or "VM"
 // jobOn : "SCPF" or "USECASE" or "FUNC" or "HA"
     return{
-        if (branch == "master"){
-            onos_branch = branch
-        }else{
-            onos_branch = "onos-" + branch
+        if( jobOn == "SR" ){
+            trigger( "1.11", "SRBridging", nodeName, jobOn, manuallyRun, onosTag )
+            trigger( "1.12", "SRBridging", nodeName, jobOn, manuallyRun, onosTag )
+            trigger( "master", "SRBridging", nodeName, jobOn, manuallyRun, onosTag )
+            returnCell( nodeName )
+            }else{
+            trigger( branch, tests, nodeName, jobOn, manuallyRun, onosTag )
         }
-        wiki = branch
-        test_branch = onos_branch
-        if (onos_branch == previous_version)
-            test_branch = "master"
-        println jobOn + "_Pipeline_" + manuallyRun ? "manually" : branch
-        node("TestStation-" + nodeName + "s"){
-            envSetup(onos_branch, test_branch, onosTag, jobOn, manuallyRun )
-
-            exportEnvProperty( onos_branch, test_branch, wiki, tests, post_result, manuallyRun, onosTag, isOldFlow )
-        }
-
-        jobToRun = jobOn + "_Pipeline_" + ( manuallyRun ? "manually" : branch )
-        build job: jobToRun, propagate: false
     }
 }
 
 // export Environment properties.
 def exportEnvProperty( onos_branch, test_branch, wiki, tests, postResult, manually_run, onosTag, isOldFlow ){
-    stage("export Property"){
+    stage( "export Property" ){
         sh '''
             echo "ONOSBranch=''' + onos_branch +'''" > /var/jenkins/TestONOS.property
             echo "TestONBranch=''' + test_branch +'''" >> /var/jenkins/TestONOS.property
@@ -350,24 +391,22 @@
         '''
     }
 }
-def sendToSlack( color, message ){
-    slackSend(color:color, message: message)
-}
 // Initialize the environment Setup for the onos and OnosSystemTest
 def envSetup( onos_branch, test_branch, onos_tag, jobOn, manuallyRun ){
-    stage("envSetup") {
+    stage( "envSetup" ) {
         sh '''#!/bin/bash -l
         set +e
         . ~/.bashrc
         env
+        ''' + borrow_mn( jobOn ) + '''
         ''' + preSetup( onos_branch, test_branch, onos_tag, manuallyRun ) + '''
         ''' + oldFlowCheck( jobOn, onos_branch ) + '''
         ''' + postSetup( onos_branch, test_branch, onos_tag, manuallyRun )
     }
 }
-def tagCheck(onos_tag, onos_branch){
+def tagCheck( onos_tag, onos_branch ){
     result = "git checkout "
-    if (onos_tag == "" )
+    if ( onos_tag == "" )
         result += onos_branch //create new local branch
     else
         result += onos_tag //checkout the tag
@@ -408,7 +447,7 @@
         git fetch --all # update all caches from remotes
         git reset --hard origin/''' + onos_branch + '''  # force local index to match remote branch
         git clean -df # clean any local files
-        ''' + tagCheck(onos_tag, onos_branch) + '''
+        ''' + tagCheck( onos_tag, onos_branch ) + '''
         git branch
         git log -1 --decorate
 
@@ -428,9 +467,9 @@
 }
 def oldFlowCheck( jobOn, onos_branch ){
     result = ""
-    if( isOldFlow && jobOn == "SCPF" && onos_branch== "master" )
-        result = '''sed -i -e 's/@Component(immediate = true)/@Component(enabled = false)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/DistributedFlowRuleStore.java
-        sed -i -e 's/@Component(enabled = false)/@Component(immediate = true)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/ECFlowRuleStore.java'''
+    if( jobOn == "SCPF" && ( onos_branch== "master" || onos_branch=="onos-1.12" ) )
+        result = '''sed -i -e 's/@Component(immediate = true)/@Component(enabled = false)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/''' + ( isOldFlow ? "DistributedFlowRuleStore" : "ECFlowRuleStore" ) + '''.java
+        sed -i -e 's/@Component(enabled = false)/@Component(immediate = true)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/''' + ( isOldFlow ? "ECFlowRuleStore" : "DistributedFlowRuleStore" ) + ".java"
     return result
 }
 def postSetup( onos_branch, test_branch, onos_tag, isManual ){
@@ -450,4 +489,26 @@
         git branch'''
     }
     return result
+}
+def returnCell( nodeName ){
+    node( "TestStation-" + nodeName + "s" ){
+        sh '''#!/bin/bash -l
+            set +e
+            . ~/.bashrc
+            env
+            ~/./return_cell.sh
+            '''
+    }
+}
+
+def generateStatGraph(){
+    if( !manually_run ){
+        testListPart = funcs.createStatsList( "FUNC", AllTheTests[ "FUNC" ], true ) +
+                        funcs.createStatsList( "HA", AllTheTests[ "HA" ], true ) +
+                        funcs.createStatsList( "USECASE", AllTheTests[ "USECASE" ], false )
+        pieTestList = funcs.makeTestList( AllTheTests[ "FUNC" ], true ) +
+                      funcs.makeTestList( AllTheTests[ "HA" ], true ) +
+                      funcs.makeTestList( AllTheTests[ "USECASE" ], false )
+        funcs.generateCategoryStatsGraph( "false", "true", stat_graph_generator_file, pie_graph_generator_file, "ALL", onos_b, testListPart, graph_saved_directory, pieTestList )
+    }
 }
\ No newline at end of file
diff --git a/TestON/JenkinsFile/PerformanceFuncs.groovy b/TestON/JenkinsFile/PerformanceFuncs.groovy
new file mode 100644
index 0000000..458a389
--- /dev/null
+++ b/TestON/JenkinsFile/PerformanceFuncs.groovy
@@ -0,0 +1,90 @@
+#!groovy
+//generalFuncs = evaluate readTrusted( 'GeneralFuncs.groovy' )
+def init(){
+    none = [ "" ]
+    batches = [ 1, 100, 1000 ]
+    neighbors = [ 'y', 'n' ]
+    times = [ 'y', 'n' ]
+    SCPF = [
+        SCPFcbench:                              [ flows:false, test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:[ 'Cbench Test' ], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)' ],
+        SCPFhostLat:                             [ flows:false, test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:[ 'Host Latency Test' ], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+        SCPFportLat:                             [ flows:false, test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb', rFile:'SCPFportLat.R', extra:none, finalResult:1, graphTitle:[ 'Port Latency Test - Port Up','Port Latency Test - Port Down' ], dbCols:[ 'up_ofp_to_dev_avg, up_dev_to_link_avg, up_link_to_graph_avg', 'down_ofp_to_dev_avg, down_dev_to_link_avg, down_link_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+        SCPFflowTp1g:                            [ flows:true, test:'SCPFflowTp1g', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB', rFile:'SCPFflowTp1g.R n', extra:neighbors, finalResult:1, graphTitle:[ 'Flow Throughput Test - neighbors=0', 'Flow Throughput Test - neighbors=4' ], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ],  y_axis:'Throughput (,000 Flows/sec)' ],
+        SCPFflowTp1gWithFlowObj:                 [ flows:true, test:'SCPFflowTp1g --params TEST/flowObj=True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj', rFile:'SCPFflowTp1g.R y', extra:neighbors, finalResult:0 ],
+        SCPFscaleTopo:                           [ flows:false, test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb', rFile:'SCPFscaleTopo.R', extra:none, finalResult:1, graphTitle:[ 'Scale Topology Test' ], dbCols:[ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ], dbWhere:'AND scale=20' , y_axis:'Latency (s)' ],
+        SCPFswitchLat:                           [ flows:false, test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb', rFile:'SCPFswitchLat.R', extra:none, finalResult:1, graphTitle:[ 'Switch Latency Test - Switch Up','Switch Latency Test - Switch Down' ], dbCols:[ 'tcp_to_feature_reply_avg,feature_reply_to_role_request_avg,role_request_to_role_reply_avg,role_reply_to_device_avg,up_device_to_graph_avg', 'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+        SCPFbatchFlowResp:                       [ flows:true, test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData', rFile:'SCPFbatchFlowResp.R', extra:none, finalResult:1, graphTitle:[ 'Batch Flow Test - Post', 'Batch Flow Test - Del' ], dbCols:[ 'elapsepost, posttoconfrm', 'elapsedel, deltoconfrm' ], dbWhere:'', y_axis:'Latency (ms)' ],
+        SCPFintentEventTp:                       [ flows:true, test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB', rFile:'SCPFintentEventTp.R n', extra:neighbors, finalResult:1, graphTitle:[ 'Intent Throughput Test - neighbors=0','Intent Throughput Test - neighbors=4' ], dbCols:'SUM( avg ) as avg', dbWhere:[ 'AND scale=5 AND neighbors=0 GROUP BY date','AND scale=5 AND NOT neighbors=0 GROUP BY date' ], y_axis:'Throughput (Ops/sec)' ],
+        SCPFintentRerouteLat:                    [ flows:true, test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches, finalResult:1, graphTitle:[ 'Intent Reroute Test' ], dbCols:'avg', dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)' ],
+        SCPFscalingMaxIntents:                   [ flows:true, test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB', rFile:'SCPFscalingMaxIntents.R n', extra:none, finalResult:0 ],
+        SCPFintentEventTpWithFlowObj:            [ flows:true, test:'SCPFintentEventTp --params TEST/flowObj=True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB', rFile:'SCPFintentEventTp.R y', extra:neighbors,finalResult:0 ],
+        SCPFintentInstallWithdrawLat:            [ flows:true, test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches,finalResult:1, graphTitle:[ 'Intent Installation Test','Intent Withdrawal Test' ], dbCols:[ 'install_avg','withdraw_avg' ], dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)' ],
+        SCPFintentRerouteLatWithFlowObj:         [ flows:true, test:'SCPFintentRerouteLat --params TEST/flowObj=True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0 ],
+        SCPFscalingMaxIntentsWithFlowObj:        [ flows:true, test:'SCPFscalingMaxIntents --params TEST/flowObj=True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO', rFile:'SCPFscalingMaxIntents.R y', extra:none, finalResult:0 ],
+        SCPFintentInstallWithdrawLatWithFlowObj: [ flows:true, test:'SCPFintentInstallWithdrawLat --params TEST/flowObj=True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0 ],
+        SCPFmastershipFailoverLat:               [ flows:false, test:'SCPFmastershipFailoverLat', table:'mastership_failover_tests', results:'mastership_failover_results', file:'mastershipFailoverLatDB', rFile:'SCPFmastershipFailoverLat.R', extra:none, finalResult:1, graphTitle:[ 'Mastership Failover Test' ], dbCols:[ 'kill_deact_avg,deact_role_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ]
+    ]
+    graph_saved_directory = "/var/jenkins/workspace/postjob-BM/"
+}
+def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName, isOldFlow ){
+    result = ""
+    for( extra in extras ){
+        result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName, isOldFlow ) + ";"
+    }
+    return result
+}
+def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName, isOldFlow ){
+
+    return generalFuncs.basicGraphPart( generalFuncs.rScriptLocation + rFileName, host, port, user, pass, testName, branchName ) +
+           " " + batch + " " + usingOldFlow( isOldFlow, testName ) + graph_saved_directory
+}
+def generateCombinedResultGraph( host, port, user, pass, testName, branchName, isOldFlow ){
+    result = ""
+
+    for ( int i=0; i< SCPF[ testName ][ 'graphTitle' ].size(); i++ ){
+        result += generalFuncs.basicGraphPart( generalFuncs.rScriptLocation + "SCPFLineGraph.R", host, port, user, pass, "\"" + SCPF[ testName ][ 'graphTitle' ][ i ] + "\"", branchName ) +
+        " " + 50 + " \"SELECT " + checkIfList( testName, 'dbCols', i ) + " FROM " + SCPF[ testName ][ 'table' ] + " WHERE  branch=\'" + branchName + "\' " + sqlOldFlow( isOldFlow, testName ) +
+        checkIfList( testName, 'dbWhere', i ) + " ORDER BY date DESC LIMIT 50\" \"" + SCPF[ testName ][ 'y_axis' ] + "\" " + hasOldFlow( isOldFlow, testName ) + graph_saved_directory + ";"
+    }
+    return result
+}
+def checkIfList( testName, forWhich, pos ){
+    return SCPF[ testName ][ forWhich ].getClass().getName() != "java.lang.String" ? SCPF[ testName ][ forWhich ][ pos ] :  SCPF[ testName ][ forWhich ]
+}
+def sqlOldFlow( isOldFlow, testName ){
+    return SCPF[ testName ][ 'flows' ] ? " AND " + ( isOldFlow == "true" ? "" : "NOT " ) + "is_old_flow " : ""
+}
+def oldFlowRuleCheck( isOldFlow, branch ){
+    this.isOldFlow = isOldFlow
+    if( isOldFlow == "false" ){
+        SCPF[ 'SCPFflowTp1g' ][ 'test' ] += " --params TEST/flows=" + ( branch == "onos-1.11" ? "4000" : "3500" )
+    }
+}
+def affectedByOldFlow( isOldFlow, testName ){
+    return SCPF[ testName ][ 'flows' ] ? "" + isOldFlow + ", " : ""
+}
+def usingOldFlow( isOldFlow, testName ){
+    return SCPF[ testName ][ 'flows' ] ? ( isOldFlow == "true" ? "y" : "n" ) + " " : ""
+}
+def hasOldFlow( isOldFlow, testName ){
+    return ( SCPF[ testName ][ 'flows' ] && isOldFlow == "true" ? "y" : "n" ) + " "
+}
+def sqlCommand( testName ){
+    if ( testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat" )
+        return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
+    return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', '\$ONOSBranch', " + affectedByOldFlow( isOldFlow, testName ) + "\$line);\""
+}
+def databasePart( testName, database_command ){
+    return '''
+    cd /tmp
+    while read line
+    do
+    echo \$line
+    echo ''' + database_command + '''
+    done< ''' + SCPF[ testName ][ 'file' ]
+}
+def getGraphGeneratingCommand( host, port, user, pass, testName, prop ){
+    return getGraphCommand( SCPF[ testName ][ 'rFile' ], SCPF[ testName ][ 'extra' ], host, port, user, pass, testName, prop[ "ONOSBranch" ], isOldFlow ) + '''
+    ''' + ( SCPF[ testName ][ 'finalResult' ] ? generateCombinedResultGraph( host, port, user, pass, testName, prop[ "ONOSBranch" ], , isOldFlow ) : "" )
+}
+return this;
\ No newline at end of file
diff --git a/TestON/JenkinsFile/SCPFJenkinsFile b/TestON/JenkinsFile/SCPFJenkinsFile
index 1ca7ab2..dc4f802 100644
--- a/TestON/JenkinsFile/SCPFJenkinsFile
+++ b/TestON/JenkinsFile/SCPFJenkinsFile
@@ -1,228 +1,31 @@
 #!groovy
-import groovy.time.*
+SCPFfuncs = evaluate readTrusted( 'PerformanceFuncs.groovy' )
+SCPFfuncs.init()
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+funcs.initialize( "SCPF", SCPFfuncs );
 // This is a Jenkinsfile for a scripted pipeline for the SCPF tests
-// properties([pipelineTriggers([cron('30 19 * * *')])])
-
-// TODO: Exception handling around steps
-
-none = [ "" ]
-batches = [1,100,1000]
-neighbors = ['y', 'n']
-times = [ 'y', 'n' ]
-SCPF = [
-    SCPFcbench: [ flows:false, test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:['Cbench Test'], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)'],
-    SCPFhostLat: [ flows:false, test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:['Host Latency Test'], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)'],
-    SCPFportLat: [ flows:false, test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb', rFile:'SCPFportLat.R', extra:none, finalResult:1, graphTitle:['Port Latency Test - Port Up','Port Latency Test - Port Down'], dbCols:[ 'up_ofp_to_dev_avg,up_dev_to_link_avg,up_link_to_graph_avg', 'down_ofp_to_dev_avg,down_dev_to_link_avg,down_link_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-    SCPFflowTp1g: [ flows:true, test:'SCPFflowTp1g', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB', rFile:'SCPFflowTp1g.R n', extra:neighbors,finalResult:1, graphTitle:['Flow Throughput Test - neighbors=0','Flow Throughput Test - neighbors=4'], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ],  y_axis:'Throughput (,000 Flows/sec)' ],
-    SCPFflowTp1gWithFlowObj: [ flows:true, test:'SCPFflowTp1g --params TEST/flowObj=True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj', rFile:'SCPFflowTp1g.R y', extra:neighbors, finalResult:0],
-    SCPFscaleTopo: [ flows:false, test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb', rFile:'SCPFscaleTopo.R', extra:none, finalResult:1, graphTitle:['Scale Topology Test'], dbCols:[ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ], dbWhere:'AND scale=20' , y_axis:'Latency (s)'],
-    SCPFswitchLat: [ flows:false, test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb', rFile:'SCPFswitchLat.R', extra:none, finalResult:1, graphTitle:['Switch Latency Test - Switch Up','Switch Latency Test - Switch Down'], dbCols:[ 'tcp_to_feature_reply_avg,feature_reply_to_role_request_avg,role_request_to_role_reply_avg,role_reply_to_device_avg,up_device_to_graph_avg', 'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-    SCPFbatchFlowResp: [ flows:true, test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData', rFile:'SCPFbatchFlowResp.R', extra:none, finalResult:1, graphTitle:['Batch Flow Test - Post', 'Batch Flow Test - Del'], dbCols:[ 'elapsepost, posttoconfrm', 'elapsedel, deltoconfrm' ], dbWhere:'', y_axis:'Latency (ms)'],
-    SCPFintentEventTp: [ flows:true, test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB', rFile:'SCPFintentEventTp.R n', extra:neighbors, finalResult:1, graphTitle:['Intent Throughput Test - neighbors=0','Intent Throughput Test - neighbors=4'], dbCols:'SUM( avg ) as avg', dbWhere:[ 'AND scale=5 AND neighbors=0 GROUP BY date','AND scale=5 AND NOT neighbors=0 GROUP BY date' ], y_axis:'Throughput (Ops/sec)'],
-    SCPFintentRerouteLat: [ flows:true, test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches, finalResult:1, graphTitle:['Intent Reroute Test'], dbCols:'avg', dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
-    SCPFscalingMaxIntents: [ flows:true, test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB', rFile:'SCPFscalingMaxIntents.R n', extra:times, finalResult:0],
-    SCPFintentEventTpWithFlowObj: [ flows:true, test:'SCPFintentEventTp --params TEST/flowObj=True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB', rFile:'SCPFintentEventTp.R y', extra:neighbors,finalResult:0],
-    SCPFintentInstallWithdrawLat: [ flows:true, test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches,finalResult:1, graphTitle:['Intent Installation Test','Intent Withdrawal Test'], dbCols:[ 'install_avg','withdraw_avg' ], dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
-    SCPFintentRerouteLatWithFlowObj: [ flows:true, test:'SCPFintentRerouteLat --params TEST/flowObj=True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
-    SCPFscalingMaxIntentsWithFlowObj: [ flows:true, test:'SCPFscalingMaxIntents --params TEST/flowObj=True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO', rFile:'SCPFscalingMaxIntents.R y', extra:times, finalResult:0],
-    SCPFintentInstallWithdrawLatWithFlowObj: [ flows:true, test:'SCPFintentInstallWithdrawLat --params TEST/flowObj=True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
-    SCPFmastershipFailoverLat: [ flows:false, test:'SCPFmastershipFailoverLat', table:'mastership_failover_tests', results:'mastership_failover_results', file:'mastershipFailoverLatDB', rFile:'SCPFmastershipFailoverLat.R', extra:none, finalResult:1, graphTitle:['Mastership Failover Test'], dbCols:[ 'kill_deact_avg,deact_role_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ]
-]
-
-echo("Testcases:")
-graph_generator_directory = "~/OnosSystemTest/TestON/JenkinsFile/scripts/"
-graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_BM/"
-def testsToRun = null
 def prop = null
-node("TestStation-BMs"){
-    prop = readProperties(file:'/var/jenkins/TestONOS.property') // TODO set defaults
-    testsToRun = prop["Tests"].tokenize("\n;, ")
-    for ( String test : testsToRun ) {
-        println test
-    }
-}
+prop = funcs.getProperties()
+
+echo( "Testcases:" )
+def testsToRun = null
+testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
+funcs.printTestToRun( testsToRun )
+
 isOldFlow = prop[ "isOldFlow" ]
-oldFlowRuleCheck( isOldFlow )
+SCPFfuncs.oldFlowRuleCheck( isOldFlow, prop[ "ONOSBranch" ] )
 def tests = [:]
-for( String test : SCPF.keySet() ){
+for( String test : SCPFfuncs.SCPF.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = SCPFTest(test, toBeRun, prop)
+
+    pureTestName = test.replaceAll( "WithFlowObj", "" )
+    tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false, SCPFfuncs.SCPF, "", "" )
 }
 
-def now = new Date()
+start = funcs.getCurrentTime()
 // run the tests
 for ( test in tests.keySet() ){
-    tests[test].call()
+    tests[ test ].call()
 }
-try{
-    if( prop["manualRun"] == "false" ){
-        def end = new Date()
-        TimeDuration duration = TimeCategory.minus( end, now )
-        slackSend( color:"#5816EE", message: "SCPF tests ended at: " + end.toString() + "\nTime took : " + duration )
-    }
-}
-catch(all){}
-
-// The testName should be the key from the SCPF map
-def SCPFTest( testName, toBeRun, prop ) {
-    return {
-        catchError{
-            stage(testName) {
-                if ( toBeRun ){
-                    workSpace = "/var/jenkins/workspace/"+testName
-                    node("TestStation-BMs"){
-                        withEnv(['ONOSBranch='+prop["ONOSBranch"],
-                                 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
-                                 'TestONBranch='+prop["TestONBranch"],
-                                 'ONOSTag='+prop["ONOSTag"],
-                                 'WikiPrefix='+prop["WikiPrefix"],
-                                 'WORKSPACE='+workSpace]){
-                            sh '''#!/bin/bash -l
-                            set -i # interactive
-                            set +e
-                            shopt -s expand_aliases # expand alias in non-interactive mode
-                            export PYTHONUNBUFFERED=1
-
-                            ifconfig
-
-                            echo "ONOS Branch is: $ONOSBranch"
-                            echo "TestON Branch is: $TestONBranch"
-                            echo "Test date: "
-                            date
-
-                            cd ~
-                            export PATH=$PATH:onos/tools/test/bin
-
-                            timeout 240 stc shutdown | head -100
-                            timeout 240 stc teardown | head -100
-                            timeout 240 stc shutdown | head -100
-
-                            cd ~/OnosSystemTest/TestON/bin
-                            git log |head
-                            ./cleanup.sh
-                            ''' + "./cli.py run " + SCPF[testName]['test']
-
-                            // For moving results
-                            sh '''#!/bin/bash -i
-                            set +e
-                            # remove any leftover files from previous tests
-                            sudo rm ${WORKSPACE}/*Result.txt
-
-                            #copy files to workspace
-                            cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
-                            sudo cp *Result.txt ${WORKSPACE}/
-                            cd ${WORKSPACE}/
-                            ls -al
-                            cd '''
-                            // Post Results
-                            if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                                withCredentials([
-                                    string(credentialsId: 'db_pass', variable: 'pass'),
-                                    string(credentialsId: 'db_user', variable: 'user'),
-                                    string(credentialsId: 'db_host', variable: 'host'),
-                                    string(credentialsId: 'db_port', variable: 'port')]) {
-                                        def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', '\$ONOSBranch', " + affectedByOldFlow( isOldFlow, testName ) + "\$line);\""
-                                        if (testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat") {
-                                            database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
-                                        }
-                                        sh '''#!/bin/bash
-
-                                        export DATE=\$(date +%F_%T)
-                                        cd ~
-                                        pwd
-                                        cd /tmp
-                                        while read line
-                                        do
-
-                                        echo \$line
-                                        echo ''' + database_command + '''
-
-                                        done< ''' + SCPF[testName]['file'] + '''
-                                        ''' + getGraphCommand( SCPF[testName]['rFile'], SCPF[testName]['extra'], host, port, user, pass, testName, prop["ONOSBranch"], isOldFlow ) + '''
-                                        ''' + ( SCPF[testName]['finalResult'] ? generateCombinedResultGraph( host,port, user, pass, testName, prop["ONOSBranch"], , isOldFlow ) : "" )
-                                }
-                            }
-                            // Fetch Logs
-                            sh '''#!/bin/bash
-                            set +e
-                            cd ~/OnosSystemTest/TestON/logs
-                            echo "Job Name is: ${JOB_NAME}"
-                            TestONlogDir=$(ls -t | grep ${TEST_NAME}_  |head -1)
-                            echo "########################################################################################"
-                            echo "#####  copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
-                            echo "########################################################################################"
-                            cd $TestONlogDir
-                            if [ $? -eq 1 ]
-                            then
-                                echo "Job name does not match any test suite name to move log!"
-                            else
-                                pwd
-                                for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
-                            fi'''
-                        }
-                    }
-                    if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                        def post = build job: "Pipeline_postjob_BM", propagate: false
-                    }
-                    node("TestStation-BMs"){
-                        resultContents = readFile workSpace + "/" + testName.replaceAll("WithFlowObj","") + "Result.txt"
-                        resultContents = resultContents.split("\n")
-                        if( resultContents[ 0 ] == "1" ){
-                            print "All passed"
-                        }else{
-                            print "Failed"
-                            if( prop["manualRun"] == "false" )
-                                slackSend(color:"FF0000", message: "[" + prop["ONOSBranch"] + "]" + testName + " : Failed!\n"
-                                + resultContents[ 1 ] + "\n"
-                                + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline" )
-                            Failed
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
-def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName, isOldFlow ){
-    result = ""
-    for( extra in extras ){
-        result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName, isOldFlow ) + ";"
-    }
-    return result
-}
-def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName, isOldFlow ){
-    return "Rscript " + graph_generator_directory + rFileName + " " + host + " " + port + " " + user + " " + pass + " " +
-            testName + " " + branchName +  " " + batch + " " + usingOldFlow( isOldFlow, testName ) + graph_saved_directory
-}
-def generateCombinedResultGraph( host, port, user, pass, testName, branchName, isOldFlow ){
-    result = ""
-    for ( int i=0; i< SCPF[testName]['graphTitle'].size(); i++){
-        result += "Rscript " + graph_generator_directory + "SCPFLineGraph.R " + host + " " + port + " " + user + " " + pass + " \"" + SCPF[testName]['graphTitle'][i] + "\" " +
-        branchName + " " + 50 + " \"SELECT " + checkIfList( testName, 'dbCols', i ) + " FROM " + SCPF[testName]['table'] + " WHERE  branch=\'" + branchName + "\' " + sqlOldFlow( isOldFlow, testName ) +
-        checkIfList( testName, 'dbWhere', i ) + " ORDER BY date DESC LIMIT 50\" \"" + SCPF[testName]['y_axis'] + "\" " + hasOldFlow( isOldFlow, testName ) + graph_saved_directory + ";"
-    }
-    return result
-}
-def checkIfList( testName, forWhich, pos ){
-    return SCPF[testName][forWhich].getClass().getName() != "java.lang.String" ? SCPF[testName][forWhich][pos] :  SCPF[testName][forWhich]
-}
-def sqlOldFlow( isOldFlow, testName ){
-    return SCPF[ testName ][ 'flows' ] ? " AND " + ( isOldFlow == "true" ? "" : "NOT " ) + "is_old_flow " : ""
-}
-def oldFlowRuleCheck( isOldFlow ){
-    if( isOldFlow == "false" ){
-        SCPF[ 'SCPFflowTp1g' ][ 'test' ] += " --params TEST/flows=6125"
-        SCPF[ 'SCPFbatchFlowResp' ][ 'test' ] += " --params CASE1000/batchSize=100"
-        SCPF[ 'SCPFintentEventTp' ][ 'test' ] += " --params TEST/numKeys=4000"
-    }
-}
-def affectedByOldFlow( isOldFlow, testName ){
-    return SCPF[ testName ][ 'flows' ] ? "" + isOldFlow + ", " : ""
-}
-def usingOldFlow( isOldFlow, testName ){
-    return SCPF[ testName ][ 'flows' ] ? ( isOldFlow == "true" ? "y" : "n" ) + " " : ""
-}
-def hasOldFlow( isOldFlow, testName ){
-    return ( SCPF[ testName ][ 'flows' ] && isOldFlow == "true" ? "y" : "n" ) + " "
-}
\ No newline at end of file
+funcs.sendResultToSlack( start,  prop["manualRun"], prop[ "WikiPrefix" ] )
\ No newline at end of file
diff --git a/TestON/JenkinsFile/SCPF_Graph_Generator b/TestON/JenkinsFile/SCPF_Graph_Generator
new file mode 100644
index 0000000..435283c
--- /dev/null
+++ b/TestON/JenkinsFile/SCPF_Graph_Generator
@@ -0,0 +1,28 @@
+#!groovy
+SCPFfuncs = evaluate readTrusted( 'PerformanceFuncs.groovy' )
+SCPFfuncs.init()
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+funcs.initialize( "SCPF", SCPFfuncs );
+
+def prop = null
+prop = funcs.getProperties()
+
+def Tests = params.Test
+isOldFlow = params.isOldFlow
+prop[ "ONOSBranch" ] = params.ONOSbranch
+
+SCPFfuncs.oldFlowRuleCheck( isOldFlow, prop[ "ONOSBranch" ] )
+
+def testsToRun = null
+testsToRun = funcs.getTestsToRun( Tests )
+
+def tests = [:]
+for( String test : testsToRun ){
+    println test
+    pureTestName = test.replaceAll( "WithFlowObj", "" )
+    tests[ test ] = funcs.runTest( test, true, prop, pureTestName, true, [], "", "" )
+}
+
+for ( test in tests.keySet() ){
+    tests[ test ].call()
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/Trend_Graph_Generator b/TestON/JenkinsFile/Trend_Graph_Generator
new file mode 100644
index 0000000..58e600b
--- /dev/null
+++ b/TestON/JenkinsFile/Trend_Graph_Generator
@@ -0,0 +1,27 @@
+#!groovy
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+//generalFuncs = evaluate readTrusted( 'GeneralFuncs.groovy' )
+nodeCluster = params.NodeCluster
+
+graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R"
+graph_saved_directory = "/var/jenkins/workspace/postjob-" + nodeCluster + "/"
+
+funcs.initializeTrend( nodeCluster );
+def prop = null
+prop = funcs.getProperties()
+
+def Tests = params.Test
+prop[ "ONOSBranch" ] = params.ONOSbranch
+
+def testsToRun = null
+testsToRun = funcs.getTestsToRun( Tests )
+
+def tests = [:]
+for( String test : testsToRun ){
+    println test
+    tests[ test ] = funcs.runTest( test, true, prop, test, true, [], graph_generator_file, graph_saved_directory )
+}
+
+for ( test in tests.keySet() ){
+    tests[ test ].call()
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/USECASEJenkinsFile b/TestON/JenkinsFile/USECASEJenkinsFile
index bfae6c1..ae23cd5 100644
--- a/TestON/JenkinsFile/USECASEJenkinsFile
+++ b/TestON/JenkinsFile/USECASEJenkinsFile
@@ -1,202 +1,44 @@
 #!groovy
-import groovy.time.*
-// This is a Jenkinsfile for a scripted pipeline for the USECASETest tests
-
-// TODO: Exception handling around steps
-
+funcs = evaluate readTrusted( 'JenkinsCommonFuncs.groovy' )
+funcs.initialize( "USECASE" );
+// This is a Jenkinsfile for a scripted pipeline for the USECASE tests
 def prop = null
-node("TestStation-BMs"){
-    prop = readProperties(file:'/var/jenkins/TestONOS.property')
-}
+prop = funcs.getProperties()
 USECASE = [
-    "FUNCvirNetNB" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCvirNetNB", wiki_file:"FUNCvirNetNBWiki.txt"],
-    "FUNCbgpls" : [wiki_link:prop["WikiPrefix"]+"-"+"FUNCbgpls", wiki_file:"FUNCbgplsWiki.txt"],
-    "VPLSBasic" : [wiki_link:prop["WikiPrefix"]+"-"+"VPLSBasic", wiki_file:"VPLSBasicWiki.txt"],
-    "VPLSfailsafe" : [wiki_link:prop["WikiPrefix"]+"-"+"VPLSfailsafe", wiki_file:"VPLSfailsafeWiki.txt"],
-    "PLATdockertest": [wiki_link:"Docker Images sanity test", wiki_file:"PLATdockertestTableWiki.txt"],
-    "SRSanity": [wiki_link:prop["WikiPrefix"]+"-"+"SR Sanity", wiki_file:"SRSanityWiki.txt"],
-    "SRSwitchFailure": [wiki_link:prop["WikiPrefix"]+"-"+"SR Switch Failure", wiki_file:"SRSwitchFailureWiki.txt"],
-    "SRLinkFailure": [wiki_link:prop["WikiPrefix"]+"-"+"SR Link Failure", wiki_file:"SRLinkFailureWiki.txt"],
-    "SROnosFailure": [wiki_link:prop["WikiPrefix"]+"-"+"SR Onos node Failure", wiki_file:"SROnosFailureWiki.txt"],
-    "SRClusterRestart": [wiki_link:prop["WikiPrefix"]+"-"+"SR Cluster Restart", wiki_file:"SRClusterRestartWiki.txt"],
-    "SRDynamic": [wiki_link:prop["WikiPrefix"]+"-"+"SR Dynamic Config", wiki_file:"SRDynamicWiki.txt"],
-    "SRHighAvailability": [wiki_link:prop["WikiPrefix"]+"-"+"SR High Availability", wiki_file:"SRHighAvailabilityWiki.txt"],
-    "USECASE_SdnipFunction": [wiki_link:prop["WikiPrefix"]+"-"+"SDNIP Function", wiki_file:"USECASE_SdnipFunctionWiki.txt"],
-    "USECASE_SdnipFunctionCluster": [wiki_link:prop["WikiPrefix"]+"-"+"SDNIP Function Cluster", wiki_file:"USECASE_SdnipFunctionClusterWiki.txt"]
+    "FUNCvirNetNB" :                [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCvirNetNB", wiki_file:"FUNCvirNetNBWiki.txt" ],
+    "FUNCbgpls" :                   [ wiki_link:prop[ "WikiPrefix" ] + "-" + "FUNCbgpls", wiki_file:"FUNCbgplsWiki.txt" ],
+    "VPLSBasic" :                   [ wiki_link:prop[ "WikiPrefix" ] + "-" + "VPLSBasic", wiki_file:"VPLSBasicWiki.txt" ],
+    "VPLSfailsafe" :                [ wiki_link:prop[ "WikiPrefix" ] + "-" + "VPLSfailsafe", wiki_file:"VPLSfailsafeWiki.txt" ],
+    "PLATdockertest":               [ wiki_link:"Docker Images sanity test", wiki_file:"PLATdockertestTableWiki.txt" ],
+    "SRSanity":                     [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR Sanity", wiki_file:"SRSanityWiki.txt" ],
+    "SRSwitchFailure":              [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR Switch Failure", wiki_file:"SRSwitchFailureWiki.txt" ],
+    "SRLinkFailure":                [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR Link Failure", wiki_file:"SRLinkFailureWiki.txt" ],
+    "SROnosFailure":                [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR Onos node Failure", wiki_file:"SROnosFailureWiki.txt" ],
+    "SRClusterRestart":             [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR Cluster Restart", wiki_file:"SRClusterRestartWiki.txt" ],
+    "SRDynamic":                    [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR Dynamic Config", wiki_file:"SRDynamicWiki.txt" ],
+    "SRHighAvailability":           [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SR High Availability", wiki_file:"SRHighAvailabilityWiki.txt" ],
+    "USECASE_SdnipFunction":        [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SDNIP Function", wiki_file:"USECASE_SdnipFunctionWiki.txt" ],
+    "USECASE_SdnipFunctionCluster": [ wiki_link:prop[ "WikiPrefix" ] + "-" + "SDNIP Function Cluster", wiki_file:"USECASE_SdnipFunctionClusterWiki.txt" ]
 ]
-
-table_name = "executed_test_tests"
-result_name = "executed_test_results"
 graph_generator_file = "~/OnosSystemTest/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R"
-graph_saved_directory = "/var/jenkins/workspace/Pipeline_postjob_BM/"
+graph_saved_directory = "/var/jenkins/workspace/postjob-BM/"
 
-echo("Testcases:")
-testsToRun = prop["Tests"].tokenize("\n;, ")
-for ( String test : testsToRun ) {
-    println test
-}
+echo( "Testcases:" )
+def testsToRun = null
+testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
+funcs.printTestToRun( testsToRun )
 
 def tests = [:]
 for( String test : USECASE.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = USECASETest(test, toBeRun, prop)
+    tests[ stepName ] = funcs.runTest( test, toBeRun, prop, test, false, USECASE, graph_generator_file, graph_saved_directory )
 }
 
-def now = new Date()
+start = funcs.getCurrentTime()
 // run the tests
 for ( test in tests.keySet() ){
-    tests[test].call()
+    tests[ test ].call()
 }
-try{
-    if( prop["manualRun"] == "false" ){
-        def end = new Date()
-        TimeDuration duration = TimeCategory.minus( end, now )
-        slackSend( color:"#5816EE", message: "USECASE tests ended at: " + end.toString() + "\nTime took : " + duration )
-    }
-}
-catch(all){}
-
-// The testName should be the key from the FUNC
-def USECASETest( testName, toBeRun, prop ) {
-    return {
-        catchError{
-            stage(testName) {
-                if ( toBeRun ){
-                    workSpace = "/var/jenkins/workspace/"+testName
-                    def fileContents = ""
-                    node("TestStation-BMs"){
-                        withEnv(['ONOSBranch='+prop["ONOSBranch"],
-                                 'ONOSJVMHeap='+prop["ONOSJVMHeap"],
-                                 'TestONBranch='+prop["TestONBranch"],
-                                 'ONOSTag='+prop["ONOSTag"],
-                                 'WikiPrefix='+prop["WikiPrefix"],
-                                 'WORKSPACE='+workSpace]){
-                            sh '''#!/bin/bash -l
-                            set -i # interactive
-                            set +e
-                            shopt -s expand_aliases # expand alias in non-interactive mode
-                            export PYTHONUNBUFFERED=1
-
-                            ifconfig
-
-                            echo "ONOS Branch is: $ONOSBranch"
-                            echo "TestON Branch is: $TestONBranch"
-                            echo "Test date: "
-                            date
-
-                            cd ~
-                            export PATH=$PATH:onos/tools/test/bin
-
-                            . .bash_killcmd
-                            killTestONall
-                            onos-group uninstall
-                            timeout 240 stc teardown | head -100
-
-                            cd ~/OnosSystemTest/TestON/bin
-                            git log |head
-                            ./cleanup.sh -f
-                            ''' + "./cli.py run " + testName + '''
-                            ./cleanup.sh -f
-                            cd ~/onos/tools/package/config
-                            git clean -df'''
-
-                            // For the Wiki page
-                            sh '''#!/bin/bash -i
-                            set +e
-                            echo "ONOS Branch is: ${ONOSBranch}"
-                            echo "TestON Branch is: ${TestONBranch}"
-
-                            echo "Job name is: "''' + testName + '''
-                            echo "Workspace is: ${WORKSPACE}/"
-
-                            echo "Wiki page to post is: ${WikiPrefix}-"
-
-                            # remove any leftover files from previous tests
-                            sudo rm ${WORKSPACE}/*Wiki.txt
-                            sudo rm ${WORKSPACE}/*Summary.txt
-                            sudo rm ${WORKSPACE}/*Result.txt
-                            sudo rm ${WORKSPACE}/*.csv
-
-                            #copy files to workspace
-                            cd `ls -t ~/OnosSystemTest/TestON/logs/*/ | head -1 | sed 's/://'`
-                            sudo cp *.txt ${WORKSPACE}/
-                            sudo cp *.csv ${WORKSPACE}/
-                            cd ${WORKSPACE}/
-                            for i in *.csv
-                                do mv "$i" "$WikiPrefix"-"$i"
-                            done
-                            ls -al
-                            cd '''
-
-                            if( prop["manualRun"] == "false" || prop["postResult"] == "true"  ){
-                                // Post Results
-                                withCredentials([
-                                    string(credentialsId: 'db_pass', variable: 'pass'),
-                                    string(credentialsId: 'db_user', variable: 'user'),
-                                    string(credentialsId: 'db_host', variable: 'host'),
-                                    string(credentialsId: 'db_port', variable: 'port')]) {
-                                        def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\""
-
-                                        sh '''#!/bin/bash
-                                        export DATE=\$(date +%F_%T)
-                                        cd ~
-                                        pwd
-                                        sed 1d ''' + workSpace + "/" + prop["WikiPrefix"] + "-" + testName + '''.csv | while read line
-                                        do
-                                        echo \$line
-                                        echo ''' + database_command + '''
-
-                                        done
-                                        Rscript ''' + graph_generator_file + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + prop["ONOSBranch"] + " 20 " + graph_saved_directory
-
-                                }
-                            }
-                            // Fetch Logs
-                            sh '''#!/bin/bash
-                            set +e
-                            cd ~/OnosSystemTest/TestON/logs
-                            echo "Job Name is: " + ''' + testName + '''
-                            TestONlogDir=$(ls -t | grep ${TEST_NAME}_  |head -1)
-                            echo "########################################################################################"
-                            echo "#####  copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
-                            echo "########################################################################################"
-                            cd $TestONlogDir
-                            if [ $? -eq 1 ]
-                            then
-                                echo "Job name does not match any test suite name to move log!"
-                            else
-                                pwd
-                                for i in $OC{1..7}; do onos-fetch-logs $i || echo log does not exist; done
-                            fi
-                            cd'''
-                            fileContents = readFile workSpace+"/"+USECASE[testName]['wiki_file']
-
-                        }
-                    }
-                    if( prop["manualRun"] == "false" || prop["postResult"] == "true" ){
-                        def post = build job: "Pipeline_postjob_BM", propagate: false,
-                                    parameters: [
-                                        string(name: 'Wiki_Contents', value: fileContents),
-                                        string(name: 'Wiki_Link', value: USECASE[testName]['wiki_link'])
-                                    ]
-                    }
-                    node("TestStation-BMs"){
-                        resultContents = readFile workSpace + "/" + testName + "Result.txt"
-                        resultContents = resultContents.split("\n")
-                        if( resultContents[ 0 ] == "1" ){
-                            print "All passed"
-                        }else{
-                            print "Failed"
-                            if( prop["manualRun"] == "false" )
-                                slackSend(color:"FF0000", message: "[" + prop["ONOSBranch"] + "]" + testName + " : Failed!\n"
-                                                                    + resultContents[ 1 ] + "\n"
-                                                                    + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline" )
-                            Failed
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
\ No newline at end of file
+funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
+funcs.generateOverallGraph( prop, USECASE, graph_saved_directory )
\ No newline at end of file