Merge "Fix for SRMulticast"
diff --git a/TestON/JenkinsFile/CHO_Graph_Generator b/TestON/JenkinsFile/CHO_Graph_Generator
index 424f9a3..04a6f74 100644
--- a/TestON/JenkinsFile/CHO_Graph_Generator
+++ b/TestON/JenkinsFile/CHO_Graph_Generator
@@ -1,36 +1,80 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for graph-generator-CHO jenkins job.
+
+// Read the files that has the dependencies
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+// The way reading the parameters from the Jenkins.
 branches = params.ONOSbranch
 hours = params.hours
+
+// divide the branch list into the list that was separated by newline, semi-colon, comma or space
 branchList = branches.tokenize( "\n;, " )
+
+// initialize the directory.
+
+// Script file is the R script path
 script_file = fileRelated.trendCHO
+// saving_directory is the directory that save the generate graphs.
 saving_directory = fileRelated.jenkinsWorkspace + "postjob-Fabric5/"
 scriptDir = fileRelated.CHOScriptDir
 
+// create a bash script that will generate the graph
 graphScript = generateGraphScript( branchList )
 
 stage( 'Generating-Graph' ){
+    // This will run on TestStation-Fabric5s node.
     node( "TestStation-Fabric5s" ){
+        // run the bash script on this node.
         runScript( graphScript )
     }
 }
+// stage that will trigger postjob.
+// Need to be executed outside the current node to avoid deadlock.
 stage( 'posting-result' ){
     postJob()
 }
 
 def generateGraphScript( branchList ){
+    // Generate the bash script that will run the Rscript to make graph.
     graphScript = ''''''
+
+    // In case there are multiple branches running.
     for( branch in branchList ){
         branchDir = scriptDir + branch + "/"
         graphScript += '''export BRANCH=''' + branchDir + '''
+                          # make branch dir if not existing.
                           mkdir ''' + branchDir + ''';
+                          # inside the branchDir, check if there were existing graph
                           if [ ! -f ''' + branchDir + '''existing.txt ]; then
+                             # If it was first generated, it will copy .csv file.
                              cp *.csv ''' + branchDir + ''';
+                             # mark that this has created already.
                              echo "1" > ''' + branchDir + '''existing.txt;
                           fi;
+                          # run the log-summary that will export status
                           bash log-summary;''' + '''
+                          # run Rscript with it's parameters.
                           Rscript ''' +  script_file + ' ' + branchDir + 'event.csv ' +
                                 branchDir + 'failure.csv ' + branchDir + 'error.csv ' +
                                 branch + ' 60 ' + hours + ' ' +  saving_directory + ''';
@@ -40,6 +84,7 @@
     return graphScript
 }
 def runScript( graphScript ){
+    // run bash script that will init the environment and run the graph generating part.
     sh '''#!/bin/bash -l
           set -i
           set +e
@@ -48,6 +93,7 @@
           ''' + graphScript
 }
 def postJob(){
-        jobToRun = "postjob-Fabric5"
-        build job: jobToRun, propagate: false
+    // Triggering jenkins job called `postjob-Fabric5`
+    jobToRun = "postjob-Fabric5"
+    build job: jobToRun, propagate: false
 }
diff --git a/TestON/JenkinsFile/FUNCJenkinsFile b/TestON/JenkinsFile/FUNCJenkinsFile
index c063ee2..613dbf5 100644
--- a/TestON/JenkinsFile/FUNCJenkinsFile
+++ b/TestON/JenkinsFile/FUNCJenkinsFile
@@ -1,24 +1,55 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for FUNC-pipeline-<branch>
+
+// read the dependency files
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+
+// initialize the funcs with category of FUNC
 funcs.initialize( "FUNC" );
 
 // This is a Jenkinsfile for a scripted pipeline for the FUNC tests
 def prop = null
+
+// Read the TestONOS.property from the VM
 prop = funcs.getProperties()
+
+// get the list of the test and init branch to it.
 FUNC = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "FUNC" ]
 
+// init some directories
 graph_generator_file = fileRelated.trendIndividual
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-VM/"
 
+// get the list of the test to run
 echo( "Testcases:" )
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
 funcs.printTestToRun( testsToRun )
 
+// run the test sequentially and save the function into the dictionary.
 def tests = [:]
 for( String test : FUNC.keySet() ){
     toBeRun = testsToRun.contains( test )
@@ -26,10 +57,16 @@
     tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, FUNC, graph_generator_file, graph_saved_directory )
 }
 
+// get the start time of the test.
 start = funcs.getCurrentTime()
-// run the tests
+
+// run the tests sequentially.
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
+
+// generate the overall graph of the FUNC tests.
 funcs.generateOverallGraph( prop, FUNC, graph_saved_directory )
+
+// send the notification to Slack that running FUNC tests was ended.
 funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/FabricJenkinsfileTrigger b/TestON/JenkinsFile/FabricJenkinsfileTrigger
index ef805a9..6507fc1 100644
--- a/TestON/JenkinsFile/FabricJenkinsfileTrigger
+++ b/TestON/JenkinsFile/FabricJenkinsfileTrigger
@@ -1,19 +1,48 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
 
+// This is the Jenkins script for the fabric-pipeline-trigger
+
+// init dependencies functions
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/TriggerFuncs.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+
+// set the versions of the onos.
 current_version = "master"
 previous_version = "1.13"
 before_previous_version = "1.12"
+
+// Function that will initialize the configuration of the Fabric.
 funcs.initializeTrend( "Fabric" );
 funcs.initialize( "Fabric" )
 triggerFuncs.init( funcs )
 
+// Wiki contents is the contents for https://wiki.onosproject.org/display/ONOS/Automated+Test+Schedule
+// It will only be used by the VM_BMJenkinsTrigger not in here.
 wikiContents = ""
+
+// Having two different SR and SR1 to allow current_version and previous_version to be run on same machine.
 testcases = [
     "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
     "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
@@ -22,6 +51,8 @@
     "SR1" : [ tests : "", nodeName : [ "Fabric2", "Fabric3" ], wikiContent : "" ],
     "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
 ]
+
+// depends on the First two letter of the testname, it will decide which category to put test.
 Prefix_organizer = [
     "FU" : "FUNC",
     "HA" : "HA",
@@ -33,6 +64,7 @@
     "VP" : "USECASE"
 ]
 
+// set some variables from the parameter
 manually_run = params.manual_run
 onos_b = current_version
 test_branch = ""
@@ -44,70 +76,104 @@
 print now.toString()
 today = now[ Calendar.DAY_OF_WEEK ]
 
+// if it is manually run, it will set the onos version to be what it was passed by.
+// Currently, SR-pipeline-manually is not supported due to the special way of it is executed.
 if ( manually_run ){
     onos_b = params.ONOSVersion
 }
+
+// get the list of the tests from the JenkinsTestONTests.groovy
 AllTheTests = test_lists.getAllTheTests( onos_b )
 
+
 day = ""
+
+// list of the test on each test category will be run.
 SCPF_choices = ""
 USECASE_choices = ""
 FUNC_choices = ""
 HA_choices = ""
 SR_choices = ""
+
+// initialize the graph generating files.
 stat_graph_generator_file = fileRelated.histogramMultiple
 pie_graph_generator_file = fileRelated.pieMultiple
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-Fabric/"
 
+// get the post_result. This will be affected only for the manual runs.
 post_result = params.PostResult
 if( !manually_run ){
+    // If it is automated running, it will post the beginning message to the channel.
     slackSend( channel:'sr-failures', color:'#03CD9F',
                message:":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
                         + "Starting tests on : " + now.toString()
                         + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
 
+    // Choices will get the list of the test with Segment Rounting type tests.
     SR_choices += adder( "SR", "basic", true )
     if ( today == Calendar.FRIDAY ){
+        // if today is Friday, it will also test tests with extra_A category
         SR_choices += adder( "SR", "extra_A", true )
     } else if( today == Calendar.SATURDAY ){
+        // if today is Saturday, it will add the test with extra_B category
         SR_choices += adder( "SR", "extra_B", true )
     }
+    // removing last comma added at the end of the last test name.
     SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
 }
+
+
 if ( manually_run ){
     testcases = triggerFuncs.organize_tests( params.Tests, testcases )
 
     isOldFlow = params.isOldFlow
     println "Tests to be run manually : "
 }else{
+    // set the list of the tests to run.
     testcases[ "SR" ][ "tests" ] = SR_choices
     testcases[ "SR1" ][ "tests" ] = SR_choices
     println "Defaulting to " + day + " tests:"
 }
 
+// print out the list of the test to run on Jenkins
 triggerFuncs.print_tests( testcases )
 
+// This will hold the block of code to be run.
 def runTest = [
     "Fabric2" : [:],
     "Fabric3" : [:]
 ]
 if ( manually_run ){
+    // for manual run situation.
     for( String test in testcases.keySet() ){
         println test
+        // Unless the list of the tests on the test category is empty, it will save the block of code to run in dictionary.
         if ( testcases[ test ][ "tests" ] != "" ){
             runTest[ testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ], test, manually_run, onos_tag )
         }
     }
 }else{
+    // for automated situation, it will save current and previous version to Fabric2 and before_previous_version to Fabric3.
     runTest[ "Fabric2" ][ "SR1" ] = triggerFuncs.trigger_pipeline( current_version, testcases[ "SR1" ][ "tests" ], testcases[ "SR1" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
     runTest[ "Fabric2" ][ "SR" ] = triggerFuncs.trigger_pipeline( previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
     runTest[ "Fabric3" ][ "SR" ] = triggerFuncs.trigger_pipeline( before_previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 1 ], "SR", manually_run, onos_tag )
 }
 
 def finalList = [:]
+
+// It will run each category of test to run sequentially on each branch.
+// In our case, it will run SR1 first then SR on Fabric2 and just SR on Fabric3
 finalList[ "Fabric2" ] = triggerFuncs.runTestSeq( runTest[ "Fabric2" ] )
 finalList[ "Fabric3" ] = triggerFuncs.runTestSeq( runTest[ "Fabric3" ] )
+
+// It will then run Fabric2 and Fabric3 to be run concurrently.
+// In our case,
+//                      ----> Fabric2 : current -> previous
+// This pipeline ----->
+//                      ----> Fabric3 : before_previous
 parallel finalList
+
+// Way we are generating pie graphs. not supported in SegmentRouting yet.
 /*
 if ( !manually_run ){
     funcs.generateStatGraph( "TestStation-Fabric2s",
@@ -123,7 +189,12 @@
                              pie_graph_generator_file,
                              graph_saved_directory )
 }*/
+
+// Way to add list of the tests with specific category to the result
 def adder( testCat, set, getResult ){
+    // testCat : test Category ( Eg. FUNC, HA, SR ... )
+    // set : set of the test ( Eg. basic, extra_A ... )
+    // if getResult == true, it will add the result.
     result = ""
     for( String test in AllTheTests[ testCat ].keySet() ){
         if( AllTheTests[ testCat ][ test ][ set ] ){
@@ -133,6 +204,9 @@
     }
     return result
 }
+
+// check which node is on.
+// 1.12 runs on Fabric3 and rest on 1.13 and master
 def nodeOn( branch ){
     return branch == "1.12" ? 1 : 0;
 }
diff --git a/TestON/JenkinsFile/HAJenkinsFile b/TestON/JenkinsFile/HAJenkinsFile
index 7a0638d..c1bd8bf 100644
--- a/TestON/JenkinsFile/HAJenkinsFile
+++ b/TestON/JenkinsFile/HAJenkinsFile
@@ -1,23 +1,55 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for HA-pipeline-<branch>
+
+// read the dependency files
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+
+// initialize the funcs with category of HA
 funcs.initialize( "HA" );
-// This is a Jenkinsfile for a scripted pipeline for the HA tests
+
+// This is a Jenkinsfile for a scripted pipeline for the FUNC tests
 def prop = null
+
+// Read the TestONOS.property from the VM
 prop = funcs.getProperties()
+
+// get the list of the test and init branch to it.
 HA = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "HA" ]
 
+// init some directories
 graph_generator_file = fileRelated.trendIndividual
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-VM/"
 
+// get the list of the test to run
 echo( "Testcases:" )
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
 funcs.printTestToRun( testsToRun )
 
+// run the test sequentially and save the function into the dictionary.
 def tests = [:]
 for( String test : HA.keySet() ){
     toBeRun = testsToRun.contains( test )
@@ -25,10 +57,15 @@
     tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, HA, graph_generator_file, graph_saved_directory )
 }
 
+// get the start time of the test.
 start = funcs.getCurrentTime()
-// run the tests
+// run the tests sequentially.
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
+
+// generate the overall graph of the HA tests.
 funcs.generateOverallGraph( prop, HA, graph_saved_directory )
+
+// send the notification to Slack that running HA tests was ended.
 funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
index 6625355..a2996df 100644
--- a/TestON/JenkinsFile/JenkinsfileTrigger
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -1,9 +1,34 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for all-pipeline-trigger
+
+// read the dependency functions
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 
+// get current time and send the notification of starting the test.
 def now = funcs.getCurrentTime()
 print now.toString()
 today = now[ Calendar.DAY_OF_WEEK ]
+
+// read the passed parameters from the Jenkins.
 machines = params.machines
 manually_run = params.manual_run
 if( !manually_run ){
@@ -13,17 +38,20 @@
                         + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
 }
 
+// store the list of the machines into the dictionary.
 machineList = machines.tokenize( "\n;, " )
 machineOn = [:]
 
+// save the triggering job function as a dictionary.
 for (machine in machineList){
     print( machine )
     machineOn[ machine ] = triggerJob( machine )
 }
 
+// run the triggering jobs concurrently.
 parallel machineOn
 
-
+// function that will trigger the specific jobs from current pipeline.
 def triggerJob( on ){
     return{
         jobToRun = on + "-pipeline-trigger"
diff --git a/TestON/JenkinsFile/Overall_Graph_Generator b/TestON/JenkinsFile/Overall_Graph_Generator
index f08f18a..41b1ccb 100644
--- a/TestON/JenkinsFile/Overall_Graph_Generator
+++ b/TestON/JenkinsFile/Overall_Graph_Generator
@@ -1,18 +1,45 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for manual-graph-generator-overall
+
+// read the dependency functions.
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+
+// set the file and directory paths.
 stat_graph_generator_file = fileRelated.histogramMultiple
 pie_graph_generator_file = fileRelated.pieMultiple
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-VM/"
 
+// init trend which will generate the pie and histogram graphs to be VM.
 funcs.initializeTrend( "VM" )
 
 onos_branch = params.ONOSbranch
 AllTheTests = test_lists.getAllTheTests("")
 
+// generate the graph and post the result on TestStation-VMs. Right now, all the pie and histograms are saved
+// on VM.
 funcs.generateStatGraph( "TestStation-VMs",
                          onos_branch,
                          AllTheTests,
diff --git a/TestON/JenkinsFile/README b/TestON/JenkinsFile/README
new file mode 100644
index 0000000..18307e2
--- /dev/null
+++ b/TestON/JenkinsFile/README
@@ -0,0 +1,68 @@
+Jenkins script for Jenkins Pipeline jobs **
+*******************************************
+
+All the Jenkins files are under the dependencies folders are the dependencies of the Jenkins file.
+
+Some tips for the Jenkins File:
+
+
+1. If you want to change the branch to be run on weekdays and weekends (VM, BM) :
+
+  1) go to VM_BMJenkinsfileTrigger
+  2) change the portion on line 31 :
+     current_version = "master"
+     previous_version = "1.13"
+     before_previous_version = "1.12"
+
+
+2. If you want to change the branch to be run for the Segment Routing tests :
+
+  1) go to the FabricJenkinsfileTrigger
+  2) change the variable values on line 32:
+     current_version = "master"
+     previous_version = "1.13"
+     before_previous_version = "1.12"
+  3) If you want certain branch to be run on different machine then change
+     a) line 157 - 159:
+        testcases[ "SR1" ][ "nodeName" ][ <node you want> ] 0 : Fabric2, 1: Fabric3
+     b) Then, go to dependencies/JenkinsCommonFuncs.groovy
+        change def fabricOn( branch ) on line 86.
+        Currently, only 1.12 runs on Fabric3 and rest are running Fabric2.
+     c) Make sure to change the URLs on Wiki as well. If 1.12 runs on Fabric2 instead of Fabric3, then you
+        have to update the URL from
+          https://jenkins.onosproject.org/view/QA/job/postjob-Fabric3/lastSuccessfulBuild/artifact/SRBridging_onos-1.12_20-builds_graph.jpg
+
+          to
+
+          https://jenkins.onosproject.org/view/QA/job/postjob-Fabric2/lastSuccessfulBuild/artifact/SRBridging_onos-1.12_20-builds_graph.jpg
+
+
+3. If you want to upload additional logs for the Segment Routing tests:
+
+   1) go to dependencies/JenkinsCommonFuncs.groovy,
+   2) change def copyLogs( testName ) on line 174.
+   3) you may change/add/remove files from there.
+
+
+4. If you want to add a new test. ( non-SCPF tests like FUNC,HA,USECASE, and SR ):
+
+   1) go to dependencies/JenkinsTestONTests.groovy.
+   2) Add the new test on the list.
+   3) Make sure to also create a new wiki page for wiki link before running the test.
+      Since publish to confluence does not make new page, it has to be there before gets published.
+   4) If your new test's category is not scheduled :
+       a) For non-Segment Routing test :
+          i) go to VM_BMJenkinsFileTrigger
+          ii) add your new category on any days you want it to be run after line 223.
+       b) For Segment Routing test:
+          i) go to FabricJenkinsfileTrigger
+          ii) add your new category after line 113.
+   5) add your new graph to the wiki page.
+
+5. If you want to add a new test ( SCPF ):
+   1) Outside the Jenkins file requirement : need to add new table, function, view and its rule to the database first.
+   2) go to dependencies/JenkinsTestONTests.groovy and follow the 4 steps procedure for scheduling.
+   3) go to dependencies/PerformanceFuncs.groovy and add a new test in the dictionary.
+   4) It explains details about which to add in the file.
+   5) Make a Rscript for that test.
+   6) add your new graph to the wiki page.
\ No newline at end of file
diff --git a/TestON/JenkinsFile/SCPFJenkinsFile b/TestON/JenkinsFile/SCPFJenkinsFile
index 5f039b5..ee242e1 100644
--- a/TestON/JenkinsFile/SCPFJenkinsFile
+++ b/TestON/JenkinsFile/SCPFJenkinsFile
@@ -1,19 +1,48 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for SCPF-pipeline-<branch>
+
+// read the dependency files
 SCPFfuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy' )
 SCPFfuncs.init()
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
+
+// init funcs with SCPF specificaton
 funcs.initialize( "SCPF", SCPFfuncs );
-// This is a Jenkinsfile for a scripted pipeline for the SCPF tests
+
+// read the information from TestON.property on BM
 def prop = null
 prop = funcs.getProperties()
 
+// get the list of the test to run.
 echo( "Testcases:" )
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
 funcs.printTestToRun( testsToRun )
 
+// check if it is using old flow.
 isOldFlow = prop[ "isOldFlow" ] == "true"
 SCPFfuncs.oldFlowRuleCheck( isOldFlow, prop[ "ONOSBranch" ] )
+
+// set test to run as a list of function
 def tests = [:]
 for( String test : SCPFfuncs.SCPF.keySet() ){
     toBeRun = testsToRun.contains( test )
@@ -23,9 +52,12 @@
     tests[ stepName ] = funcs.runTest( test, toBeRun, prop, pureTestName, false, SCPFfuncs.SCPF, "", "" )
 }
 
+// get start time
 start = funcs.getCurrentTime()
-// run the tests
+// run the tests sequentially
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
+
+// send result to slack after running test is done.
 funcs.sendResultToSlack( start,  prop["manualRun"], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/SCPF_Graph_Generator b/TestON/JenkinsFile/SCPF_Graph_Generator
index 938faf8..e12b9e2 100644
--- a/TestON/JenkinsFile/SCPF_Graph_Generator
+++ b/TestON/JenkinsFile/SCPF_Graph_Generator
@@ -1,4 +1,26 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for manual-graph-generator-SCPF
+
+// read and set the functions from dependcies.
 SCPFfuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy' )
 SCPFfuncs.init()
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
@@ -7,6 +29,7 @@
 def prop = null
 prop = funcs.getProperties()
 
+// get the variables from the Jenkins parameters.
 def Tests = params.Test
 isOldFlow = params.isOldFlow
 prop[ "ONOSBranch" ] = params.ONOSbranch
@@ -16,6 +39,7 @@
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( Tests )
 
+// pureTestName is because we don't want 'WFobj' to be used for test name.
 def tests = [:]
 for( String test : testsToRun ){
     println test
@@ -23,6 +47,7 @@
     tests[ test ] = funcs.runTest( test, true, prop, pureTestName, true, [], "", "" )
 }
 
+// generate the graphs sequentially.
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
diff --git a/TestON/JenkinsFile/SRJenkinsFile b/TestON/JenkinsFile/SRJenkinsFile
index c9c00e5..fa3bb04 100644
--- a/TestON/JenkinsFile/SRJenkinsFile
+++ b/TestON/JenkinsFile/SRJenkinsFile
@@ -1,25 +1,56 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for SR-pipeline-<branch>
+
+// read dependencies.
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
+// init configuratin to be SR
 fileRelated.init()
 funcs.initialize( "SR" );
+
+// get the name of the Jenkins job.
 jobName = env.JOB_NAME
+
+// additional setup for Segment routing because it is running multiple branch concurrently on different machines.
 funcs.additionalInitForSR( jobName )
-// This is a Jenkinsfile for a scripted pipeline for the SR tests
+
+// read the TestON.property depends on which branch it is running. ( currently 1.13 and master on Fabric2, 1.12 on Fabric3 )
 def prop = null
 prop = funcs.getProperties()
 SR = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "SR" ]
 
+// set the file paths and directory
 graph_generator_file = fileRelated.trendIndividual
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-Fabric" + funcs.fabricOn( prop[ "ONOSBranch" ] ) + "/"
 
+// get the list of the tests.
 echo( "Testcases:" )
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
 funcs.printTestToRun( testsToRun )
 
+// save the functions to run test in the dictionary.
 def tests = [:]
 for( String test : SR.keySet() ){
     toBeRun = testsToRun.contains( test )
@@ -27,10 +58,13 @@
     tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, SR, graph_generator_file, graph_saved_directory )
 }
 
+// get start time
 start = funcs.getCurrentTime()
-// run the tests
+
+// run the tests sequentially
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
 //funcs.generateOverallGraph( prop, SR, graph_saved_directory )
+// send the notification of ending test after SR tests is done.
 funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/Trend_Graph_Generator b/TestON/JenkinsFile/Trend_Graph_Generator
index 3430b40..b95504d 100644
--- a/TestON/JenkinsFile/Trend_Graph_Generator
+++ b/TestON/JenkinsFile/Trend_Graph_Generator
@@ -1,26 +1,54 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for manual-graph-generator-trend
+
+// read the dependency functions
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+
+// read the node cluster ( VM or BM or Fabrics ) from the Jenkins job.
 nodeCluster = params.NodeCluster
 
 funcs.initializeTrend( nodeCluster );
+
+// do additional check for Fabric since it will be different depends on which branch it is running on.
 if( nodeCluster == "Fabric" )
   funcs.additionalInitForSR( params.ONOSbranch )
 def prop = null
 prop = funcs.getProperties()
 
+// get the list of the tests from the parameters.
 def Tests = params.Test
 prop[ "ONOSBranch" ] = params.ONOSbranch
 
-
+// set some of the paths of the file and directory
 graph_generator_file = fileRelated.trendIndividual
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-" + nodeCluster + ( nodeCluster == "Fabric" ? funcs.fabricOn( prop[ "ONOSBranch" ] ) : "" ) + "/"
 
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( Tests )
 
+// set functions into the dictionary.
 def tests = [:]
 for( String test : testsToRun ){
     println test
diff --git a/TestON/JenkinsFile/USECASEJenkinsFile b/TestON/JenkinsFile/USECASEJenkinsFile
index 6493914..dd5cf88 100644
--- a/TestON/JenkinsFile/USECASEJenkinsFile
+++ b/TestON/JenkinsFile/USECASEJenkinsFile
@@ -1,23 +1,50 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the Jenkins script for USECASE-pipeline-<branch>
+
+// set the dependencies.
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
+// init configuration of funcs to be USECASE
 fileRelated.init()
 funcs.initialize( "USECASE" );
-// This is a Jenkinsfile for a scripted pipeline for the USECASE tests
+
+// read the TestON.property files and save it as a dictionary
 def prop = null
 prop = funcs.getProperties()
 USECASE = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "USECASE" ]
 
+// save directory and file.
 graph_generator_file = fileRelated.trendIndividual
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-BM/"
 
+// get tests to run.
 echo( "Testcases:" )
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( prop[ "Tests" ] )
 funcs.printTestToRun( testsToRun )
 
+// save the function of the test running into the dictionary.
 def tests = [:]
 for( String test : USECASE.keySet() ){
     toBeRun = testsToRun.contains( test )
@@ -25,10 +52,15 @@
     tests[ stepName ] = funcs.runTest( test, toBeRun, prop, test, false, USECASE, graph_generator_file, graph_saved_directory )
 }
 
+// get start time of the test.
 start = funcs.getCurrentTime()
-// run the tests
+// run the tests sequntially
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
+
+// send the result to slack after USECASE test is done.
 funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
+
+// generate the overall graph for USECASE.
 funcs.generateOverallGraph( prop, USECASE, graph_saved_directory )
diff --git a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
index 119b427..729cb89 100644
--- a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
+++ b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
@@ -1,17 +1,47 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
 
+// This is the Jenkins script for vm-pipeline-trigger or bm-pipeline-trigger
+
+// set the functions of the dependencies.
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 test_lists = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy' )
 triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/TriggerFuncs.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
+// set the versions of the onos
 fileRelated.init()
 current_version = "master"
 previous_version = "1.13"
 before_previous_version = "1.12"
+
+// init trend graphs to be on VM.
 funcs.initializeTrend( "VM" );
 triggerFuncs.init( funcs )
+
+// contents for page https://wiki.onosproject.org/display/ONOS/Automated+Test+Schedule
+// which will demonstrates the list of the scheduled tests on the days.
 wikiContents = ""
+
+// default FUNC,HA to be VM, SCPF,USECASE to be BM.
+// SR will not be used in here.
 testcases = [
     "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
     "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
@@ -19,6 +49,8 @@
     "SR" : [ tests : "", nodeName : "Fabric", wikiContent : "" ],
     "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
 ]
+
+// depends on the first two characters of the test name, it will be divided.
 Prefix_organizer = [
     "FU" : "FUNC",
     "HA" : "HA",
@@ -30,7 +62,10 @@
     "VP" : "USECASE"
 ]
 
+// read the parameters from the Jenkins
 manually_run = params.manual_run
+
+// set default onos_b to be current_version.
 onos_b = current_version
 test_branch = ""
 onos_tag = params.ONOSTag
@@ -41,28 +76,39 @@
 print now.toString()
 today = now[ Calendar.DAY_OF_WEEK ]
 
+// get branch from parameter if it is manually running
 if ( manually_run ){
     onos_b = params.ONOSVersion
 } else {
+    // otherwise, the version would be different over the weekend.
+    // If today is weekdays, it will be default to current_version.
     if ( today == Calendar.SATURDAY ){
         onos_b = previous_version
     } else if( today == Calendar.SUNDAY ){
         onos_b = before_previous_version
     }
 }
+
+// Get all the list of the tests from the JenkinsTestONTests.groovy
 AllTheTests = test_lists.getAllTheTests( onos_b )
 
+// list of the tests to be run will be saved in each choices.
 day = ""
 SCPF_choices = ""
 USECASE_choices = ""
 FUNC_choices = ""
 HA_choices = ""
 SR_choices = ""
+
+// init some paths for the files and directories.
 stat_graph_generator_file = fileRelated.histogramMultiple
 pie_graph_generator_file = fileRelated.pieMultiple
 graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-VM/"
 
+// get post result from the params for manually run.
 post_result = params.PostResult
+
+// if automatically run, it will remove the comma at the end after dividing the tests.
 if( !manually_run ){
     testDivider( today )
     FUNC_choices =  triggerFuncs.lastCommaRemover( FUNC_choices )
@@ -72,6 +118,7 @@
     SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
 }
 
+
 if ( manually_run ){
     testcases = triggerFuncs.organize_tests( params.Tests, testcases )
 
@@ -92,6 +139,8 @@
     "VM" : [:],
     "BM" : [:]
 ]
+
+// set the test running function into the dictionary.
 for( String test in testcases.keySet() ){
     println test
     if ( testcases[ test ][ "tests" ] != "" ){
@@ -99,15 +148,22 @@
     }
 }
 def finalList = [:]
+
+// get the name of the job.
 jobName = env.JOB_NAME
+
+// first set the list of the functions to be run.
 finalList[ "VM" ] = triggerFuncs.runTestSeq( runTest[ "VM" ] )
 finalList[ "BM" ] = triggerFuncs.runTestSeq( runTest[ "BM" ] )
-//parallel finalList
+
+// if first two character of the job name is vm, only call VM.
+// else, only on BM
 if( jobName.take( 2 ) == "vm" )
     finalList[ "VM" ].call()
 else
     finalList[ "BM" ].call()
 
+// If it is automated running, it will generate the stats graph on VM.
 if ( !manually_run ){
     funcs.generateStatGraph( "TestStation-VMs",
                              funcs.branchWithPrefix( onos_b ),
@@ -117,9 +173,13 @@
                              graph_saved_directory )
 }
 
+// function that will divide tests depends on which day it is.
 def testDivider( today ){
     switch ( today ) {
         case Calendar.MONDAY:
+            // THe reason Monday calls all the days is because we want to post the test schedules on the wiki
+            // and slack channel every monday.
+            // It will only generate the list of the test for monday.
             initHtmlForWiki()
             monday( true )
             tuesday( true, false )
@@ -159,8 +219,13 @@
             break
     }
 }
+
+// function for monday.
 def monday( getResult ){
+    // add header for wiki page script.
     addingHeader( "FUNC" )
+    // call category of basic and extra_A of FUNC tests.
+    // put M into the dictionary.
     FUNC_choices += adder( "FUNC", "basic", true, "M", getResult )
     FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
     closingHeader( "FUNC" )
@@ -178,6 +243,9 @@
     addingHeader( "USECASE" )
     closingHeader( "USECASE" )
 }
+
+// If get result is false, it will not add the test result to xx_choices, but will generate the
+// header and days
 def tuesday( getDay, getResult ){
     addingHeader( "FUNC" )
     FUNC_choices += adder( "FUNC", "basic", getDay, "T", getResult )
@@ -309,7 +377,15 @@
     USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
     closingHeader( "USECASE" )
 }
+
+// adder that will return the list of the tests.
 def adder( testCat, set, dayAdding, day, getResult ){
+    // testCat : the category of the test which will be either FUNC,HA,SR...
+    // set : the set of the test to be run which will be basic,extra_A,extra_B...
+    // dayAdding : boolean whether to add the days into the list or not
+    // day : the day you are trying to add (m,t,w,th... )
+    // getResult : if want to get the list of the test to be run. False will return empty list.
+    //             And once the list is empty, it will not be run.
     result = ""
     for( String test in AllTheTests[ testCat ].keySet() ){
         if( AllTheTests[ testCat ][ test ][ set ] ){
@@ -317,11 +393,14 @@
                 result += test + ","
             if( dayAdding )
                 dayAdder( testCat, test, day )
+            // make HTML columns for wiki page on schedule.
             makeHtmlColList( testCat, test )
         }
     }
     return result
 }
+
+// Initial part of the wiki page.
 def initHtmlForWiki(){
     wikiContents = '''
     <table class="wrapped confluenceTable">
@@ -352,21 +431,29 @@
                 <th colspan="1" class="confluenceTh">''' + test + '''</th>'''
     }
 }
+
+// adding header functionality.
 def addingHeader( testCategory ){
     testcases[ testCategory ][ 'wikiContent' ] += '''
                 <td class="confluenceTd">
                     <ul>'''
 }
+
+// making column list for html
 def makeHtmlColList( testCategory, testName ){
     testcases[ testCategory ][ 'wikiContent' ] += '''
                         <li>'''+ testName +'''</li>'''
 
 }
+
+// closing the header for html
 def closingHeader( testCategory ){
     testcases[ testCategory ][ 'wikiContent' ] += '''
                     </ul>
                 </td>'''
 }
+
+// close the html for the wiki page.
 def closeHtmlForWiki(){
     for( String test in testcases.keySet() ){
         wikiContents += testcases[ test ][ 'wikiContent' ]
@@ -381,6 +468,8 @@
     <p>On <strong>Saturdays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( previous_version ) +''' branch.</p>
     <p>On <strong>Sundays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( before_previous_version ) +''' branch.</p>'''
 }
+
+// post the result to wiki page using publish to confluence.
 def postToWiki( contents ){
     node( testMachine ){
         workspace = fileRelated.jenkinsWorkspace + "all-pipeline-trigger/"
@@ -391,6 +480,8 @@
                                    workspace + filename )
     }
 }
+
+// add the day to the "day" on the dictionary.
 def dayAdder( testCat, testName, dayOfWeek ){
     AllTheTests[ testCat ][ testName ][ "day" ] += dayOfWeek + ","
 }
diff --git a/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy b/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy
index a0c70c9..cae7f6f 100644
--- a/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy
@@ -1,11 +1,37 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
 
+// This is the dependency Jenkins script.
+// it has some general functionality of making database command, basic graph part, and get list of the test
+
+// make the init part of the database command
 def database_command_create( pass, host, port, user ){
   return pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c "
 }
+
+// make the basic graph part for the Rscript
 def basicGraphPart( rFileName, host, port, user, pass, subject, branchName ){
   return " Rscript " + rFileName + " " + host + " " + port + " " + user + " " + pass + " " + subject + " " + branchName
 }
+
+// get the list of the test as dictionary then return as a string
 def getTestList( tests ){
     list = ""
     for( String test : tests.keySet() )
diff --git a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
index 8c480d3..0b03c69 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
@@ -1,4 +1,26 @@
 #!groovy
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the dependency Jenkins script.
+// it has some common functions that runs test and generate graph.
+
 import groovy.time.*
 generalFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/GeneralFuncs.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
@@ -6,6 +28,9 @@
 fileRelated.init()
 
 def initializeTrend( machine ){
+  // For initializing any trend graph jobs
+  // machine : Either VM,BM, or Fabric#
+
   table_name = "executed_test_tests"
   result_name = "executed_test_results"
   testMachine = "TestStation-" + machine + "s";
@@ -14,12 +39,19 @@
   isTrend = true
 }
 def initialize( type, SCPFfuncs ){
+  // Initializing for SCPF tests
+  // type : type of the test ( SR,FUNC,SCPF... )
+  // Passing the SCPFfunction which will be PerformanceFuncs.groovy
+
   init( type )
   SCPFfunc = SCPFfuncs
   isSCPF = true
   machine = machineType[ type ]
 }
 def initialize( type ){
+  // initializing for FUNC,HA,SR, and USECASE
+  // type : type of the test ( SR,FUNC,SCPF... )
+
   init( type )
   SCPFfunc = null
   table_name = "executed_test_tests"
@@ -29,6 +61,8 @@
   isSCPF = false
 }
 def init( type ){
+  // type : type of the test ( SR,FUNC,SCPF... )
+
   machineType = [ "FUNC"    : "VM",
                   "HA"      : "VM",
                   "SR"      : "Fabric",
@@ -39,6 +73,9 @@
   isTrend = false
 }
 def additionalInitForSR( branch ){
+  // additional setup for SegmentRouting tests to determine the machine depends on the branch it is running.
+  // branch : branch of the onos. ( master, 1.12, 1.13... )
+
   testMachine = ( ( new StringBuilder( testMachine ) ).insert( testMachine.size()-1, fabricOn( branch ) ) ).toString()
   if( isTrend )
     machine += fabricOn( branch )
@@ -47,33 +84,53 @@
   print testMachine
 }
 def fabricOn( branch ){
+  // gets the fabric machines with the branch of onos.
+  // branch : master, 1.12, 1.13...
+
   return branch.reverse().take(4).reverse() == "1.12" ? '3' : '2'
 }
 def printType(){
+  // print the test type and test machine that was initialized.
+
   echo testType;
   echo testMachine;
 }
 def getProperties(){
+  // get the properties of the test by reading the TestONOS.property
+
   node( testMachine ){
     return readProperties( file:'/var/jenkins/TestONOS.property' );
   }
 }
 def getTestsToRun( testList ){
+  // get test to run by tokenizing the list.
+
   testList.tokenize("\n;, ")
 }
 def getCurrentTime(){
+  // get time of the PST zone.
+
   TimeZone.setDefault( TimeZone.getTimeZone('PST') )
   return new Date();
 }
 def getTotalTime( start, end ){
+  // get total time of the test using start and end time.
+
   return TimeCategory.minus( end, start );
 }
 def printTestToRun( testList ){
+  // printout the list of the test in the list.
+
   for ( String test : testList ) {
       println test;
   }
 }
 def sendResultToSlack( start, isManualRun, branch ){
+  // send the result of the test to the slack when it is not manually running.
+  // start : start time of the test
+  // isManualRun : string that is whether "false" or "true"
+  // branch : branch of the onos.
+
   try{
     if( isManualRun == "false" ){
         end = getCurrentTime();
@@ -85,7 +142,11 @@
   catch( all ){}
 }
 def initAndRunTest( testName, testCategory ){
-  // after ifconfig : ''' + borrowCell( testName ) + '''
+  // Bash script that will
+  // Initialize the environment to the machine and run the test.
+  // testName : name of the test
+  // testCategory : (SR,FUNC ... )
+
   return '''#!/bin/bash -l
         set -i # interactive
         set +e
@@ -111,6 +172,9 @@
         git clean -df'''
 }
 def copyLogs( testName ){
+  // bash script part for copy the logs and other neccessary element for SR tests.
+  // testName : name of the test.
+
   result = ""
     if( testType == "SR" ){
       result = '''
@@ -125,6 +189,9 @@
   return result
 }
 def cleanAndCopyFiles( testName ){
+  // clean up some files that were in the folder and copy the new files from the log
+  // testName : name of the test
+
   return '''#!/bin/bash -i
         set +e
         echo "ONOS Branch is: ${ONOSBranch}"
@@ -150,6 +217,9 @@
         cd '''
 }
 def fetchLogs( testName ){
+  // fetch the logs of onos from onos nodes to onos System Test logs
+  // testName: name of the test
+
   return '''#!/bin/bash
   set +e
   cd ~/OnosSystemTest/TestON/logs
@@ -169,14 +239,25 @@
   cd'''
 }
 def isPostingResult( manual, postresult ){
+  // check if it is posting the result.
+  // posting when it is automatically running or has postResult condition from the manual run
+
   return manual == "false" || postresult == "true"
 }
 def postResult( prop, graphOnly ){
+  // post the result by triggering postjob.
+  // prop : property dictionary that was read from the machine.
+  // graphOnly : if it is graph generating job
+
   if( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
     def post = build job: "postjob-" + ( graphOnly ? machine : machineType[ testType ] ), propagate: false
   }
 }
 def postLogs( testName, prefix ){
+  // posting logs of the onos jobs specifically SR tests
+  // testName : name of the test
+  // prefix : branch prefix ( master, 1.12, 1.13 ... )
+
   resultURL = ""
   if( testType == "SR" ){
     def post = build job: "SR-log-" + prefix, propagate: false
@@ -185,9 +266,21 @@
   return resultURL
 }
 def getSlackChannel(){
+  // get name of the slack channel.
+  // if the test is SR, it will return sr-failures
+
   return "#" + ( testType == "SR" ? "sr-failures" : "jenkins-related" )
 }
 def analyzeResult( prop, workSpace, testName, otherTestName, resultURL, wikiLink, isSCPF ){
+  // analyzing the result of the test and send to slack if the test was failed.
+  // prop : property dictionary
+  // workSpace : workSpace where the result file is saved
+  // testName : real name of the test
+  // otherTestName : other name of the test for SCPF tests ( SCPFflowTPFobj )
+  // resultURL : url for the logs for SR tests. Will not be posted if it is empty
+  // wikiLink : link of the wiki page where the result was posted
+  // isSCPF : Check if it is SCPF. If so, it won't post the wiki link.
+
   node( testMachine ){
     resultContents = readFile workSpace + "/" + testName + "Result.txt"
     resultContents = resultContents.split("\n")
@@ -209,6 +302,12 @@
   }
 }
 def publishToConfluence( isManualRun, isPostResult, wikiLink, file ){
+  // publish HTML script to wiki confluence
+  // isManualRun : string "true" "false"
+  // isPostResult : string "true" "false"
+  // wikiLink : link of the wiki page to publish
+  // file : name of the file to be published
+
   if( isPostingResult( isManualRun, isPostResult ) ){
     publishConfluence siteName: 'wiki.onosproject.org', pageName: wikiLink, spaceName: 'ONOS',
                   attachArchivedArtifacts: true, buildIfUnstable: true,
@@ -219,6 +318,16 @@
 
 }
 def runTest( testName, toBeRun, prop, pureTestName, graphOnly, testCategory, graph_generator_file, graph_saved_directory ) {
+  // run the test on the machine that contains all the steps : init and run test, copy files, publish result ...
+  // testName : name of the test
+  // toBeRun : boolean value whether the test will be run or not. If not, it won't be run but shows up with empty result on pipeline view
+  // prop : dictionary property on the machine
+  // pureTestName : Pure name of the test. ( ex. pureTestName of SCPFflowTpFobj will be SCPFflowTp )
+  // graphOnly : check if it is generating graph job. If so, it will only generate the generating graph part
+  // testCategory : category of the test ( SCPF, SR, FUNC ... )
+  // graph_generator_file : Rscript file with the full path.
+  // graph_saved_directory : where the generated graph will be saved to.
+
   return {
       catchError{
           stage( testName ) {
@@ -259,17 +368,15 @@
       }
   }
 }
-def borrowCell( testName ){
-  result = ""
-  if( testType == "SR" ){
-      result = '''
-      cd
-      source ~/borrow.cell
-      '''
-  }
-  return result
-}
 def databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory ){
+  // part where it insert the data into the database.
+  // It will use the predefined encrypted variables from the Jenkins.
+  // prop : property dictionary that was read from the machine
+  // testName : name of the test
+  // graphOnly : boolean whether it is graph only or not
+  // graph_generator_file : Rscript file with the full path.
+  // graph_saved_directory : where the generated graph will be saved to.
+
   if( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
       // Post Results
       withCredentials( [
@@ -288,6 +395,17 @@
   }
 }
 def generateCategoryStatsGraph( testMachineOn, manualRun, postresult, stat_file, pie_file, type, branch, testListPart, save_path, pieTestListPart ){
+  // function that will generate the category stat graphs for the overall test.
+  // testMachineOn : the machine the graph will be generated. It will be TestStation-VMs for the most cases
+  // manualRun : string of "true" or "false"
+  // postresult : string of "true" or "false"
+  // stat_file : file name with full path for Rscript for the stat graph
+  // pie_file : file name with full path for Rscript for the pie graph
+  // type : type of the test ( USECASE, FUNC, HA )
+  // branch : branch of the test ( master, onos-1.12, onos-1.13 )
+  // testListPart : list of the test to be included
+  // save_path : path that will save the graphs to
+  // pieTestListPart : list of the test for pie graph
 
   if( isPostingResult( manualRun, postresult ) ){
     node( testMachineOn ){
@@ -307,12 +425,22 @@
     }
 }
 def makeTestList( list, commaNeeded ){
+  // make the list of the test in to a string.
+  // list : list of the test
+  // commaNeeded : if comma is needed for the string
+
   return generalFuncs.getTestList( list ) + ( commaNeeded ? "," : "" )
 }
 def createStatsList( testCategory, list, semiNeeded ){
+  // make the list for stats
+  // testCategory : category of the test
+  // list : list of the test
+  // semiNeeded: if semi colon is needed
+
   return testCategory + "-" + generalFuncs.getTestList( list ) + ( semiNeeded ? ";" : "" )
 }
 def generateOverallGraph( prop, testCategory, graph_saved_directory ){
+  // generate the overall graph for the test
 
   if( isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
     node( testMachine ){
@@ -331,15 +459,23 @@
     }
 }
 def getOverallPieGraph( file, host, port, user, pass, branch, type, testList, yOrN, path ){
+   // Rcommand for the pie graph
+
    return generalFuncs.basicGraphPart( file, host, port, user, pass, type, branch ) + " \"" + testList + "\" latest " + yOrN + " " + path
 }
 def sqlCommand( testName ){
+  // get the inserting sqlCommand for non-SCPF tests.
+
   return "\"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\" "
 }
 def graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory, graph_generator_file ){
+  // get the graphGenerating R command for non-SCPF tests
+
   return generalFuncs.basicGraphPart( graph_generator_file, host, port, user, pass, testName, prop[ "ONOSBranch" ] ) + " 20 " + graph_saved_directory
 }
 def databasePart( wikiPrefix, testName, database_command ){
+  // to read and insert the data from .csv to the database
+
   return '''
     sed 1d ''' + workSpace + "/" + wikiPrefix + "-" + testName + '''.csv | while read line
     do
@@ -348,6 +484,8 @@
     done '''
 }
 def generateStatGraph( testMachineOn, onos_branch, AllTheTests, stat_graph_generator_file, pie_graph_generator_file, graph_saved_directory ){
+    // Will generate the stats graph.
+
     testListPart = createStatsList( "FUNC", AllTheTests[ "FUNC" ], true ) +
                    createStatsList( "HA", AllTheTests[ "HA" ], true ) +
                    createStatsList( "USECASE", AllTheTests[ "USECASE" ], false )
@@ -357,6 +495,7 @@
     generateCategoryStatsGraph( testMachineOn, "false", "true", stat_graph_generator_file, pie_graph_generator_file, "ALL", onos_branch, testListPart, graph_saved_directory, pieTestList )
 }
 def branchWithPrefix( branch ){
+    // get the branch with the prefix ( "onos-" )
     return ( ( branch != "master" ) ? "onos-" : "" ) + branch
 }
 return this;
diff --git a/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy b/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy
index fc89730..f8700c9 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy
@@ -1,5 +1,28 @@
 #!groovy
 
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the dependency Jenkins script.
+// This will initialize the paths of the jenkins file and paths.
+
+// init the paths for the directory
 def initLocation(){
     jenkinsFolder = "~/OnosSystemTest/TestON/JenkinsFile/"
     rScriptLocation = jenkinsFolder + "wikiGraphRScripts/"
@@ -7,6 +30,8 @@
     SCPFSpecificLocation = rScriptLocation + "SCPFspecificGraphRScripts/"
     CHOScriptDir = "~/CHO_Jenkins_Scripts/"
 }
+
+// init the paths for the files.
 def initFiles(){
     trendIndividual = rScriptLocation + "trendIndividualTest.R"
     trendMultiple = rScriptLocation + "trendMultipleTests.R"
@@ -15,6 +40,8 @@
     histogramMultiple = rScriptLocation + "histogramMultipleTestGroups.R"
     pieMultiple = rScriptLocation + "pieMultipleTests.R"
 }
+
+// init both directory and file paths.
 def init(){
     initLocation()
     initFiles()
diff --git a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
index ac27a0c..b23591b 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
@@ -1,6 +1,38 @@
 #!groovy
 
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the dependency Jenkins script.
+// This will provide the basic information for the tests for scheduling.
+// Any new test to be added should be added here.
+
+
 def getAllTheTests( wikiPrefix ){
+    // This contains the dictionary of the test and the category of them
+    // wikiPrefix : master, 1.13, 1.12 ...
+
+    // category: it will be used to distinguish which category to be run on which days ( basic,extra_A, extra_B ... )
+    // day: it will be used to display the schedule of the test to be run to the slack. It is empty in the first place but will be
+    //     filled out every monday.
+    // wiki_link : link of the wiki page that will be used to publish to confluence later on. SCPF tests don't need one.
+
     return [
         "FUNC":[
                 "FUNCipv6Intent" : [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCipv6Intent", wiki_file:"FUNCipv6IntentWiki.txt" ],
diff --git a/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy b/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy
index a759051..c2dfc5d 100644
--- a/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy
@@ -1,12 +1,55 @@
 #!groovy
+
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the dependency Jenkins script.
+// This will provide the SCPF specific functions
+
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
 def init(){
+    // init step for SCPFfunctions. It has some mandatory init steps
+
+    // none, batches, neighbors, times : to be used for extra parameters for generating graphs.
     none = [ "" ]
     batches = [ 1, 100, 1000 ]
     neighbors = [ 'y', 'n' ]
     times = [ 'y', 'n' ]
+
+    //flows : whether the test is affected by oldFlow or newFlow
+    // test : command of the test to be executed when running the test
+    // table : name of the view table on database
+    // result : name of the actual table on database
+    // file : name of the file that contains the result of the test to be used to insert data to database
+    // rFile : specific Rscript file name to be used to generate each SCPF graph. For with flowObj graphs, you need to put 'n' or 'y' after the test name
+    // extra : extra condition that will be used for Rscript parameter. Some of the Rscript requires extra parameters like if it has
+    //         neighbors or batches. In this case, it will generate Rscript x times of what extra has. So that it will generate one with
+    //         neighbor = y and the other with neighbor = n
+    // finalResult : If you want to generate overall graph for the front page.
+    // graphTitle : title for the graph. It should contain n numbers depends on how many graphs you are generating.
+    // [Optional]
+    // dbCols : specific dbColumns to choose for 50 data overall graph if there is one.
+    // dbWhere : specific where statement that has some condition for 50 data overall graph if there is one.
+    // y_axis : title of the y_axis to be shown for 50 data overall graph if there is one.
+
     SCPF = [
         SCPFcbench:                              [ flows:false, test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:[ 'Cbench Test' ], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)' ],
         SCPFhostLat:                             [ flows:false, test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:[ 'Host Latency Test' ], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
@@ -29,6 +72,8 @@
     graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-BM/"
 }
 def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName, isOldFlow ){
+    // generate the list of Rscript command for individual graphs
+
     result = ""
     for( extra in extras ){
         result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName, isOldFlow ) + ";"
@@ -36,11 +81,13 @@
     return result
 }
 def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName, isOldFlow ){
+    //  generate the Rscript command for individual graphs
 
     return generalFuncs.basicGraphPart( fileRelated.SCPFSpecificLocation + rFileName, host, port, user, pass, testName, branchName ) +
            " " + batch + " " + usingOldFlow( isOldFlow, testName ) + graph_saved_directory
 }
 def generateCombinedResultGraph( host, port, user, pass, testName, branchName, isOldFlow ){
+    // generate Rscript for overall graph for the front page.
     result = ""
 
     for ( int i=0; i< SCPF[ testName ][ 'graphTitle' ].size(); i++ ){
@@ -51,32 +98,47 @@
     return result
 }
 def checkIfList( testName, forWhich, pos ){
+    // check if some dictionary has list or string.
+
     return SCPF[ testName ][ forWhich ].getClass().getName() != "java.lang.String" ? SCPF[ testName ][ forWhich ][ pos ] :  SCPF[ testName ][ forWhich ]
 }
 def sqlOldFlow( isOldFlow, testName ){
+    // sql where command part for checking old flows.
+
     return SCPF[ testName ][ 'flows' ] ? " AND " + ( isOldFlow ? "" : "NOT " ) + "is_old_flow " : ""
 }
 def oldFlowRuleCheck( isOldFlow, branch ){
+    // checking if it is old flow
+
     this.isOldFlow = isOldFlow
     if( !isOldFlow ){
         SCPF[ 'SCPFflowTp1g' ][ 'test' ] += " --params TEST/flows=" + ( branch == "onos-1.11" ? "4000" : "3500" )
     }
 }
 def affectedByOldFlow( isOldFlow, testName ){
+    // For sql command :  if the test is affect by old flow, it will return parameters for old flow
     return SCPF[ testName ][ 'flows' ] ? "" + isOldFlow + ", " : ""
 }
 def usingOldFlow( isOldFlow, testName ){
+    // For Rscript command : if it is using old flow.
+
     return SCPF[ testName ][ 'flows' ] ? ( isOldFlow ? "y" : "n" ) + " " : ""
 }
 def hasOldFlow( isOldFlow, testName ){
+    // For Rscript command for 50 data
+
     return ( SCPF[ testName ][ 'flows' ] && isOldFlow ? "y" : "n" ) + " "
 }
 def sqlCommand( testName ){
+    // sql command for inserting data into the database
+
     if ( testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat" )
         return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
     return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', '\$ONOSBranch', " + affectedByOldFlow( isOldFlow, testName ) + "\$line);\""
 }
 def databasePart( testName, database_command ){
+    // read the file from the machine and insert it to the database
+
     return '''
     cd /tmp
     while read line
@@ -86,6 +148,8 @@
     done< ''' + SCPF[ testName ][ 'file' ]
 }
 def getGraphGeneratingCommand( host, port, user, pass, testName, prop ){
+    // returns the combined Rscript command for each test.
+
     return getGraphCommand( SCPF[ testName ][ 'rFile' ], SCPF[ testName ][ 'extra' ], host, port, user, pass, testName, prop[ "ONOSBranch" ], isOldFlow ) + '''
     ''' + ( SCPF[ testName ][ 'finalResult' ] ? generateCombinedResultGraph( host, port, user, pass, testName, prop[ "ONOSBranch" ], , isOldFlow ) : "" )
 }
diff --git a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
index 96c0855..238d7bc 100644
--- a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
@@ -1,15 +1,42 @@
 #!groovy
 
+// Copyright 2017 Open Networking Foundation (ONF)
+//
+// Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+// the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+// or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+//
+//     TestON is free software: you can redistribute it and/or modify
+//     it under the terms of the GNU General Public License as published by
+//     the Free Software Foundation, either version 2 of the License, or
+//     (at your option) any later version.
+//
+//     TestON is distributed in the hope that it will be useful,
+//     but WITHOUT ANY WARRANTY; without even the implied warranty of
+//     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//     GNU General Public License for more details.
+//
+//     You should have received a copy of the GNU General Public License
+//     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+
+// This is the dependency Jenkins script.
+// This will provide the portion that will set up the environment of the machine
+//      and trigger the corresponding jobs.
+
 def init( commonFuncs ){
     funcs = commonFuncs
 }
 def lastCommaRemover( str ){
+    // function that will remove the last comma from the string
+
     if ( str.size() > 0 && str[ str.size() - 1 ] == ',' ){
         str = str.substring( 0,str.size() - 1 )
     }
     return str
 }
 def printDaysForTest( AllTheTests ){
+    // Print the days for what test has.
+
     result = ""
     for ( String test in AllTheTests.keySet() ){
         result += test + " : \n"
@@ -22,6 +49,7 @@
     return result
 }
 def runTestSeq( testList ){
+    // Running the test sequentially
     return{
         for ( test in testList.keySet() ){
             testList[ test ].call()
@@ -29,6 +57,8 @@
     }
 }
 def print_tests( tests ){
+    // print the list of the tsets to be run
+
     for( String test in tests.keySet() ){
         if( tests[ test ][ "tests" ] != "" ){
             println test + ":"
@@ -37,19 +67,18 @@
     }
 }
 def organize_tests( tests, testcases ){
+    // organize the test to its category using its name.
+    // most of the time it will use the first two character of the test name
+    // but there are some exceptions like FUNCbgpls or FUNCvirNetNB since they are now under USECASE
+
     testList = tests.tokenize( "\n;, " )
     for( String test in testList )
         testcases [ Prefix_organizer[ ( test == "FUNCbgpls" || test == "FUNCvirNetNB" ? "US" : ( test[ 0 ] + test[ 1 ] ) ) ] ][ "tests" ] += test + ","
     return testcases
 }
-def borrow_mn( jobOn ){
-    result = ""
-    if( jobOn == "SR" ){
-        result = "~/cell_borrow.sh"
-    }
-    return result
-}
 def trigger( branch, tests, nodeName, jobOn, manuallyRun, onosTag ){
+    // triggering function that will setup the environment and determine which pipeline to trigger
+
     println jobOn + "-pipeline-" + manuallyRun ? "manually" : branch
     def wiki = branch
     branch = funcs.branchWithPrefix( branch )
@@ -64,14 +93,18 @@
     build job: jobToRun, propagate: false
 }
 def trigger_pipeline( branch, tests, nodeName, jobOn, manuallyRun, onosTag ){
-// nodeName : "BM" or "VM"
-// jobOn : "SCPF" or "USECASE" or "FUNC" or "HA"
+    // nodeName : "BM" or "VM"
+    // jobOn : "SCPF" or "USECASE" or "FUNC" or "HA"
+    // this will return the function by wrapping them up with return{} to prevent them to be
+    // executed once this function is called to assign to specific variable.
     return{
         trigger( branch, tests, nodeName, jobOn, manuallyRun, onosTag )
     }
 }
 // export Environment properties.
 def exportEnvProperty( onos_branch, test_branch, wiki, tests, postResult, manually_run, onosTag, isOldFlow ){
+    // export environment properties to the machine.
+
     stage( "export Property" ){
         sh '''
             echo "ONOSBranch=''' + onos_branch +'''" > /var/jenkins/TestONOS.property
@@ -88,6 +121,8 @@
 }
 // Initialize the environment Setup for the onos and OnosSystemTest
 def envSetup( onos_branch, test_branch, onos_tag, jobOn, manuallyRun ){
+    // to setup the environment using the bash script
+
     stage( "envSetup" ) {
         // after env: ''' + borrow_mn( jobOn ) + '''
         sh '''#!/bin/bash -l
@@ -101,6 +136,8 @@
     }
 }
 def tagCheck( onos_tag, onos_branch ){
+    // check the tag for onos if it is not empty
+
     result = "git checkout "
     if ( onos_tag == "" )
         result += onos_branch //create new local branch
@@ -109,6 +146,8 @@
     return result
 }
 def preSetup( onos_branch, test_branch, onos_tag, isManual ){
+    // pre setup part which will clean up and checkout to corresponding branch.
+
     result = ""
     if( !isManual ){
         result = '''echo -e "\n#####  Set TestON Branch #####"
@@ -152,6 +191,8 @@
     return result
 }
 def oldFlowCheck( jobOn, onos_branch ){
+    // part that will check if it is oldFlow. If so, it will switch to use old flow. Only affected with SCPF.
+
     result = ""
     if( jobOn == "SCPF" && ( onos_branch== "master" || onos_branch=="onos-1.12" ) )
         result = '''sed -i -e 's/@Component(immediate = true)/@Component(enabled = false)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/''' + ( isOldFlow ? "DistributedFlowRuleStore" : "ECFlowRuleStore" ) + '''.java
@@ -159,6 +200,8 @@
     return result
 }
 def postSetup( onos_branch, test_branch, onos_tag, isManual ){
+    // setup that will build the onos using buck.
+
     result = ""
     if( !isManual ){
         result = '''echo -e "\n##### build ONOS skip unit tests ######"
@@ -176,6 +219,8 @@
     return result
 }
 def generateKey(){
+    // generate cluster-key of the onos
+
     try{
         sh '''
         #!/bin/bash -l
@@ -187,15 +232,5 @@
         '''
     }catch( all ){}
 }
-def returnCell( nodeName ){
-    node( "TestStation-" + nodeName + "s" ){
-        sh '''#!/bin/bash -l
-            set +e
-            . ~/.bashrc
-            env
-            ~/./return_cell.sh
-            '''
-    }
-}
 
 return this;
diff --git a/TestON/JenkinsFile/scripts/README.md b/TestON/JenkinsFile/scripts/README.md
deleted file mode 100644
index dab3f68..0000000
--- a/TestON/JenkinsFile/scripts/README.md
+++ /dev/null
@@ -1,23 +0,0 @@
-<h1>Wiki Graph Scripts</h1>
-
-The scripts that generate the graphs are written in the R programming language.
-
-The scripts are structured in the following format:
-1. Data Management
-    * Data is obtained from the databases through SQL. CLI arguments, filename, and titles are also handled here.
-        1. Importing libraries
-        2. Command line arguments
-        3. Title of the graph
-        4. Filename
-        5. SQL Initialization and Data Gathering
-2. Organize Data
-    * Raw data is sorted into a data frame.  The data frame is used in generating the graph.
-        1. Combining data into a single list.
-        2. Using the list to construct a data frame
-        3. Adding data as columns to the data frame
-3. Generate Graphs
-    * The graphs are formatted and constructed here.
-        1. Main plot generated
-        2. Fundamental variables assigned
-        3. Generate specific graph format
-        4. Exporting graph to file
diff --git a/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R b/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R
deleted file mode 100644
index 6f67b0d..0000000
--- a/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R
+++ /dev/null
@@ -1,386 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-has_flow_obj = 1
-database_host = 2
-database_port = 3
-database_u_id = 4
-database_pw = 5
-test_name = 6
-branch_name = 7
-batch_size = 8
-old_flow = 9
-save_directory = 10
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFIntentInstallWithdrawRerouteLat.R",
-                                  "<isFlowObj>" ,
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<batch-size>",
-                                  "<using-old-flow>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------------------------
-# Create File Name and Title of Graph
-# -----------------------------------
-
-print( "Creating filename and title of graph." )
-
-chartTitle <- "Intent Install, Withdraw, & Reroute Latencies"
-flowObjFileModifier <- ""
-errBarOutputFile <- paste( args[ save_directory ],
-                    "SCPFIntentInstallWithdrawRerouteLat_",
-                    args[ branch_name ],
-                    sep="" )
-
-if ( args[ has_flow_obj ] == "y" ){
-    errBarOutputFile <- paste( errBarOutputFile, "_fobj", sep="" )
-    flowObjFileModifier <- "fobj_"
-    chartTitle <- paste( chartTitle, "w/ FlowObj" )
-}
-if ( args[ old_flow ] == "y" ){
-    errBarOutputFile <- paste( errBarOutputFile, "_OldFlow", sep="" )
-    chartTitle <- paste( chartTitle,
-                         "With Eventually Consistent Flow Rule Store",
-                         sep="\n" )
-}
-errBarOutputFile <- paste( errBarOutputFile,
-                           "_",
-                           args[ batch_size ],
-                           "-batchSize_graph.jpg",
-                           sep="" )
-
-chartTitle <- paste( chartTitle,
-                     "\nBatch Size =",
-                     args[ batch_size ],
-                     sep=" " )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ---------------------------------------
-# Intent Install and Withdraw SQL Command
-# ---------------------------------------
-print( "Generating Intent Install and Withdraw SQL Command" )
-
-installWithdrawSQLCommand <- paste( "SELECT * FROM intent_latency_",
-                                    flowObjFileModifier,
-                                    "tests WHERE batch_size=",
-                                    args[ batch_size ],
-                                    " AND branch = '",
-                                    args[ branch_name ],
-                                    "' AND date IN ( SELECT MAX( date ) FROM intent_latency_",
-                                    flowObjFileModifier,
-                                    "tests WHERE branch='",
-                                    args[ branch_name ],
-                                    "' AND ",
-                                    ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
-                                    "is_old_flow",
-                                    ")",
-                                    sep="" )
-
-print( "Sending Intent Install and Withdraw SQL command:" )
-print( installWithdrawSQLCommand )
-installWithdrawData <- dbGetQuery( con, installWithdrawSQLCommand )
-
-# --------------------------
-# Intent Reroute SQL Command
-# --------------------------
-
-print( "Generating Intent Reroute SQL Command" )
-
-rerouteSQLCommand <- paste( "SELECT * FROM intent_reroute_latency_",
-                            flowObjFileModifier,
-                            "tests WHERE batch_size=",
-                            args[ batch_size ],
-                            " AND branch = '",
-                            args[ branch_name ],
-                            "' AND date IN ( SELECT MAX( date ) FROM intent_reroute_latency_",
-                            flowObjFileModifier,
-                            "tests WHERE branch='",
-                            args[ branch_name ],
-                            "' AND ",
-                            ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
-                            "is_old_flow",
-                            ")",
-                            sep="" )
-
-print( "Sending Intent Reroute SQL command:" )
-print( rerouteSQLCommand )
-rerouteData <- dbGetQuery( con, rerouteSQLCommand )
-
-# **********************************************************
-# STEP 2: Organize Data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# -------------------------------------------------------
-# Combining Install, Withdraw, and Reroute Latencies Data
-# -------------------------------------------------------
-
-print( "Combining Install, Withdraw, and Reroute Latencies Data" )
-
-if ( ncol( rerouteData ) == 0 ){  # Checks if rerouteData exists, so we can exclude it if necessary
-
-    requiredColumns <- c( "install_avg",
-                          "withdraw_avg"  )
-
-    tryCatch( avgs <- c( installWithdrawData[ requiredColumns] ),
-              error = function( e ) {
-                  print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-                  print( "Required columns: " )
-                  print( requiredColumns )
-                  print( "Actual columns: " )
-                  print( names( fileData ) )
-                  print( "Error dump:" )
-                  print( e )
-                  quit( status = 1 )
-              }
-             )
-} else{
-    colnames( rerouteData ) <- c( "date",
-                                  "name",
-                                  "date",
-                                  "branch",
-                                  "is_old_flow",
-                                  "commit",
-                                  "scale",
-                                  "batch_size",
-                                  "reroute_avg",
-                                  "reroute_std" )
-
-    tryCatch( avgs <- c( installWithdrawData[ 'install_avg' ],
-                         installWithdrawData[ 'withdraw_avg' ],
-                         rerouteData[ 'reroute_avg' ] ),
-              error = function( e ) {
-                  print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-                  print( "Required columns: " )
-                  print( requiredColumns )
-                  print( "Actual columns: " )
-                  print( names( fileData ) )
-                  print( "Error dump:" )
-                  print( e )
-                  quit( status = 1 )
-              }
-             )
-
-}
-
-# Combine lists into data frames.
-dataFrame <- melt( avgs )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing data frame." )
-
-if ( ncol( rerouteData ) == 0 ){  # Checks if rerouteData exists (due to batch size) for the dataFrame this time
-    dataFrame$scale <- c( installWithdrawData$scale,
-                          installWithdrawData$scale )
-
-    dataFrame$stds <- c( installWithdrawData$install_std,
-                         installWithdrawData$withdraw_std )
-} else{
-    dataFrame$scale <- c( installWithdrawData$scale,
-                          installWithdrawData$scale,
-                          rerouteData$scale )
-
-    dataFrame$stds <- c( installWithdrawData$install_std,
-                         installWithdrawData$withdraw_std,
-                         rerouteData$reroute_std )
-}
-
-colnames( dataFrame ) <- c( "ms",
-                            "type",
-                            "scale",
-                            "stds" )
-
-# Format data frame so that the data is in the same order as it appeared in the file.
-dataFrame$type <- as.character( dataFrame$type )
-dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
-
-dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graph.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# -------------------
-# Main Plot Generated
-# -------------------
-
-print( "Creating the main plot." )
-
-mainPlot <- ggplot( data = dataFrame, aes( x = scale,
-                                           y = ms,
-                                           ymin = ms,
-                                           ymax = ms + stds,
-                                           fill = type ) )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 22 ) )
-barWidth <- 1.3
-xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9) )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Latency (ms)" )
-fillLabel <- labs( fill="Type" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
-
-theme <- theme( plot.title=element_text( hjust = 0.5, size = 32, face='bold' ),
-                legend.position="bottom",
-                legend.text=element_text( size=22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-colors <- scale_fill_manual( values=c( "#F77670",
-                                       "#619DFA",
-                                       "#18BA48" ) )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        title +
-                        colors
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph with error bars." )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            width = barWidth,
-                            position = "dodge" )
-
-errorBarFormat <- geom_errorbar( width = barWidth,
-                                 position = position_dodge( barWidth ),
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x = dataFrame$scale,
-                          y = dataFrame$ms + 0.035 * max( dataFrame$ms ),
-                          label = format( dataFrame$ms,
-                                          digits = 3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          position = position_dodge( width = barWidth ),
-                          size = 5.5,
-                          fontface = "bold" )
-
-wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          values +
-          wrapLegend
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
-
-tryCatch( ggsave( errBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFLineGraph.R b/TestON/JenkinsFile/scripts/SCPFLineGraph.R
deleted file mode 100644
index 8c59c0d..0000000
--- a/TestON/JenkinsFile/scripts/SCPFLineGraph.R
+++ /dev/null
@@ -1,303 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# This is the R script that generates the SCPF front page graphs.
-
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-graph_title = 5
-branch_name = 6
-num_dates = 7
-sql_commands = 8
-y_axis = 9
-old_flow = 10
-save_directory = 11
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# Check if sufficient args are provided.
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript testresultgraph.R",
-                                    "<database-host>",
-                                    "<database-port>",
-                                    "<database-user-id>",
-                                    "<database-password>",
-                                    "<graph-title>",    # part of the output filename as well
-                                    "<branch-name>",    # part of the output filename
-                                    "<#-dates>",        # part of the output filename
-                                    "<SQL-command>",
-                                    "<y-axis-title>",   # y-axis may be different among other SCPF graphs (ie: batch size, latency, etc. )
-                                    "<using-old-flow>",
-                                    "<directory-to-save-graph>",
-                  sep = " " ) )
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -------------------------------
-# Create Title and Graph Filename
-# -------------------------------
-
-print( "Creating title of graph" )
-
-# Title of graph based on command line args.
-
-title <- args[ graph_title ]
-title <- paste( title, if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "" )
-
-print( "Creating graph filename." )
-
-# Filenames for the output graph include the testname, branch, and the graph type.
-outputFile <- paste( args[ save_directory ],
-                    "SCPF_Front_Page_",
-                    gsub( " ", "_", args[ graph_title ] ),
-                    "_",
-                    args[ branch_name ],
-                    "_",
-                    args[ num_dates ],
-                    "-dates",
-                    if( args[ old_flow ] == "y" ) "_OldFlow" else "",
-                    "_graph.jpg",
-                    sep="" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-print( "Sending SQL command:" )
-print( args[ sql_commands ] )
-
-fileData <- dbGetQuery( con, args[ sql_commands ] )
-
-# Check if data has been received
-if ( nrow( fileData ) == 0 ){
-    print( "[ERROR]: No data received from the databases. Please double check this by manually running the SQL command." )
-    quit( status = 1 )
-}
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# Create lists c() and organize data into their corresponding list.
-print( "Combine data retrieved from databases into a list." )
-
-buildNums <- fileData$build
-fileData$build <- c()
-print( fileData )
-
-if ( ncol( fileData ) > 1 ){
-    for ( i in 2:ncol( fileData ) ){
-        fileData[ i ] <- fileData[ i - 1 ] + fileData[ i ]
-    }
-}
-
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing data frame from combined data." )
-
-dataFrame <- melt( fileData )
-dataFrame$date <- fileData$date
-
-colnames( dataFrame ) <- c( "Legend",
-                            "Values" )
-
-# Format data frame so that the data is in the same order as it appeared in the file.
-dataFrame$Legend <- as.character( dataFrame$Legend )
-dataFrame$Legend <- factor( dataFrame$Legend, levels=unique( dataFrame$Legend ) )
-dataFrame$build <- buildNums
-
-# Adding a temporary iterative list to the dataFrame so that there are no gaps in-between date numbers.
-dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# -------------------
-# Main Plot Generated
-# -------------------
-
-print( "Creating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually iterative, but it will become date # later)
-#        - y: y-axis values (usually tests)
-#        - color: the category of the colored lines (usually legend of test)
-
-mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
-                                           y = Values,
-                                           color = Legend ) )
-
-# -------------------
-# Main Plot Formatted
-# -------------------
-
-print( "Formatting main plot." )
-
-limitExpansion <- expand_limits( y = 0 )
-
-tickLength <- 3
-breaks <- seq( max( dataFrame$iterative ) %% tickLength, max( dataFrame$iterative ), by = tickLength )
-breaks <- breaks[ which( breaks != 0 ) ]
-
-maxYDisplay <- max( dataFrame$Values ) * 1.05
-yBreaks <- ceiling( max( dataFrame$Values ) / 10 )
-yScaleConfig <- scale_y_continuous( breaks = seq( 0, maxYDisplay, by = yBreaks ) )
-xScaleConfig <- scale_x_continuous( breaks = breaks, label = rev( dataFrame$build )[ breaks ] )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-xLabel <- xlab( "Build" )
-yLabel <- ylab( args[ y_axis ] )
-
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-# Set other graph configurations here.
-theme <- theme( axis.text.x = element_text( angle = 0, size = 13 ),
-                plot.title = element_text( size = 32, face='bold', hjust = 0.5 ),
-                legend.position = "bottom",
-                legend.text = element_text( size=22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                legend.direction = 'horizontal',
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = title, subtitle = subtitle )
-
-# Colors used for the lines.
-# Note: graphs that have X lines will use the first X colors in this list.
-colors <- scale_color_manual( values=c( "#111111",   # black
-                                        "#008CFF",   # blue
-                                        "#FF3700",   # red
-                                        "#00E043",   # green
-                                        "#EEB600",   # yellow
-                                        "#E500FF") ) # purple (not used)
-
-wrapLegend <- guides( color = guide_legend( nrow = 2, byrow = TRUE ) )
-
-fundamentalGraphData <- mainPlot +
-                        limitExpansion +
-                        xScaleConfig +
-                        yScaleConfig +
-                        xLabel +
-                        yLabel +
-                        theme +
-                        colors +
-                        wrapLegend +
-                        title
-
-# ----------------------------
-# Generating Line Graph Format
-# ----------------------------
-
-print( "Generating line graph." )
-
-lineGraphFormat <- geom_line( size = 0.75 )
-pointFormat <- geom_point( size = 1.75 )
-
-result <- fundamentalGraphData +
-          lineGraphFormat +
-          pointFormat
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving result graph to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote result graph out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R b/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R
deleted file mode 100644
index dd0868e..0000000
--- a/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R
+++ /dev/null
@@ -1,418 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-old_flow = 7
-save_directory = 8
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFbatchFlowResp.R",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<using-old-flow>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-postOutputFile <- paste( args[ save_directory ],
-                         args[ test_name ],
-                         "_",
-                         args[ branch_name ],
-                         if( args[ old_flow ] == "y" ) "_OldFlow" else "",
-                         "_PostGraph.jpg",
-                         sep="" )
-
-delOutputFile <- paste( args[ save_directory ],
-                        args[ test_name ],
-                        "_",
-                        args[ branch_name ],
-                        if( args[ old_flow ] == "y" ) "_OldFlow" else "",
-                        "_DelGraph.jpg",
-                        sep="" )
-
-postChartTitle <- paste( "Single Bench Flow Latency - Post\n",
-                         "Last 3 Builds",
-                         if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "",
-                         sep = "" )
-delChartTitle <- paste( "Single Bench Flow Latency - Del\n",
-                        "Last 3 Builds",
-                        if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "",
-                        sep = "" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ---------------------------
-# Batch Flow Resp SQL Command
-# ---------------------------
-
-print( "Generating Batch Flow Resp SQL Command" )
-
-command <- paste( "SELECT * FROM batch_flow_tests WHERE branch='",
-                  args[ branch_name ],
-                  "' AND " ,
-                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
-                  "is_old_flow",
-                  " ORDER BY date DESC LIMIT 3",
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# -----------------
-# Post Data Sorting
-# -----------------
-
-print( "Sorting data for Post." )
-
-requiredColumns <- c( "posttoconfrm", "elapsepost" )
-
-tryCatch( postAvgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# -------------------------
-# Post Construct Data Frame
-# -------------------------
-
-postDataFrame <- melt( postAvgs )
-postDataFrame$scale <- fileData$scale
-postDataFrame$date <- fileData$date
-postDataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
-
-colnames( postDataFrame ) <- c( "ms",
-                                "type",
-                                "scale",
-                                "date",
-                                "iterative" )
-
-# Format data frame so that the data is in the same order as it appeared in the file.
-postDataFrame$type <- as.character( postDataFrame$type )
-postDataFrame$type <- factor( postDataFrame$type,
-                              levels = unique( postDataFrame$type ) )
-
-postDataFrame <- na.omit( postDataFrame )   # Omit any data that doesn't exist
-
-print( "Post Data Frame Results:" )
-print( postDataFrame )
-
-# ----------------
-# Del Data Sorting
-# ----------------
-
-requiredColumns <- c( "deltoconfrm", "elapsedel" )
-
-tryCatch( delAvgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-
-# ------------------------
-# Del Construct Data Frame
-# ------------------------
-
-delDataFrame <- melt( delAvgs )
-delDataFrame$scale <- fileData$scale
-delDataFrame$date <- fileData$date
-delDataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
-
-colnames( delDataFrame ) <- c( "ms",
-                               "type",
-                               "scale",
-                               "date",
-                               "iterative" )
-
-# Format data frame so that the data is in the same order as it appeared in the file.
-delDataFrame$type <- as.character( delDataFrame$type )
-delDataFrame$type <- factor( delDataFrame$type,
-                             levels = unique( delDataFrame$type ) )
-
-delDataFrame <- na.omit( delDataFrame )   # Omit any data that doesn't exist
-
-print( "Del Data Frame Results:" )
-print( delDataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------------------------------
-# Initializing variables used in both graphs
-# ------------------------------------------
-
-print( "Initializing variables used in both graphs." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-xLabel <- xlab( "Build Date" )
-yLabel <- ylab( "Latency (s)" )
-fillLabel <- labs( fill="Type" )
-colors <- scale_fill_manual( values=c( "#F77670", "#619DFA" ) )
-wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
-barWidth <- 0.3
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            width = barWidth )
-
-# -----------------------
-# Post Generate Main Plot
-# -----------------------
-
-print( "Creating main plot for Post graph." )
-
-mainPlot <- ggplot( data = postDataFrame, aes( x = iterative,
-                                               y = ms,
-                                               fill = type ) )
-
-# -----------------------------------
-# Post Fundamental Variables Assigned
-# -----------------------------------
-
-print( "Generating fundamental graph data for Post graph." )
-
-xScaleConfig <- scale_x_continuous( breaks = postDataFrame$iterative,
-                                    label = postDataFrame$date )
-
-title <- labs( title = postChartTitle, subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        wrapLegend +
-                        colors +
-                        title
-
-# --------------------------------
-# Post Generating Bar Graph Format
-# --------------------------------
-
-print( "Generating bar graph for Post graph." )
-
-sum <- fileData[ 'posttoconfrm' ] +
-       fileData[ 'elapsepost' ]
-
-values <- geom_text( aes( x = postDataFrame$iterative,
-                          y = sum + 0.03 * max( sum ),
-                          label = format( sum,
-                                          digits = 3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          values
-
-# ----------------------------
-# Post Exporting Graph to File
-# ----------------------------
-
-print( paste( "Saving Post bar chart to", postOutputFile ) )
-
-tryCatch( ggsave( postOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote stacked bar chart out to", postOutputFile ) )
-
-# ----------------------
-# Del Generate Main Plot
-# ----------------------
-
-print( "Creating main plot for Del graph." )
-
-mainPlot <- ggplot( data = delDataFrame, aes( x = iterative,
-                                              y = ms,
-                                              fill = type ) )
-
-# ----------------------------------
-# Del Fundamental Variables Assigned
-# ----------------------------------
-
-print( "Generating fundamental graph data for Del graph." )
-
-xScaleConfig <- scale_x_continuous( breaks = delDataFrame$iterative,
-                                    label = delDataFrame$date )
-
-title <- labs( title = delChartTitle, subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        wrapLegend +
-                        colors +
-                        title
-
-# -------------------------------
-# Del Generating Bar Graph Format
-# -------------------------------
-
-print( "Generating bar graph for Del graph." )
-
-sum <- fileData[ 'deltoconfrm' ] +
-       fileData[ 'elapsedel' ]
-
-values <- geom_text( aes( x = delDataFrame$iterative,
-                          y = sum + 0.03 * max( sum ),
-                          label = format( sum,
-                                          digits = 3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          title +
-          values
-
-# ---------------------------
-# Del Exporting Graph to File
-# ---------------------------
-
-print( paste( "Saving Del bar chart to", delOutputFile ) )
-
-tryCatch( ggsave( delOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote stacked bar chart out to", delOutputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFcbench.R b/TestON/JenkinsFile/scripts/SCPFcbench.R
deleted file mode 100644
index 9d1972a..0000000
--- a/TestON/JenkinsFile/scripts/SCPFcbench.R
+++ /dev/null
@@ -1,267 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-save_directory = 7
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFcbench",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-errBarOutputFile <- paste( args[ save_directory ],
-                           args[ test_name ],
-                           "_",
-                           args[ branch_name ],
-                           "_errGraph.jpg",
-                           sep="" )
-
-chartTitle <- paste( "Single-Node CBench Throughput", "Last 3 Builds", sep = "\n" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ------------------
-# Cbench SQL Command
-# ------------------
-
-print( "Generating Scale Topology SQL Command" )
-
-command <- paste( "SELECT * FROM cbench_bm_tests WHERE branch='",
-                  args[ branch_name ],
-                  "' ORDER BY date DESC LIMIT 3",
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Sorting data." )
-
-requiredColumns <- c( "avg" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame" )
-
-dataFrame <- melt( avgs )
-dataFrame$std <- c( fileData$std )
-dataFrame$date <- c( fileData$date )
-dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
-
-colnames( dataFrame ) <- c( "ms",
-                            "type",
-                            "std",
-                            "date",
-                            "iterative" )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------
-# Generate Main Plot
-# ------------------
-
-print( "Creating main plot." )
-
-mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
-                                           y = ms,
-                                           ymin = ms,
-                                           ymax = ms + std ) )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-barWidth <- 0.3
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
-                                    label = dataFrame$date )
-xLabel <- xlab( "Build Date" )
-yLabel <- ylab( "Responses / sec" )
-fillLabel <- labs( fill = "Type" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-errorBarColor <- rgb( 140,140,140, maxColorValue=255 )
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 18, face = "bold" ),
-                legend.title = element_blank(),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        title
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph with error bars." )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            position = position_dodge(),
-                            width = barWidth,
-                            fill = "#00AA13" )
-
-errorBarFormat <- geom_errorbar( width = barWidth,
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x=dataFrame$iterative,
-                          y=fileData[ 'avg' ] + 0.025 * max( fileData[ 'avg' ] ),
-                          label = format( fileData[ 'avg' ],
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          values
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
-
-tryCatch( ggsave( errBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
diff --git a/TestON/JenkinsFile/scripts/SCPFflowTp1g.R b/TestON/JenkinsFile/scripts/SCPFflowTp1g.R
deleted file mode 100644
index ffb91a9..0000000
--- a/TestON/JenkinsFile/scripts/SCPFflowTp1g.R
+++ /dev/null
@@ -1,327 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-has_flow_obj = 1
-database_host = 2
-database_port = 3
-database_u_id = 4
-database_pw = 5
-test_name = 6
-branch_name = 7
-has_neighbors = 8
-old_flow = 9
-save_directory = 10
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFflowTp1g.R",
-                                  "<has-flow-obj>",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<has-neighbors>",
-                                  "<using-old-flow>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-chartTitle <- "Flow Throughput Test"
-fileNeighborsModifier <- "no"
-commandNeighborModifier <- ""
-fileFlowObjModifier <- ""
-sqlFlowObjModifier <- ""
-if ( args[ has_flow_obj ] == 'y' ){
-    fileFlowObjModifier <- "_flowObj"
-    sqlFlowObjModifier <- "_fobj"
-    chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
-}
-
-chartTitle <- paste( chartTitle, "\nNeighbors =", sep="" )
-
-fileOldFlowModifier <- ""
-if ( args[ has_neighbors ] == 'y' ){
-    fileNeighborsModifier <- "all"
-    commandNeighborModifier <- "scale=1 OR NOT "
-    chartTitle <- paste( chartTitle, "Cluster Size - 1" )
-} else {
-    chartTitle <- paste( chartTitle, "0" )
-}
-if ( args[ old_flow ] == 'y' ){
-    fileOldFlowModifier <- "_OldFlow"
-    chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
-}
-errBarOutputFile <- paste( args[ save_directory ],
-                           args[ test_name ],
-                           "_",
-                           args[ branch_name ],
-                           "_",
-                           fileNeighborsModifier,
-                           "-neighbors",
-                           fileFlowObjModifier,
-                           fileOldFlowModifier,
-                           "_graph.jpg",
-                           sep="" )
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ---------------------------
-# Flow Throughput SQL Command
-# ---------------------------
-
-print( "Generating Flow Throughput SQL command." )
-
-command <- paste( "SELECT scale, avg( avg ), avg( std ) FROM flow_tp",
-                  sqlFlowObjModifier,
-                  "_tests WHERE (",
-                  commandNeighborModifier,
-                  "neighbors = 0 ) AND branch = '",
-                  args[ branch_name ],
-                  "' AND date IN ( SELECT max( date ) FROM flow_tp",
-                  sqlFlowObjModifier,
-                  "_tests WHERE branch='",
-                  args[ branch_name ],
-                  "' AND ",
-                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
-                  "is_old_flow",
-                  " ) GROUP BY scale ORDER BY scale",
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Sorting data for Flow Throughput." )
-
-colnames( fileData ) <- c( "scale",
-                           "avg",
-                           "std" )
-
-requiredColumns <- c( "avg" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-
-# ----------------------------
-# Flow TP Construct Data Frame
-# ----------------------------
-
-print( "Constructing Flow TP data frame." )
-
-dataFrame <- melt( avgs )              # This is where reshape2 comes in. Avgs list is converted to data frame
-dataFrame$scale <- fileData$scale      # Add node scaling to the data frame.
-dataFrame$std <- fileData$std
-
-colnames( dataFrame ) <- c( "throughput",
-                            "type",
-                            "scale",
-                            "std" )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------
-# Generate Main Plot
-# ------------------
-
-print( "Generating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually node scaling)
-#        - y: y-axis values (usually time in milliseconds)
-#        - fill: the category of the colored side-by-side bars (usually type)
-
-mainPlot <- ggplot( data = dataFrame, aes( x = scale,
-                                           y = throughput,
-                                           ymin = throughput,
-                                           ymax = throughput + std,
-                                           fill = type ) )
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-# Formatting the plot
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-width <- 0.7  # Width of the bars.
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale,
-                                    label = dataFrame$scale )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Throughput (,000 Flows/sec)" )
-fillLabel <- labs( fill="Type" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
-
-theme <- theme( plot.title = element_text( hjust = 0.5,
-                                           size = 32,
-                                           face = 'bold' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        title
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-# Create the stacked bar graph with error bars.
-# geom_bar contains:
-#    - stat: data formatting (usually "identity")
-#    - width: the width of the bar types (declared above)
-# geom_errorbar contains similar arguments as geom_bar.
-print( "Generating bar graph with error bars." )
-barGraphFormat <- geom_bar( stat = "identity",
-                            width = width,
-                            fill = "#FFAA3C" )
-
-errorBarFormat <- geom_errorbar( width = width,
-                                 position = position_dodge(),
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x = dataFrame$scale,
-                          y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
-                          label = format( dataFrame$throughput,
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          values
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
-
-tryCatch( ggsave( errBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFhostLat.R b/TestON/JenkinsFile/scripts/SCPFhostLat.R
deleted file mode 100644
index b291551..0000000
--- a/TestON/JenkinsFile/scripts/SCPFhostLat.R
+++ /dev/null
@@ -1,263 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************\
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-save_directory = 7
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFhostLat",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-errBarOutputFile <- paste( args[ save_directory ],
-                           args[ test_name ],
-                           "_",
-                           args[ branch_name ],
-                           "_errGraph.jpg",
-                           sep="" )
-
-chartTitle <- "Host Latency"
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ------------------------
-# Host Latency SQL Command
-# ------------------------
-
-print( "Generating Host Latency SQL Command" )
-
-command  <- paste( "SELECT * FROM host_latency_tests WHERE branch = '",
-                   args[ branch_name ],
-                   "' AND date IN ( SELECT MAX( date ) FROM host_latency_tests WHERE branch = '",
-                   args[ branch_name ],
-                   "' ) ",
-                   sep = "" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Sorting data." )
-
-requiredColumns <- c( "avg" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame" )
-
-dataFrame <- melt( avgs )
-dataFrame$scale <- fileData$scale
-dataFrame$std <- fileData$std
-
-colnames( dataFrame ) <- c( "ms",
-                            "type",
-                            "scale",
-                            "std" )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------
-# Generate Main Plot
-# ------------------
-
-print( "Creating main plot." )
-
-mainPlot <- ggplot( data = dataFrame, aes( x = scale,
-                                           y = ms,
-                                           ymin = ms,
-                                           ymax = ms + std ) )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Latency (ms)" )
-fillLabel <- labs( fill="Type" )
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face ='bold' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-errorBarColor <- rgb( 140, 140, 140, maxColorValue = 255 )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        title
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph with error bars." )
-
-barWidth <- 0.9
-barGraphFormat <- geom_bar( stat = "identity",
-                            position = position_dodge(),
-                            width = barWidth,
-                            fill = "#A700EF" )
-
-errorBarFormat <- geom_errorbar( position = position_dodge(),
-                                 width = barWidth,
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x=dataFrame$scale,
-                          y=dataFrame$ms + 0.06 * max( dataFrame$ms ),
-                          label = format( dataFrame$ms,
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          values
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
-
-tryCatch( ggsave( errBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart out to", errBarOutputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFintentEventTp.R b/TestON/JenkinsFile/scripts/SCPFintentEventTp.R
deleted file mode 100644
index e9a9dc4..0000000
--- a/TestON/JenkinsFile/scripts/SCPFintentEventTp.R
+++ /dev/null
@@ -1,310 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-has_flow_obj = 1
-database_host = 2
-database_port = 3
-database_u_id = 4
-database_pw = 5
-test_name = 6
-branch_name = 7
-has_neighbors = 8
-old_flow = 9
-save_directory = 10
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFIntentEventTp.R",
-                                  "<has-flow-obj>",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<has-neighbors>",
-                                  "<using-old-flow>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-chartTitle <- "Intent Event Throughput"
-fileNeighborsModifier <- "no"
-commandNeighborModifier <- ""
-fileFlowObjModifier <- ""
-sqlFlowObjModifier <- ""
-
-if ( args[ has_flow_obj ] == 'y' ){
-    fileFlowObjModifier <- "_flowObj"
-    sqlFlowObjModifier <- "_fobj"
-    chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
-}
-
-chartTitle <- paste( chartTitle, "\nevents/second with Neighbors =", sep="" )
-
-fileOldFlowModifier <- ""
-if ( args[ has_neighbors ] == 'y' ){
-    fileNeighborsModifier <- "all"
-    commandNeighborModifier <- "scale=1 OR NOT "
-    chartTitle <- paste( chartTitle, "all" )
-} else {
-    chartTitle <- paste( chartTitle, "0" )
-}
-if ( args[ old_flow ] == 'y' ){
-    fileOldFlowModifier <- "_OldFlow"
-    chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
-}
-
-errBarOutputFile <- paste( args[ save_directory ],
-                           args[ test_name ],
-                           "_",
-                           args[ branch_name ],
-                           "_",
-                           fileNeighborsModifier,
-                           "-neighbors",
-                           fileFlowObjModifier,
-                           fileOldFlowModifier,
-                           "_graph.jpg",
-                           sep="" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# -----------------------------------
-# Intent Event Throughput SQL Command
-# -----------------------------------
-
-print( "Generating Intent Event Throughput SQL command." )
-
-command <- paste( "SELECT scale, SUM( avg ) as avg FROM intent_tp",
-                  sqlFlowObjModifier,
-                  "_tests WHERE (",
-                  commandNeighborModifier,
-                  "neighbors = 0 ) AND branch = '",
-                  args[ branch_name ],
-                  "' AND date IN ( SELECT max( date ) FROM intent_tp",
-                  sqlFlowObjModifier,
-                  "_tests WHERE branch='",
-                  args[ branch_name ],
-                  "' AND ",
-                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
-                  "is_old_flow",
-                  " ) GROUP BY scale ORDER BY scale",
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Sorting data." )
-
-requiredColumns <- c( "avg" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing data frame." )
-dataFrame <- melt( avgs )              # This is where reshape2 comes in. Avgs list is converted to data frame
-dataFrame$scale <- fileData$scale          # Add node scaling to the data frame.
-
-colnames( dataFrame ) <- c( "throughput",
-                            "type",
-                            "scale" )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------
-# Generate Main Plot
-# ------------------
-
-print( "Generating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually node scaling)
-#        - y: y-axis values (usually time in milliseconds)
-#        - fill: the category of the colored side-by-side bars (usually type)
-
-mainPlot <- ggplot( data = dataFrame, aes( x = scale,
-                                           y = throughput,
-                                           fill = type ) )
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-# Formatting the plot
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-width <- 0.7  # Width of the bars.
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale, label = dataFrame$scale )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Throughput (events/second)" )
-fillLabel <- labs( fill="Type" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 18, face = "bold" ),
-                legend.title = element_blank(),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-values <- geom_text( aes( x = dataFrame$scale,
-                          y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
-                          label = format( dataFrame$throughput,
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7,
-                          fontface = "bold" )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        values
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph." )
-barGraphFormat <- geom_bar( stat = "identity",
-                            width = width,
-                            fill = "#169EFF" )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          title
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart to", errBarOutputFile ) )
-
-tryCatch( ggsave( errBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart out to", errBarOutputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R b/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R
deleted file mode 100644
index 9a61592..0000000
--- a/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R
+++ /dev/null
@@ -1,353 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-save_directory = 7
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-if ( is.na( args[ save_directory ] ) ){
-    print( paste( "Usage: Rscript SCPFmastershipFailoverLat",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-        quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-chartTitle <- "Mastership Failover Latency"
-
-errBarOutputFile <- paste( args[ save_directory ],
-                           args[ test_name ],
-                           "_",
-                           args[ branch_name ],
-                           "_errGraph.jpg",
-                           sep="" )
-
-stackedBarOutputFile <- paste( args[ save_directory ],
-                        args[ test_name ],
-                        "_",
-                        args[ branch_name ],
-                        "_stackedGraph.jpg",
-                        sep="" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ---------------------------------------
-# Mastership Failover Latency SQL Command
-# ---------------------------------------
-
-print( "Generating Mastership Failover Latency SQL command" )
-
-command  <- paste( "SELECT * FROM mastership_failover_tests WHERE branch = '",
-                   args[ branch_name ],
-                   "' AND date IN ( SELECT MAX( date ) FROM mastership_failover_tests WHERE branch = '",
-                   args[ branch_name ],
-                   "' ) ",
-                   sep = "" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Combining averages into a list." )
-
-requiredColumns <- c( "kill_deact_avg", "deact_role_avg" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame from list." )
-
-dataFrame <- melt( avgs )
-dataFrame$scale <- fileData$scale
-dataFrame$stds <- c( fileData$kill_deact_std,
-                     fileData$deact_role_std )
-
-colnames( dataFrame ) <- c( "ms",
-                            "type",
-                            "scale",
-                            "stds" )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-sum <- fileData[ 'deact_role_avg' ] +
-       fileData[ 'kill_deact_avg' ]
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------------------------
-# Initialize Variables for Both Graphs
-# ------------------------------------
-
-print( "Initializing variables used in both graphs." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9) )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Latency (ms)" )
-fillLabel <- labs( fill = "Type" )
-barWidth <- 0.9
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face='bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size=22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-barColors <- scale_fill_manual( values=c( "#F77670",
-                                          "#619DFA" ) )
-
-wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
-
-# ----------------------------------
-# Error Bar Graph Generate Main Plot
-# ----------------------------------
-
-print( "Creating main plot." )
-
-mainPlot <- ggplot( data = dataFrame, aes( x = scale,
-                                           y = ms,
-                                           ymin = ms,
-                                           ymax = ms + stds,
-                                           fill = type ) )
-
-# ----------------------------------------------
-# Error Bar Graph Fundamental Variables Assigned
-# ----------------------------------------------
-
-print( "Generating fundamental graph data for the error bar graph." )
-
-errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        title +
-                        wrapLegend
-
-# -------------------------------------------
-# Error Bar Graph Generating Bar Graph Format
-# -------------------------------------------
-
-print( "Generating bar graph with error bars." )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            position = position_dodge(),
-                            width = barWidth )
-
-errorBarFormat <- geom_errorbar( width = barWidth,
-                                 position = position_dodge(),
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x = dataFrame$scale,
-                          y = dataFrame$ms + 0.02 * max( dataFrame$ms ),
-                          label = format( dataFrame$ms,
-                                          digits = 3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold",
-                          position = position_dodge( 0.9 ) )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          barColors +
-          errorBarFormat +
-          values
-
-# ---------------------------------------
-# Error Bar Graph Exporting Graph to File
-# ---------------------------------------
-
-print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
-
-tryCatch( ggsave( errBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
-
-# ------------------------------------------------
-# Stacked Bar Graph Fundamental Variables Assigned
-# ------------------------------------------------
-
-print( "Generating fundamental graph data for the stacked bar graph." )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        title +
-                        wrapLegend
-
-# ---------------------------------------------
-# Stacked Bar Graph Generating Bar Graph Format
-# ---------------------------------------------
-
-print( "Generating stacked bar chart." )
-stackedBarFormat <- geom_bar( stat = "identity",
-                              width = barWidth )
-
-values <- geom_text( aes( x = dataFrame$scale,
-                          y = sum + 0.02 * max( sum ),
-                          label = format( sum,
-                                          digits = 3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          stackedBarFormat +
-          barColors +
-          title +
-          values
-
-# -----------------------------------------
-# Stacked Bar Graph Exporting Graph to File
-# -----------------------------------------
-
-print( paste( "Saving stacked bar chart to", stackedBarOutputFile ) )
-
-tryCatch( ggsave( stackedBarOutputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote stacked bar chart out to", stackedBarOutputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFportLat.R b/TestON/JenkinsFile/scripts/SCPFportLat.R
deleted file mode 100644
index 3398b0b..0000000
--- a/TestON/JenkinsFile/scripts/SCPFportLat.R
+++ /dev/null
@@ -1,409 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-save_directory = 7
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFportLat",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-errBarOutputFileUp <- paste( args[ save_directory ],
-                             "SCPFportLat_",
-                             args[ branch_name ],
-                             "_UpErrBarWithStack.jpg",
-                             sep = "" )
-
-errBarOutputFileDown <- paste( args[ save_directory ],
-                             "SCPFportLat_",
-                             args[ branch_name ],
-                             "_DownErrBarWithStack.jpg",
-                             sep = "" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# ------------------------
-# Port Latency SQL Command
-# ------------------------
-
-print( "Generating Port Latency SQL Command" )
-
-command <- paste( "SELECT * FROM port_latency_details WHERE branch = '",
-                  args[ branch_name ],
-                  "' AND date IN ( SELECT MAX( date ) FROM port_latency_details WHERE branch = '",
-                  args[ branch_name ],
-                  "' ) ",
-                  sep = "" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# -----------------------------
-# Port Up Averages Data Sorting
-# -----------------------------
-
-print( "Sorting data for Port Up Averages." )
-
-requiredColumns <- c( "up_ofp_to_dev_avg", "up_dev_to_link_avg", "up_link_to_graph_avg" )
-
-tryCatch( upAvgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-
-# ----------------------------
-# Port Up Construct Data Frame
-# ----------------------------
-
-print( "Constructing Port Up data frame." )
-
-upAvgsDataFrame <- melt( upAvgs )
-upAvgsDataFrame$scale <- fileData$scale
-upAvgsDataFrame$up_std <- fileData$up_std
-
-colnames( upAvgsDataFrame ) <- c( "ms",
-                             "type",
-                             "scale",
-                             "stds" )
-
-upAvgsDataFrame <- na.omit( upAvgsDataFrame )
-
-upAvgsDataFrame$type <- as.character( upAvgsDataFrame$type )
-upAvgsDataFrame$type <- factor( upAvgsDataFrame$type, levels=unique( upAvgsDataFrame$type ) )
-
-sumOfUpAvgs <- fileData[ 'up_ofp_to_dev_avg' ] +
-               fileData[ 'up_dev_to_link_avg' ] +
-               fileData[ 'up_link_to_graph_avg' ]
-
-print( "Up Averages Results:" )
-print( upAvgsDataFrame )
-
-# -------------------------------
-# Port Down Averages Data Sorting
-# -------------------------------
-
-print( "Sorting data for Port Down Averages." )
-
-requiredColumns <- c( "down_ofp_to_dev_avg", "down_dev_to_link_avg", "down_link_to_graph_avg" )
-
-tryCatch( downAvgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# ------------------------------
-# Port Down Construct Data Frame
-# ------------------------------
-
-print( "Constructing Port Down data frame." )
-
-downAvgsDataFrame <- melt( downAvgs )
-downAvgsDataFrame$scale <- fileData$scale
-downAvgsDataFrame$down_std <- fileData$down_std
-
-colnames( downAvgsDataFrame ) <- c( "ms",
-                               "type",
-                               "scale",
-                               "stds" )
-
-downAvgsDataFrame <- na.omit( downAvgsDataFrame )
-
-downAvgsDataFrame$type <- as.character( downAvgsDataFrame$type )
-downAvgsDataFrame$type <- factor( downAvgsDataFrame$type, levels=unique( downAvgsDataFrame$type ) )
-
-sumOfDownAvgs <- fileData[ 'down_ofp_to_dev_avg' ] +
-                 fileData[ 'down_dev_to_link_avg' ] +
-                 fileData[ 'down_link_to_graph_avg' ]
-
-print( "Down Averages Results:" )
-print( downAvgsDataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------------------------
-# Initialize Variables For Both Graphs
-# ------------------------------------
-
-print( "Initializing variables used in both graphs." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-barWidth <- 1
-xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Latency (ms)" )
-fillLabel <- labs( fill="Type" )
-wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
-
-theme <- theme( plot.title=element_text( hjust = 0.5, size = 32, face='bold' ),
-                legend.position="bottom",
-                legend.text=element_text( size=22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-colors <- scale_fill_manual( values=c( "#F77670",
-                                       "#619DFA",
-                                       "#18BA48" ) )
-
-# --------------------------
-# Port Up Generate Main Plot
-# --------------------------
-
-print( "Generating main plot (Port Up Latency)." )
-
-mainPlot <- ggplot( data = upAvgsDataFrame, aes( x = scale,
-                                            y = ms,
-                                            fill = type,
-                                            ymin = fileData[ 'up_end_to_end_avg' ],
-                                            ymax = fileData[ 'up_end_to_end_avg' ] + stds ) )
-
-# --------------------------------------
-# Port Up Fundamental Variables Assigned
-# --------------------------------------
-
-print( "Generating fundamental graph data (Port Up Latency)." )
-
-title <- labs( title = "Port Up Latency", subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        wrapLegend +
-                        title +
-                        colors
-
-# -----------------------------------
-# Port Up Generating Bar Graph Format
-# -----------------------------------
-
-print( "Generating bar graph with error bars (Port Up Latency)." )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            width = barWidth )
-errorBarFormat <- geom_errorbar( width = barWidth,
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x = upAvgsDataFrame$scale,
-                          y = sumOfUpAvgs + 0.03 * max( sumOfUpAvgs ),
-                          label = format( sumOfUpAvgs,
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          values
-
-# -------------------------------
-# Port Up Exporting Graph to File
-# -------------------------------
-
-print( paste( "Saving bar chart with error bars (Port Up Latency) to", errBarOutputFileUp ) )
-
-tryCatch( ggsave( errBarOutputFileUp,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars (Port Up Latency) out to", errBarOutputFileUp ) )
-
-# ----------------------------
-# Port Down Generate Main Plot
-# ----------------------------
-
-print( "Generating main plot (Port Down Latency)." )
-
-mainPlot <- ggplot( data = downAvgsDataFrame, aes( x = scale,
-                                              y = ms,
-                                              fill = type,
-                                              ymin = fileData[ 'down_end_to_end_avg' ],
-                                              ymax = fileData[ 'down_end_to_end_avg' ] + stds ) )
-
-# ----------------------------------------
-# Port Down Fundamental Variables Assigned
-# ----------------------------------------
-
-print( "Generating fundamental graph data (Port Down Latency)." )
-
-title <- labs( title = "Port Down Latency", subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        wrapLegend +
-                        title +
-                        colors
-
-# -------------------------------------
-# Port Down Generating Bar Graph Format
-# -------------------------------------
-
-print( "Generating bar graph with error bars (Port Down Latency)." )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            width = barWidth )
-errorBarFormat <- geom_errorbar( width = barWidth,
-                                 color = errorBarColor )
-
-values <- geom_text( aes( x = downAvgsDataFrame$scale,
-                          y = sumOfDownAvgs + 0.03 * max( sumOfDownAvgs ),
-                          label = format( sumOfDownAvgs,
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 7.0,
-                          fontface = "bold" )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          values
-
-# ---------------------------------
-# Port Down Exporting Graph to File
-# ---------------------------------
-
-print( paste( "Saving bar chart with error bars (Port Down Latency) to", errBarOutputFileDown ) )
-
-tryCatch( ggsave( errBarOutputFileDown,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars (Port Down Latency) out to", errBarOutputFileDown ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFscaleTopo.R b/TestON/JenkinsFile/scripts/SCPFscaleTopo.R
deleted file mode 100644
index f6da0d2..0000000
--- a/TestON/JenkinsFile/scripts/SCPFscaleTopo.R
+++ /dev/null
@@ -1,268 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-save_directory = 7
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFgraphGenerator",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" ") )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-outputFile <- paste( args[ save_directory ],
-                     args[ test_name ],
-                     "_",
-                     args[ branch_name ],
-                     "_graph.jpg",
-                     sep="" )
-
-chartTitle <- "Scale Topology Latency Test"
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# --------------------------
-# Scale Topology SQL Command
-# --------------------------
-
-print( "Generating Scale Topology SQL Command" )
-
-command <- paste( "SELECT * FROM scale_topo_latency_details WHERE branch = '",
-                  args[ branch_name ],
-                  "' AND date IN ( SELECT MAX( date ) FROM scale_topo_latency_details WHERE branch = '",
-                  args[ branch_name ],
-                  "' ) ",
-                  sep = "" )
-
-print( "Sending SQL command:" )
-print( command )
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Sorting data." )
-
-requiredColumns <- c( "last_role_request_to_last_topology", "last_connection_to_last_role_request", "first_connection_to_last_connection" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame" )
-
-# Parse lists into data frames.
-dataFrame <- melt( avgs )
-dataFrame$scale <- fileData$scale
-colnames( dataFrame ) <- c( "s",
-                            "type",
-                            "scale")
-
-# Format data frame so that the data is in the same order as it appeared in the file.
-dataFrame$type <- as.character( dataFrame$type )
-dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
-dataFrame$iterative <- seq( 1, nrow( fileData ), by = 1 )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-sum <- fileData[ 'last_role_request_to_last_topology' ] +
-       fileData[ 'last_connection_to_last_role_request' ] +
-       fileData[ 'first_connection_to_last_connection' ]
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------
-# Generate Main Plot
-# ------------------
-
-print( "Creating main plot." )
-
-mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
-                                           y = s,
-                                           fill = type ) )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 20 ) )   # set the default text size of the graph.
-width <- 0.6  # Width of the bars.
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
-                                    label = dataFrame$scale )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Latency (s)" )
-fillLabel <- labs( fill="Type" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size=22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-values <- geom_text( aes( x = dataFrame$iterative,
-                          y = sum + 0.02 * max( sum ),
-                          label = format( sum,
-                                          big.mark = ",",
-                                          scientific = FALSE ),
-                          fontface = "bold" ),
-                          size = 7.0 )
-
-wrapLegend <- guides( fill = guide_legend( nrow=2, byrow=TRUE ) )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        values +
-                        wrapLegend +
-                        title
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph." )
-
-barGraphFormat <- geom_bar( stat = "identity", width = width )
-
-result <- fundamentalGraphData +
-          barGraphFormat
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R b/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R
deleted file mode 100644
index 045f5e7..0000000
--- a/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R
+++ /dev/null
@@ -1,290 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-has_flow_obj = 1
-database_host = 2
-database_port = 3
-database_u_id = 4
-database_pw = 5
-test_name = 6
-branch_name = 7
-old_flow = 8
-save_directory = 9
-
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-    print( paste( "Usage: Rscript SCPFInstalledIntentsFlows",
-                                  "<has-flowObj>",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",
-                                  "<branch-name>",
-                                  "<using-old-flow>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-fileFlowObjModifier <- ""
-sqlFlowObjModifier <- ""
-chartTitle <- "Number of Installed Intents & Flows"
-
-if ( args[ has_flow_obj ] == "y" ){
-    fileFlowObjModifier <- "_flowObj"
-    sqlFlowObjModifier <- "fobj_"
-    chartTitle <- "Number of Installed Intents & Flows\n with Flow Objectives"
-}
-fileOldFlowModifier <- ""
-if ( args[ old_flow ] == 'y' ){
-    fileOldFlowModifier <- "_OldFlow"
-    chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
-}
-
-outputFile <- paste( args[ save_directory ],
-                     args[ test_name ],
-                     fileFlowObjModifier,
-                     fileOldFlowModifier,
-                     "_",
-                     args[ branch_name ],
-                     "_errGraph.jpg",
-                     sep="" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# -------------------------------
-# Scaling Max Intents SQL Command
-# -------------------------------
-
-print( "Scaling Max Intents SQL Command" )
-
-command <- paste( "SELECT * FROM max_intents_",
-                  sqlFlowObjModifier,
-                  "tests WHERE branch = '",
-                  args[ branch_name ],
-                  "' AND date IN ( SELECT MAX( date ) FROM max_intents_",
-                  sqlFlowObjModifier,
-                  "tests WHERE branch = '",
-                  args[ branch_name ],
-                  "' AND ",
-                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
-                  "is_old_flow",
-                  " ) ",
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# ------------
-# Data Sorting
-# ------------
-
-print( "Sorting data." )
-
-requiredColumns <- c( "max_intents_ovs", "max_flows_ovs" )
-
-tryCatch( avgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame" )
-
-dataFrame <- melt( avgs )
-dataFrame$scale <- fileData$scale
-
-colnames( dataFrame ) <- c( "ms", "type", "scale" )
-
-dataFrame$type <- as.character( dataFrame$type )
-dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
-
-dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------
-# Generate Main Plot
-# ------------------
-
-print( "Creating main plot." )
-mainPlot <- ggplot( data = dataFrame, aes( x = scale,
-                                           y = ms,
-                                           fill = type ) )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-barWidth <- 1.3
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
-xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Max Number of Intents/Flow Rules" )
-fillLabel <- labs( fill="Type" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size=22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-colors <- scale_fill_manual( values = c( "#F77670",
-                                         "#619DFA" ) )
-
-wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
-
-title <- labs( title = chartTitle, subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        fillLabel +
-                        theme +
-                        wrapLegend +
-                        title +
-                        colors
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph." )
-
-barGraphFormat <- geom_bar( stat = "identity",
-                            position = position_dodge(),
-                            width = barWidth )
-
-values <- geom_text( aes( x = dataFrame$scale,
-                          y = dataFrame$ms + 0.015 * max( dataFrame$ms ),
-                          label = format( dataFrame$ms,
-                                          digits=3,
-                                          big.mark = ",",
-                                          scientific = FALSE ) ),
-                          size = 5.2,
-                          fontface = "bold",
-                          position = position_dodge( width = 1.25 ) )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          values
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving bar chart to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/SCPFswitchLat.R b/TestON/JenkinsFile/scripts/SCPFswitchLat.R
deleted file mode 100644
index 86290db..0000000
--- a/TestON/JenkinsFile/scripts/SCPFswitchLat.R
+++ /dev/null
@@ -1,407 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-database_host = 1
-database_port = 2
-database_u_id = 3
-database_pw = 4
-test_name = 5
-branch_name = 6
-save_directory = 7
-
-# Command line arguments are read.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )    # For databases
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ save_directory ] ) ){
-
-    print( paste( "Usage: Rscript SCPFswitchLat",
-                            "<database-host>",
-                            "<database-port>",
-                            "<database-user-id>",
-                            "<database-password>",
-                            "<test-name>",
-                            "<branch-name>",
-                            "<directory-to-save-graphs>",
-                            sep=" ") )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -----------------
-# Create File Names
-# -----------------
-
-print( "Creating filenames and title of graph." )
-
-errBarOutputFileUp <- paste( args[ save_directory ],
-                             "SCPFswitchLat_",
-                             args[ branch_name ],
-                             "_UpErrBarWithStack.jpg",
-                             sep="" )
-
-errBarOutputFileDown <- paste( args[ save_directory ],
-                               "SCPFswitchLat_",
-                               args[ branch_name ],
-                               "_DownErrBarWithStack.jpg",
-                               sep="" )
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ database_host ],
-                  port = strtoi( args[ database_port ] ),
-                  user = args[ database_u_id ],
-                  password = args[ database_pw ] )
-
-# --------------------------
-# Switch Latency SQL Command
-# --------------------------
-
-print( "Generating Switch Latency SQL Command" )
-
-command <- paste( "SELECT * FROM switch_latency_details WHERE branch = '",
-                  args[ branch_name ],
-                  "' AND date IN ( SELECT MAX( date ) FROM switch_latency_details WHERE branch='",
-                  args[ branch_name ],
-                  "' )",
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-
-fileData <- dbGetQuery( con, command )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# -------------------------------
-# Switch Up Averages Data Sorting
-# -------------------------------
-
-print( "Sorting data for Switch Up Averages." )
-
-requiredColumns <- c( "up_device_to_graph_avg",
-                      "feature_reply_to_device_avg",
-                      "tcp_to_feature_reply_avg" )
-
-tryCatch( upAvgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# ------------------------------
-# Switch Up Construct Data Frame
-# ------------------------------
-
-print( "Constructing Switch Up data frame." )
-
-upAvgsData <- melt( upAvgs )
-upAvgsData$scale <- fileData$scale
-upAvgsData$up_std <- fileData$up_std
-upAvgsData <- na.omit( upAvgsData )
-
-colnames( upAvgsData ) <- c( "ms",
-                             "type",
-                             "scale",
-                             "stds" )
-
-upAvgsData$type <- as.character( upAvgsData$type )
-upAvgsData$type <- factor( upAvgsData$type, levels=unique( upAvgsData$type ) )
-
-sumOfUpAvgs <- fileData[ 'up_device_to_graph_avg' ] +
-               fileData[ 'feature_reply_to_device_avg' ] +
-               fileData[ 'tcp_to_feature_reply_avg' ]
-
-print( "Up Averages Results:" )
-print( upAvgsData )
-
-# ---------------------------------
-# Switch Down Averages Data Sorting
-# ---------------------------------
-
-print( "Sorting data for Switch Down Averages." )
-
-requiredColumns <- c( "down_device_to_graph_avg",
-                      "ack_to_device_avg",
-                      "fin_ack_to_ack_avg" )
-
-tryCatch( downAvgs <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------------------
-# Switch Down Construct Data Frame
-# --------------------------------
-
-print( "Constructing Switch Down data frame." )
-
-downAvgsData <- melt( downAvgs )
-downAvgsData$scale <- fileData$scale
-downAvgsData$down_std <- fileData$down_std
-
-colnames( downAvgsData ) <- c( "ms",
-                               "type",
-                               "scale",
-                               "stds" )
-
-downAvgsData$type <- as.character( downAvgsData$type )
-downAvgsData$type <- factor( downAvgsData$type, levels=unique( downAvgsData$type ) )
-
-downAvgsData <- na.omit( downAvgsData )
-
-sumOfDownAvgs <- fileData[ 'down_device_to_graph_avg' ] +
-                 fileData[ 'ack_to_device_avg' ] +
-                 fileData[ 'fin_ack_to_ack_avg' ]
-
-print( "Down Averages Results:" )
-print( downAvgsData )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# ------------------------------------
-# Initialize Variables For Both Graphs
-# ------------------------------------
-
-print( "Initializing variables used in both graphs." )
-
-theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graphs
-xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9 ) )
-xLabel <- xlab( "Scale" )
-yLabel <- ylab( "Latency (ms)" )
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-errorBarColor <- rgb( 140, 140, 140, maxColorValue = 255 )
-barWidth <- 1
-
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-# ----------------------------
-# Switch Up Generate Main Plot
-# ----------------------------
-
-print( "Creating main plot (Switch Up Latency)." )
-
-mainPlot <- ggplot( data = upAvgsData, aes( x = scale,
-                                            y = ms,
-                                            fill = type,
-                                            ymin = fileData[ 'up_end_to_end_avg' ],
-                                            ymax = fileData[ 'up_end_to_end_avg' ] + stds ) )
-
-# ----------------------------------------
-# Switch Up Fundamental Variables Assigned
-# ----------------------------------------
-
-print( "Generating fundamental graph data (Switch Up Latency)." )
-
-title <- labs( title = "Switch Up Latency", subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        theme +
-                        title
-
-# -------------------------------------
-# Switch Up Generating Bar Graph Format
-# -------------------------------------
-
-print( "Generating bar graph with error bars (Switch Up Latency)." )
-
-barGraphFormat <- geom_bar( stat = "identity", width = barWidth )
-errorBarFormat <- geom_errorbar( width = barWidth, color = errorBarColor )
-
-barGraphValues <- geom_text( aes( x = upAvgsData$scale,
-                                  y = sumOfUpAvgs + 0.04 * max( sumOfUpAvgs ),
-                                  label = format( sumOfUpAvgs,
-                                                  digits = 3,
-                                                  big.mark = ",",
-                                                  scientific = FALSE ) ),
-                                  size = 7.0,
-                                  fontface = "bold" )
-
-wrapLegend <- guides( fill = guide_legend( nrow = 2, byrow = TRUE ) )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          errorBarFormat +
-          barGraphValues +
-          wrapLegend
-
-# ---------------------------------
-# Switch Up Exporting Graph to File
-# ---------------------------------
-
-print( paste( "Saving bar chart with error bars (Switch Up Latency) to", errBarOutputFileUp ) )
-
-tryCatch( ggsave( errBarOutputFileUp,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars (Switch Up Latency) out to", errBarOutputFileUp ) )
-
-# ------------------------------
-# Switch Down Generate Main Plot
-# ------------------------------
-
-print( "Creating main plot (Switch Down Latency)." )
-
-mainPlot <- ggplot( data = downAvgsData, aes( x = scale,
-                                              y = ms,
-                                              fill = type,
-                                              ymin = fileData[ 'down_end_to_end_avg' ],
-                                              ymax = fileData[ 'down_end_to_end_avg' ] + stds ) )
-
-# ------------------------------------------
-# Switch Down Fundamental Variables Assigned
-# ------------------------------------------
-
-print( "Generating fundamental graph data (Switch Down Latency)." )
-
-colors <- scale_fill_manual( values=c( "#F77670",       # Red
-                                       "#619DFA",       # Blue
-                                       "#18BA48" ) )    # Green
-
-title <- labs( title = "Switch Down Latency", subtitle = subtitle )
-
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        xLabel +
-                        yLabel +
-                        theme +
-                        title
-
-# ---------------------------------------
-# Switch Down Generating Bar Graph Format
-# ---------------------------------------
-
-print( "Generating bar graph with error bars (Switch Down Latency)." )
-barGraphFormat <- geom_bar( stat = "identity", width = barWidth )
-errorBarFormat <- geom_errorbar( width = barWidth, color = errorBarColor )
-
-barGraphValues <- geom_text( aes( x = downAvgsData$scale,
-                                  y = sumOfDownAvgs + 0.04 * max( sumOfDownAvgs ),
-                                  label = format( sumOfDownAvgs,
-                                                  digits = 3,
-                                                  big.mark = ",",
-                                                  scientific = FALSE ) ),
-                                  size = 7.0,
-                                  fontface = "bold" )
-
-wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
-
-result <- fundamentalGraphData +
-          barGraphFormat +
-          colors +
-          errorBarFormat +
-          barGraphValues +
-          wrapLegend
-
-# -----------------------------------
-# Switch Down Exporting Graph to File
-# -----------------------------------
-
-print( paste( "Saving bar chart with error bars (Switch Down Latency) to", errBarOutputFileDown ) )
-
-tryCatch( ggsave( errBarOutputFileDown,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote bar chart with error bars (Switch Down Latency) out to", errBarOutputFileDown ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R b/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R
deleted file mode 100644
index 5671a38..0000000
--- a/TestON/JenkinsFile/scripts/testCaseGraphGenerator.R
+++ /dev/null
@@ -1,323 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# This is the R script that generates the FUNC, HA, and various USECASE result graphs.
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ 8 ] ) ){
-
-    print( paste( "Usage: Rscript testCaseGraphGenerator.R",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-name>",                      # part of the output filename
-                                  "<branch-name>",                    # for sql and output filename
-                                  "<#-builds-to-show>",               # for sql and output filename
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -------------------------------
-# Create Title and Graph Filename
-# -------------------------------
-
-print( "Creating title of graph." )
-
-title <- paste( args[ 5 ],
-                " - ",
-                args[ 6 ],
-                " \n Results of Last ",
-                args[ 7 ],
-                " Builds",
-                sep="" )
-
-print( "Creating graph filename." )
-
-outputFile <- paste( args[ 8 ],
-                     args[ 5 ],
-                     "_",
-                     args[ 6 ],
-                     "_",
-                     args[ 7 ],
-                     "-builds_graph.jpg",
-                     sep="" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
-
-# ---------------------
-# Test Case SQL Command
-# ---------------------
-print( "Generating Test Case SQL command." )
-
-command <- paste( "SELECT * FROM executed_test_tests WHERE actual_test_name='",
-                  args[ 5 ],
-                  "' AND branch='",
-                  args[ 6 ],
-                  "' ORDER BY date DESC LIMIT ",
-                  args[ 7 ],
-                  sep="" )
-
-print( "Sending SQL command:" )
-print( command )
-fileData <- dbGetQuery( con, command )
-
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-# -------------------------------------------------------
-# Combining Passed, Failed, and Planned Data
-# -------------------------------------------------------
-
-print( "Combining Passed, Failed, and Planned Data." )
-
-requiredColumns <- c( "num_failed", "num_passed", "num_planned" )
-
-tryCatch( categories <- c( fileData[ requiredColumns] ),
-          error = function( e ) {
-              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
-              print( "Required columns: " )
-              print( requiredColumns )
-              print( "Actual columns: " )
-              print( names( fileData ) )
-              print( "Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-         )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing data frame from combined data." )
-
-dataFrame <- melt( categories )
-
-# Rename column names in dataFrame
-colnames( dataFrame ) <- c( "Tests",
-                            "Status" )
-
-# Add build dates to the dataFrame
-dataFrame$build <- fileData$build
-
-# Format data frame so that the data is in the same order as it appeared in the file.
-dataFrame$Status <- as.character( dataFrame$Status )
-dataFrame$Status <- factor( dataFrame$Status, levels = unique( dataFrame$Status ) )
-
-# Add planned, passed, and failed results to the dataFrame (for the fill below the lines)
-dataFrame$num_planned <- fileData$num_planned
-dataFrame$num_passed <- fileData$num_passed
-dataFrame$num_failed <- fileData$num_failed
-
-# Adding a temporary reversed iterative list to the dataFrame so that there are no gaps in-between build numbers.
-dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
-
-# Omit any data that doesn't exist
-dataFrame <- na.omit( dataFrame )
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# -------------------
-# Main Plot Generated
-# -------------------
-
-print( "Creating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually iterative, but it will become build # later)
-#        - y: y-axis values (usually tests)
-#        - color: the category of the colored lines (usually status of test)
-
-mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
-                                           y = Tests,
-                                           color = Status ) )
-
-# -------------------
-# Main Plot Formatted
-# -------------------
-
-print( "Formatting main plot." )
-
-# geom_ribbon is used so that there is a colored fill below the lines. These values shouldn't be changed.
-failedColor <- geom_ribbon( aes( ymin = 0,
-                                 ymax = dataFrame$num_failed ),
-                                 fill = "red",
-                                 linetype = 0,
-                                 alpha = 0.07 )
-
-passedColor <- geom_ribbon( aes( ymin = 0,
-                                 ymax = dataFrame$num_passed ),
-                                 fill = "green",
-                                 linetype = 0,
-                                 alpha = 0.05 )
-
-plannedColor <- geom_ribbon( aes( ymin = 0,
-                                  ymax = dataFrame$num_planned ),
-                                  fill = "blue",
-                                  linetype = 0,
-                                  alpha = 0.01 )
-
-# Colors for the lines
-lineColors <- scale_color_manual( values=c( "#E80000",      # red
-                                            "#00B208",      # green
-                                            "#00A5FF") )    # blue
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 26 ) )   # set the default text size of the graph.
-
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
-                                    label = dataFrame$build )
-yScaleConfig <- scale_y_continuous( breaks = seq( 0, max( dataFrame$Tests ),
-                                    by = ceiling( max( dataFrame$Tests ) / 10 ) ) )
-
-xLabel <- xlab( "Build Number" )
-yLabel <- ylab( "Test Cases" )
-
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-legendLabels <- scale_colour_discrete( labels = c( "Failed Cases",
-                                                   "Passed Cases",
-                                                   "Planned Cases" ) )
-
-# Set other graph configurations here.
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face ='bold' ),
-                axis.text.x = element_text( angle = 0, size = 14 ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = title, subtitle = subtitle )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        plannedColor +
-                        passedColor +
-                        failedColor +
-                        xScaleConfig +
-                        yScaleConfig +
-                        xLabel +
-                        yLabel +
-                        lineColors +
-                        legendLabels +
-                        theme +
-                        title
-
-# ----------------------------
-# Generating Line Graph Format
-# ----------------------------
-
-print( "Generating line graph." )
-
-lineGraphFormat <- geom_line( size = 1.1 )
-pointFormat <- geom_point( size = 3 )
-
-result <- fundamentalGraphData +
-           lineGraphFormat +
-           pointFormat
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving result graph to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote result graph out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/testCategoryBuildStats.R b/TestON/JenkinsFile/scripts/testCategoryBuildStats.R
deleted file mode 100644
index 94c3572..0000000
--- a/TestON/JenkinsFile/scripts/testCategoryBuildStats.R
+++ /dev/null
@@ -1,399 +0,0 @@
-# Copyright 2018 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-databaseHost <- 1
-databasePort <- 2
-databaseUserID <- 3
-databasePassword <- 4
-testSuiteName <- 5
-branchName <- 6
-testsToInclude <- 7
-buildToShow <- 8
-saveDirectory <- 9
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ saveDirectory ] ) ){
-
-    print( paste( "Usage: Rscript testCategoryBuildStats.R",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-suite-name>",
-                                  "<branch-name>",
-                                  "<tests-to-include-(as-one-string-sep-groups-by-semicolon-title-as-first-group-item-sep-by-dash)>",
-                                  "<build-to-show>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ databaseHost ],
-                  port = strtoi( args[ databasePort ] ),
-                  user = args[ databaseUserID ],
-                  password = args[ databasePassword ] )
-
-# ---------------------
-# Test Case SQL Command
-# ---------------------
-
-print( "Generating Test Case SQL command." )
-
-tests <- "'"
-for ( test in as.list( strsplit( args[ testsToInclude ], "," )[[1]] ) ){
-    tests <- paste( tests, test, "','", sep="" )
-}
-tests <- substr( tests, 0, nchar( tests ) - 2 )
-
-fileBuildToShow <- args[ buildToShow ]
-operator <- "= "
-buildTitle <- ""
-if ( args[ buildToShow ] == "latest" ){
-    buildTitle <- "\nLatest Test Results"
-    operator <- ">= "
-    args[ buildToShow ] <- "1000"
-} else {
-    buildTitle <- paste( " \n Build #", args[ buildToShow ] , sep="" )
-}
-
-tests <- strsplit( args[ testsToInclude ], ";" )
-dbResults <- list()
-titles <- list()
-
-for ( i in 1:length( tests[[1]] ) ){
-    splitTestList <- strsplit( tests[[1]][ i ], "-" )
-    testList <- splitTestList[[1]][2]
-    titles[[i]] <- splitTestList[[1]][1]
-
-    testsCommand <- "'"
-    for ( test in as.list( strsplit( testList, "," )[[1]] ) ){
-        testsCommand <- paste( testsCommand, test, "','", sep="" )
-    }
-    testsCommand <- substr( testsCommand, 0, nchar( testsCommand ) - 2 )
-
-    command <- paste( "SELECT * ",
-                      "FROM executed_test_tests a ",
-                      "WHERE ( SELECT COUNT( * ) FROM executed_test_tests b ",
-                      "WHERE b.branch='",
-                      args[ branchName ],
-                      "' AND b.actual_test_name IN (",
-                      testsCommand,
-                      ") AND a.actual_test_name = b.actual_test_name AND a.date <= b.date AND b.build ", operator,
-                      args[ buildToShow ],
-                      " ) = ",
-                      1,
-                      " AND a.branch='",
-                      args[ branchName ],
-                      "' AND a.actual_test_name IN (",
-                      testsCommand,
-                      ") AND a.build ", operator,
-                      args[ buildToShow ],
-                      " ORDER BY a.actual_test_name DESC, a.date DESC",
-                      sep="")
-    print( "Sending SQL command:" )
-    print( command )
-    dbResults[[i]] <- dbGetQuery( con, command )
-}
-
-print( "dbResult:" )
-print( dbResults )
-
-# -------------------------------
-# Create Title and Graph Filename
-# -------------------------------
-
-print( "Creating title of graph." )
-
-titlePrefix <- paste( args[ testSuiteName ], " ", sep="" )
-if ( args[ testSuiteName ] == "ALL" ){
-    titlePrefix <- ""
-}
-
-title <- paste( titlePrefix,
-                "Summary of Test Suites - ",
-                args[ branchName ],
-                buildTitle,
-                sep="" )
-
-print( "Creating graph filename." )
-
-outputFile <- paste( args[ saveDirectory ],
-                     args[ testSuiteName ],
-                     "_",
-                     args[ branchName ],
-                     "_build-",
-                     fileBuildToShow,
-                     "_test-suite-summary.jpg",
-                     sep="" )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-passNum <- list()
-failNum <- list()
-exeNum <- list()
-skipNum <- list()
-totalNum <- list()
-
-passPercent <- list()
-failPercent <- list()
-exePercent <- list()
-nonExePercent <- list()
-
-actualPassPercent <- list()
-actualFailPercent <- list()
-
-appName <- c()
-afpName <- c()
-nepName <- c()
-
-tmpPos <- c()
-tmpCases <- c()
-
-for ( i in 1:length( dbResults ) ){
-    t <- dbResults[[i]]
-
-    passNum[[i]] <- sum( t$num_passed )
-    failNum[[i]] <- sum( t$num_failed )
-    exeNum[[i]] <- passNum[[i]] + failNum[[i]]
-    totalNum[[i]] <- sum( t$num_planned )
-    skipNum[[i]] <- totalNum[[i]] - exeNum[[i]]
-
-    passPercent[[i]] <- passNum[[i]] / exeNum[[i]]
-    failPercent[[i]] <- failNum[[i]] / exeNum[[i]]
-    exePercent[[i]] <- exeNum[[i]] / totalNum[[i]]
-    nonExePercent[[i]] <- ( 1 - exePercent[[i]] ) * 100
-
-    actualPassPercent[[i]] <- passPercent[[i]] * exePercent[[i]] * 100
-    actualFailPercent[[i]] <- failPercent[[i]] * exePercent[[i]] * 100
-
-    appName <- c( appName, "Passed" )
-    afpName <- c( afpName, "Failed" )
-    nepName <- c( nepName, "Skipped/Unexecuted" )
-
-    tmpPos <- c( tmpPos, 100 - ( nonExePercent[[i]] / 2 ), actualPassPercent[[i]] + actualFailPercent[[i]] - ( actualFailPercent[[i]] / 2 ), actualPassPercent[[i]] - ( actualPassPercent[[i]] / 2 ) )
-    tmpCases <- c( tmpCases, skipNum[[i]], failNum[[i]], passNum[[i]] )
-}
-
-relativePosLength <- length( dbResults ) * 3
-
-relativePos <- c()
-relativeCases <- c()
-
-for ( i in 1:3 ){
-    relativePos <- c( relativePos, tmpPos[ seq( i, relativePosLength, 3 ) ] )
-    relativeCases <- c( relativeCases, tmpCases[ seq( i, relativePosLength, 3 ) ] )
-}
-names( actualPassPercent ) <- appName
-names( actualFailPercent ) <- afpName
-names( nonExePercent ) <- nepName
-
-labels <- paste( titles, "\n", totalNum, " Test Cases", sep="" )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame" )
-
-dataFrame <- melt( c( nonExePercent, actualFailPercent, actualPassPercent ) )
-dataFrame$title <- seq( 1, length( dbResults ), by = 1 )
-colnames( dataFrame ) <- c( "perc", "key", "suite" )
-
-dataFrame$xtitles <- labels
-dataFrame$relativePos <- relativePos
-dataFrame$relativeCases <- relativeCases
-dataFrame$valueDisplay <- c( paste( round( dataFrame$perc, digits = 2 ), "% - ", relativeCases, " Tests", sep="" ) )
-
-dataFrame$key <- factor( dataFrame$key, levels=unique( dataFrame$key ) )
-
-dataFrame$willDisplayValue <- dataFrame$perc > 15.0 / length( dbResults )
-
-for ( i in 1:nrow( dataFrame ) ){
-    if ( relativeCases[[i]] == "1" ){
-        dataFrame[ i, "valueDisplay" ] <- c( paste( round( dataFrame$perc[[i]], digits = 2 ), "% - ", relativeCases[[i]], " Test", sep="" ) )
-    }
-    if ( !dataFrame[ i, "willDisplayValue" ] ){
-        dataFrame[ i, "valueDisplay" ] <- ""
-    }
-}
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# -------------------
-# Main Plot Generated
-# -------------------
-
-print( "Creating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually iterative, but it will become build # later)
-#        - y: y-axis values (usually tests)
-#        - color: the category of the colored lines (usually status of test)
-
-# -------------------
-# Main Plot Formatted
-# -------------------
-
-print( "Formatting main plot." )
-mainPlot <- ggplot( data = dataFrame, aes( x = suite,
-                                           y = perc,
-                                           fill = key ) )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 26 ) )   # set the default text size of the graph.
-
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$suite,
-                                    label = dataFrame$xtitles )
-yScaleConfig <- scale_y_continuous( breaks = seq( 0, 100,
-                                    by = 10 ) )
-
-xLabel <- xlab( "" )
-yLabel <- ylab( "Total Test Cases (%)" )
-
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-# Set other graph configurations here.
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face ='bold' ),
-                axis.text.x = element_text( angle = 0, size = 25 - 1.25 * length( dbResults ) ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = title, subtitle = subtitle )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        xScaleConfig +
-                        yScaleConfig +
-                        xLabel +
-                        yLabel +
-                        theme +
-                        title
-
-# ---------------------------
-# Generating Bar Graph Format
-# ---------------------------
-
-print( "Generating bar graph." )
-
-unexecutedColor <- "#CCCCCC"    # Gray
-failedColor <- "#E02020"        # Red
-passedColor <- "#16B645"        # Green
-
-colors <- scale_fill_manual( values=c( if ( "Skipped/Unexecuted" %in% dataFrame$key ){ unexecutedColor },
-                                       if ( "Failed" %in% dataFrame$key ){ failedColor },
-                                       if ( "Passed" %in% dataFrame$key ){ passedColor } ) )
-
-barGraphFormat <- geom_bar( stat = "identity", width = 0.8 )
-
-barGraphValues <- geom_text( aes( x = dataFrame$suite,
-                                  y = dataFrame$relativePos,
-                                  label = format( paste( dataFrame$valueDisplay ) ) ),
-                                  size = 15.50 / length( dbResults ) + 2.33, fontface = "bold" )
-
-result <- fundamentalGraphData +
-          colors +
-          barGraphFormat +
-          barGraphValues
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving result graph to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote result graph out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/testCategoryPiePassFail.R b/TestON/JenkinsFile/scripts/testCategoryPiePassFail.R
deleted file mode 100644
index 0b731b5..0000000
--- a/TestON/JenkinsFile/scripts/testCategoryPiePassFail.R
+++ /dev/null
@@ -1,341 +0,0 @@
-# Copyright 2018 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-databaseHost <- 1
-databasePort <- 2
-databaseUserID <- 3
-databasePassword <- 4
-testSuiteName <- 5
-branchName <- 6
-testsToInclude <- 7
-buildToShow <- 8
-isDisplayingPlan <- 9
-saveDirectory <- 10
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ saveDirectory ] ) ){
-
-    print( paste( "Usage: Rscript testCategoryPiePassFail.R",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-suite-name>",
-                                  "<branch-name>",
-                                  "<tests-to-include-(as-one-string)>",
-                                  "<build-to-show>",
-                                  "<is-displaying-plan>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ databaseHost ],
-                  port = strtoi( args[ databasePort ] ),
-                  user = args[ databaseUserID ],
-                  password = args[ databasePassword ] )
-
-# ---------------------
-# Test Case SQL Command
-# ---------------------
-
-print( "Generating Test Case SQL command." )
-
-tests <- "'"
-for ( test in as.list( strsplit( args[ testsToInclude ], "," )[[1]] ) ){
-    tests <- paste( tests, test, "','", sep="" )
-}
-tests <- substr( tests, 0, nchar( tests ) - 2 )
-
-fileBuildToShow <- args[ buildToShow ]
-operator <- "= "
-buildTitle <- ""
-if ( args[ buildToShow ] == "latest" ){
-    buildTitle <- "\nLatest Test Results"
-    operator <- ">= "
-    args[ buildToShow ] <- "1000"
-} else {
-    buildTitle <- paste( " \n Build #", args[ buildToShow ], sep="" )
-}
-
-command <- paste( "SELECT * ",
-                  "FROM executed_test_tests a ",
-                  "WHERE ( SELECT COUNT( * ) FROM executed_test_tests b ",
-                  "WHERE b.branch='",
-                  args[ branchName ],
-                  "' AND b.actual_test_name IN (",
-                  tests,
-                  ") AND a.actual_test_name = b.actual_test_name AND a.date <= b.date AND b.build ", operator,
-                  args[ buildToShow ],
-                  " ) = ",
-                  1,
-                  " AND a.branch='",
-                  args[ branchName ],
-                  "' AND a.actual_test_name IN (",
-                  tests,
-                  ") AND a.build ", operator,
-                  args[ buildToShow ],
-                  " ORDER BY a.actual_test_name DESC, a.date DESC",
-                  sep="")
-
-print( "Sending SQL command:" )
-print( command )
-
-dbResult <- dbGetQuery( con, command )
-
-print( "dbResult:" )
-print( dbResult )
-
-# -------------------------------
-# Create Title and Graph Filename
-# -------------------------------
-
-print( "Creating title of graph." )
-
-typeOfPieTitle <- "Executed Results"
-typeOfPieFile <- "_passfail"
-isPlannedPie <- FALSE
-if ( args[ isDisplayingPlan ] == "y" ){
-    typeOfPieTitle <- "Test Execution"
-    typeOfPieFile <- "_executed"
-    isPlannedPie <- TRUE
-}
-
-title <- paste( args[ testSuiteName ],
-                " Tests: Summary of ",
-                typeOfPieTitle,
-                "",
-                " - ",
-                args[ branchName ],
-                buildTitle,
-                sep="" )
-
-print( "Creating graph filename." )
-
-outputFile <- paste( args[ saveDirectory ],
-                     args[ testSuiteName ],
-                     "_",
-                     args[ branchName ],
-                     "_build-",
-                     fileBuildToShow,
-                     typeOfPieFile,
-                     "_pieChart.jpg",
-                     sep="" )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-t <- subset( dbResult, select=c( "actual_test_name", "num_passed", "num_failed", "num_planned" ) )
-
-executedTests <- sum( t$num_passed ) + sum( t$num_failed )
-
-# --------------------
-# Construct Data Frame
-# --------------------
-
-print( "Constructing Data Frame." )
-
-if ( isPlannedPie ){
-
-    nonExecutedTests <- sum( t$num_planned ) - executedTests
-    totalTests <- sum( t$num_planned )
-
-    executedPercent <- round( executedTests / totalTests * 100, digits = 2 )
-    nonExecutedPercent <- 100 - executedPercent
-
-    dfData <- c( nonExecutedPercent, executedPercent )
-
-    labels <- c( "Executed Test Cases", "Skipped Test Cases" )
-
-    dataFrame <- data.frame(
-        rawData <- dfData,
-        displayedData <- c( paste( nonExecutedPercent, "%\n", nonExecutedTests, " / ", totalTests, " Tests", sep="" ), paste( executedPercent, "%\n", executedTests, " / ", totalTests," Tests", sep="" ) ),
-        names <- factor( rev( labels ), levels = labels ) )
-} else {
-
-    sumPassed <- sum( t$num_passed )
-    sumFailed <- sum( t$num_failed )
-    sumExecuted <- sumPassed + sumFailed
-
-    percentPassed <- sumPassed / sumExecuted
-    percentFailed <- sumFailed / sumExecuted
-
-    dfData <- c( percentFailed, percentPassed )
-    labels <- c( "Failed Test Cases", "Passed Test Cases" )
-
-    dataFrame <- data.frame(
-        rawData <- dfData,
-        displayedData <- c( paste( round( percentFailed * 100, 2 ), "%\n", sumFailed, " / ", sumExecuted, " Tests", sep="" ), paste( round( percentPassed * 100, 2 ), "%\n", sumPassed, " / ", sumExecuted, " Tests", sep="" ) ),
-        names <- factor( labels, levels = rev( labels ) ) )
-}
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# -------------------
-# Main Plot Generated
-# -------------------
-
-print( "Creating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually iterative, but it will become build # later)
-#        - y: y-axis values (usually tests)
-#        - color: the category of the colored lines (usually status of test)
-
-mainPlot <- ggplot( data = dataFrame,
-                    aes( x = "", y=rawData, fill = names ) )
-
-# -------------------
-# Main Plot Formatted
-# -------------------
-
-print( "Formatting main plot." )
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 26 ) )   # set the default text size of the graph.
-
-imageWidth <- 12
-imageHeight <- 10
-imageDPI <- 200
-
-# Set other graph configurations here.
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 30, face ='bold' ),
-                axis.text.x = element_blank(),
-                axis.title.x = element_blank(),
-                axis.title.y = element_blank(),
-                axis.ticks = element_blank(),
-                panel.border = element_blank(),
-                panel.grid=element_blank(),
-                legend.position = "bottom",
-                legend.text = element_text( size = 22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = title, subtitle = subtitle )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        theme +
-                        title
-
-# ----------------------------
-# Generating Line Graph Format
-# ----------------------------
-
-print( "Generating line graph." )
-
-if ( isPlannedPie ){
-    executedColor <- "#00A5FF"      # Blue
-    nonExecutedColor <- "#CCCCCC"   # Gray
-    pieColors <- scale_fill_manual( values = c( executedColor, nonExecutedColor ) )
-} else {
-    passColor <- "#16B645"          # Green
-    failColor <- "#E02020"          # Red
-    pieColors <- scale_fill_manual( values = c( passColor, failColor ) )
-}
-
-pieFormat <- geom_bar( width = 1, stat = "identity" )
-pieLabels <- geom_text( aes( y = rawData / length( rawData ) + c( 0, cumsum( rawData )[ -length( rawData ) ] ) ),
-                             label = dataFrame$displayedData,
-                             size = 7, fontface = "bold" )
-
-
-result <- fundamentalGraphData +
-          pieFormat + coord_polar( "y" ) + pieLabels + pieColors
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving result graph to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote result graph out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/scripts/testCategoryTrend.R b/TestON/JenkinsFile/scripts/testCategoryTrend.R
deleted file mode 100644
index 33664b0..0000000
--- a/TestON/JenkinsFile/scripts/testCategoryTrend.R
+++ /dev/null
@@ -1,325 +0,0 @@
-# Copyright 2017 Open Networking Foundation (ONF)
-#
-# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
-# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
-# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
-#
-#     TestON is free software: you can redistribute it and/or modify
-#     it under the terms of the GNU General Public License as published by
-#     the Free Software Foundation, either version 2 of the License, or
-#     (at your option) any later version.
-#
-#     TestON is distributed in the hope that it will be useful,
-#     but WITHOUT ANY WARRANTY; without even the implied warranty of
-#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#     GNU General Public License for more details.
-#
-#     You should have received a copy of the GNU General Public License
-#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
-#
-# If you have any questions, or if you don't understand R,
-# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
-
-pipelineMinValue = 1000
-
-# **********************************************************
-# STEP 1: Data management.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 1: Data management." )
-print( "**********************************************************" )
-
-# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
-print( "Reading commmand-line args." )
-args <- commandArgs( trailingOnly=TRUE )
-
-databaseHost <- 1
-databasePort <- 2
-databaseUserID <- 3
-databasePassword <- 4
-testSuiteName <- 5
-branchName <- 6
-testsToInclude <- 7
-buildsToShow <- 8
-saveDirectory <- 9
-
-# ----------------
-# Import Libraries
-# ----------------
-
-print( "Importing libraries." )
-library( ggplot2 )
-library( reshape2 )
-library( RPostgreSQL )
-
-# -------------------
-# Check CLI Arguments
-# -------------------
-
-print( "Verifying CLI args." )
-
-if ( is.na( args[ saveDirectory ] ) ){
-
-    print( paste( "Usage: Rscript testCategoryTrend.R",
-                                  "<database-host>",
-                                  "<database-port>",
-                                  "<database-user-id>",
-                                  "<database-password>",
-                                  "<test-suite-name>",
-                                  "<branch-name>",
-                                  "<tests-to-include-(as-one-string)>",
-                                  "<builds-to-show>",
-                                  "<directory-to-save-graphs>",
-                                  sep=" " ) )
-
-    quit( status = 1 )  # basically exit(), but in R
-}
-
-# -------------------------------
-# Create Title and Graph Filename
-# -------------------------------
-
-print( "Creating title of graph." )
-
-title <- paste( args[ testSuiteName ],
-                " Test Results Trend - ",
-                args[ branchName ],
-                " \n Results of Last ",
-                args[ buildsToShow ],
-                " Nightly Builds",
-                sep="" )
-
-print( "Creating graph filename." )
-
-outputFile <- paste( args[ saveDirectory ],
-                     args[ testSuiteName ],
-                     "_",
-                     args[ branchName ],
-                     "_overview.jpg",
-                     sep="" )
-
-# ------------------
-# SQL Initialization
-# ------------------
-
-print( "Initializing SQL" )
-
-con <- dbConnect( dbDriver( "PostgreSQL" ),
-                  dbname = "onostest",
-                  host = args[ databaseHost ],
-                  port = strtoi( args[ databasePort ] ),
-                  user = args[ databaseUserID ],
-                  password = args[ databasePassword ] )
-
-# ---------------------
-# Test Case SQL Command
-# ---------------------
-print( "Generating Test Case SQL command." )
-
-tests <- "'"
-for ( test in as.list( strsplit( args[ testsToInclude ], "," )[[1]] ) ){
-    tests <- paste( tests, test, "','", sep="" )
-}
-tests <- substr( tests, 0, nchar( tests ) - 2 )
-
-command <- paste( "SELECT * ",
-                  "FROM executed_test_tests a ",
-                  "WHERE ( SELECT COUNT( * ) FROM executed_test_tests b ",
-                  "WHERE b.branch='",
-                  args[ branchName ],
-                  "' AND b.actual_test_name IN (",
-                  tests,
-                  ") AND a.actual_test_name = b.actual_test_name AND a.date <= b.date AND b.build >= ",
-                  pipelineMinValue,
-                  " ) <= ",
-                  args[ buildsToShow ],
-                  " AND a.branch='",
-                  args[ branchName ],
-                  "' AND a.actual_test_name IN (",
-                  tests,
-                  ") AND a.build >= ",
-                  pipelineMinValue,
-                  " ORDER BY a.actual_test_name DESC, a.date DESC",
-                  sep="")
-
-print( "Sending SQL command:" )
-print( command )
-dbResult <- dbGetQuery( con, command )
-maxBuild <- max( dbResult[ 'build' ] ) - strtoi( args[ buildsToShow ] )
-dbResult <- dbResult[ which( dbResult[,4]>maxBuild ), ]
-print( dbResult )
-
-# **********************************************************
-# STEP 2: Organize data.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 2: Organize Data." )
-print( "**********************************************************" )
-
-t <- subset( dbResult, select=c( "actual_test_name", "build", "num_failed" ) )
-t$num_failed <- ceiling( t$num_failed / ( t$num_failed + 1 ) )
-t$num_planned <- 1
-
-fileData <- aggregate( t$num_failed, by=list( Category=t$build ), FUN=sum )
-colnames( fileData ) <- c( "build", "num_failed" )
-
-fileData$num_planned <- ( aggregate( t$num_planned, by=list( Category=t$build ), FUN=sum ) )$x
-fileData$num_passed <- fileData$num_planned - fileData$num_failed
-
-print(fileData)
-
-# --------------------
-# Construct Data Frame
-# --------------------
-#
-
-dataFrame <- melt( subset( fileData, select=c( "num_failed", "num_passed", "num_planned" ) ) )
-dataFrame$build <- fileData$build
-colnames( dataFrame ) <- c( "status", "results", "build" )
-
-dataFrame$num_failed <- fileData$num_failed
-dataFrame$num_passed <- fileData$num_passed
-dataFrame$num_planned <- fileData$num_planned
-dataFrame$iterative <- seq( 1, nrow( fileData ), by = 1 )
-
-print( "Data Frame Results:" )
-print( dataFrame )
-
-# **********************************************************
-# STEP 3: Generate graphs.
-# **********************************************************
-
-print( "**********************************************************" )
-print( "STEP 3: Generate Graph." )
-print( "**********************************************************" )
-
-# -------------------
-# Main Plot Generated
-# -------------------
-
-print( "Creating main plot." )
-# Create the primary plot here.
-# ggplot contains the following arguments:
-#     - data: the data frame that the graph will be based off of
-#    - aes: the asthetics of the graph which require:
-#        - x: x-axis values (usually iterative, but it will become build # later)
-#        - y: y-axis values (usually tests)
-#        - color: the category of the colored lines (usually status of test)
-
-mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
-                                           y = results,
-                                           color = status ) )
-
-# -------------------
-# Main Plot Formatted
-# -------------------
-
-print( "Formatting main plot." )
-
-# geom_ribbon is used so that there is a colored fill below the lines. These values shouldn't be changed.
-failedColor <- geom_ribbon( aes( ymin = 0,
-                                 ymax = dataFrame$num_failed ),
-                                 fill = "#ff0000",
-                                 linetype = 0,
-                                 alpha = 0.07 )
-
-passedColor <- geom_ribbon( aes( ymin = 0,
-                                 ymax = dataFrame$num_passed ),
-                                 fill = "#0083ff",
-                                 linetype = 0,
-                                 alpha = 0.05 )
-
-plannedColor <- geom_ribbon( aes( ymin = 0,
-                                  ymax = dataFrame$num_planned ),
-                                  fill = "#000000",
-                                  linetype = 0,
-                                  alpha = 0.01 )
-
-# Colors for the lines
-lineColors <- scale_color_manual( values=c( "#ff0000",      # fail
-                                            "#0083ff",      # pass
-                                            "#000000"),
-                                  labels = c( "Containing Failures",
-                                              "No Failures",
-                                              "Total Built" ) )    # planned
-
-# ------------------------------
-# Fundamental Variables Assigned
-# ------------------------------
-
-print( "Generating fundamental graph data." )
-
-theme_set( theme_grey( base_size = 26 ) )   # set the default text size of the graph.
-
-xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
-                                    label = dataFrame$build )
-yScaleConfig <- scale_y_continuous( breaks = seq( 0, max( dataFrame$results ),
-                                    by = ceiling( max( dataFrame$results ) / 10 ) ) )
-
-xLabel <- xlab( "Build Number" )
-yLabel <- ylab( "Tests" )
-
-imageWidth <- 15
-imageHeight <- 10
-imageDPI <- 200
-
-# Set other graph configurations here.
-theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face ='bold' ),
-                axis.text.x = element_text( angle = 0, size = 14 ),
-                legend.position = "bottom",
-                legend.text = element_text( size = 22 ),
-                legend.title = element_blank(),
-                legend.key.size = unit( 1.5, 'lines' ),
-                plot.subtitle = element_text( size=16, hjust=1.0 ) )
-
-subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
-
-title <- labs( title = title, subtitle = subtitle )
-
-# Store plot configurations as 1 variable
-fundamentalGraphData <- mainPlot +
-                        plannedColor +
-                        passedColor +
-                        failedColor +
-                        xScaleConfig +
-                        yScaleConfig +
-                        xLabel +
-                        yLabel +
-                        theme +
-                        title +
-                        lineColors
-
-# ----------------------------
-# Generating Line Graph Format
-# ----------------------------
-
-print( "Generating line graph." )
-
-lineGraphFormat <- geom_line( size = 1.1 )
-pointFormat <- geom_point( size = 3 )
-
-result <- fundamentalGraphData +
-           lineGraphFormat +
-           pointFormat
-
-# -----------------------
-# Exporting Graph to File
-# -----------------------
-
-print( paste( "Saving result graph to", outputFile ) )
-
-tryCatch( ggsave( outputFile,
-                  width = imageWidth,
-                  height = imageHeight,
-                  dpi = imageDPI ),
-          error = function( e ){
-              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
-              print( e )
-              quit( status = 1 )
-          }
-        )
-
-print( paste( "[SUCCESS] Successfully wrote result graph out to", outputFile ) )
-quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/trendCHO.R b/TestON/JenkinsFile/wikiGraphRScripts/trendCHO.R
index 32b11e7..b1ef7a7 100644
--- a/TestON/JenkinsFile/wikiGraphRScripts/trendCHO.R
+++ b/TestON/JenkinsFile/wikiGraphRScripts/trendCHO.R
@@ -18,7 +18,7 @@
 #     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
 #
 # Example script:
-# Rscript trendCHO event.csv failure.csv error.csv master 60 /path/to/save/directory/
+# Rscript trendCHO event.csv failure.csv error.csv master 60 168 /path/to/save/directory/
 
 # **********************************************************
 # STEP 1: Data management.
@@ -276,10 +276,10 @@
 events_dataFrame$Type <- as.character( events_dataFrame$Type )
 events_dataFrame$Type <- factor( events_dataFrame$Type, levels = unique( events_dataFrame$Type ) )
 
-events_dataFrame$timeStamps <- rev( gsub('^(.{11})(.*)$', '\\1\n\\2', event_fileData$Time ) )
+events_dataFrame$timeStamps <- gsub('^(.{11})(.*)$', '\\1\n\\2', event_fileData$Time )
 
 # Adding a temporary reversed iterative list to the events_dataFrame so that there are no gaps in-between build numbers.
-events_dataFrame$iterative <- rev( seq( 1, nrow( event_fileData ), by = 1 ) )
+events_dataFrame$iterative <- seq( 1, nrow( event_fileData ), by = 1 )
 
 # Omit any data that doesn't exist
 events_dataFrame <- na.omit( events_dataFrame )
@@ -315,10 +315,10 @@
 failures_dataFrame$Type <- as.character( failures_dataFrame$Type )
 failures_dataFrame$Type <- factor( failures_dataFrame$Type, levels = unique( failures_dataFrame$Type ) )
 
-failures_dataFrame$timeStamps <- rev( gsub('^(.{11})(.*)$', '\\1\n\\2', failure_fileData$Time ) )
+failures_dataFrame$timeStamps <- gsub('^(.{11})(.*)$', '\\1\n\\2', failure_fileData$Time )
 
 # Adding a temporary reversed iterative list to the failures_dataFrame so that there are no gaps in-between build numbers.
-failures_dataFrame$iterative <- rev( seq( 1, nrow( failure_fileData ), by = 1 ) )
+failures_dataFrame$iterative <- seq( 1, nrow( failure_fileData ), by = 1 )
 
 # Omit any data that doesn't exist
 failures_dataFrame <- na.omit( failures_dataFrame )
@@ -522,7 +522,7 @@
 
 print( "Generating 'errors' graph data." )
 
-yLabel <- ylab( "Errors" )
+yLabel <- ylab( "Warnings/Errors" )
 
 xScaleConfig <- scale_x_continuous( breaks = errors_dataFrame$iterative,
                                     label = errors_dataFrame$timeStamps )
diff --git a/TestON/drivers/common/cli/emulator/mininetclidriver.py b/TestON/drivers/common/cli/emulator/mininetclidriver.py
index c5ee51d..a0a64e9 100644
--- a/TestON/drivers/common/cli/emulator/mininetclidriver.py
+++ b/TestON/drivers/common/cli/emulator/mininetclidriver.py
@@ -473,7 +473,7 @@
                     # Current host pings all other hosts specified
                     pingCmd = str( host ) + cmd + str( temp )
                     self.handle.sendline( pingCmd )
-                    self.handle.expect( "mininet>", timeout=wait + 1 )
+                    self.handle.expect( "mininet>", timeout=wait + 5 )
                     response = self.handle.before
                     if re.search( ',\s0\%\spacket\sloss', response ):
                         pingResponse += str( " h" + str( temp[ 1: ] ) )
@@ -536,7 +536,7 @@
                     while failedPings <= acceptableFailed:
                         main.log.debug( "Pinging from " + str( host ) + " to " + str( temp ) )
                         self.handle.sendline( pingCmd )
-                        self.handle.expect( "mininet>", timeout=wait + 1 )
+                        self.handle.expect( "mininet>", timeout=wait + 5 )
                         response = self.handle.before
                         if re.search( ',\s0\%\spacket\sloss', response ):
                             pingResponse += " " + str( temp )
@@ -606,9 +606,9 @@
                     discoveryResult = main.FALSE
                 if cmd:
                     self.handle.sendline( "{} ip neigh flush all".format( host ) )
-                    self.handle.expect( "mininet>", timeout=wait + 1 )
+                    self.handle.expect( "mininet>" )
                     self.handle.sendline( cmd )
-                    self.handle.expect( "mininet>", timeout=wait + 1 )
+                    self.handle.expect( "mininet>", timeout=wait + 5 )
             return discoveryResult
         except pexpect.TIMEOUT:
             main.log.exception( self.name + ": TIMEOUT exception" )
@@ -655,7 +655,7 @@
                     while failedPings <= acceptableFailed:
                         main.log.debug( "Pinging from " + str( host ) + " to " + str( temp ) )
                         self.handle.sendline( pingCmd )
-                        self.handle.expect( "mininet>", timeout=wait + 1 )
+                        self.handle.expect( "mininet>", timeout=wait + 5 )
                         response = self.handle.before
                         if re.search( ',\s0\%\spacket\sloss', response ):
                             pingResponse += " " + str( temp )
@@ -707,7 +707,7 @@
             main.log.info( "Sending: " + command )
             self.handle.sendline( command )
             i = self.handle.expect( [ command, pexpect.TIMEOUT ],
-                                    timeout=wait + 1 )
+                                    timeout=wait + 5 )
             if i == 1:
                 main.log.error(
                     self.name +
@@ -752,7 +752,7 @@
             main.log.info( "Sending: " + command )
             self.handle.sendline( command )
             i = self.handle.expect( [ command, pexpect.TIMEOUT ],
-                                    timeout=wait + 1 )
+                                    timeout=wait + 5 )
             if i == 1:
                 main.log.error(
                     self.name +
@@ -806,7 +806,7 @@
                 i = self.handle.expect( [ self.hostPrompt,
                                           '\*\*\* Unknown command: ' + pingCmd,
                                           pexpect.TIMEOUT ],
-                                        timeout=wait + 1 )
+                                        timeout=wait + 5 )
                 # For some reason we need to send something
                 # Otherwise ping results won't be read by handle
                 self.handle.sendline( "" )
diff --git a/TestON/drivers/common/cli/emulator/mininethostdriver.py b/TestON/drivers/common/cli/emulator/mininethostdriver.py
index 408f918..360fd9a 100644
--- a/TestON/drivers/common/cli/emulator/mininethostdriver.py
+++ b/TestON/drivers/common/cli/emulator/mininethostdriver.py
@@ -145,7 +145,7 @@
             main.log.info( self.name + ": Sending: " + command )
             self.handle.sendline( command )
             i = self.handle.expect( [ self.hostPrompt, pexpect.TIMEOUT ],
-                                    timeout=wait + 1 )
+                                    timeout=wait + 5 )
             if i == 1:
                 main.log.error(
                     self.name +
@@ -179,7 +179,7 @@
             main.log.info( self.name + ": Sending: " + command )
             self.handle.sendline( command )
             i = self.handle.expect( [ self.hostPrompt, pexpect.TIMEOUT ],
-                                    timeout=wait + 1 )
+                                    timeout=wait + 5 )
             if i == 1:
                 main.log.error(
                     self.name +
diff --git a/TestON/drivers/common/cli/hostdriver.py b/TestON/drivers/common/cli/hostdriver.py
index 238721e..4156902 100644
--- a/TestON/drivers/common/cli/hostdriver.py
+++ b/TestON/drivers/common/cli/hostdriver.py
@@ -145,7 +145,7 @@
             main.log.info( self.name + ": Sending: " + command )
             self.handle.sendline( command )
             i = self.handle.expect( [ self.prompt, pexpect.TIMEOUT ],
-                                    timeout=wait + 1 )
+                                    timeout=wait + 5 )
             if i == 1:
                 main.log.error(
                     self.name +
@@ -179,7 +179,7 @@
             main.log.info( self.name + ": Sending: " + command )
             self.handle.sendline( command )
             i = self.handle.expect( [ self.prompt, pexpect.TIMEOUT ],
-                                    timeout=wait + 1 )
+                                    timeout=wait + 5 )
             if i == 1:
                 main.log.error(
                     self.name +
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
index 9c97ebb..fc3f0b3 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
@@ -1,5 +1,6 @@
 
 class SRRouting:
+
     def __init__( self ):
         self.default = ''
 
@@ -468,6 +469,64 @@
         verifyOnosFailure( main, internal=False )
         lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
 
+    def CASE601( self, main ):
+        """
+        Bring down all switches
+        Verify Topology
+        Bring up all switches
+        Verify
+
+        repeat x3
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Bring down all switches then recover" )
+        setupTest( main, test_idx=601, external=False )
+        main.Cluster.next().CLI.balanceMasters()
+        time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+        main.Network.discoverHosts( hostList=main.internalIpv4Hosts + main.internalIpv6Hosts )
+        totalSwitches = int( main.params[ 'TOPO' ][ 'switchNum' ] )
+        totalLinks = int( main.params[ 'TOPO' ][ 'linkNum' ] )
+        switchList = [ 'spine101', 'spine102', 'spine103', 'spine104',
+                       'leaf1', 'leaf2', 'leaf3', 'leaf4', 'leaf5', 'leaf6' ]
+        verify( main, disconnected=False, external=False )
+        for i in range( 1, 4 ):
+            main.log.info( "Beginning iteration {} of stopping then starting all switches".format( i ) )
+            main.log.debug( main.Cluster.next().summary() )
+            # Bring down all switches
+            main.step( "Stopping switches - iteration " + str( i ) )
+            switchStop = main.TRUE
+            for switch in switchList:
+                switchStop = switchStop and main.Network.switch( SW=switch, OPTION="stop" )
+            utilities.assert_equals( expect=main.TRUE, actual=switchStop,
+                                     onpass="All switches stopped",
+                                     onfail="Failed to stop all switches" )
+
+            time.sleep( 60 )
+            lib.verifyTopology( main, 0, 0, main.Cluster.numCtrls )
+            # Bring up all switches
+            main.log.debug( main.Cluster.next().summary() )
+            main.step( "Starting switches - iteration " + str( i ) )
+            switchStart = main.TRUE
+            for switch in switchList:
+                switchStart = switchStart and main.Network.switch( SW=switch, OPTION="start" )
+            utilities.assert_equals( expect=main.TRUE, actual=switchStart,
+                                     onpass="All switches started",
+                                     onfail="Failed to start all switches" )
+
+            main.Network.discoverHosts( hostList=main.internalIpv4Hosts + main.internalIpv6Hosts )
+            lib.verifyTopology( main, totalSwitches, totalLinks, main.Cluster.numCtrls )
+            main.log.debug( main.Cluster.next().summary() )
+            time.sleep( 60 )
+            main.log.debug( main.Cluster.next().summary() )
+            time.sleep( 60 )
+            main.log.debug( main.Cluster.next().summary() )
+            verifyPing( main )
+            verify( main, disconnected=False, external=False )
+        verify( main, disconnected=False, external=False )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
     def CASE603( self, main ):
         """"
         Drop HAGG-1 device and test connectivity.
@@ -476,7 +535,6 @@
 
         Repeat the same with HAGG-2 and DAAS-2.
         """
-        import time
         from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
         from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
         main.case( "Drop hagg spine switch along with dass leaf switch." )
@@ -485,30 +543,48 @@
         main.disconnectedIpv6Hosts = []
 
         verify( main )
-        lib.killSwitch( main, "spine103", int( main.params[ "TOPO" ]["switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
+        lib.killSwitch( main, "spine103",
+                        int( main.params[ "TOPO" ]["switchNum" ] ) - 1,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
         verify( main )
-        lib.killSwitch( main, "leaf6", int( main.params[ "TOPO" ]["switchNum" ] ) - 2, int( main.params[ "TOPO" ][ "linkNum" ] ) - 8 )
+        lib.killSwitch( main, "leaf6",
+                        int( main.params[ "TOPO" ]["switchNum" ] ) - 2,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 8 )
         main.disconnectedIpv4Hosts = [ 'h12v4', 'h13v4']
         main.disconnectedIpv6Hosts = [ 'h12v6', 'h13v6']
         verify( main )
-        lib.recoverSwitch( main, "leaf6", int( main.params[ "TOPO" ]["switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6, rediscoverHosts=True)
+        lib.recoverSwitch( main, "leaf6",
+                           int( main.params[ "TOPO" ]["switchNum" ] ) - 1,
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) - 6,
+                           rediscoverHosts=True)
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
         verify( main )
-        lib.recoverSwitch( main, "spine103", int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ))
+        lib.recoverSwitch( main, "spine103",
+                           int( main.params[ "TOPO" ][ "switchNum" ] ),
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) )
         verify( main )
 
-        lib.killSwitch( main, "spine104", int( main.params[ "TOPO" ]["switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
+        lib.killSwitch( main, "spine104",
+                        int( main.params[ "TOPO" ]["switchNum" ] ) - 1,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
         verify( main )
-        lib.killSwitch( main, "leaf1", int( main.params[ "TOPO" ]["switchNum" ] ) - 2, int( main.params[ "TOPO" ][ "linkNum" ] ) - 8 )
+        lib.killSwitch( main, "leaf1",
+                        int( main.params[ "TOPO" ]["switchNum" ] ) - 2,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 8 )
         main.disconnectedIpv4Hosts = [ 'h1v4', 'h2v4']
         main.disconnectedIpv6Hosts = [ 'h1v6', 'h2v6']
         verify( main )
-        lib.recoverSwitch( main, "leaf1", int( main.params[ "TOPO" ]["switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6, rediscoverHosts=True)
+        lib.recoverSwitch( main, "leaf1",
+                           int( main.params[ "TOPO" ]["switchNum" ] ) - 1,
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) - 6,
+                           rediscoverHosts=True )
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
         verify( main )
-        lib.recoverSwitch( main, "spine104", int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ))
+        lib.recoverSwitch( main, "spine104",
+                           int( main.params[ "TOPO" ][ "switchNum" ] ),
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) )
         verify( main )
 
         lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
@@ -520,7 +596,6 @@
         Drop HAGG-2 device and test connectivity.
         Bring up HAGG-2 device and test connectivity
         """
-        import time
         from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
         from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
         main.case( "Drop hagg spine switches." )
@@ -528,13 +603,21 @@
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
         verify( main )
-        lib.killSwitch( main, "spine103", int( main.params[ "TOPO" ]["switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
+        lib.killSwitch( main, "spine103",
+                        int( main.params[ "TOPO" ]["switchNum" ] ) - 1,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
         verify( main )
-        lib.recoverSwitch( main, "spine103", int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ))
+        lib.recoverSwitch( main, "spine103",
+                           int( main.params[ "TOPO" ][ "switchNum" ] ),
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) )
         verify( main )
-        lib.killSwitch( main, "spine104", int( main.params[ "TOPO" ]["switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
+        lib.killSwitch( main, "spine104",
+                        int( main.params[ "TOPO" ]["switchNum" ] ) - 1,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
         verify( main )
-        lib.recoverSwitch( main, "spine104", int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ))
+        lib.recoverSwitch( main, "spine104",
+                           int( main.params[ "TOPO" ][ "switchNum" ] ),
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) )
         verify( main )
         lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
 
@@ -553,18 +636,24 @@
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
         verify( main )
-        lib.killSwitch( main, "spine103", int( main.params[ "TOPO" ][ "switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
+        lib.killSwitch( main, "spine103",
+                        int( main.params[ "TOPO" ][ "switchNum" ] ) - 1,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
         verify( main )
-        lib.killSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ], int( main.params[ "TOPO" ][ "switchNum" ] ) - 5,
+        lib.killSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ],
+                        int( main.params[ "TOPO" ][ "switchNum" ] ) - 5,
                         int( main.params[ "TOPO" ][ "linkNum" ] ) - 42 )
         main.disconnectedIpv4Hosts = [ "h3v4", "h4v4", "h5v4", "h6v4", "h7v4", "h8v4", "h9v4", "h10v4", "h11v4" ]
         main.disconnectedIpv6Hosts = [ "h3v6", "h4v6", "h5v6", "h6v6", "h7v6", "h8v6", "h9v6", "h10v6", "h11v6" ]
         main.disconnectedExternalIpv4Hosts = [ "rh1v4", "rh2v4", "rh5v4" ]
         main.disconnectedExternalIpv6Hosts = [ "rh1v6", "rh11v6", "rh5v6", "rh2v6", "rh22v6" ]
         verify( main, disconnected=True )
-        lib.recoverSwitch( main, "spine103", int( main.params[ "TOPO" ][ "switchNum" ] ) - 4, int( main.params[ "TOPO" ][ "linkNum" ] ) - 36 )
+        lib.recoverSwitch( main, "spine103",
+                           int( main.params[ "TOPO" ][ "switchNum" ] ) - 4,
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) - 36 )
         verify( main, disconnected=True )
-        lib.recoverSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ], int( main.params[ "TOPO" ][ "switchNum" ] ),
+        lib.recoverSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ],
+                           int( main.params[ "TOPO" ][ "switchNum" ] ),
                            int( main.params[ "TOPO" ][ "linkNum" ] ) )
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
@@ -572,18 +661,24 @@
         main.disconnectedExternalIpv6Hosts = [ ]
         verify( main )
 
-        lib.killSwitch( main, "spine104", int( main.params[ "TOPO" ][ "switchNum" ] ) - 1, int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
+        lib.killSwitch( main, "spine104",
+                        int( main.params[ "TOPO" ][ "switchNum" ] ) - 1,
+                        int( main.params[ "TOPO" ][ "linkNum" ] ) - 6 )
         verify( main )
-        lib.killSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ], int( main.params[ "TOPO" ][ "switchNum" ] ) - 5,
+        lib.killSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ],
+                        int( main.params[ "TOPO" ][ "switchNum" ] ) - 5,
                         int( main.params[ "TOPO" ][ "linkNum" ] ) - 42 )
         main.disconnectedIpv4Hosts = [ "h3v4", "h4v4", "h5v4", "h6v4", "h7v4", "h8v4", "h9v4", "h10v4", "h11v4" ]
         main.disconnectedIpv6Hosts = [ "h3v6", "h4v6", "h5v6", "h6v6", "h7v6", "h8v6", "h9v6", "h10v6", "h11v6" ]
         main.disconnectedExternalIpv4Hosts = [ "rh1v4", "rh2v4", "rh5v4" ]
         main.disconnectedExternalIpv6Hosts = [ "rh1v6", "rh11v6", "rh5v6", "rh2v6", "rh22v6" ]
         verify( main, disconnected=True )
-        lib.recoverSwitch( main, "spine104", int( main.params[ "TOPO" ][ "switchNum" ] ) - 4, int( main.params[ "TOPO" ][ "linkNum" ] ) - 36 )
+        lib.recoverSwitch( main, "spine104",
+                           int( main.params[ "TOPO" ][ "switchNum" ] ) - 4,
+                           int( main.params[ "TOPO" ][ "linkNum" ] ) - 36 )
         verify( main, disconnected=True )
-        lib.recoverSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ], int( main.params[ "TOPO" ][ "switchNum" ] ),
+        lib.recoverSwitch( main, [ "leaf2", "leaf3", "leaf4", "leaf5" ],
+                           int( main.params[ "TOPO" ][ "switchNum" ] ),
                            int( main.params[ "TOPO" ][ "linkNum" ] ) )
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
@@ -600,7 +695,6 @@
         Bring up the paired leaf and test connectivity
         Repeat above with SPINE-2 and a different paired leaf
         """
-        import time
         from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
         from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
         main.case( "Drop spine and paired leaf" )
@@ -646,7 +740,6 @@
         check that buckets in select groups change accordingly
         Bring up links again and check that buckets in select groups change accordingly
         """
-        import time
         from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
         from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
         main.case( "Take down one of double links towards the spine" )
@@ -719,7 +812,6 @@
         """
         Take down all uplinks from a paired leaf switch
         """
-        import time
         from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
         from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
         from core import utilities
@@ -756,7 +848,6 @@
         Drop a device
         Bring that same instance up again and observe that this specific instance sees that the device is down.
         """
-        import time
         from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
         from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
         from core import utilities
@@ -773,6 +864,186 @@
                                  onfail="ONOS instance {} doesn't see correct device numbers".format( onosToKill ) )
         lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
 
+    def CASE640( self, main ):
+        """
+        Controller instance going down and switch coming down at the same time and then we bring them up together
+
+        A. Instance goes down and SPINE-1 goes down
+            - All connectivity should be there
+            - Bring them up together
+            - All connectivity should be there
+        B. Instance goes down and HAGG-1 goes down
+            - All connectivity should be there
+            - Bring them up together
+            - All connectivity should be there
+        C. Instance goes down and a paired leaf switch goes down
+            - Single homed hosts in this leaf should lose connectivity all others should be ok
+            - Bring them up together
+            - Test connectivity
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Drop an ONOS instance and switch(es) at the same time" )
+        caseDict = { 'A': { 'switches': "spine101",
+                            'disconnectedV4': [],
+                            'disconnectedV6': [],
+                            'expectedSwitches': 9,
+                            'expectedLinks': 30 },
+                     'B': { 'switches': "spine103",
+                            'disconnectedV4': [],
+                            'disconnectedV6': [],
+                            'expectedSwitches': 9,
+                            'expectedLinks': 42 },
+                     'C': { 'switches': "leaf2",
+                            'disconnectedV4': [ "h3v4" ],
+                            'disconnectedV6': [ "h3v6" ],
+                            'expectedSwitches': 9,
+                            'expectedLinks': 38 } }
+        totalSwitches = int( main.params[ 'TOPO' ][ 'switchNum' ] )
+        totalLinks = int( main.params[ 'TOPO' ][ 'linkNum' ] )
+        nodeIndex = 0
+        cases = sorted( caseDict.keys() )
+        for case in cases:
+            switches = caseDict[ case ][ 'switches' ]
+            expectedSwitches = caseDict[ case ][ 'expectedSwitches' ]
+            expectedLinks = caseDict[ case ][ 'expectedLinks' ]
+            main.step( "\n640{}: Drop ONOS{} and switch(es) {} at the same time".format( case,
+                                                                                         nodeIndex + 1,
+                                                                                         switches ) )
+            setupTest( main, test_idx=640 )
+            main.Cluster.next().CLI.balanceMasters()
+            time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+            main.Network.discoverHosts( hostList=main.internalIpv4Hosts + main.internalIpv6Hosts )
+            instance = main.Cluster.controllers[ nodeIndex ]
+            verify( main, disconnected=False, external=False )
+
+            # Simultaneous failures
+            main.step( "Kill ONOS{}: {}".format( nodeIndex + 1, instance.ipAddress ) )
+            killResult = main.ONOSbench.onosDie( instance.ipAddress )
+            utilities.assert_equals( expect=main.TRUE, actual=killResult,
+                                     onpass="ONOS node killed",
+                                     onfail="Failed to kill ONOS node" )
+            instance.active = False
+            main.Cluster.reset()
+            # TODO: Remove sleeps from the concurrent events
+            lib.killSwitch( main, switches, expectedSwitches, expectedLinks )
+            main.disconnectedIpv4Hosts = caseDict[ case ][ 'disconnectedV4' ]
+            main.disconnectedIpv6Hosts = caseDict[ case ][ 'disconnectedV6' ]
+
+            # verify functionality
+            main.log.debug( main.Cluster.next().summary() )
+            main.Network.discoverHosts( hostList=main.internalIpv4Hosts + main.internalIpv6Hosts )
+            main.log.debug( main.Cluster.next().summary() )
+            lib.verifyTopology( main, expectedSwitches, expectedLinks, main.Cluster.numCtrls - 1  )
+            lib.verifyNodes( main )
+            verify( main, external=False )
+
+            # Bring everything back up
+            lib.recoverSwitch( main, switches, totalSwitches, totalLinks, rediscoverHosts=True )
+            main.disconnectedIpv4Hosts = []
+            main.disconnectedIpv6Hosts = []
+            lib.recoverOnos( main, [ nodeIndex ], expectedSwitches, expectedLinks, main.Cluster.numCtrls )
+
+            # Verify functionality
+            lib.verifyNodes( main )
+            verify( main, disconnected=False, external=False )
+            lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+            nodeIndex = ( nodeIndex + 1 ) % main.Cluster.numCtrls
+
+    def CASE641( self, main ):
+        """
+        Controller instance going down while switch comes up at the same time
+
+        A. Take down SPINE-1
+            - Test connectivity
+            - Bring up SPINE-1 and drop an instance at the same time
+            - Test connectivity
+            - Bring up instance one
+            - Test connectivity
+        B. Take down HAGG-1
+            - Test connectivity
+            - Bring up HAGG-1 and drop an instance at the same time
+            - Test connectivity
+            - Bring up instance one
+            - Test connectivity
+        C. Take down a paired leaf switch
+            - Test connectivity ( single homed hosts on this leaf will lose it )
+            - Bring up paired leaf switch and drop a controller instance at the same time
+            - Test connectivity
+            - Bring up the instance
+            - Test connectivity
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Drop an ONOS instance and recover switch(es) at the same time" )
+        caseDict = { 'A': { 'switches': "spine101",
+                            'disconnectedV4': [],
+                            'disconnectedV6': [],
+                            'expectedSwitches': 9,
+                            'expectedLinks': 30 },
+                     'B': { 'switches': "spine103",
+                            'disconnectedV4': [],
+                            'disconnectedV6': [],
+                            'expectedSwitches': 9,
+                            'expectedLinks': 42 },
+                     'C': { 'switches': "leaf2",
+                            'disconnectedV4': [ "h3v4" ],
+                            'disconnectedV6': [ "h3v6" ],
+                            'expectedSwitches': 9,
+                            'expectedLinks': 38 } }
+        totalSwitches = int( main.params[ 'TOPO' ][ 'switchNum' ] )
+        totalLinks = int( main.params[ 'TOPO' ][ 'linkNum' ] )
+        nodeIndex = 0
+        cases = sorted( caseDict.keys() )
+        for case in cases:
+            switches = caseDict[ case ][ 'switches' ]
+            expectedSwitches = caseDict[ case ][ 'expectedSwitches' ]
+            expectedLinks = caseDict[ case ][ 'expectedLinks' ]
+            main.step( "\n641{}: Drop ONOS{} and recover switch(es) {} at the same time".format( case,
+                                                                                                 nodeIndex + 1,
+                                                                                                 switches ) )
+            setupTest( main, test_idx=641 )
+            main.Cluster.next().CLI.balanceMasters()
+            time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+            main.Network.discoverHosts( hostList=main.internalIpv4Hosts + main.internalIpv6Hosts )
+            instance = main.Cluster.controllers[ nodeIndex ]
+            verify( main, disconnected=False, external=False )
+            # Drop the switch to setup scenario
+            lib.killSwitch( main, switches, expectedSwitches, expectedLinks )
+            main.disconnectedIpv4Hosts = caseDict[ case ][ 'disconnectedV4' ]
+            main.disconnectedIpv6Hosts = caseDict[ case ][ 'disconnectedV6' ]
+            verify( main, external=False )
+
+            # Simultaneous node failure and switch recovery
+            main.step( "Kill ONOS{}: {}".format( nodeIndex + 1, instance.ipAddress ) )
+            killResult = main.ONOSbench.onosDie( instance.ipAddress )
+            utilities.assert_equals( expect=main.TRUE, actual=killResult,
+                                     onpass="ONOS node killed",
+                                     onfail="Failed to kill ONOS node" )
+            instance.active = False
+            main.Cluster.reset()
+            # TODO: Remove sleeps from the concurrent events
+            lib.recoverSwitch( main, switches, totalSwitches, totalLinks, rediscoverHosts=True )
+            main.disconnectedIpv4Hosts = []
+            main.disconnectedIpv6Hosts = []
+
+            # verify functionality
+            main.log.debug( main.Cluster.next().summary() )
+            lib.recoverSwitch( main, switches, totalSwitches, totalLinks, rediscoverHosts=True )
+            main.log.debug( main.Cluster.next().summary() )
+            lib.verifyTopology( main, totalSwitches, totalLinks, main.Cluster.numCtrls - 1 )
+            lib.verifyNodes( main )
+            verify( main, disconnected=False, external=False )
+
+            # Bring everything back up and verify functionality
+            lib.recoverOnos( main, [ nodeIndex ], totalSwitches, totalLinks, main.Cluster.numCtrls )
+            lib.verifyNodes( main )
+            verify( main, external=False )
+            lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+            nodeIndex = ( nodeIndex + 1 ) % main.Cluster.numCtrls
+
     def CASE642( self, main ):
         """
         Drop one link from each double link
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py b/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
index f4b5481..90e8129 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
@@ -19,7 +19,8 @@
     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
 """
 
-def setupTest( main, test_idx, onosNodes, ipv4=True, ipv6=True, external=True, static=False, countFlowsGroups=False ):
+def setupTest( main, test_idx, onosNodes=-1, ipv4=True, ipv6=True,
+               external=True, static=False, countFlowsGroups=False ):
     """
     SRRouting test setup
     """
@@ -31,6 +32,8 @@
     if not hasattr( main, 'apps' ):
         init = True
         lib.initTest( main )
+    if onosNodes < 0:
+        onosNodes = main.Cluster.numCtrls
     # Skip onos packaging if the cluster size stays the same
     if not init and onosNodes == main.Cluster.numCtrls:
         skipPackage = True
@@ -162,7 +165,8 @@
     if external:
         verifyPingExternal( main, ipv4, ipv6, disconnected )
 
-def verifyLinkFailure( main, ipv4=True, ipv6=True, disconnected=False, internal=True, external=True, countFlowsGroups=False ):
+def verifyLinkFailure( main, ipv4=True, ipv6=True, disconnected=False,
+                       internal=True, external=True, countFlowsGroups=False ):
     """
     Kill and recover all links to spine101 and 102 sequencially and run verifications
     """
@@ -186,7 +190,8 @@
     lib.restoreLinkBatch( main, linksToRemove, 48, 10 )
     verify( main, ipv4, ipv6, disconnected, internal, external, countFlowsGroups )
 
-def verifySwitchFailure( main, ipv4=True, ipv6=True, disconnected=False, internal=True, external=True, countFlowsGroups=False ):
+def verifySwitchFailure( main, ipv4=True, ipv6=True, disconnected=False,
+                         internal=True, external=True, countFlowsGroups=False ):
     """
     Kill and recover spine101 and 102 sequencially and run verifications
     """
@@ -197,7 +202,8 @@
         lib.recoverSwitch( main, switchToKill, 10, 48 )
         verify( main, ipv4, ipv6, disconnected, internal, external, countFlowsGroups )
 
-def verifyOnosFailure( main, ipv4=True, ipv6=True, disconnected=False, internal=True, external=True, countFlowsGroups=False ):
+def verifyOnosFailure( main, ipv4=True, ipv6=True, disconnected=False,
+                       internal=True, external=True, countFlowsGroups=False ):
     """
     Kill and recover onos nodes sequencially and run verifications
     """
@@ -208,16 +214,17 @@
     numCtrls = len( main.Cluster.runningNodes )
     links = len( json.loads( main.Cluster.next().links() ) )
     switches = len( json.loads( main.Cluster.next().devices() ) )
+    mastershipSleep = float( main.params[ 'timers' ][ 'balanceMasterSleep' ] )
     for ctrl in xrange( numCtrls ):
         # Kill node
         lib.killOnos( main, [ ctrl ], switches, links, ( numCtrls - 1 ) )
         main.Cluster.active(0).CLI.balanceMasters()
-        time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+        time.sleep( mastershipSleep )
         verify( main, ipv4, ipv6, disconnected, internal, external, countFlowsGroups )
         # Recover node
         lib.recoverOnos( main, [ ctrl ], switches, links, numCtrls )
         main.Cluster.active(0).CLI.balanceMasters()
-        time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+        time.sleep( mastershipSleep )
         verify( main, ipv4, ipv6, disconnected, internal, external, countFlowsGroups )
 
 def verify( main, ipv4=True, ipv6=True, disconnected=True, internal=True, external=True, countFlowsGroups=False ):
@@ -230,6 +237,6 @@
     lib.verifyNetworkHostIp( main )
     # check flows / groups numbers
     if countFlowsGroups:
-        run.checkFlowsGroupsFromFile( main )
+        lib.checkFlowsGroupsFromFile( main )
     # ping hosts
     verifyPing( main, ipv4, ipv6, disconnected, internal, external )
diff --git a/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py b/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py
index 0daf074..b008984 100644
--- a/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py
+++ b/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py
@@ -19,7 +19,6 @@
     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
 """
 import os
-import imp
 import time
 import json
 import urllib
@@ -52,7 +51,6 @@
         main.testSetUp.envSetupDescription( False )
         stepResult = main.FALSE
         try:
-            main.step( "Constructing test variables" )
             # Test variables
             main.cellName = main.params[ 'ENV' ][ 'cellName' ]
             main.apps = main.params[ 'ENV' ][ 'cellApps' ]
@@ -427,7 +425,8 @@
         return
 
     @staticmethod
-    def pingAll( main, tag="", dumpflows=True, acceptableFailed=0, basedOnIp=False, sleep=10, retryAttempts=1, skipOnFail=False ):
+    def pingAll( main, tag="", dumpflows=True, acceptableFailed=0, basedOnIp=False,
+                 sleep=10, retryAttempts=1, skipOnFail=False ):
         '''
         Verify connectivity between hosts according to the ping chart
         acceptableFailed: max number of acceptable failed pings.
@@ -507,81 +506,92 @@
                                         tag + "_GroupsOn" )
 
     @staticmethod
-    def killLink( main, end1, end2, switches, links ):
+    def killLink( main, end1, end2, switches, links, sleep=None ):
         """
         end1,end2: identify the switches, ex.: 'leaf1', 'spine1'
         switches, links: number of expected switches and links after linkDown, ex.: '4', '6'
         Kill a link and verify ONOS can see the proper link change
         """
-        main.linkSleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
+        else:
+            sleep = float( sleep )
         main.step( "Kill link between %s and %s" % ( end1, end2 ) )
-        LinkDown = main.Network.link( END1=end1, END2=end2, OPTION="down" )
-        LinkDown = main.Network.link( END2=end1, END1=end2, OPTION="down" )
+        linkDown = main.Network.link( END1=end1, END2=end2, OPTION="down" )
+        linkDown = linkDown and main.Network.link( END2=end1, END1=end2, OPTION="down" )
+        # TODO: Can remove this, since in the retry we will wait anyways if topology is incorrect
         main.log.info(
-                "Waiting %s seconds for link down to be discovered" % main.linkSleep )
-        time.sleep( main.linkSleep )
+                "Waiting %s seconds for link down to be discovered" % sleep )
+        time.sleep( sleep )
         topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
                                     main.FALSE,
                                     kwargs={ 'numoswitch': switches,
                                              'numolink': links },
                                     attempts=10,
-                                    sleep=main.linkSleep )
-        result = topology & LinkDown
+                                    sleep=sleep )
+        result = topology and linkDown
         utilities.assert_equals( expect=main.TRUE, actual=result,
                                  onpass="Link down successful",
                                  onfail="Failed to turn off link?" )
 
     @staticmethod
-    def killLinkBatch( main, links, linksAfter, switches ):
+    def killLinkBatch( main, links, linksAfter, switches, sleep=None ):
         """
         links = list of links (src, dst) to bring down.
         """
 
         main.step("Killing a batch of links {0}".format(links))
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
+        else:
+            sleep = float( sleep )
 
         for end1, end2 in links:
             main.Network.link( END1=end1, END2=end2, OPTION="down")
             main.Network.link( END1=end2, END2=end1, OPTION="down")
 
-        main.linkSleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
+        # TODO: Can remove this, since in the retry we will wait anyways if topology is incorrect
         main.log.info(
-                "Waiting %s seconds for links down to be discovered" % main.linkSleep )
-        time.sleep( main.linkSleep )
+                "Waiting %s seconds for links down to be discovered" % sleep )
+        time.sleep( sleep )
 
         topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
                                     main.FALSE,
                                     kwargs={ 'numoswitch': switches,
                                              'numolink': linksAfter },
                                     attempts=10,
-                                    sleep=main.linkSleep )
+                                    sleep=sleep )
 
         utilities.assert_equals( expect=main.TRUE, actual=topology,
                                  onpass="Link batch down successful",
                                  onfail="Link batch down failed" )
 
     @staticmethod
-    def restoreLinkBatch( main, links, linksAfter, switches ):
+    def restoreLinkBatch( main, links, linksAfter, switches, sleep=None ):
         """
         links = list of link (src, dst) to bring up again.
         """
 
         main.step("Restoring a batch of links {0}".format(links))
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
+        else:
+            sleep = float( sleep )
 
         for end1, end2 in links:
             main.Network.link( END1=end1, END2=end2, OPTION="up")
             main.Network.link( END1=end2, END2=end1, OPTION="up")
 
-        main.linkSleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
         main.log.info(
-                "Waiting %s seconds for links up to be discovered" % main.linkSleep )
-        time.sleep( main.linkSleep )
+                "Waiting %s seconds for links up to be discovered" % sleep )
+        time.sleep( sleep )
 
         topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
                                     main.FALSE,
                                     kwargs={ 'numoswitch': switches,
                                              'numolink': linksAfter },
                                     attempts=10,
-                                    sleep=main.linkSleep )
+                                    sleep=sleep )
 
         utilities.assert_equals( expect=main.TRUE, actual=topology,
                                  onpass="Link batch up successful",
@@ -635,7 +645,7 @@
 
     @staticmethod
     def restoreLink( main, end1, end2, switches, links,
-                     portUp=False, dpid1='', dpid2='', port1='', port2='' ):
+                     portUp=False, dpid1='', dpid2='', port1='', port2='', sleep=None ):
         """
         Params:
             end1,end2: identify the end switches, ex.: 'leaf1', 'spine1'
@@ -646,23 +656,28 @@
         Kill a link and verify ONOS can see the proper link change
         """
         main.step( "Restore link between %s and %s" % ( end1, end2 ) )
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
+        else:
+            sleep = float( sleep )
         result = False
         count = 0
         while True:
             count += 1
+            ctrl = main.Cluster.next()
             main.Network.link( END1=end1, END2=end2, OPTION="up" )
             main.Network.link( END2=end1, END1=end2, OPTION="up" )
             main.log.info(
-                    "Waiting %s seconds for link up to be discovered" % main.linkSleep )
-            time.sleep( main.linkSleep )
+                    "Waiting %s seconds for link up to be discovered" % sleep )
+            time.sleep( sleep )
 
             if portUp:
                 ctrl.CLI.portstate( dpid=dpid1, port=port1, state='Enable' )
                 ctrl.CLI.portstate( dpid=dpid2, port=port2, state='Enable' )
-                time.sleep( main.linkSleep )
+                time.sleep( sleep )
 
-            result = main.Cluster.active( 0 ).CLI.checkStatus( numoswitch=switches,
-                                                               numolink=links )
+            result = ctrl.CLI.checkStatus( numoswitch=switches,
+                                           numolink=links )
             if count > 5 or result:
                 break
         utilities.assert_equals( expect=main.TRUE, actual=result,
@@ -670,58 +685,67 @@
                                  onfail="Failed to bring link up" )
 
     @staticmethod
-    def killSwitch( main, switch, switches, links ):
+    def killSwitch( main, switch, switches, links, sleep=None ):
         """
         Params: switches, links: number of expected switches and links after SwitchDown, ex.: '4', '6'
         Completely kill a switch and verify ONOS can see the proper change
         """
-        main.switchSleep = float( main.params[ 'timers' ][ 'SwitchDiscovery' ] )
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'SwitchDiscovery' ] )
+        else:
+            sleep = float( sleep )
         switch = switch if isinstance( switch, list ) else [ switch ]
         main.step( "Kill " + str( switch ) )
         for s in switch:
             main.log.info( "Stopping " + s )
             main.Network.switch( SW=s, OPTION="stop" )
         # todo make this repeatable
+
+        # TODO: Can remove this, since in the retry we will wait anyways if topology is incorrect
         main.log.info( "Waiting %s seconds for switch down to be discovered" % (
-            main.switchSleep ) )
-        time.sleep( main.switchSleep )
+            sleep ) )
+        time.sleep( sleep )
         topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
                                     main.FALSE,
                                     kwargs={ 'numoswitch': switches,
                                              'numolink': links },
                                     attempts=10,
-                                    sleep=main.switchSleep )
+                                    sleep=sleep )
         utilities.assert_equals( expect=main.TRUE, actual=topology,
                                  onpass="Kill switch successful",
                                  onfail="Failed to kill switch?" )
 
     @staticmethod
-    def recoverSwitch( main, switch, switches, links, rediscoverHosts=False, hostsToDiscover=[] ):
+    def recoverSwitch( main, switch, switches, links, rediscoverHosts=False, hostsToDiscover=[], sleep=None ):
         """
         Params: switches, links: number of expected switches and links after SwitchUp, ex.: '4', '6'
         Recover a switch and verify ONOS can see the proper change
         """
-        # todo make this repeatable
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'SwitchDiscovery' ] )
+        else:
+            sleep = float( sleep )
+        # TODO make this repeatable
         switch = switch if isinstance( switch, list ) else [ switch ]
         main.step( "Recovering " + str( switch ) )
         for s in switch:
             main.log.info( "Starting " + s )
             main.Network.switch( SW=s, OPTION="start" )
         main.log.info( "Waiting %s seconds for switch up to be discovered" % (
-            main.switchSleep ) )
-        time.sleep( main.switchSleep )
+            sleep ) )
+        time.sleep( sleep )
         if rediscoverHosts:
             main.Network.discoverHosts( hostList=hostsToDiscover )
             main.log.info( "Waiting %s seconds for hosts to get re-discovered" % (
-                           main.switchSleep ) )
-            time.sleep( main.switchSleep )
+                           sleep ) )
+            time.sleep( sleep )
 
         topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
                                     main.FALSE,
                                     kwargs={ 'numoswitch': switches,
                                              'numolink': links },
                                     attempts=10,
-                                    sleep=main.switchSleep )
+                                    sleep=sleep )
         utilities.assert_equals( expect=main.TRUE, actual=topology,
                                  onpass="Switch recovery successful",
                                  onfail="Failed to recover switch?" )
@@ -768,14 +792,62 @@
             main.ONOSbench.onosStop( ctrl.ipAddress )
 
     @staticmethod
-    def killOnos( main, nodes, switches, links, expNodes ):
+    def verifyNodes( main ):
+        """
+        Verifies Each active node in the cluster has an accurate view of other node's and their status
+
+        Params:
+        nodes, integer array with position of the ONOS nodes in the CLIs array
+        """
+        nodeResults = utilities.retry( main.Cluster.nodesCheck,
+                                       False,
+                                       attempts=10,
+                                       sleep=10 )
+        utilities.assert_equals( expect=True, actual=nodeResults,
+                                 onpass="Nodes check successful",
+                                 onfail="Nodes check NOT successful" )
+
+        if not nodeResults:
+            for ctrl in main.Cluster.runningNodes:
+                main.log.debug( "{} components not ACTIVE: \n{}".format(
+                    ctrl.name,
+                    ctrl.CLI.sendline( "scr:list | grep -v ACTIVE" ) ) )
+            main.log.error( "Failed to kill ONOS, stopping test" )
+            main.cleanAndExit()
+
+    @staticmethod
+    def verifyTopology( main, switches, links, expNodes ):
+        """
+        Verifies that the ONOS cluster has an acuurate view of the topology
+
+        Params:
+        switches, links, expNodes: number of expected switches, links, and nodes at this point in the test ex.: '4', '6', '2'
+        """
+        main.step( "Check number of topology elements" )
+        topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
+                                    main.FALSE,
+                                    kwargs={ 'numoswitch': switches,
+                                             'numolink': links,
+                                             'numoctrl': expNodes },
+                                    attempts=10,
+                                    sleep=12 )
+        utilities.assert_equals( expect=main.TRUE, actual=topology,
+                                 onpass="Number of topology elements are correct",
+                                 onfail="Unexpected number of links, switches, and/or controllers" )
+
+    @staticmethod
+    def killOnos( main, nodes, switches, links, expNodes, sleep=None ):
         """
         Params: nodes, integer array with position of the ONOS nodes in the CLIs array
         switches, links, nodes: number of expected switches, links and nodes after KillOnos, ex.: '4', '6'
         Completely Kill an ONOS instance and verify the ONOS cluster can see the proper change
         """
+        # TODO: We have enough information in the Cluster instance to remove expNodes from here and verifyTopology
         main.step( "Killing ONOS instances with index(es): {}".format( nodes ) )
-        main.onosSleep = float( main.params[ 'timers' ][ 'OnosDiscovery' ] )
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'OnosDiscovery' ] )
+        else:
+            sleep = float( sleep )
 
         for i in nodes:
             killResult = main.ONOSbench.onosDie( main.Cluster.runningNodes[ i ].ipAddress )
@@ -783,48 +855,27 @@
                                      onpass="ONOS instance Killed",
                                      onfail="Error killing ONOS instance" )
             main.Cluster.runningNodes[ i ].active = False
-        time.sleep( main.onosSleep )
+        main.Cluster.reset()
+        time.sleep( sleep )
 
         if len( nodes ) < main.Cluster.numCtrls:
-
-            nodeResults = utilities.retry( main.Cluster.nodesCheck,
-                                           False,
-                                           attempts=10,
-                                           sleep=10 )
-            utilities.assert_equals( expect=True, actual=nodeResults,
-                                     onpass="Nodes check successful",
-                                     onfail="Nodes check NOT successful" )
-
-            if not nodeResults:
-                for i in nodes:
-                    ctrl = main.Cluster.runningNodes[ i ]
-                    main.log.debug( "{} components not ACTIVE: \n{}".format(
-                        ctrl.name,
-                        ctrl.CLI.sendline( "scr:list | grep -v ACTIVE" ) ) )
-                main.log.error( "Failed to kill ONOS, stopping test" )
-                main.cleanAndExit()
-
-            topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
-                                        main.FALSE,
-                                        kwargs={ 'numoswitch': switches,
-                                                 'numolink': links,
-                                                 'numoctrl': expNodes },
-                                        attempts=10,
-                                        sleep=12 )
-            utilities.assert_equals( expect=main.TRUE, actual=topology,
-                                     onpass="ONOS Instance down successful",
-                                     onfail="Failed to turn off ONOS Instance" )
+            Testcaselib.verifyNodes( main )
+            Testcaselib.verifyTopology( main, switches, links, expNodes )
 
     @staticmethod
-    def recoverOnos( main, nodes, switches, links, expNodes ):
+    def recoverOnos( main, nodes, switches, links, expNodes, sleep=None ):
         """
         Params: nodes, integer array with position of the ONOS nodes in the CLIs array
         switches, links, nodes: number of expected switches, links and nodes after recoverOnos, ex.: '4', '6'
         Recover an ONOS instance and verify the ONOS cluster can see the proper change
         """
         main.step( "Recovering ONOS instances with index(es): {}".format( nodes ) )
+        if sleep is None:
+            sleep = float( main.params[ 'timers' ][ 'OnosDiscovery' ] )
+        else:
+            sleep = float( sleep )
         [ main.ONOSbench.onosStart( main.Cluster.runningNodes[ i ].ipAddress ) for i in nodes ]
-        time.sleep( main.onosSleep )
+        time.sleep( sleep )
         for i in nodes:
             isUp = main.ONOSbench.isup( main.Cluster.runningNodes[ i ].ipAddress )
             utilities.assert_equals( expect=main.TRUE, actual=isUp,
@@ -843,34 +894,11 @@
                                      onpass="ONOS CLI is ready",
                                      onfail="ONOS CLI is not ready" )
 
+        main.Cluster.reset()
         main.step( "Checking ONOS nodes" )
-        nodeResults = utilities.retry( main.Cluster.nodesCheck,
-                                       False,
-                                       attempts=5,
-                                       sleep=10 )
-        utilities.assert_equals( expect=True, actual=nodeResults,
-                                 onpass="Nodes check successful",
-                                 onfail="Nodes check NOT successful" )
+        Testcaselib.verifyNodes( main )
+        Testcaselib.verifyTopology( main, switches, links, expNodes )
 
-        if not nodeResults:
-            for i in nodes:
-                ctrl = main.Cluster.runningNodes[ i ]
-                main.log.debug( "{} components not ACTIVE: \n{}".format(
-                    ctrl.name,
-                    ctrl.CLI.sendline( "scr:list | grep -v ACTIVE" ) ) )
-            main.log.error( "Failed to start ONOS, stopping test" )
-            main.cleanAndExit()
-
-        topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
-                                    main.FALSE,
-                                    kwargs={ 'numoswitch': switches,
-                                             'numolink': links,
-                                             'numoctrl': expNodes },
-                                    attempts=10,
-                                    sleep=12 )
-        utilities.assert_equals( expect=main.TRUE, actual=topology,
-                                 onpass="ONOS Instance down successful",
-                                 onfail="Failed to turn off ONOS Instance" )
         ready = utilities.retry( main.Cluster.active( 0 ).CLI.summary,
                                  main.FALSE,
                                  attempts=10,
@@ -1103,7 +1131,7 @@
                 # Send packet and check received packet
                 expectedResult = expect.pop( 0 ) if isinstance( expect, list ) else expect
                 t3Cmd = "t3-troubleshoot -vv -sp {} -et ipv{} -d {} -dm {}".format( srcEntry[ "port" ], routeData[ "ipVersion" ],
-                                                                                routeData[ "group" ], srcEntry[ "Ether" ] )
+                                                                                    routeData[ "group" ], srcEntry[ "Ether" ] )
                 trafficResult = main.topo.sendScapyPackets( sender, receiver, pktFilter, pkt, sIface, dIface,
                                                             expectedResult, maxRetry, True, t3Cmd )
                 if not trafficResult:
diff --git a/TestON/tests/dependencies/ONOSSetup.py b/TestON/tests/dependencies/ONOSSetup.py
index ee339be..66eced1 100644
--- a/TestON/tests/dependencies/ONOSSetup.py
+++ b/TestON/tests/dependencies/ONOSSetup.py
@@ -35,7 +35,7 @@
             main.case( "Constructing test variables and building ONOS package" )
             main.caseExplanation = "For loading from params file, and pull" + \
                                    " and build the latest ONOS package"
-        main.step("Constructing test variables")
+        main.step( "Constructing test variables" )
         try:
             from tests.dependencies.Cluster import Cluster
         except ImportError: