Merge "[ONOS-7637] Making a script that finds the broken commit from the ONOS."
diff --git a/TestON/JenkinsFile/CHO_Graph_Generator b/TestON/JenkinsFile/CHO_Graph_Generator
new file mode 100644
index 0000000..5f45c32
--- /dev/null
+++ b/TestON/JenkinsFile/CHO_Graph_Generator
@@ -0,0 +1,47 @@
+#!groovy
+
+branches = params.ONOSbranch
+branchList = branches.tokenize( "\n;, " )
+script_file = "TestON/JenkinsFile/wikiGraphRScripts/trendCHO.R"
+saving_directory = "/var/jenkins/workspace/postjob-Fabric5/"
+scriptDir = "~/CHO_Jenkins_Scripts/"
+
+graphScript = generateGraphScript( branchList )
+
+stage( 'Generating-Graph' ){
+    node( "TestStation-Fabric5s" ){
+        runScript( graphScript )
+    }
+}
+stage( 'posting-result' ){
+    postJob()
+}
+
+def generateGraphScript( branchList ){
+    graphScript = ''''''
+    for( branch in branchList ){
+        branchDir = scriptDir + branch + "/"
+        graphScript += '''export BRANCH=''' + branchDir + '''
+                          mkdir ''' + branchDir + ''';
+                          cp *.csv ''' + branchDir + ''';
+                          bash log-summary;''' + '''
+                          ''' +  script_file + ' ' + branchDir + 'event.csv ' +
+                                branchDir + 'failure.csv ' + branchDir + 'error.csv ' +
+                                branch + ' 60 ' +  saving_directory + ''';
+        '''
+        print( graphScript )
+    }
+    return graphScript
+}
+def runScript( graphScript ){
+    sh '''#!/bin/bash -l
+          set -i
+          set +e
+          export PYTHONPATH=/home/sdn/TestON:/home/sdn/sts
+          cd ''' + scriptDir + ''';
+          ''' + graphScript
+}
+def postJob(){
+        jobToRun = "postjob-Fabric5"
+        build job: jobToRun, propagate: false
+}
diff --git a/TestON/JenkinsFile/SRJenkinsfileTrigger b/TestON/JenkinsFile/FabricJenkinsfileTrigger
similarity index 77%
rename from TestON/JenkinsFile/SRJenkinsfileTrigger
rename to TestON/JenkinsFile/FabricJenkinsfileTrigger
index 98648ed..da75805 100644
--- a/TestON/JenkinsFile/SRJenkinsfileTrigger
+++ b/TestON/JenkinsFile/FabricJenkinsfileTrigger
@@ -17,6 +17,7 @@
     "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
     "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
     "SR" : [ tests : "", nodeName : [ "Fabric2", "Fabric3" ], wikiContent : "" ],
+    "SR1" : [ tests : "", nodeName : [ "Fabric2", "Fabric3" ], wikiContent : "" ],
     "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
 ]
 Prefix_organizer = [
@@ -78,25 +79,29 @@
     println "Tests to be run manually : "
 }else{
     testcases[ "SR" ][ "tests" ] = SR_choices
+    testcases[ "SR1" ][ "tests" ] = SR_choices
     println "Defaulting to " + day + " tests:"
 }
 
 triggerFuncs.print_tests( testcases )
 
 def runTest = [
-    "VM" : [:],
-    "BM" : [:],
     "Fabric2" : [:],
     "Fabric3" : [:]
 ]
-for( String test in testcases.keySet() ){
-    println test
-    if ( testcases[ test ][ "tests" ] != "" ){
-        runTest[ testcases[ test ][ "nodeName" ][ 0 ] ][ test ] = triggerFuncs.trigger_pipeline( current_version, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ 0 ], test, manually_run, onos_tag )
-        runTest[ testcases[ test ][ "nodeName" ][ 0 ] ][ test ] = triggerFuncs.trigger_pipeline( previous_version, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ 0 ], test, manually_run, onos_tag )
-        runTest[ testcases[ test ][ "nodeName" ][ 1 ] ][ test ] = triggerFuncs.trigger_pipeline( before_previous_version, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ 1 ], test, manually_run, onos_tag )
+if ( manually_run ){
+    for( String test in testcases.keySet() ){
+        println test
+        if ( testcases[ test ][ "tests" ] != "" ){
+            runTest[ testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ], test, manually_run, onos_tag )
+        }
     }
+}else{
+    runTest[ "Fabric2" ][ "SR1" ] = triggerFuncs.trigger_pipeline( current_version, testcases[ "SR1" ][ "tests" ], testcases[ "SR1" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
+    runTest[ "Fabric2" ][ "SR" ] = triggerFuncs.trigger_pipeline( previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
+    runTest[ "Fabric3" ][ "SR" ] = triggerFuncs.trigger_pipeline( before_previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 1 ], "SR", manually_run, onos_tag )
 }
+
 def finalList = [:]
 finalList[ "Fabric2" ] = triggerFuncs.runTestSeq( runTest[ "Fabric2" ] )
 finalList[ "Fabric3" ] = triggerFuncs.runTestSeq( runTest[ "Fabric3" ] )
@@ -126,3 +131,6 @@
     }
     return result
 }
+def nodeOn( branch ){
+    return branch == "1.12" ? 1 : 0;
+}
diff --git a/TestON/JenkinsFile/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
index ea4eccd..fd7253b 100644
--- a/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
+++ b/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
@@ -46,8 +46,7 @@
   print testMachine
 }
 def fabricOn( branch ){
-  return branch.reverse().take(4).reverse() == "1.13" ? '2' : '3'
-// Temp Fix  return branch.reverse().take(6).reverse() == "master" ? '2' : '3'
+  return branch.reverse().take(4).reverse() == "1.12" ? '3' : '2'
 }
 def printType(){
   echo testType;
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
index 336e237..d744a67 100644
--- a/TestON/JenkinsFile/JenkinsfileTrigger
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -1,394 +1,32 @@
 #!groovy
-
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsTestONTests.groovy' )
-triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/TriggerFuncs.groovy' )
 
-current_version = "master"
-previous_version = "1.13"
-before_previous_version = "1.12"
-funcs.initializeTrend( "VM" );
-triggerFuncs.init( funcs )
-wikiContents = ""
-testcases = [
-    "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
-    "SR" : [ tests : "", nodeName : "Fabric", wikiContent : "" ],
-    "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
-]
-Prefix_organizer = [
-    "FU" : "FUNC",
-    "HA" : "HA",
-    "PL" : "USECASE",
-    "SA" : "USECASE",
-    "SC" : "SCPF",
-    "SR" : "SR",
-    "US" : "USECASE",
-    "VP" : "USECASE"
-]
-
-manually_run = params.manual_run
-onos_b = current_version
-test_branch = ""
-onos_tag = params.ONOSTag
-isOldFlow = true
-
-// Set tests based on day of week
 def now = funcs.getCurrentTime()
 print now.toString()
 today = now[ Calendar.DAY_OF_WEEK ]
-
-if ( manually_run ){
-    onos_b = params.ONOSVersion
-} else {
-    if ( today == Calendar.SATURDAY ){
-        onos_b = previous_version
-    } else if( today == Calendar.SUNDAY ){
-        onos_b = before_previous_version
-    }
-}
-AllTheTests = test_lists.getAllTheTests( onos_b )
-
-day = ""
-SCPF_choices = ""
-USECASE_choices = ""
-FUNC_choices = ""
-HA_choices = ""
-SR_choices = ""
-stat_graph_generator_file = "testCategoryBuildStats.R"
-pie_graph_generator_file = "testCategoryPiePassFail.R"
-graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
-
-post_result = params.PostResult
+machines = params.machines
+manually_run = params.manual_run
 if( !manually_run ){
     slackSend( color:'#03CD9F',
                message:":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
                         + "Starting tests on : " + now.toString()
                         + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
-    testDivider( today )
-    FUNC_choices =  triggerFuncs.lastCommaRemover( FUNC_choices )
-    HA_choices =  triggerFuncs.lastCommaRemover( HA_choices )
-    SCPF_choices =  triggerFuncs.lastCommaRemover( SCPF_choices )
-    USECASE_choices =  triggerFuncs.lastCommaRemover( USECASE_choices )
-    SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
 }
 
-if ( manually_run ){
-    testcases = triggerFuncs.organize_tests( params.Tests, testcases )
+machineList = machines.tokenize( "\n;, " )
+machineOn = [:]
 
-    isOldFlow = params.isOldFlow
-    println "Tests to be run manually : "
-}else{
-    testcases[ "SCPF" ][ "tests" ] = SCPF_choices
-    testcases[ "USECASE" ][ "tests" ] = USECASE_choices
-    testcases[ "FUNC" ][ "tests" ] = FUNC_choices
-    testcases[ "HA" ][ "tests" ] = HA_choices
-    testcases[ "SR" ][ "tests" ] = SR_choices
-    println "Defaulting to " + day + " tests:"
+for (machine in machineList){
+    print( machine )
+    machineOn[ machine ] = triggerJob( machine )
 }
 
-triggerFuncs.print_tests( testcases )
+parallel machineOn
 
-def runTest = [
-    "VM" : [:],
-    "BM" : [:]
-]
-for( String test in testcases.keySet() ){
-    println test
-    if ( testcases[ test ][ "tests" ] != "" ){
-        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test, manually_run, onos_tag )
+
+def triggerJob( on ){
+    return{
+        jobToRun = on + "-pipeline-trigger"
+        build job: jobToRun, propagate: false
     }
 }
-def finalList = [:]
-finalList[ "VM" ] = triggerFuncs.runTestSeq( runTest[ "VM" ] )
-finalList[ "BM" ] = triggerFuncs.runTestSeq( runTest[ "BM" ] )
-parallel finalList
-//finalList[ "BM" ].call()
-
-if ( !manually_run ){
-    funcs.generateStatGraph( "TestStation-VMs",
-                             funcs.branchWithPrefix( onos_b ),
-                             AllTheTests,
-                             stat_graph_generator_file,
-                             pie_graph_generator_file,
-                             graph_saved_directory )
-}
-
-def testDivider( today ){
-    switch ( today ) {
-        case Calendar.MONDAY:
-            initHtmlForWiki()
-            monday( true )
-            tuesday( true, false )
-            wednesday( true, false )
-            thursday( true, false )
-            friday( true, false )
-            saturday( false, false )
-            sunday( false, false )
-            day = "Monday"
-            closeHtmlForWiki()
-            postToWiki( wikiContents )
-            slackSend( color:'#FFD988', message:"Tests to be run this weekdays : \n" + triggerFuncs.printDaysForTest( AllTheTests ) )
-            break
-        case Calendar.TUESDAY:
-            tuesday( false, true )
-            day = "Tuesday"
-            break
-        case Calendar.WEDNESDAY:
-            wednesday( false, true )
-            day = "Wednesday"
-            break
-        case Calendar.THURSDAY:
-            thursday( false, true )
-            day = "Thursday"
-            break
-        case Calendar.FRIDAY:
-            friday( false, true )
-            day = "Friday"
-            break
-        case Calendar.SATURDAY:
-            saturday( false, true )
-            day = "Saturday"
-            break
-        case Calendar.SUNDAY:
-            sunday( false , true )
-            day = "Sunday"
-            break
-    }
-}
-def monday( getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", true, "M", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", true, "M", getResult )
-    HA_choices += adder( "HA", "extra_A", true, "M", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", true, "M", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", true, "M", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", true, "M", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def tuesday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "T", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "T", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "T", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "T", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "T", getResult )
-    SCPF_choices += adder( "SCPF", "extra_C", getDay, "T", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "T", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
-    USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
-    closingHeader( "USECASE" )
-}
-def wednesday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "W", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "W", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "W", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "W", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "W", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def thursday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Th", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "Th", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "Th", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def friday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "F", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "F", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "F", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "F", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "F", getResult )
-    SCPF_choices += adder( "SCPF", "extra_D", getDay, "F", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "F", false )
-    SR_choices += adder( "SR", "extra_A", getDay, "F", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def saturday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "Sa", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "Sa", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Sa", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "Sa", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "Sa", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "Sa", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_C", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_D", getDay, "Sa", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "Sa", false )
-    SR_choices += adder( "SR", "extra_B", getDay, "Sa", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
-    closingHeader( "USECASE" )
-}
-def sunday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "S", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "S", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "S", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "S", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "S", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "S", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_C", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_D", getDay, "S", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "S", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
-    closingHeader( "USECASE" )
-}
-def adder( testCat, set, dayAdding, day, getResult ){
-    result = ""
-    for( String test in AllTheTests[ testCat ].keySet() ){
-        if( AllTheTests[ testCat ][ test ][ set ] ){
-            if( getResult )
-                result += test + ","
-            if( dayAdding )
-                dayAdder( testCat, test, day )
-            makeHtmlColList( testCat, test )
-        }
-    }
-    return result
-}
-def initHtmlForWiki(){
-    wikiContents = '''
-    <table class="wrapped confluenceTable">
-        <colgroup>
-              <col />
-              <col />
-              <col />
-              <col />
-              <col />
-              <col />
-        </colgroup>
-        <tbody>
-            <tr>
-                <th colspan="1" class="confluenceTh">
-                    <br />
-                </th>
-                <th class="confluenceTh"><p>Monday</p></th>
-                <th class="confluenceTh"><p>Tuesday</p></th>
-                <th class="confluenceTh"><p>Wednesday</p></th>
-                <th class="confluenceTh"><p>Thursday</p></th>
-                <th class="confluenceTh"><p>Friday</p></th>
-                <th class="confluenceTh"><p>Saturday</p></th>
-                <th class="confluenceTh"><p>Sunday</p></th>
-            </tr>'''
-    for( String test in testcases.keySet() ){
-        testcases[ test ][ 'wikiContent' ] = '''
-            <tr>
-                <th colspan="1" class="confluenceTh">''' + test + '''</th>'''
-    }
-}
-def addingHeader( testCategory ){
-    testcases[ testCategory ][ 'wikiContent' ] += '''
-                <td class="confluenceTd">
-                    <ul>'''
-}
-def makeHtmlColList( testCategory, testName ){
-    testcases[ testCategory ][ 'wikiContent' ] += '''
-                        <li>'''+ testName +'''</li>'''
-
-}
-def closingHeader( testCategory ){
-    testcases[ testCategory ][ 'wikiContent' ] += '''
-                    </ul>
-                </td>'''
-}
-def closeHtmlForWiki(){
-    for( String test in testcases.keySet() ){
-        wikiContents += testcases[ test ][ 'wikiContent' ]
-        wikiContents += '''
-            </tr>'''
-    }
-    wikiContents += '''
-        </tbody>
-    </table>
-    <p><strong>Everyday</strong>, all SegmentRouting tests are built and run on every supported branch.</p>
-    <p>On <strong>Weekdays</strong>, all the other tests are built and run on the master branch.</p>
-    <p>On <strong>Saturdays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( previous_version ) +''' branch.</p>
-    <p>On <strong>Sundays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( before_previous_version ) +''' branch.</p>'''
-}
-def postToWiki( contents ){
-    node( testMachine ){
-        workspace = "/var/jenkins/workspace/all-pipeline-trigger/"
-        filename = "jenkinsSchedule.txt"
-        writeFile file: workspace + filename, text: contents
-        funcs.publishToConfluence( "false", "true",
-                                   "Automated Test Schedule",
-                                   workspace + filename )
-    }
-}
-def dayAdder( testCat, testName, dayOfWeek ){
-    AllTheTests[ testCat ][ testName ][ "day" ] += dayOfWeek + ","
-}
diff --git a/TestON/JenkinsFile/TriggerFuncs.groovy b/TestON/JenkinsFile/TriggerFuncs.groovy
index 8cefc50..96c0855 100644
--- a/TestON/JenkinsFile/TriggerFuncs.groovy
+++ b/TestON/JenkinsFile/TriggerFuncs.groovy
@@ -97,6 +97,7 @@
         ''' + preSetup( onos_branch, test_branch, onos_tag, manuallyRun ) + '''
         ''' + oldFlowCheck( jobOn, onos_branch ) + '''
         ''' + postSetup( onos_branch, test_branch, onos_tag, manuallyRun )
+        generateKey()
     }
 }
 def tagCheck( onos_tag, onos_branch ){
@@ -169,10 +170,23 @@
         echo -e "\n##### Stop all running instances of Karaf #####"
         kill $(ps -efw | grep karaf | grep -v grep | awk '{print $2}')
         sleep 30
-        git branch'''
+        git branch
+        '''
     }
     return result
 }
+def generateKey(){
+    try{
+        sh '''
+        #!/bin/bash -l
+        set +e
+        . ~/.bashrc
+        env
+        onos-push-bits-through-proxy
+        onos-gen-cluster-key -f
+        '''
+    }catch( all ){}
+}
 def returnCell( nodeName ){
     node( "TestStation-" + nodeName + "s" ){
         sh '''#!/bin/bash -l
diff --git a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
new file mode 100644
index 0000000..3d03048
--- /dev/null
+++ b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
@@ -0,0 +1,394 @@
+#!groovy
+
+funcs = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsCommonFuncs.groovy' )
+test_lists = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsTestONTests.groovy' )
+triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/TriggerFuncs.groovy' )
+
+current_version = "master"
+previous_version = "1.13"
+before_previous_version = "1.12"
+funcs.initializeTrend( "VM" );
+triggerFuncs.init( funcs )
+wikiContents = ""
+testcases = [
+    "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
+    "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
+    "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
+    "SR" : [ tests : "", nodeName : "Fabric", wikiContent : "" ],
+    "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
+]
+Prefix_organizer = [
+    "FU" : "FUNC",
+    "HA" : "HA",
+    "PL" : "USECASE",
+    "SA" : "USECASE",
+    "SC" : "SCPF",
+    "SR" : "SR",
+    "US" : "USECASE",
+    "VP" : "USECASE"
+]
+
+manually_run = params.manual_run
+onos_b = current_version
+test_branch = ""
+onos_tag = params.ONOSTag
+isOldFlow = true
+
+// Set tests based on day of week
+def now = funcs.getCurrentTime()
+print now.toString()
+today = now[ Calendar.DAY_OF_WEEK ]
+
+if ( manually_run ){
+    onos_b = params.ONOSVersion
+} else {
+    if ( today == Calendar.SATURDAY ){
+        onos_b = previous_version
+    } else if( today == Calendar.SUNDAY ){
+        onos_b = before_previous_version
+    }
+}
+AllTheTests = test_lists.getAllTheTests( onos_b )
+
+day = ""
+SCPF_choices = ""
+USECASE_choices = ""
+FUNC_choices = ""
+HA_choices = ""
+SR_choices = ""
+stat_graph_generator_file = "testCategoryBuildStats.R"
+pie_graph_generator_file = "testCategoryPiePassFail.R"
+graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
+
+post_result = params.PostResult
+if( !manually_run ){
+    testDivider( today )
+    FUNC_choices =  triggerFuncs.lastCommaRemover( FUNC_choices )
+    HA_choices =  triggerFuncs.lastCommaRemover( HA_choices )
+    SCPF_choices =  triggerFuncs.lastCommaRemover( SCPF_choices )
+    USECASE_choices =  triggerFuncs.lastCommaRemover( USECASE_choices )
+    SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
+}
+
+if ( manually_run ){
+    testcases = triggerFuncs.organize_tests( params.Tests, testcases )
+
+    isOldFlow = params.isOldFlow
+    println "Tests to be run manually : "
+}else{
+    testcases[ "SCPF" ][ "tests" ] = SCPF_choices
+    testcases[ "USECASE" ][ "tests" ] = USECASE_choices
+    testcases[ "FUNC" ][ "tests" ] = FUNC_choices
+    testcases[ "HA" ][ "tests" ] = HA_choices
+    testcases[ "SR" ][ "tests" ] = SR_choices
+    println "Defaulting to " + day + " tests:"
+}
+
+triggerFuncs.print_tests( testcases )
+
+def runTest = [
+    "VM" : [:],
+    "BM" : [:]
+]
+for( String test in testcases.keySet() ){
+    println test
+    if ( testcases[ test ][ "tests" ] != "" ){
+        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test, manually_run, onos_tag )
+    }
+}
+def finalList = [:]
+jobName = env.JOB_NAME
+finalList[ "VM" ] = triggerFuncs.runTestSeq( runTest[ "VM" ] )
+finalList[ "BM" ] = triggerFuncs.runTestSeq( runTest[ "BM" ] )
+//parallel finalList
+if( jobName.take( 2 ) == "vm" )
+    finalList[ "VM" ].call()
+else
+    finalList[ "BM" ].call()
+
+if ( !manually_run ){
+    funcs.generateStatGraph( "TestStation-VMs",
+                             funcs.branchWithPrefix( onos_b ),
+                             AllTheTests,
+                             stat_graph_generator_file,
+                             pie_graph_generator_file,
+                             graph_saved_directory )
+}
+
+def testDivider( today ){
+    switch ( today ) {
+        case Calendar.MONDAY:
+            initHtmlForWiki()
+            monday( true )
+            tuesday( true, false )
+            wednesday( true, false )
+            thursday( true, false )
+            friday( true, false )
+            saturday( false, false )
+            sunday( false, false )
+            day = "Monday"
+            closeHtmlForWiki()
+            postToWiki( wikiContents )
+            slackSend( color:'#FFD988', message:"Tests to be run this weekdays : \n" + triggerFuncs.printDaysForTest( AllTheTests ) )
+            break
+        case Calendar.TUESDAY:
+            tuesday( false, true )
+            day = "Tuesday"
+            break
+        case Calendar.WEDNESDAY:
+            wednesday( false, true )
+            day = "Wednesday"
+            break
+        case Calendar.THURSDAY:
+            thursday( false, true )
+            day = "Thursday"
+            break
+        case Calendar.FRIDAY:
+            friday( false, true )
+            day = "Friday"
+            break
+        case Calendar.SATURDAY:
+            saturday( false, true )
+            day = "Saturday"
+            break
+        case Calendar.SUNDAY:
+            sunday( false , true )
+            day = "Sunday"
+            break
+    }
+}
+def monday( getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", true, "M", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", true, "M", getResult )
+    HA_choices += adder( "HA", "extra_A", true, "M", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", true, "M", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", true, "M", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", true, "M", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def tuesday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "T", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "T", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "T", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "T", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "T", getResult )
+    SCPF_choices += adder( "SCPF", "extra_C", getDay, "T", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "T", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
+    USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
+    closingHeader( "USECASE" )
+}
+def wednesday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "W", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "W", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "W", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "W", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "W", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def thursday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Th", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "Th", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "Th", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def friday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "F", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "F", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "F", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "F", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "F", getResult )
+    SCPF_choices += adder( "SCPF", "extra_D", getDay, "F", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "F", false )
+    SR_choices += adder( "SR", "extra_A", getDay, "F", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def saturday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "Sa", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "Sa", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Sa", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "Sa", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "Sa", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "Sa", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_C", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_D", getDay, "Sa", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "Sa", false )
+    SR_choices += adder( "SR", "extra_B", getDay, "Sa", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
+    closingHeader( "USECASE" )
+}
+def sunday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "S", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "S", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "S", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "S", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "S", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "S", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_C", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_D", getDay, "S", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "S", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
+    closingHeader( "USECASE" )
+}
+def adder( testCat, set, dayAdding, day, getResult ){
+    result = ""
+    for( String test in AllTheTests[ testCat ].keySet() ){
+        if( AllTheTests[ testCat ][ test ][ set ] ){
+            if( getResult )
+                result += test + ","
+            if( dayAdding )
+                dayAdder( testCat, test, day )
+            makeHtmlColList( testCat, test )
+        }
+    }
+    return result
+}
+def initHtmlForWiki(){
+    wikiContents = '''
+    <table class="wrapped confluenceTable">
+        <colgroup>
+              <col />
+              <col />
+              <col />
+              <col />
+              <col />
+              <col />
+        </colgroup>
+        <tbody>
+            <tr>
+                <th colspan="1" class="confluenceTh">
+                    <br />
+                </th>
+                <th class="confluenceTh"><p>Monday</p></th>
+                <th class="confluenceTh"><p>Tuesday</p></th>
+                <th class="confluenceTh"><p>Wednesday</p></th>
+                <th class="confluenceTh"><p>Thursday</p></th>
+                <th class="confluenceTh"><p>Friday</p></th>
+                <th class="confluenceTh"><p>Saturday</p></th>
+                <th class="confluenceTh"><p>Sunday</p></th>
+            </tr>'''
+    for( String test in testcases.keySet() ){
+        testcases[ test ][ 'wikiContent' ] = '''
+            <tr>
+                <th colspan="1" class="confluenceTh">''' + test + '''</th>'''
+    }
+}
+def addingHeader( testCategory ){
+    testcases[ testCategory ][ 'wikiContent' ] += '''
+                <td class="confluenceTd">
+                    <ul>'''
+}
+def makeHtmlColList( testCategory, testName ){
+    testcases[ testCategory ][ 'wikiContent' ] += '''
+                        <li>'''+ testName +'''</li>'''
+
+}
+def closingHeader( testCategory ){
+    testcases[ testCategory ][ 'wikiContent' ] += '''
+                    </ul>
+                </td>'''
+}
+def closeHtmlForWiki(){
+    for( String test in testcases.keySet() ){
+        wikiContents += testcases[ test ][ 'wikiContent' ]
+        wikiContents += '''
+            </tr>'''
+    }
+    wikiContents += '''
+        </tbody>
+    </table>
+    <p><strong>Everyday</strong>, all SegmentRouting tests are built and run on every supported branch.</p>
+    <p>On <strong>Weekdays</strong>, all the other tests are built and run on the master branch.</p>
+    <p>On <strong>Saturdays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( previous_version ) +''' branch.</p>
+    <p>On <strong>Sundays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( before_previous_version ) +''' branch.</p>'''
+}
+def postToWiki( contents ){
+    node( testMachine ){
+        workspace = "/var/jenkins/workspace/all-pipeline-trigger/"
+        filename = "jenkinsSchedule.txt"
+        writeFile file: workspace + filename, text: contents
+        funcs.publishToConfluence( "false", "true",
+                                   "Automated Test Schedule",
+                                   workspace + filename )
+    }
+}
+def dayAdder( testCat, testName, dayOfWeek ){
+    AllTheTests[ testCat ][ testName ][ "day" ] += dayOfWeek + ","
+}
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/README.md b/TestON/JenkinsFile/wikiGraphRScripts/README.md
new file mode 100644
index 0000000..dab3f68
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/README.md
@@ -0,0 +1,23 @@
+<h1>Wiki Graph Scripts</h1>
+
+The scripts that generate the graphs are written in the R programming language.
+
+The scripts are structured in the following format:
+1. Data Management
+    * Data is obtained from the databases through SQL. CLI arguments, filename, and titles are also handled here.
+        1. Importing libraries
+        2. Command line arguments
+        3. Title of the graph
+        4. Filename
+        5. SQL Initialization and Data Gathering
+2. Organize Data
+    * Raw data is sorted into a data frame.  The data frame is used in generating the graph.
+        1. Combining data into a single list.
+        2. Using the list to construct a data frame
+        3. Adding data as columns to the data frame
+3. Generate Graphs
+    * The graphs are formatted and constructed here.
+        1. Main plot generated
+        2. Fundamental variables assigned
+        3. Generate specific graph format
+        4. Exporting graph to file
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFIntentInstallWithdrawRerouteLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFIntentInstallWithdrawRerouteLat.R
new file mode 100644
index 0000000..6f67b0d
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFIntentInstallWithdrawRerouteLat.R
@@ -0,0 +1,386 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+batch_size = 8
+old_flow = 9
+save_directory = 10
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+
+    print( paste( "Usage: Rscript SCPFIntentInstallWithdrawRerouteLat.R",
+                                  "<isFlowObj>" ,
+                                  "<database-host>",
+                                  "<database-port>",
+                                  "<database-user-id>",
+                                  "<database-password>",
+                                  "<test-name>",
+                                  "<branch-name>",
+                                  "<batch-size>",
+                                  "<using-old-flow>",
+                                  "<directory-to-save-graphs>",
+                                  sep=" " ) )
+    quit( status = 1 )  # basically exit(), but in R
+}
+
+# -----------------------------------
+# Create File Name and Title of Graph
+# -----------------------------------
+
+print( "Creating filename and title of graph." )
+
+chartTitle <- "Intent Install, Withdraw, & Reroute Latencies"
+flowObjFileModifier <- ""
+errBarOutputFile <- paste( args[ save_directory ],
+                    "SCPFIntentInstallWithdrawRerouteLat_",
+                    args[ branch_name ],
+                    sep="" )
+
+if ( args[ has_flow_obj ] == "y" ){
+    errBarOutputFile <- paste( errBarOutputFile, "_fobj", sep="" )
+    flowObjFileModifier <- "fobj_"
+    chartTitle <- paste( chartTitle, "w/ FlowObj" )
+}
+if ( args[ old_flow ] == "y" ){
+    errBarOutputFile <- paste( errBarOutputFile, "_OldFlow", sep="" )
+    chartTitle <- paste( chartTitle,
+                         "With Eventually Consistent Flow Rule Store",
+                         sep="\n" )
+}
+errBarOutputFile <- paste( errBarOutputFile,
+                           "_",
+                           args[ batch_size ],
+                           "-batchSize_graph.jpg",
+                           sep="" )
+
+chartTitle <- paste( chartTitle,
+                     "\nBatch Size =",
+                     args[ batch_size ],
+                     sep=" " )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+                  dbname = "onostest",
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
+
+# ---------------------------------------
+# Intent Install and Withdraw SQL Command
+# ---------------------------------------
+print( "Generating Intent Install and Withdraw SQL Command" )
+
+installWithdrawSQLCommand <- paste( "SELECT * FROM intent_latency_",
+                                    flowObjFileModifier,
+                                    "tests WHERE batch_size=",
+                                    args[ batch_size ],
+                                    " AND branch = '",
+                                    args[ branch_name ],
+                                    "' AND date IN ( SELECT MAX( date ) FROM intent_latency_",
+                                    flowObjFileModifier,
+                                    "tests WHERE branch='",
+                                    args[ branch_name ],
+                                    "' AND ",
+                                    ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+                                    "is_old_flow",
+                                    ")",
+                                    sep="" )
+
+print( "Sending Intent Install and Withdraw SQL command:" )
+print( installWithdrawSQLCommand )
+installWithdrawData <- dbGetQuery( con, installWithdrawSQLCommand )
+
+# --------------------------
+# Intent Reroute SQL Command
+# --------------------------
+
+print( "Generating Intent Reroute SQL Command" )
+
+rerouteSQLCommand <- paste( "SELECT * FROM intent_reroute_latency_",
+                            flowObjFileModifier,
+                            "tests WHERE batch_size=",
+                            args[ batch_size ],
+                            " AND branch = '",
+                            args[ branch_name ],
+                            "' AND date IN ( SELECT MAX( date ) FROM intent_reroute_latency_",
+                            flowObjFileModifier,
+                            "tests WHERE branch='",
+                            args[ branch_name ],
+                            "' AND ",
+                            ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+                            "is_old_flow",
+                            ")",
+                            sep="" )
+
+print( "Sending Intent Reroute SQL command:" )
+print( rerouteSQLCommand )
+rerouteData <- dbGetQuery( con, rerouteSQLCommand )
+
+# **********************************************************
+# STEP 2: Organize Data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -------------------------------------------------------
+# Combining Install, Withdraw, and Reroute Latencies Data
+# -------------------------------------------------------
+
+print( "Combining Install, Withdraw, and Reroute Latencies Data" )
+
+if ( ncol( rerouteData ) == 0 ){  # Checks if rerouteData exists, so we can exclude it if necessary
+
+    requiredColumns <- c( "install_avg",
+                          "withdraw_avg"  )
+
+    tryCatch( avgs <- c( installWithdrawData[ requiredColumns] ),
+              error = function( e ) {
+                  print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+                  print( "Required columns: " )
+                  print( requiredColumns )
+                  print( "Actual columns: " )
+                  print( names( fileData ) )
+                  print( "Error dump:" )
+                  print( e )
+                  quit( status = 1 )
+              }
+             )
+} else{
+    colnames( rerouteData ) <- c( "date",
+                                  "name",
+                                  "date",
+                                  "branch",
+                                  "is_old_flow",
+                                  "commit",
+                                  "scale",
+                                  "batch_size",
+                                  "reroute_avg",
+                                  "reroute_std" )
+
+    tryCatch( avgs <- c( installWithdrawData[ 'install_avg' ],
+                         installWithdrawData[ 'withdraw_avg' ],
+                         rerouteData[ 'reroute_avg' ] ),
+              error = function( e ) {
+                  print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+                  print( "Required columns: " )
+                  print( requiredColumns )
+                  print( "Actual columns: " )
+                  print( names( fileData ) )
+                  print( "Error dump:" )
+                  print( e )
+                  quit( status = 1 )
+              }
+             )
+
+}
+
+# Combine lists into data frames.
+dataFrame <- melt( avgs )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing data frame." )
+
+if ( ncol( rerouteData ) == 0 ){  # Checks if rerouteData exists (due to batch size) for the dataFrame this time
+    dataFrame$scale <- c( installWithdrawData$scale,
+                          installWithdrawData$scale )
+
+    dataFrame$stds <- c( installWithdrawData$install_std,
+                         installWithdrawData$withdraw_std )
+} else{
+    dataFrame$scale <- c( installWithdrawData$scale,
+                          installWithdrawData$scale,
+                          rerouteData$scale )
+
+    dataFrame$stds <- c( installWithdrawData$install_std,
+                         installWithdrawData$withdraw_std,
+                         rerouteData$reroute_std )
+}
+
+colnames( dataFrame ) <- c( "ms",
+                            "type",
+                            "scale",
+                            "stds" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+dataFrame$type <- as.character( dataFrame$type )
+dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graph.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating the main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+                                           y = ms,
+                                           ymin = ms,
+                                           ymax = ms + stds,
+                                           fill = type ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+theme_set( theme_grey( base_size = 22 ) )
+barWidth <- 1.3
+xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9) )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
+
+theme <- theme( plot.title=element_text( hjust = 0.5, size = 32, face='bold' ),
+                legend.position="bottom",
+                legend.text=element_text( size=22 ),
+                legend.title = element_blank(),
+                legend.key.size = unit( 1.5, 'lines' ),
+                plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+colors <- scale_fill_manual( values=c( "#F77670",
+                                       "#619DFA",
+                                       "#18BA48" ) )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        title +
+                        colors
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            width = barWidth,
+                            position = "dodge" )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+                                 position = position_dodge( barWidth ),
+                                 color = errorBarColor )
+
+values <- geom_text( aes( x = dataFrame$scale,
+                          y = dataFrame$ms + 0.035 * max( dataFrame$ms ),
+                          label = format( dataFrame$ms,
+                                          digits = 3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          position = position_dodge( width = barWidth ),
+                          size = 5.5,
+                          fontface = "bold" )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          values +
+          wrapLegend
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
+
+tryCatch( ggsave( errBarOutputFile,
+                  width = imageWidth,
+                  height = imageHeight,
+                  dpi = imageDPI ),
+          error = function( e ){
+              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+        )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFbatchFlowResp.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFbatchFlowResp.R
new file mode 100644
index 0000000..0b68425
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFbatchFlowResp.R
@@ -0,0 +1,372 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+#
+# Example script:
+# Single Bench Flow Latency Graph with Eventually Consistent Flow Rule Store (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFbatchFlowResp_master_OldFlow_PostGraph.jpg):
+# Rscript SCPFbatchFlowResp.R <url> <port> <username> <pass> SCPFbatchFlowResp.R master y /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+old_flow <- 7
+save_directory <- 8
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFbatchFlowResp.R", c( "using-old-flow" ) )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+postOutputFile <- paste( args[ save_directory ],
+                         args[ graph_title ],
+                         "_",
+                         args[ branch_name ],
+                         if( args[ old_flow ] == "y" ) "_OldFlow" else "",
+                         "_PostGraph.jpg",
+                         sep="" )
+
+delOutputFile <- paste( args[ save_directory ],
+                        args[ graph_title ],
+                        "_",
+                        args[ branch_name ],
+                        if( args[ old_flow ] == "y" ) "_OldFlow" else "",
+                        "_DelGraph.jpg",
+                        sep="" )
+
+postChartTitle <- paste( "Single Bench Flow Latency - Post\n",
+                         "Last 3 Builds",
+                         if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "",
+                         sep = "" )
+delChartTitle <- paste( "Single Bench Flow Latency - Del\n",
+                        "Last 3 Builds",
+                        if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "",
+                        sep = "" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ---------------------------
+# Batch Flow Resp SQL Command
+# ---------------------------
+
+print( "Generating Batch Flow Resp SQL Command" )
+
+command <- paste( "SELECT * FROM batch_flow_tests WHERE branch='",
+                  args[ branch_name ],
+                  "' AND " ,
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+                  "is_old_flow",
+                  " ORDER BY date DESC LIMIT 3",
+                  sep="" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -----------------
+# Post Data Sorting
+# -----------------
+
+print( "Sorting data for Post." )
+
+requiredColumns <- c( "posttoconfrm", "elapsepost" )
+
+tryCatch( postAvgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# -------------------------
+# Post Construct Data Frame
+# -------------------------
+
+postDataFrame <- melt( postAvgs )
+postDataFrame$scale <- fileData$scale
+postDataFrame$date <- fileData$date
+postDataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+colnames( postDataFrame ) <- c( "ms",
+                                "type",
+                                "scale",
+                                "date",
+                                "iterative" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+postDataFrame$type <- as.character( postDataFrame$type )
+postDataFrame$type <- factor( postDataFrame$type,
+                              levels = unique( postDataFrame$type ) )
+
+postDataFrame <- na.omit( postDataFrame )   # Omit any data that doesn't exist
+
+print( "Post Data Frame Results:" )
+print( postDataFrame )
+
+# ----------------
+# Del Data Sorting
+# ----------------
+
+requiredColumns <- c( "deltoconfrm", "elapsedel" )
+
+tryCatch( delAvgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+
+# ------------------------
+# Del Construct Data Frame
+# ------------------------
+
+delDataFrame <- melt( delAvgs )
+delDataFrame$scale <- fileData$scale
+delDataFrame$date <- fileData$date
+delDataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+colnames( delDataFrame ) <- c( "ms",
+                               "type",
+                               "scale",
+                               "date",
+                               "iterative" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+delDataFrame$type <- as.character( delDataFrame$type )
+delDataFrame$type <- factor( delDataFrame$type,
+                             levels = unique( delDataFrame$type ) )
+
+delDataFrame <- na.omit( delDataFrame )   # Omit any data that doesn't exist
+
+print( "Del Data Frame Results:" )
+print( delDataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------------
+# Initializing variables used in both graphs
+# ------------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xLabel <- xlab( "Build Date" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+                                       webColor( "light_blue" ) ) )
+
+wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
+
+barWidth <- 0.3
+
+theme <- graphTheme()
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            width = barWidth )
+
+# -----------------------
+# Post Generate Main Plot
+# -----------------------
+
+print( "Creating main plot for Post graph." )
+
+mainPlot <- ggplot( data = postDataFrame, aes( x = iterative,
+                                               y = ms,
+                                               fill = type ) )
+
+# -----------------------------------
+# Post Fundamental Variables Assigned
+# -----------------------------------
+
+print( "Generating fundamental graph data for Post graph." )
+
+xScaleConfig <- scale_x_continuous( breaks = postDataFrame$iterative,
+                                    label = postDataFrame$date )
+
+title <- labs( title = postChartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        wrapLegend +
+                        colors +
+                        title
+
+# --------------------------------
+# Post Generating Bar Graph Format
+# --------------------------------
+
+print( "Generating bar graph for Post graph." )
+
+sum <- fileData[ 'posttoconfrm' ] +
+       fileData[ 'elapsepost' ]
+
+values <- geom_text( aes( x = postDataFrame$iterative,
+                          y = sum + 0.03 * max( sum ),
+                          label = format( sum,
+                                          digits = 3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          values
+
+# ----------------------------
+# Post Exporting Graph to File
+# ----------------------------
+
+saveGraph( postOutputFile )
+
+# ----------------------
+# Del Generate Main Plot
+# ----------------------
+
+print( "Creating main plot for Del graph." )
+
+mainPlot <- ggplot( data = delDataFrame, aes( x = iterative,
+                                              y = ms,
+                                              fill = type ) )
+
+# ----------------------------------
+# Del Fundamental Variables Assigned
+# ----------------------------------
+
+print( "Generating fundamental graph data for Del graph." )
+
+xScaleConfig <- scale_x_continuous( breaks = delDataFrame$iterative,
+                                    label = delDataFrame$date )
+
+title <- labs( title = delChartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        wrapLegend +
+                        colors +
+                        title
+
+# -------------------------------
+# Del Generating Bar Graph Format
+# -------------------------------
+
+print( "Generating bar graph for Del graph." )
+
+sum <- fileData[ 'deltoconfrm' ] +
+       fileData[ 'elapsedel' ]
+
+values <- geom_text( aes( x = delDataFrame$iterative,
+                          y = sum + 0.03 * max( sum ),
+                          label = format( sum,
+                                          digits = 3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          title +
+          values
+
+# ---------------------------
+# Del Exporting Graph to File
+# ---------------------------
+
+saveGraph( delOutputFile )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFcbench.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFcbench.R
new file mode 100644
index 0000000..871000f
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFcbench.R
@@ -0,0 +1,232 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Cbench Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFcbench_master_errGraph.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFcbench.R <url> <port> <username> <pass> SCPFcbench master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory <- 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFcbench.R" )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ graph_title ],
+                           "_",
+                           args[ branch_name ],
+                           "_errGraph.jpg",
+                           sep="" )
+
+chartTitle <- paste( "Single-Node CBench Throughput", "Last 3 Builds", sep = "\n" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ------------------
+# Cbench SQL Command
+# ------------------
+
+print( "Generating Scale Topology SQL Command" )
+
+command <- paste( "SELECT * FROM cbench_bm_tests WHERE branch='",
+                  args[ branch_name ],
+                  "' ORDER BY date DESC LIMIT 3",
+                  sep="" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( avgs )
+dataFrame$std <- c( fileData$std )
+dataFrame$date <- c( fileData$date )
+dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+colnames( dataFrame ) <- c( "ms",
+                            "type",
+                            "std",
+                            "date",
+                            "iterative" )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+                                           y = ms,
+                                           ymin = ms,
+                                           ymax = ms + std ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+barWidth <- 0.3
+
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
+                                    label = dataFrame$date )
+xLabel <- xlab( "Build Date" )
+yLabel <- ylab( "Responses / sec" )
+fillLabel <- labs( fill = "Type" )
+
+theme <- graphTheme()
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            position = position_dodge(),
+                            width = barWidth,
+                            fill = webColor( "green" ) )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+                                 color = webColor( "darkerGray" ) )
+
+values <- geom_text( aes( x=dataFrame$iterative,
+                          y=fileData[ 'avg' ] + 0.025 * max( fileData[ 'avg' ] ),
+                          label = format( fileData[ 'avg' ],
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( errBarOutputFile ) # from saveGraph.R
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFflowTp1g.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFflowTp1g.R
new file mode 100644
index 0000000..ffb91a9
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFflowTp1g.R
@@ -0,0 +1,327 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+has_neighbors = 8
+old_flow = 9
+save_directory = 10
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+
+    print( paste( "Usage: Rscript SCPFflowTp1g.R",
+                                  "<has-flow-obj>",
+                                  "<database-host>",
+                                  "<database-port>",
+                                  "<database-user-id>",
+                                  "<database-password>",
+                                  "<test-name>",
+                                  "<branch-name>",
+                                  "<has-neighbors>",
+                                  "<using-old-flow>",
+                                  "<directory-to-save-graphs>",
+                                  sep=" " ) )
+
+    quit( status = 1 )  # basically exit(), but in R
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+chartTitle <- "Flow Throughput Test"
+fileNeighborsModifier <- "no"
+commandNeighborModifier <- ""
+fileFlowObjModifier <- ""
+sqlFlowObjModifier <- ""
+if ( args[ has_flow_obj ] == 'y' ){
+    fileFlowObjModifier <- "_flowObj"
+    sqlFlowObjModifier <- "_fobj"
+    chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
+}
+
+chartTitle <- paste( chartTitle, "\nNeighbors =", sep="" )
+
+fileOldFlowModifier <- ""
+if ( args[ has_neighbors ] == 'y' ){
+    fileNeighborsModifier <- "all"
+    commandNeighborModifier <- "scale=1 OR NOT "
+    chartTitle <- paste( chartTitle, "Cluster Size - 1" )
+} else {
+    chartTitle <- paste( chartTitle, "0" )
+}
+if ( args[ old_flow ] == 'y' ){
+    fileOldFlowModifier <- "_OldFlow"
+    chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
+}
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
+                           "_",
+                           args[ branch_name ],
+                           "_",
+                           fileNeighborsModifier,
+                           "-neighbors",
+                           fileFlowObjModifier,
+                           fileOldFlowModifier,
+                           "_graph.jpg",
+                           sep="" )
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+                  dbname = "onostest",
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
+
+# ---------------------------
+# Flow Throughput SQL Command
+# ---------------------------
+
+print( "Generating Flow Throughput SQL command." )
+
+command <- paste( "SELECT scale, avg( avg ), avg( std ) FROM flow_tp",
+                  sqlFlowObjModifier,
+                  "_tests WHERE (",
+                  commandNeighborModifier,
+                  "neighbors = 0 ) AND branch = '",
+                  args[ branch_name ],
+                  "' AND date IN ( SELECT max( date ) FROM flow_tp",
+                  sqlFlowObjModifier,
+                  "_tests WHERE branch='",
+                  args[ branch_name ],
+                  "' AND ",
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+                  "is_old_flow",
+                  " ) GROUP BY scale ORDER BY scale",
+                  sep="" )
+
+print( "Sending SQL command:" )
+print( command )
+
+fileData <- dbGetQuery( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data for Flow Throughput." )
+
+colnames( fileData ) <- c( "scale",
+                           "avg",
+                           "std" )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+
+# ----------------------------
+# Flow TP Construct Data Frame
+# ----------------------------
+
+print( "Constructing Flow TP data frame." )
+
+dataFrame <- melt( avgs )              # This is where reshape2 comes in. Avgs list is converted to data frame
+dataFrame$scale <- fileData$scale      # Add node scaling to the data frame.
+dataFrame$std <- fileData$std
+
+colnames( dataFrame ) <- c( "throughput",
+                            "type",
+                            "scale",
+                            "std" )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Generating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+#     - data: the data frame that the graph will be based off of
+#    - aes: the asthetics of the graph which require:
+#        - x: x-axis values (usually node scaling)
+#        - y: y-axis values (usually time in milliseconds)
+#        - fill: the category of the colored side-by-side bars (usually type)
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+                                           y = throughput,
+                                           ymin = throughput,
+                                           ymax = throughput + std,
+                                           fill = type ) )
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+# Formatting the plot
+theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
+width <- 0.7  # Width of the bars.
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale,
+                                    label = dataFrame$scale )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Throughput (,000 Flows/sec)" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
+
+theme <- theme( plot.title = element_text( hjust = 0.5,
+                                           size = 32,
+                                           face = 'bold' ),
+                plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+# Create the stacked bar graph with error bars.
+# geom_bar contains:
+#    - stat: data formatting (usually "identity")
+#    - width: the width of the bar types (declared above)
+# geom_errorbar contains similar arguments as geom_bar.
+print( "Generating bar graph with error bars." )
+barGraphFormat <- geom_bar( stat = "identity",
+                            width = width,
+                            fill = "#FFAA3C" )
+
+errorBarFormat <- geom_errorbar( width = width,
+                                 position = position_dodge(),
+                                 color = errorBarColor )
+
+values <- geom_text( aes( x = dataFrame$scale,
+                          y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
+                          label = format( dataFrame$throughput,
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
+
+tryCatch( ggsave( errBarOutputFile,
+                  width = imageWidth,
+                  height = imageHeight,
+                  dpi = imageDPI ),
+          error = function( e ){
+              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+        )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFhostLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFhostLat.R
new file mode 100644
index 0000000..c4c30b2
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFhostLat.R
@@ -0,0 +1,233 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory = 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFhostLat.R" )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ graph_title ],
+                           "_",
+                           args[ branch_name ],
+                           "_errGraph.jpg",
+                           sep="" )
+
+chartTitle <- "Host Latency"
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ------------------------
+# Host Latency SQL Command
+# ------------------------
+
+print( "Generating Host Latency SQL Command" )
+
+command  <- paste( "SELECT * FROM host_latency_tests WHERE branch = '",
+                   args[ branch_name ],
+                   "' AND date IN ( SELECT MAX( date ) FROM host_latency_tests WHERE branch = '",
+                   args[ branch_name ],
+                   "' ) ",
+                   sep = "" )
+
+fileData <- retrieveData( con, command )
+
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns ] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+dataFrame$std <- fileData$std
+
+colnames( dataFrame ) <- c( "ms",
+                            "type",
+                            "scale",
+                            "std" )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+                                           y = ms,
+                                           ymin = ms,
+                                           ymax = ms + std ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+barWidth <- 0.9
+
+xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+
+theme <- graphTheme()
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+errorBarColor <- rgb( 140, 140, 140, maxColorValue = 255 )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            position = position_dodge(),
+                            width = barWidth,
+                            fill = webColor( "purple" ) )
+
+errorBarFormat <- geom_errorbar( position = position_dodge(),
+                                 width = barWidth,
+                                 color = webColor( "darkerGray" ) )
+
+values <- geom_text( aes( x=dataFrame$scale,
+                          y=dataFrame$ms + 0.06 * max( dataFrame$ms ),
+                          label = format( dataFrame$ms,
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( errBarOutputFile )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFintentEventTp.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFintentEventTp.R
new file mode 100644
index 0000000..e9a9dc4
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFintentEventTp.R
@@ -0,0 +1,310 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+has_neighbors = 8
+old_flow = 9
+save_directory = 10
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+
+    print( paste( "Usage: Rscript SCPFIntentEventTp.R",
+                                  "<has-flow-obj>",
+                                  "<database-host>",
+                                  "<database-port>",
+                                  "<database-user-id>",
+                                  "<database-password>",
+                                  "<test-name>",
+                                  "<branch-name>",
+                                  "<has-neighbors>",
+                                  "<using-old-flow>",
+                                  "<directory-to-save-graphs>",
+                                  sep=" " ) )
+
+    quit( status = 1 )  # basically exit(), but in R
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+chartTitle <- "Intent Event Throughput"
+fileNeighborsModifier <- "no"
+commandNeighborModifier <- ""
+fileFlowObjModifier <- ""
+sqlFlowObjModifier <- ""
+
+if ( args[ has_flow_obj ] == 'y' ){
+    fileFlowObjModifier <- "_flowObj"
+    sqlFlowObjModifier <- "_fobj"
+    chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
+}
+
+chartTitle <- paste( chartTitle, "\nevents/second with Neighbors =", sep="" )
+
+fileOldFlowModifier <- ""
+if ( args[ has_neighbors ] == 'y' ){
+    fileNeighborsModifier <- "all"
+    commandNeighborModifier <- "scale=1 OR NOT "
+    chartTitle <- paste( chartTitle, "all" )
+} else {
+    chartTitle <- paste( chartTitle, "0" )
+}
+if ( args[ old_flow ] == 'y' ){
+    fileOldFlowModifier <- "_OldFlow"
+    chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
+}
+
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
+                           "_",
+                           args[ branch_name ],
+                           "_",
+                           fileNeighborsModifier,
+                           "-neighbors",
+                           fileFlowObjModifier,
+                           fileOldFlowModifier,
+                           "_graph.jpg",
+                           sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+                  dbname = "onostest",
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
+
+# -----------------------------------
+# Intent Event Throughput SQL Command
+# -----------------------------------
+
+print( "Generating Intent Event Throughput SQL command." )
+
+command <- paste( "SELECT scale, SUM( avg ) as avg FROM intent_tp",
+                  sqlFlowObjModifier,
+                  "_tests WHERE (",
+                  commandNeighborModifier,
+                  "neighbors = 0 ) AND branch = '",
+                  args[ branch_name ],
+                  "' AND date IN ( SELECT max( date ) FROM intent_tp",
+                  sqlFlowObjModifier,
+                  "_tests WHERE branch='",
+                  args[ branch_name ],
+                  "' AND ",
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+                  "is_old_flow",
+                  " ) GROUP BY scale ORDER BY scale",
+                  sep="" )
+
+print( "Sending SQL command:" )
+print( command )
+
+fileData <- dbGetQuery( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing data frame." )
+dataFrame <- melt( avgs )              # This is where reshape2 comes in. Avgs list is converted to data frame
+dataFrame$scale <- fileData$scale          # Add node scaling to the data frame.
+
+colnames( dataFrame ) <- c( "throughput",
+                            "type",
+                            "scale" )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Generating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+#     - data: the data frame that the graph will be based off of
+#    - aes: the asthetics of the graph which require:
+#        - x: x-axis values (usually node scaling)
+#        - y: y-axis values (usually time in milliseconds)
+#        - fill: the category of the colored side-by-side bars (usually type)
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+                                           y = throughput,
+                                           fill = type ) )
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+# Formatting the plot
+theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
+width <- 0.7  # Width of the bars.
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale, label = dataFrame$scale )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Throughput (events/second)" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+
+theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
+                legend.position = "bottom",
+                legend.text = element_text( size = 18, face = "bold" ),
+                legend.title = element_blank(),
+                plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+values <- geom_text( aes( x = dataFrame$scale,
+                          y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
+                          label = format( dataFrame$throughput,
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7,
+                          fontface = "bold" )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        values
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+barGraphFormat <- geom_bar( stat = "identity",
+                            width = width,
+                            fill = "#169EFF" )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          title
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart to", errBarOutputFile ) )
+
+tryCatch( ggsave( errBarOutputFile,
+                  width = imageWidth,
+                  height = imageHeight,
+                  dpi = imageDPI ),
+          error = function( e ){
+              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+        )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart out to", errBarOutputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R
new file mode 100644
index 0000000..2525009
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R
@@ -0,0 +1,303 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Mastership Failover Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFmastershipFailoverLat_master_errGraph.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R <url> <port> <username> <pass> SCPFmastershipFailoverLat master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory <- 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFmastershipFailoverLat.R" )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+chartTitle <- "Mastership Failover Latency"
+
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ graph_title ],
+                           "_",
+                           args[ branch_name ],
+                           "_errGraph.jpg",
+                           sep="" )
+
+stackedBarOutputFile <- paste( args[ save_directory ],
+                        args[ graph_title ],
+                        "_",
+                        args[ branch_name ],
+                        "_stackedGraph.jpg",
+                        sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ---------------------------------------
+# Mastership Failover Latency SQL Command
+# ---------------------------------------
+
+print( "Generating Mastership Failover Latency SQL command" )
+
+command  <- paste( "SELECT * FROM mastership_failover_tests WHERE branch = '",
+                   args[ branch_name ],
+                   "' AND date IN ( SELECT MAX( date ) FROM mastership_failover_tests WHERE branch = '",
+                   args[ branch_name ],
+                   "' ) ",
+                   sep = "" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Combining averages into a list." )
+
+requiredColumns <- c( "kill_deact_avg", "deact_role_avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame from list." )
+
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+dataFrame$stds <- c( fileData$kill_deact_std,
+                     fileData$deact_role_std )
+
+colnames( dataFrame ) <- c( "ms",
+                            "type",
+                            "scale",
+                            "stds" )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+sum <- fileData[ 'deact_role_avg' ] +
+       fileData[ 'kill_deact_avg' ]
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------
+# Initialize Variables for Both Graphs
+# ------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill = "Type" )
+
+barWidth <- 0.9
+
+theme <- graphTheme()
+
+barColors <- scale_fill_manual( values=c( webColor( "redv2" ),
+                                          webColor( "light_blue" ) ) )
+
+wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
+
+# ----------------------------------
+# Error Bar Graph Generate Main Plot
+# ----------------------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+                                           y = ms,
+                                           ymin = ms,
+                                           ymax = ms + stds,
+                                           fill = type ) )
+
+# ----------------------------------------------
+# Error Bar Graph Fundamental Variables Assigned
+# ----------------------------------------------
+
+print( "Generating fundamental graph data for the error bar graph." )
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        title +
+                        wrapLegend
+
+# -------------------------------------------
+# Error Bar Graph Generating Bar Graph Format
+# -------------------------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            position = position_dodge(),
+                            width = barWidth )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+                                 position = position_dodge(),
+                                 color = webColor( "darkerGray" ) )
+
+values <- geom_text( aes( x = dataFrame$scale,
+                          y = dataFrame$ms + 0.02 * max( dataFrame$ms ),
+                          label = format( dataFrame$ms,
+                                          digits = 3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold",
+                          position = position_dodge( 0.9 ) )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          barColors +
+          errorBarFormat +
+          values
+
+# ---------------------------------------
+# Error Bar Graph Exporting Graph to File
+# ---------------------------------------
+
+saveGraph( errBarOutputFile )
+
+# ------------------------------------------------
+# Stacked Bar Graph Fundamental Variables Assigned
+# ------------------------------------------------
+
+print( "Generating fundamental graph data for the stacked bar graph." )
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        title +
+                        wrapLegend
+
+# ---------------------------------------------
+# Stacked Bar Graph Generating Bar Graph Format
+# ---------------------------------------------
+
+print( "Generating stacked bar chart." )
+stackedBarFormat <- geom_bar( stat = "identity",
+                              width = barWidth )
+
+values <- geom_text( aes( x = dataFrame$scale,
+                          y = sum + 0.02 * max( sum ),
+                          label = format( sum,
+                                          digits = 3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          stackedBarFormat +
+          barColors +
+          title +
+          values
+
+# -----------------------------------------
+# Stacked Bar Graph Exporting Graph to File
+# -----------------------------------------
+
+saveGraph( stackedBarOutputFile )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFportLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFportLat.R
new file mode 100644
index 0000000..70d6607
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFportLat.R
@@ -0,0 +1,367 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Port Latency Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFportLat_master_UpErrBarWithStack.jpg):
+# Rscript SCPFportLat.R <url> <port> <username> <pass> SCPFmastershipFailoverLat master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory = 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFmastershipFailoverLat.R" )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+errBarOutputFileUp <- paste( args[ save_directory ],
+                             "SCPFportLat_",
+                             args[ branch_name ],
+                             "_UpErrBarWithStack.jpg",
+                             sep = "" )
+
+errBarOutputFileDown <- paste( args[ save_directory ],
+                             "SCPFportLat_",
+                             args[ branch_name ],
+                             "_DownErrBarWithStack.jpg",
+                             sep = "" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ------------------------
+# Port Latency SQL Command
+# ------------------------
+
+print( "Generating Port Latency SQL Command" )
+
+command <- paste( "SELECT * FROM port_latency_details WHERE branch = '",
+                  args[ branch_name ],
+                  "' AND date IN ( SELECT MAX( date ) FROM port_latency_details WHERE branch = '",
+                  args[ branch_name ],
+                  "' ) ",
+                  sep = "" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -----------------------------
+# Port Up Averages Data Sorting
+# -----------------------------
+
+print( "Sorting data for Port Up Averages." )
+
+requiredColumns <- c( "up_ofp_to_dev_avg", "up_dev_to_link_avg", "up_link_to_graph_avg" )
+
+tryCatch( upAvgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# ----------------------------
+# Port Up Construct Data Frame
+# ----------------------------
+
+print( "Constructing Port Up data frame." )
+
+upAvgsDataFrame <- melt( upAvgs )
+upAvgsDataFrame$scale <- fileData$scale
+upAvgsDataFrame$up_std <- fileData$up_std
+
+colnames( upAvgsDataFrame ) <- c( "ms",
+                             "type",
+                             "scale",
+                             "stds" )
+
+upAvgsDataFrame <- na.omit( upAvgsDataFrame )
+
+upAvgsDataFrame$type <- as.character( upAvgsDataFrame$type )
+upAvgsDataFrame$type <- factor( upAvgsDataFrame$type, levels=unique( upAvgsDataFrame$type ) )
+
+sumOfUpAvgs <- fileData[ 'up_ofp_to_dev_avg' ] +
+               fileData[ 'up_dev_to_link_avg' ] +
+               fileData[ 'up_link_to_graph_avg' ]
+
+print( "Up Averages Results:" )
+print( upAvgsDataFrame )
+
+# -------------------------------
+# Port Down Averages Data Sorting
+# -------------------------------
+
+print( "Sorting data for Port Down Averages." )
+
+requiredColumns <- c( "down_ofp_to_dev_avg", "down_dev_to_link_avg", "down_link_to_graph_avg" )
+
+tryCatch( downAvgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# ------------------------------
+# Port Down Construct Data Frame
+# ------------------------------
+
+print( "Constructing Port Down data frame." )
+
+downAvgsDataFrame <- melt( downAvgs )
+downAvgsDataFrame$scale <- fileData$scale
+downAvgsDataFrame$down_std <- fileData$down_std
+
+colnames( downAvgsDataFrame ) <- c( "ms",
+                               "type",
+                               "scale",
+                               "stds" )
+
+downAvgsDataFrame <- na.omit( downAvgsDataFrame )
+
+downAvgsDataFrame$type <- as.character( downAvgsDataFrame$type )
+downAvgsDataFrame$type <- factor( downAvgsDataFrame$type, levels=unique( downAvgsDataFrame$type ) )
+
+sumOfDownAvgs <- fileData[ 'down_ofp_to_dev_avg' ] +
+                 fileData[ 'down_dev_to_link_avg' ] +
+                 fileData[ 'down_link_to_graph_avg' ]
+
+print( "Down Averages Results:" )
+print( downAvgsDataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------
+# Initialize Variables For Both Graphs
+# ------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+
+barWidth <- 1
+
+wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
+
+theme <- graphTheme()
+
+subtitle <- lastUpdatedLabel()
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+                                       webColor( "light_blue" ),
+                                       webColor( "green" ) ) )
+
+errorBarColor <- webColor( "darkerGray" )
+
+# --------------------------
+# Port Up Generate Main Plot
+# --------------------------
+
+print( "Generating main plot (Port Up Latency)." )
+
+mainPlot <- ggplot( data = upAvgsDataFrame, aes( x = scale,
+                                            y = ms,
+                                            fill = type,
+                                            ymin = fileData[ 'up_end_to_end_avg' ],
+                                            ymax = fileData[ 'up_end_to_end_avg' ] + stds ) )
+
+# --------------------------------------
+# Port Up Fundamental Variables Assigned
+# --------------------------------------
+
+print( "Generating fundamental graph data (Port Up Latency)." )
+
+title <- labs( title = "Port Up Latency", subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        wrapLegend +
+                        title +
+                        colors
+
+# -----------------------------------
+# Port Up Generating Bar Graph Format
+# -----------------------------------
+
+print( "Generating bar graph with error bars (Port Up Latency)." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            width = barWidth )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+                                 color = errorBarColor )
+
+values <- geom_text( aes( x = upAvgsDataFrame$scale,
+                          y = sumOfUpAvgs + 0.03 * max( sumOfUpAvgs ),
+                          label = format( sumOfUpAvgs,
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          values
+
+# -------------------------------
+# Port Up Exporting Graph to File
+# -------------------------------
+
+saveGraph( errBarOutputFileUp )
+
+# ----------------------------
+# Port Down Generate Main Plot
+# ----------------------------
+
+print( "Generating main plot (Port Down Latency)." )
+
+mainPlot <- ggplot( data = downAvgsDataFrame, aes( x = scale,
+                                              y = ms,
+                                              fill = type,
+                                              ymin = fileData[ 'down_end_to_end_avg' ],
+                                              ymax = fileData[ 'down_end_to_end_avg' ] + stds ) )
+
+# ----------------------------------------
+# Port Down Fundamental Variables Assigned
+# ----------------------------------------
+
+print( "Generating fundamental graph data (Port Down Latency)." )
+
+title <- labs( title = "Port Down Latency", subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        wrapLegend +
+                        title +
+                        colors
+
+# -------------------------------------
+# Port Down Generating Bar Graph Format
+# -------------------------------------
+
+print( "Generating bar graph with error bars (Port Down Latency)." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            width = barWidth )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+                                 color = errorBarColor )
+
+values <- geom_text( aes( x = downAvgsDataFrame$scale,
+                          y = sumOfDownAvgs + 0.03 * max( sumOfDownAvgs ),
+                          label = format( sumOfDownAvgs,
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 7.0,
+                          fontface = "bold" )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          values
+
+# ---------------------------------
+# Port Down Exporting Graph to File
+# ---------------------------------
+
+saveGraph( errBarOutputFileDown )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscaleTopo.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscaleTopo.R
new file mode 100644
index 0000000..2afe86e
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscaleTopo.R
@@ -0,0 +1,238 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Scale Topology Latency Test Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFscaleTopo_master_graph.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFscaleTopo.R <url> <port> <username> <pass> SCPFscaleTopo master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory = 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFscaleTopo.R" )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+outputFile <- paste( args[ save_directory ],
+                     args[ graph_title ],
+                     "_",
+                     args[ branch_name ],
+                     "_graph.jpg",
+                     sep="" )
+
+chartTitle <- "Scale Topology Latency Test"
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# --------------------------
+# Scale Topology SQL Command
+# --------------------------
+
+print( "Generating Scale Topology SQL Command" )
+
+command <- paste( "SELECT * FROM scale_topo_latency_details WHERE branch = '",
+                  args[ branch_name ],
+                  "' AND date IN ( SELECT MAX( date ) FROM scale_topo_latency_details WHERE branch = '",
+                  args[ branch_name ],
+                  "' ) ",
+                  sep = "" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "last_role_request_to_last_topology", "last_connection_to_last_role_request", "first_connection_to_last_connection" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+# Parse lists into data frames.
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+colnames( dataFrame ) <- c( "s",
+                            "type",
+                            "scale")
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+dataFrame$type <- as.character( dataFrame$type )
+dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
+dataFrame$iterative <- seq( 1, nrow( fileData ), by = 1 )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+sum <- fileData[ 'last_role_request_to_last_topology' ] +
+       fileData[ 'last_connection_to_last_role_request' ] +
+       fileData[ 'first_connection_to_last_connection' ]
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+                                           y = s,
+                                           fill = type ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
+                                    label = dataFrame$scale )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (s)" )
+fillLabel <- labs( fill="Type" )
+
+width <- 0.6  # Width of the bars.
+
+theme <- graphTheme()
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+                                       webColor( "green" ),
+                                       webColor( "light_blue" ) ) )
+
+values <- geom_text( aes( x = dataFrame$iterative,
+                          y = sum + 0.02 * max( sum ),
+                          label = format( sum,
+                                          big.mark = ",",
+                                          scientific = FALSE ),
+                          fontface = "bold" ),
+                          size = 7.0 )
+
+wrapLegend <- guides( fill = guide_legend( nrow=2, byrow=TRUE ) )
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        values +
+                        wrapLegend +
+                        title +
+                        colors
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+
+barGraphFormat <- geom_bar( stat = "identity", width = width )
+
+result <- fundamentalGraphData +
+          barGraphFormat
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscalingMaxIntents.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscalingMaxIntents.R
new file mode 100644
index 0000000..045f5e7
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscalingMaxIntents.R
@@ -0,0 +1,290 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+old_flow = 8
+save_directory = 9
+
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+    print( paste( "Usage: Rscript SCPFInstalledIntentsFlows",
+                                  "<has-flowObj>",
+                                  "<database-host>",
+                                  "<database-port>",
+                                  "<database-user-id>",
+                                  "<database-password>",
+                                  "<test-name>",
+                                  "<branch-name>",
+                                  "<using-old-flow>",
+                                  "<directory-to-save-graphs>",
+                                  sep=" " ) )
+
+    quit( status = 1 )  # basically exit(), but in R
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+fileFlowObjModifier <- ""
+sqlFlowObjModifier <- ""
+chartTitle <- "Number of Installed Intents & Flows"
+
+if ( args[ has_flow_obj ] == "y" ){
+    fileFlowObjModifier <- "_flowObj"
+    sqlFlowObjModifier <- "fobj_"
+    chartTitle <- "Number of Installed Intents & Flows\n with Flow Objectives"
+}
+fileOldFlowModifier <- ""
+if ( args[ old_flow ] == 'y' ){
+    fileOldFlowModifier <- "_OldFlow"
+    chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
+}
+
+outputFile <- paste( args[ save_directory ],
+                     args[ test_name ],
+                     fileFlowObjModifier,
+                     fileOldFlowModifier,
+                     "_",
+                     args[ branch_name ],
+                     "_errGraph.jpg",
+                     sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+                  dbname = "onostest",
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
+
+# -------------------------------
+# Scaling Max Intents SQL Command
+# -------------------------------
+
+print( "Scaling Max Intents SQL Command" )
+
+command <- paste( "SELECT * FROM max_intents_",
+                  sqlFlowObjModifier,
+                  "tests WHERE branch = '",
+                  args[ branch_name ],
+                  "' AND date IN ( SELECT MAX( date ) FROM max_intents_",
+                  sqlFlowObjModifier,
+                  "tests WHERE branch = '",
+                  args[ branch_name ],
+                  "' AND ",
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+                  "is_old_flow",
+                  " ) ",
+                  sep="" )
+
+print( "Sending SQL command:" )
+print( command )
+fileData <- dbGetQuery( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "max_intents_ovs", "max_flows_ovs" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+
+colnames( dataFrame ) <- c( "ms", "type", "scale" )
+
+dataFrame$type <- as.character( dataFrame$type )
+dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+                                           y = ms,
+                                           fill = type ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+barWidth <- 1.3
+theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
+xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Max Number of Intents/Flow Rules" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+
+theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
+                legend.position = "bottom",
+                legend.text = element_text( size=22 ),
+                legend.title = element_blank(),
+                legend.key.size = unit( 1.5, 'lines' ),
+                plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+colors <- scale_fill_manual( values = c( "#F77670",
+                                         "#619DFA" ) )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        fillLabel +
+                        theme +
+                        wrapLegend +
+                        title +
+                        colors
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+                            position = position_dodge(),
+                            width = barWidth )
+
+values <- geom_text( aes( x = dataFrame$scale,
+                          y = dataFrame$ms + 0.015 * max( dataFrame$ms ),
+                          label = format( dataFrame$ms,
+                                          digits=3,
+                                          big.mark = ",",
+                                          scientific = FALSE ) ),
+                          size = 5.2,
+                          fontface = "bold",
+                          position = position_dodge( width = 1.25 ) )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart to", outputFile ) )
+
+tryCatch( ggsave( outputFile,
+                  width = imageWidth,
+                  height = imageHeight,
+                  dpi = imageDPI ),
+          error = function( e ){
+              print( "[ERROR] There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+        )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart out to", outputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFswitchLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFswitchLat.R
new file mode 100644
index 0000000..192ac8f
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFswitchLat.R
@@ -0,0 +1,361 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Switch Latency Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFswitchLat_master_UpErrBarWithStack.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFswitchLat.R <url> <port> <username> <pass> SCPFswitchLat master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory <- 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )    # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    usage( "SCPFswitchLat.R" )
+    quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+errBarOutputFileUp <- paste( args[ save_directory ],
+                             "SCPFswitchLat_",
+                             args[ branch_name ],
+                             "_UpErrBarWithStack.jpg",
+                             sep="" )
+
+errBarOutputFileDown <- paste( args[ save_directory ],
+                               "SCPFswitchLat_",
+                               args[ branch_name ],
+                               "_DownErrBarWithStack.jpg",
+                               sep="" )
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# --------------------------
+# Switch Latency SQL Command
+# --------------------------
+
+print( "Generating Switch Latency SQL Command" )
+
+command <- paste( "SELECT * FROM switch_latency_details WHERE branch = '",
+                  args[ branch_name ],
+                  "' AND date IN ( SELECT MAX( date ) FROM switch_latency_details WHERE branch='",
+                  args[ branch_name ],
+                  "' )",
+                  sep="" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -------------------------------
+# Switch Up Averages Data Sorting
+# -------------------------------
+
+print( "Sorting data for Switch Up Averages." )
+
+requiredColumns <- c( "up_device_to_graph_avg",
+                      "feature_reply_to_device_avg",
+                      "tcp_to_feature_reply_avg" )
+
+tryCatch( upAvgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# ------------------------------
+# Switch Up Construct Data Frame
+# ------------------------------
+
+print( "Constructing Switch Up data frame." )
+
+upAvgsData <- melt( upAvgs )
+upAvgsData$scale <- fileData$scale
+upAvgsData$up_std <- fileData$up_std
+upAvgsData <- na.omit( upAvgsData )
+
+colnames( upAvgsData ) <- c( "ms",
+                             "type",
+                             "scale",
+                             "stds" )
+
+upAvgsData$type <- as.character( upAvgsData$type )
+upAvgsData$type <- factor( upAvgsData$type, levels=unique( upAvgsData$type ) )
+
+sumOfUpAvgs <- fileData[ 'up_device_to_graph_avg' ] +
+               fileData[ 'feature_reply_to_device_avg' ] +
+               fileData[ 'tcp_to_feature_reply_avg' ]
+
+print( "Up Averages Results:" )
+print( upAvgsData )
+
+# ---------------------------------
+# Switch Down Averages Data Sorting
+# ---------------------------------
+
+print( "Sorting data for Switch Down Averages." )
+
+requiredColumns <- c( "down_device_to_graph_avg",
+                      "ack_to_device_avg",
+                      "fin_ack_to_ack_avg" )
+
+tryCatch( downAvgs <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------------------
+# Switch Down Construct Data Frame
+# --------------------------------
+
+print( "Constructing Switch Down data frame." )
+
+downAvgsData <- melt( downAvgs )
+downAvgsData$scale <- fileData$scale
+downAvgsData$down_std <- fileData$down_std
+
+colnames( downAvgsData ) <- c( "ms",
+                               "type",
+                               "scale",
+                               "stds" )
+
+downAvgsData$type <- as.character( downAvgsData$type )
+downAvgsData$type <- factor( downAvgsData$type, levels=unique( downAvgsData$type ) )
+
+downAvgsData <- na.omit( downAvgsData )
+
+sumOfDownAvgs <- fileData[ 'down_device_to_graph_avg' ] +
+                 fileData[ 'ack_to_device_avg' ] +
+                 fileData[ 'fin_ack_to_ack_avg' ]
+
+print( "Down Averages Results:" )
+print( downAvgsData )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------
+# Initialize Variables For Both Graphs
+# ------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9 ) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+
+errorBarColor <- webColor( "darkerGray" )
+barWidth <- 1
+
+theme <- graphTheme()
+
+subtitle <- lastUpdatedLabel()
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+                                       webColor( "light_blue" ),
+                                       webColor( "green" ) ) )
+
+# ----------------------------
+# Switch Up Generate Main Plot
+# ----------------------------
+
+print( "Creating main plot (Switch Up Latency)." )
+
+mainPlot <- ggplot( data = upAvgsData, aes( x = scale,
+                                            y = ms,
+                                            fill = type,
+                                            ymin = fileData[ 'up_end_to_end_avg' ],
+                                            ymax = fileData[ 'up_end_to_end_avg' ] + stds ) )
+
+# ----------------------------------------
+# Switch Up Fundamental Variables Assigned
+# ----------------------------------------
+
+print( "Generating fundamental graph data (Switch Up Latency)." )
+
+title <- labs( title = "Switch Up Latency", subtitle = subtitle )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        theme +
+                        title +
+                        colors
+
+# -------------------------------------
+# Switch Up Generating Bar Graph Format
+# -------------------------------------
+
+print( "Generating bar graph with error bars (Switch Up Latency)." )
+
+barGraphFormat <- geom_bar( stat = "identity", width = barWidth )
+errorBarFormat <- geom_errorbar( width = barWidth, color = errorBarColor )
+
+barGraphValues <- geom_text( aes( x = upAvgsData$scale,
+                                  y = sumOfUpAvgs + 0.04 * max( sumOfUpAvgs ),
+                                  label = format( sumOfUpAvgs,
+                                                  digits = 3,
+                                                  big.mark = ",",
+                                                  scientific = FALSE ) ),
+                                  size = 7.0,
+                                  fontface = "bold" )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 2, byrow = TRUE ) )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          barGraphValues +
+          wrapLegend
+
+# ---------------------------------
+# Switch Up Exporting Graph to File
+# ---------------------------------
+
+saveGraph( errBarOutputFileUp )
+
+# ------------------------------
+# Switch Down Generate Main Plot
+# ------------------------------
+
+print( "Creating main plot (Switch Down Latency)." )
+
+mainPlot <- ggplot( data = downAvgsData, aes( x = scale,
+                                              y = ms,
+                                              fill = type,
+                                              ymin = fileData[ 'down_end_to_end_avg' ],
+                                              ymax = fileData[ 'down_end_to_end_avg' ] + stds ) )
+
+# ------------------------------------------
+# Switch Down Fundamental Variables Assigned
+# ------------------------------------------
+
+print( "Generating fundamental graph data (Switch Down Latency)." )
+
+title <- labs( title = "Switch Down Latency", subtitle = subtitle )
+
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        xLabel +
+                        yLabel +
+                        theme +
+                        title +
+                        colors
+
+# ---------------------------------------
+# Switch Down Generating Bar Graph Format
+# ---------------------------------------
+
+print( "Generating bar graph with error bars (Switch Down Latency)." )
+barGraphFormat <- geom_bar( stat = "identity", width = barWidth )
+errorBarFormat <- geom_errorbar( width = barWidth, color = errorBarColor )
+
+barGraphValues <- geom_text( aes( x = downAvgsData$scale,
+                                  y = sumOfDownAvgs + 0.04 * max( sumOfDownAvgs ),
+                                  label = format( sumOfDownAvgs,
+                                                  digits = 3,
+                                                  big.mark = ",",
+                                                  scientific = FALSE ) ),
+                                  size = 7.0,
+                                  fontface = "bold" )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
+
+result <- fundamentalGraphData +
+          barGraphFormat +
+          errorBarFormat +
+          barGraphValues +
+          wrapLegend
+
+# -----------------------------------
+# Switch Down Exporting Graph to File
+# -----------------------------------
+
+saveGraph( errBarOutputFileDown )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/dependencies/cliArgs.R b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/cliArgs.R
new file mode 100644
index 0000000..e48a5dc
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/cliArgs.R
@@ -0,0 +1,60 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+database_host <- 1
+database_host_str <- "<database-host>"
+
+database_port <- 2
+database_port_str <- "<database-port>"
+
+database_u_id <- 3
+database_u_id_str <- "<database-user-id>"
+
+database_pw <- 4
+database_pw_str <- "<database-password>"
+
+graph_title <- 5
+graph_title_str <- "<graph-title>"
+
+branch_name <- 6
+branch_name_str <- "<branch-name>"
+
+save_directory_str <- "<directory-to-save-graph>"
+
+usage <- function( filename, specialArgsList = c() ){
+    special_args_str = ""
+    for ( a in specialArgsList) {
+        special_args_str = paste( special_args_str, "<", a, "> ", sep="" )
+    }
+    output <- paste( "Usage: Rscript",
+                     filename,
+                     database_host_str,
+                     database_port_str,
+                     database_u_id_str,
+                     database_pw_str,
+                     graph_title_str,
+                     branch_name_str,
+                     special_args_str,
+                     sep=" " )
+    output <- paste( output, save_directory_str, sep="" )
+    print( output )
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/dependencies/fundamentalGraphData.R b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/fundamentalGraphData.R
new file mode 100644
index 0000000..e2c4ed8
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/fundamentalGraphData.R
@@ -0,0 +1,56 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+
+graphTheme <- function(){
+    theme( plot.title = element_text( hjust = 0.5, size = 32, face ='bold' ),
+           axis.text.x = element_text( angle = 0, size = 14 ),
+           legend.position = "bottom",
+           legend.text = element_text( size = 22 ),
+           legend.title = element_blank(),
+           legend.key.size = unit( 1.5, 'lines' ),
+           legend.direction = 'horizontal',
+           plot.subtitle = element_text( size=16, hjust=1.0 ) )
+}
+
+webColor <- function( color ){
+    switch( color,
+            red = "#FF0000",
+            redv2 = "#FF6666", # more readable version of red
+            green = "#33CC33",
+            blue = "#0033FF",
+            light_blue = "#3399FF",
+            black = "#111111",
+            yellow = "#EEB600",
+            purple = "#9900FF",
+            gray = "#CCCCCC",
+            darkerGray = "#666666" )
+}
+
+wrapLegend <- function(){
+    guides( color = guide_legend( nrow = 2, byrow = TRUE ) )
+}
+
+lastUpdatedLabel <- function(){
+    paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+}
+
+defaultTextSize <- function(){
+    theme_set( theme_grey( base_size = 26 ) )   # set the default text size of the graph.
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/dependencies/initSQL.R b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/initSQL.R
new file mode 100644
index 0000000..dec5f2e
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/initSQL.R
@@ -0,0 +1,154 @@
+# Copyright 2018 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+pipelineMinValue = 1000
+
+initSQL <- function( host, port, user, pass ) {
+    dbConnect( dbDriver( "PostgreSQL" ),
+               dbname = "onostest",
+               host = host,
+               port = strtoi( port ),
+               user = user,
+               password = pass )
+}
+
+simpleSQLCommand <- function( testName, branch, limit=0 ){
+    paste( "SELECT * FROM executed_test_tests WHERE actual_test_name='",
+           testName,
+           "' AND branch='",
+           branch,
+           "' ORDER BY date DESC ",
+           if (limit > 0) "LIMIT " else "",
+           if (limit > 0) limit else "",
+           sep="" )
+}
+
+retrieveData <- function( con, sqlCommand ){
+
+    print( "Sending SQL command:" )
+    print( sqlCommand )
+
+    result <- dbGetQuery( con, sqlCommand )
+
+    # Check if data has been received
+    if ( nrow( result ) == 0 ){
+        print( "[ERROR]: No data received from the databases. Please double check this by manually running the SQL command." )
+        quit( status = 1 )
+    }
+    result
+}
+
+generateMultiTestMultiBuildSQLCommand <- function( branch, testsToInclude, buildsToShow ){
+    tests <- getTestList( testsToInclude )
+    multiTestSQLCommand( branch, tests, buildsToShow, TRUE )
+}
+
+generateMultiTestSingleBuildSQLCommand <- function( branch, testsToInclude, buildToShow ){
+    tests <- getTestList( testsToInclude )
+    operator <- "= "
+    if ( buildToShow == "latest" ){
+        operator <- ">= "
+        buildToShow <- "1000"
+    }
+
+    multiTestSQLCommand( branch, tests, buildToShow, FALSE, operator )
+}
+
+generateGroupedTestSingleBuildSQLCommand <- function( branch, groupsToInclude, buildToShow ){
+    operator <- "= "
+    if( buildToShow == "latest" ){
+        operator <- ">= "
+        buildToShow <- "1000"
+    }
+
+    tests <- strsplit( groupsToInclude, ";" )
+
+    sqlCommands <- c()
+
+    for( i in 1:length( tests[[1]] ) ){
+        splitTestList <- strsplit( tests[[1]][ i ], "-" )
+        testList <- splitTestList[[1]][2]
+
+        testsCommand <- "'"
+        for ( test in as.list( strsplit( testList, "," )[[1]] ) ){
+            testsCommand <- paste( testsCommand, test, "','", sep="" )
+        }
+        testsCommand <- substr( testsCommand, 0, nchar( testsCommand ) - 2 )
+
+        sqlCommands = c( sqlCommands, multiTestSQLCommand( branch, testsCommand, buildToShow, FALSE, operator ) )
+    }
+    sqlCommands
+}
+
+getTitlesFromGroupTest <- function( branch, groupsToInclude ){
+    tests <- strsplit( groupsToInclude, ";" )
+    titles <- list()
+    for( i in 1:length( tests[[1]] ) ){
+        splitTestList <- strsplit( tests[[1]][ i ], "-" )
+        titles[[i]] <- splitTestList[[1]][1]
+    }
+    titles
+}
+
+getTestList <- function( testsToInclude ){
+    tests <- "'"
+    for ( test in as.list( strsplit( testsToInclude, "," )[[1]] ) ){
+        tests <- paste( tests, test, "','", sep="" )
+    }
+    tests <- substr( tests, 0, nchar( tests ) - 2 )
+    tests
+}
+
+multiTestSQLCommand <- function( branch, tests, builds, isDisplayingMultipleBuilds, operator=">= " ){
+    if ( isDisplayingMultipleBuilds ){
+        operator2 <- "<="
+        multipleBuildsToShow <- builds
+        singleBuild <- pipelineMinValue
+    }
+    else{
+        operator2 <- "="
+        multipleBuildsToShow <- 1
+        singleBuild <- builds
+    }
+
+    paste( "SELECT * ",
+           "FROM executed_test_tests a ",
+           "WHERE ( SELECT COUNT( * ) FROM executed_test_tests b ",
+           "WHERE b.branch='",
+           branch,
+           "' AND b.actual_test_name IN (",
+           tests,
+           ") AND a.actual_test_name = b.actual_test_name AND a.date <= b.date AND b.build ", operator,
+           singleBuild,
+           " ) ",
+           operator2,
+           " ",
+           multipleBuildsToShow,
+           " AND a.branch='",
+           branch,
+           "' AND a.actual_test_name IN (",
+           tests,
+           ") AND a.build ", operator,
+           singleBuild,
+           " ORDER BY a.actual_test_name DESC, a.date DESC",
+           sep="")
+}
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/dependencies/saveGraph.R b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/saveGraph.R
new file mode 100644
index 0000000..257ad8d
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/dependencies/saveGraph.R
@@ -0,0 +1,42 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+
+saveGraph <- function( outputFile ){
+    print( paste( "Saving result graph to", outputFile ) )
+
+    tryCatch( ggsave( outputFile,
+                      width = imageWidth,
+                      height = imageHeight,
+                      dpi = imageDPI ),
+              error = function( e ){
+                  print( "[ERROR]: There was a problem saving the graph due to a graph formatting exception.  Error dump:" )
+                  print( e )
+                  quit( status = 1 )
+              }
+            )
+
+    print( paste( "[SUCCESS]: Successfully wrote result graph out to", outputFile ) )
+}
\ No newline at end of file
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/histogramMultipleTestGroups.R b/TestON/JenkinsFile/wikiGraphRScripts/histogramMultipleTestGroups.R
new file mode 100644
index 0000000..0143071
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/histogramMultipleTestGroups.R
@@ -0,0 +1,313 @@
+# Copyright 2018 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# ALL tests (https://jenkins.onosproject.org/view/QA/job/postjob-VM/lastSuccessfulBuild/artifact/ALL_master_build-latest_test-suite-summary.jpg):
+# Rscript histogramMultipleTestGroups.R <url> <port> <username> <pass> ALL master "FUNCbgpls,FUNCflow,FUNCformCluster,FUNCgroup,FUNCintent,FUNCintentRest,FUNCipv6Intent,FUNCnetCfg,FUNCnetconf,FUNCoptical,FUNCovsdbtest,FUNCvirNetNB,HAbackupRecover,HAclusterRestart,HAfullNetPartition,HAkillNodes,HAsanity,HAscaling,HAsingleInstanceRestart,HAstopNodes,HAswapNodes,HAupgrade,HAupgradeRollback,PLATdockertest,USECASE_SdnipFunction,USECASE_SdnipFunctionCluster,VPLSBasic,VPLSfailsafe" latest /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+testsToInclude <- 7
+build_to_show <- 8
+save_directory <- 9
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    specialArgs <- c( "tests-to-include-(as-one-string-sep-groups-by-semicolon-title-as-first-group-item-sep-by-dash)",
+                      "build-to-show" )
+    usage( "histogramMultipleTestGroups.R", specialArgs )
+    quit( status = 1 )
+}
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ---------------------
+# Test Case SQL Command
+# ---------------------
+
+print( "Generating Test Case SQL command." )
+
+sqlCommands <- generateGroupedTestSingleBuildSQLCommand( args[ branch_name ],
+                                                         args[ testsToInclude ],
+                                                         args[ build_to_show ] )
+
+titles <- getTitlesFromGroupTest( args[ branch_name ],
+                                  args[ testsToInclude ] )
+
+dbResults <- list()
+i <- 1
+for ( command in sqlCommands ){
+    dbResults[[i]] <- retrieveData( con, command )
+    i <- i + 1
+}
+
+print( "dbResult:" )
+print( dbResults )
+
+# -------------------------------
+# Create Title and Graph Filename
+# -------------------------------
+
+print( "Creating title of graph." )
+
+titlePrefix <- paste( args[ graph_title ], " ", sep="" )
+if ( args[ graph_title ] == "ALL" ){
+    titlePrefix <- ""
+}
+
+if ( args[ build_to_show ] == "latest" ){
+    buildTitle <- "\nLatest Test Results"
+    filebuild_to_show <- "latest"
+} else {
+    buildTitle <- paste( " \n Build #", args[ build_to_show ], sep="" )
+    filebuild_to_show <- args[ build_to_show ]
+}
+
+title <- paste( titlePrefix,
+                "Summary of Test Suites - ",
+                args[ branch_name ],
+                buildTitle,
+                sep="" )
+
+print( "Creating graph filename." )
+
+outputFile <- paste( args[ save_directory ],
+                     args[ graph_title ],
+                     "_",
+                     args[ branch_name ],
+                     "_build-",
+                     filebuild_to_show,
+                     "_test-suite-summary.jpg",
+                     sep="" )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+passNum <- list()
+failNum <- list()
+exeNum <- list()
+skipNum <- list()
+totalNum <- list()
+
+passPercent <- list()
+failPercent <- list()
+exePercent <- list()
+nonExePercent <- list()
+
+actualPassPercent <- list()
+actualFailPercent <- list()
+
+appName <- c()
+afpName <- c()
+nepName <- c()
+
+tmpPos <- c()
+tmpCases <- c()
+
+for ( i in 1:length( dbResults ) ){
+    t <- dbResults[[i]]
+
+    passNum[[i]] <- sum( t$num_passed )
+    failNum[[i]] <- sum( t$num_failed )
+    exeNum[[i]] <- passNum[[i]] + failNum[[i]]
+    totalNum[[i]] <- sum( t$num_planned )
+    skipNum[[i]] <- totalNum[[i]] - exeNum[[i]]
+
+    passPercent[[i]] <- passNum[[i]] / exeNum[[i]]
+    failPercent[[i]] <- failNum[[i]] / exeNum[[i]]
+    exePercent[[i]] <- exeNum[[i]] / totalNum[[i]]
+    nonExePercent[[i]] <- ( 1 - exePercent[[i]] ) * 100
+
+    actualPassPercent[[i]] <- passPercent[[i]] * exePercent[[i]] * 100
+    actualFailPercent[[i]] <- failPercent[[i]] * exePercent[[i]] * 100
+
+    appName <- c( appName, "Passed" )
+    afpName <- c( afpName, "Failed" )
+    nepName <- c( nepName, "Skipped/Unexecuted" )
+
+    tmpPos <- c( tmpPos, 100 - ( nonExePercent[[i]] / 2 ), actualPassPercent[[i]] + actualFailPercent[[i]] - ( actualFailPercent[[i]] / 2 ), actualPassPercent[[i]] - ( actualPassPercent[[i]] / 2 ) )
+    tmpCases <- c( tmpCases, skipNum[[i]], failNum[[i]], passNum[[i]] )
+}
+
+relativePosLength <- length( dbResults ) * 3
+
+relativePos <- c()
+relativeCases <- c()
+
+for ( i in 1:3 ){
+    relativePos <- c( relativePos, tmpPos[ seq( i, relativePosLength, 3 ) ] )
+    relativeCases <- c( relativeCases, tmpCases[ seq( i, relativePosLength, 3 ) ] )
+}
+names( actualPassPercent ) <- appName
+names( actualFailPercent ) <- afpName
+names( nonExePercent ) <- nepName
+
+labels <- paste( titles, "\n", totalNum, " Test Cases", sep="" )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( c( nonExePercent, actualFailPercent, actualPassPercent ) )
+dataFrame$title <- seq( 1, length( dbResults ), by = 1 )
+colnames( dataFrame ) <- c( "perc", "key", "suite" )
+
+dataFrame$xtitles <- labels
+dataFrame$relativePos <- relativePos
+dataFrame$relativeCases <- relativeCases
+dataFrame$valueDisplay <- c( paste( round( dataFrame$perc, digits = 2 ), "% - ", relativeCases, " Tests", sep="" ) )
+
+dataFrame$key <- factor( dataFrame$key, levels=unique( dataFrame$key ) )
+
+dataFrame$willDisplayValue <- dataFrame$perc > 15.0 / length( dbResults )
+
+for ( i in 1:nrow( dataFrame ) ){
+    if ( relativeCases[[i]] == "1" ){
+        dataFrame[ i, "valueDisplay" ] <- c( paste( round( dataFrame$perc[[i]], digits = 2 ), "% - ", relativeCases[[i]], " Test", sep="" ) )
+    }
+    if ( !dataFrame[ i, "willDisplayValue" ] ){
+        dataFrame[ i, "valueDisplay" ] <- ""
+    }
+}
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = suite,
+                                           y = perc,
+                                           fill = key ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+theme_set( theme_grey( base_size = 26 ) )   # set the default text size of the graph.
+
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$suite,
+                                    label = dataFrame$xtitles )
+yScaleConfig <- scale_y_continuous( breaks = seq( 0, 100,
+                                    by = 10 ) )
+
+xLabel <- xlab( "" )
+yLabel <- ylab( "Total Test Cases (%)" )
+
+theme <- graphTheme() + theme( axis.text.x = element_text( angle = 0, size = 25 - 1.25 * length( dbResults ) ) )
+
+title <- labs( title = title, subtitle = lastUpdatedLabel() )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        xScaleConfig +
+                        yScaleConfig +
+                        xLabel +
+                        yLabel +
+                        theme +
+                        title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+
+unexecutedColor <- webColor( "gray" )   # Gray
+failedColor <- webColor( "red" )        # Red
+passedColor <- webColor( "green" )      # Green
+
+colors <- scale_fill_manual( values=c( if ( "Skipped/Unexecuted" %in% dataFrame$key ){ unexecutedColor },
+                                       if ( "Failed" %in% dataFrame$key ){ failedColor },
+                                       if ( "Passed" %in% dataFrame$key ){ passedColor } ) )
+
+barGraphFormat <- geom_bar( stat = "identity", width = 0.8 )
+
+barGraphValues <- geom_text( aes( x = dataFrame$suite,
+                                  y = dataFrame$relativePos,
+                                  label = format( paste( dataFrame$valueDisplay ) ) ),
+                                  size = 15.50 / length( dbResults ) + 2.33, fontface = "bold" )
+
+result <- fundamentalGraphData +
+          colors +
+          barGraphFormat +
+          barGraphValues
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile ) # from saveGraph.R
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/pieMultipleTests.R b/TestON/JenkinsFile/wikiGraphRScripts/pieMultipleTests.R
new file mode 100644
index 0000000..a9a1cd4
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/pieMultipleTests.R
@@ -0,0 +1,282 @@
+# Copyright 2018 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example scripts:
+#
+# ALL tests with pass/fail (https://jenkins.onosproject.org/view/QA/job/postjob-VM/lastSuccessfulBuild/artifact/ALL_master_build-latest_executed_pieChart.jpg):
+# Rscript pieMultipleTests.R <url> <port> <username> <pass> ALL master "FUNCbgpls,FUNCflow,FUNCformCluster,FUNCgroup,FUNCintent,FUNCintentRest,FUNCipv6Intent,FUNCnetCfg,FUNCnetconf,FUNCoptical,FUNCovsdbtest,FUNCvirNetNB,HAbackupRecover,HAclusterRestart,HAfullNetPartition,HAkillNodes,HAsanity,HAscaling,HAsingleInstanceRestart,HAstopNodes,HAswapNodes,HAupgrade,HAupgradeRollback,PLATdockertest,USECASE_SdnipFunction,USECASE_SdnipFunctionCluster,VPLSBasic,VPLSfailsafe" latest y /path/to/save/directory/
+#
+# ALL tests with execution result (https://jenkins.onosproject.org/view/QA/job/postjob-VM/lastSuccessfulBuild/artifact/ALL_master_build-latest_passfail_pieChart.jpg):
+# Rscript pieMultipleTests.R <url> <port> <username> <pass> ALL master "FUNCbgpls,FUNCflow,FUNCformCluster,FUNCgroup,FUNCintent,FUNCintentRest,FUNCipv6Intent,FUNCnetCfg,FUNCnetconf,FUNCoptical,FUNCovsdbtest,FUNCvirNetNB,HAbackupRecover,HAclusterRestart,HAfullNetPartition,HAkillNodes,HAsanity,HAscaling,HAsingleInstanceRestart,HAstopNodes,HAswapNodes,HAupgrade,HAupgradeRollback,PLATdockertest,USECASE_SdnipFunction,USECASE_SdnipFunctionCluster,VPLSBasic,VPLSfailsafe" latest n /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+tests_to_include <- 7
+build_to_show <- 8
+is_displaying_plan <- 9
+save_directory <- 10
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+if ( is.na( args[ save_directory ] ) ){
+
+    # Check if sufficient args are provided.
+    if ( length( args ) != save_directory ){
+        specialArgs <- c(  "tests-to-include",
+                           "build-to-show",
+                           "is-displaying-plan" )
+        usage( "trendSCPF.R", specialArgs )
+        quit( status = 1 )
+    }
+}
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+                  dbname = "onostest",
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
+
+# ---------------------
+# Test Case SQL Command
+# ---------------------
+
+print( "Generating Test Case SQL command." )
+
+command <- generateMultiTestSingleBuildSQLCommand( args[ branch_name ],
+                                                   args[ tests_to_include ],
+                                                   args[ build_to_show ] )
+
+dbResult <- retrieveData( con, command )
+
+print( "dbResult:" )
+print( dbResult )
+
+# -------------------------------
+# Create Title and Graph Filename
+# -------------------------------
+
+print( "Creating title of graph." )
+
+typeOfPieTitle <- "Executed Results"
+typeOfPieFile <- "_passfail"
+isPlannedPie <- FALSE
+if ( args[ is_displaying_plan ] == "y" ){
+    typeOfPieTitle <- "Test Execution"
+    typeOfPieFile <- "_executed"
+    isPlannedPie <- TRUE
+}
+
+if ( args[ build_to_show ] == "latest" ){
+    buildTitle <- "\nLatest Test Results"
+    filebuild_to_show <- "latest"
+} else {
+    buildTitle <- paste( " \n Build #", args[ build_to_show ], sep="" )
+    filebuild_to_show <- args[ build_to_show ]
+}
+
+title <- paste( args[ graph_title ],
+                " Tests: Summary of ",
+                typeOfPieTitle,
+                "",
+                " - ",
+                args[ branch_name ],
+                buildTitle,
+                sep="" )
+
+print( "Creating graph filename." )
+
+outputFile <- paste( args[ save_directory ],
+                     args[ graph_title ],
+                     "_",
+                     args[ branch_name ],
+                     "_build-",
+                     filebuild_to_show,
+                     typeOfPieFile,
+                     "_pieChart.jpg",
+                     sep="" )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+t <- subset( dbResult, select=c( "actual_test_name", "num_passed", "num_failed", "num_planned" ) )
+
+executedTests <- sum( t$num_passed ) + sum( t$num_failed )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame." )
+
+if ( isPlannedPie ){
+    nonExecutedTests <- sum( t$num_planned ) - executedTests
+    totalTests <- sum( t$num_planned )
+
+    executedPercent <- round( executedTests / totalTests * 100, digits = 2 )
+    nonExecutedPercent <- 100 - executedPercent
+
+    dfData <- c( nonExecutedPercent, executedPercent )
+
+    labels <- c( "Executed Test Cases", "Skipped Test Cases" )
+
+    dataFrame <- data.frame(
+        rawData <- dfData,
+        displayedData <- c( paste( nonExecutedPercent, "%\n", nonExecutedTests, " / ", totalTests, " Tests", sep="" ), paste( executedPercent, "%\n", executedTests, " / ", totalTests," Tests", sep="" ) ),
+        names <- factor( rev( labels ), levels = labels ) )
+} else {
+    sumPassed <- sum( t$num_passed )
+    sumFailed <- sum( t$num_failed )
+    sumExecuted <- sumPassed + sumFailed
+
+    percentPassed <- sumPassed / sumExecuted
+    percentFailed <- sumFailed / sumExecuted
+
+    dfData <- c( percentFailed, percentPassed )
+    labels <- c( "Failed Test Cases", "Passed Test Cases" )
+
+    dataFrame <- data.frame(
+        rawData <- dfData,
+        displayedData <- c( paste( round( percentFailed * 100, 2 ), "%\n", sumFailed, " / ", sumExecuted, " Tests", sep="" ), paste( round( percentPassed * 100, 2 ), "%\n", sumPassed, " / ", sumExecuted, " Tests", sep="" ) ),
+        names <- factor( labels, levels = rev( labels ) ) )
+}
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+#     - data: the data frame that the graph will be based off of
+#    - aes: the asthetics of the graph which require:
+#        - x: x-axis values (usually iterative, but it will become build # later)
+#        - y: y-axis values (usually tests)
+#        - color: the category of the colored lines (usually status of test)
+
+mainPlot <- ggplot( data = dataFrame,
+                    aes( x = "", y=rawData, fill = names ) )
+
+# -------------------
+# Main Plot Formatted
+# -------------------
+
+print( "Formatting main plot." )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+# Set other graph configurations here.
+theme <- graphTheme() +
+         theme( axis.text.x = element_blank(),
+                axis.title.x = element_blank(),
+                axis.title.y = element_blank(),
+                axis.ticks = element_blank(),
+                panel.border = element_blank(),
+                panel.grid=element_blank(),
+                legend.position = "bottom" )
+
+title <- labs( title = title, subtitle = lastUpdatedLabel() )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        theme +
+                        title
+
+# ----------------------------
+# Generating Line Graph Format
+# ----------------------------
+
+print( "Generating line graph." )
+
+if ( isPlannedPie ){
+    executedColor <- webColor( "light_blue" )
+    nonExecutedColor <- webColor( "gray" )
+    pieColors <- scale_fill_manual( values = c( executedColor, nonExecutedColor ) )
+} else {
+    passColor <- webColor( "green" )
+    failColor <- webColor( "red" )
+    pieColors <- scale_fill_manual( values = c( passColor, failColor ) )
+}
+
+pieFormat <- geom_bar( width = 1, stat = "identity" )
+pieLabels <- geom_text( aes( y = rawData / length( rawData ) + c( 0, cumsum( rawData )[ -length( rawData ) ] ) ),
+                             label = dataFrame$displayedData,
+                             size = 7, fontface = "bold" )
+
+
+result <- fundamentalGraphData +
+          pieFormat + coord_polar( "y" ) + pieLabels + pieColors
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/trendIndividualTest.R b/TestON/JenkinsFile/wikiGraphRScripts/trendIndividualTest.R
new file mode 100644
index 0000000..f2f46bb
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/trendIndividualTest.R
@@ -0,0 +1,274 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# FUNCintent Results 20 Builds (https://jenkins.onosproject.org/view/QA/job/postjob-VM/lastSuccessfulBuild/artifact/FUNCintent_master_20-builds_graph.jpg):
+# Rscript trendIndividualTest.R <url> <port> <username> <pass> FUNCintent master 20 /path/to/save/directory/
+
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# Command line arguments are read. Args include the database credentials, test name, branch name, and the directory to output files.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# Args 1 through 6 reside in fundamentalGraphData.R
+buildsToShow <- 7
+save_directory <- 8
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+    specialArgs <- c(  "#-builds-to-show" )
+    usage( "trendIndividualTest.R", specialArgs )
+    quit( status = 1 )
+}
+
+# -------------------------------
+# Create Title and Graph Filename
+# -------------------------------
+
+print( "Creating title of graph." )
+
+title <- paste( args[ graph_title ],
+                " - ",
+                args[ branch_name ],
+                " \n Results of Last ",
+                args[ buildsToShow ],
+                " Builds",
+                sep="" )
+
+print( "Creating graph filename." )
+
+outputFile <- paste( args[ save_directory ],
+                     args[ graph_title ],
+                     "_",
+                     args[ branch_name ],
+                     "_",
+                     args[ buildsToShow ],
+                     "-builds_graph.jpg",
+                     sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ---------------------
+# Test Case SQL Command
+# ---------------------
+print( "Generating Test Case SQL command." )
+
+command <- simpleSQLCommand( args[ graph_title ], args[ branch_name ], args[ buildsToShow ] )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -------------------------------------------------------
+# Combining Passed, Failed, and Planned Data
+# -------------------------------------------------------
+
+print( "Combining Passed, Failed, and Planned Data." )
+
+requiredColumns <- c( "num_failed", "num_passed", "num_planned" )
+
+tryCatch( categories <- c( fileData[ requiredColumns] ),
+          error = function( e ) {
+              print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+              print( "Required columns: " )
+              print( requiredColumns )
+              print( "Actual columns: " )
+              print( names( fileData ) )
+              print( "Error dump:" )
+              print( e )
+              quit( status = 1 )
+          }
+         )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing data frame from combined data." )
+
+dataFrame <- melt( categories )
+
+# Rename column names in dataFrame
+colnames( dataFrame ) <- c( "Tests",
+                            "Status" )
+
+# Add build dates to the dataFrame
+dataFrame$build <- fileData$build
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+dataFrame$Status <- as.character( dataFrame$Status )
+dataFrame$Status <- factor( dataFrame$Status, levels = unique( dataFrame$Status ) )
+
+# Add planned, passed, and failed results to the dataFrame (for the fill below the lines)
+dataFrame$num_planned <- fileData$num_planned
+dataFrame$num_passed <- fileData$num_passed
+dataFrame$num_failed <- fileData$num_failed
+
+# Adding a temporary reversed iterative list to the dataFrame so that there are no gaps in-between build numbers.
+dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+# Omit any data that doesn't exist
+dataFrame <- na.omit( dataFrame )
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+                                           y = Tests,
+                                           color = Status ) )
+
+# -------------------
+# Main Plot Formatted
+# -------------------
+
+print( "Formatting main plot." )
+
+# geom_ribbon is used so that there is a colored fill below the lines. These values shouldn't be changed.
+failedColor <- geom_ribbon( aes( ymin = 0,
+                                 ymax = dataFrame$num_failed ),
+                                 fill = webColor( "red" ),
+                                 linetype = 0,
+                                 alpha = 0.07 )
+
+passedColor <- geom_ribbon( aes( ymin = 0,
+                                 ymax = dataFrame$num_passed ),
+                                 fill = webColor( "green" ),
+                                 linetype = 0,
+                                 alpha = 0.05 )
+
+plannedColor <- geom_ribbon( aes( ymin = 0,
+                                  ymax = dataFrame$num_planned ),
+                                  fill = webColor( "blue" ),
+                                  linetype = 0,
+                                  alpha = 0.01 )
+
+# Colors for the lines
+lineColors <- scale_color_manual( values=c( webColor( "red" ),
+                                            webColor( "green" ),
+                                            webColor( "blue" ) ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
+                                    label = dataFrame$build )
+yScaleConfig <- scale_y_continuous( breaks = seq( 0, max( dataFrame$Tests ),
+                                    by = ceiling( max( dataFrame$Tests ) / 10 ) ) )
+
+xLabel <- xlab( "Build Number" )
+yLabel <- ylab( "Test Cases" )
+
+legendLabels <- scale_colour_discrete( labels = c( "Failed Cases",
+                                                   "Passed Cases",
+                                                   "Planned Cases" ) )
+
+title <- labs( title = title, subtitle = lastUpdatedLabel() )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        plannedColor +
+                        passedColor +
+                        failedColor +
+                        xScaleConfig +
+                        yScaleConfig +
+                        xLabel +
+                        yLabel +
+                        lineColors +
+                        legendLabels +
+                        graphTheme() +  # from fundamentalGraphData.R
+                        title
+
+# ----------------------------
+# Generating Line Graph Format
+# ----------------------------
+
+print( "Generating line graph." )
+
+lineGraphFormat <- geom_line( size = 1.1 )
+pointFormat <- geom_point( size = 3 )
+
+result <- fundamentalGraphData +
+           lineGraphFormat +
+           pointFormat
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile ) # from saveGraph.R
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/trendMultipleTests.R b/TestON/JenkinsFile/wikiGraphRScripts/trendMultipleTests.R
new file mode 100644
index 0000000..524d715
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/trendMultipleTests.R
@@ -0,0 +1,257 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# FUNC Test Results Trend (https://jenkins.onosproject.org/view/QA/job/postjob-VM/lastSuccessfulBuild/artifact/FUNC_master_overview.jpg):
+# Rscript trendMultipleTests.R <url> <port> <username> <pass> FUNC master "FUNCflow,FUNCformCluster,FUNCgroup,FUNCintent,FUNCintentRest,FUNCipv6Intent,FUNCnetCfg,FUNCnetconf,FUNCoptical,FUNCovsdbtest" 20 /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+tests_to_include <- 7
+builds_to_show <- 8
+save_directory <- 9
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+args <- commandArgs( trailingOnly=TRUE )
+
+if ( length( args ) != save_directory ){
+    specialArgs <- c(  "tests-to-include-(as-one-string)",
+                       "builds-to-show" )
+    usage( "trendMultipleTests.R", specialArgs )
+    quit( status = 1 )
+}
+
+# -------------------------------
+# Create Title and Graph Filename
+# -------------------------------
+
+print( "Creating title of graph." )
+
+title <- paste( args[ graph_title ],
+                " Test Results Trend - ",
+                args[ branch_name ],
+                " \n Results of Last ",
+                args[ builds_to_show ],
+                " Nightly Builds",
+                sep="" )
+
+print( "Creating graph filename." )
+
+outputFile <- paste( args[ save_directory ],
+                     args[ graph_title ],
+                     "_",
+                     args[ branch_name ],
+                     "_overview.jpg",
+                     sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+                args[ database_port ],
+                args[ database_u_id ],
+                args[ database_pw ] )
+
+# ---------------------
+# Test Case SQL Command
+# ---------------------
+
+print( "Generating Test Case SQL command." )
+
+command <- generateMultiTestMultiBuildSQLCommand( args[ branch_name ],
+                                                  args[ tests_to_include ],
+                                                  args[ builds_to_show ] )
+
+dbResult <- retrieveData( con, command )
+maxBuild <- max( dbResult[ 'build' ] ) - strtoi( args[ builds_to_show ] )
+dbResult <- dbResult[ which( dbResult[,4] > maxBuild ), ]
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+t <- subset( dbResult, select=c( "actual_test_name", "build", "num_failed" ) )
+t$num_failed <- ceiling( t$num_failed / ( t$num_failed + 1 ) )
+t$num_planned <- 1
+
+fileData <- aggregate( t$num_failed, by=list( Category=t$build ), FUN=sum )
+colnames( fileData ) <- c( "build", "num_failed" )
+
+fileData$num_planned <- ( aggregate( t$num_planned, by=list( Category=t$build ), FUN=sum ) )$x
+fileData$num_passed <- fileData$num_planned - fileData$num_failed
+
+print(fileData)
+
+# --------------------
+# Construct Data Frame
+# --------------------
+#
+
+dataFrame <- melt( subset( fileData, select=c( "num_failed", "num_passed", "num_planned" ) ) )
+dataFrame$build <- fileData$build
+colnames( dataFrame ) <- c( "status", "results", "build" )
+
+dataFrame$num_failed <- fileData$num_failed
+dataFrame$num_passed <- fileData$num_passed
+dataFrame$num_planned <- fileData$num_planned
+dataFrame$iterative <- seq( 1, nrow( fileData ), by = 1 )
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+#     - data: the data frame that the graph will be based off of
+#    - aes: the asthetics of the graph which require:
+#        - x: x-axis values (usually iterative, but it will become build # later)
+#        - y: y-axis values (usually tests)
+#        - color: the category of the colored lines (usually status of test)
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+                                           y = results,
+                                           color = status ) )
+
+# -------------------
+# Main Plot Formatted
+# -------------------
+
+print( "Formatting main plot." )
+
+# geom_ribbon is used so that there is a colored fill below the lines. These values shouldn't be changed.
+failedColor <- geom_ribbon( aes( ymin = 0,
+                                 ymax = dataFrame$num_failed ),
+                                 fill = webColor( "red" ),
+                                 linetype = 0,
+                                 alpha = 0.07 )
+
+passedColor <- geom_ribbon( aes( ymin = 0,
+                                 ymax = dataFrame$num_passed ),
+                                 fill = webColor( "light_blue" ),
+                                 linetype = 0,
+                                 alpha = 0.05 )
+
+plannedColor <- geom_ribbon( aes( ymin = 0,
+                                  ymax = dataFrame$num_planned ),
+                                  fill = webColor( "black" ),
+                                  linetype = 0,
+                                  alpha = 0.01 )
+
+# Colors for the lines
+lineColors <- scale_color_manual( values=c( webColor( "red" ),
+                                            webColor( "light_blue" ),
+                                            webColor( "black" )),
+                                  labels = c( "Containing Failures",
+                                              "No Failures",
+                                              "Total Built" ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
+                                    label = dataFrame$build )
+yScaleConfig <- scale_y_continuous( breaks = seq( 0, max( dataFrame$results ),
+                                    by = ceiling( max( dataFrame$results ) / 10 ) ) )
+
+xLabel <- xlab( "Build Number" )
+yLabel <- ylab( "Tests" )
+
+theme <- graphTheme()
+
+title <- labs( title = title, subtitle = lastUpdatedLabel() )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+                        plannedColor +
+                        passedColor +
+                        failedColor +
+                        xScaleConfig +
+                        yScaleConfig +
+                        xLabel +
+                        yLabel +
+                        theme +
+                        title +
+                        lineColors
+
+# ----------------------------
+# Generating Line Graph Format
+# ----------------------------
+
+print( "Generating line graph." )
+
+lineGraphFormat <- geom_line( size = 1.1 )
+pointFormat <- geom_point( size = 3 )
+
+result <- fundamentalGraphData +
+           lineGraphFormat +
+           pointFormat
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/trendSCPF.R b/TestON/JenkinsFile/wikiGraphRScripts/trendSCPF.R
new file mode 100644
index 0000000..b2c68c6
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/trendSCPF.R
@@ -0,0 +1,251 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# This is the R script that generates the SCPF front page graphs.
+
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+# Args 1 through 6 reside in fundamentalGraphData.R
+num_dates = 7
+sql_commands = 8
+y_axis = 9
+old_flow = 10
+save_directory = 11
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL )
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+args <- commandArgs( trailingOnly=TRUE )
+
+# Check if sufficient args are provided.
+if ( length( args ) != save_directory ){
+    specialArgs <- c(  "#-dates",
+                       "SQL-command",
+                       "y-axis-title",
+                       "using-old-flow" )
+    usage( "trendSCPF.R", specialArgs )
+    quit( status = 1 )
+}
+
+# -------------------------------
+# Create Title and Graph Filename
+# -------------------------------
+
+print( "Creating title of graph" )
+
+# Title of graph based on command line args.
+
+title <- args[ graph_title ]
+title <- paste( title, if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "" )
+
+print( "Creating graph filename." )
+
+# Filenames for the output graph include the testname, branch, and the graph type.
+outputFile <- paste( args[ save_directory ],
+                    "SCPF_Front_Page_",
+                    gsub( " ", "_", args[ graph_title ] ),
+                    "_",
+                    args[ branch_name ],
+                    "_",
+                    args[ num_dates ],
+                    "-dates",
+                    if( args[ old_flow ] == "y" ) "_OldFlow" else "",
+                    "_graph.jpg",
+                    sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+con <- initSQL( args[ database_host ], args[ database_port ], args[ database_u_id ], args[ database_pw ] )
+
+fileData <- retrieveData( con, args[ sql_commands ] )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# Create lists c() and organize data into their corresponding list.
+print( "Combine data retrieved from databases into a list." )
+
+buildNums <- fileData$build
+fileData$build <- c()
+print( fileData )
+
+if ( ncol( fileData ) > 1 ){
+    for ( i in 2:ncol( fileData ) ){
+        fileData[ i ] <- fileData[ i - 1 ] + fileData[ i ]
+    }
+}
+
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing data frame from combined data." )
+
+dataFrame <- melt( fileData )
+dataFrame$date <- fileData$date
+
+colnames( dataFrame ) <- c( "Legend",
+                            "Values" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+dataFrame$Legend <- as.character( dataFrame$Legend )
+dataFrame$Legend <- factor( dataFrame$Legend, levels=unique( dataFrame$Legend ) )
+dataFrame$build <- buildNums
+
+# Adding a temporary iterative list to the dataFrame so that there are no gaps in-between date numbers.
+dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+dataFrame <- na.omit( dataFrame )   # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+#     - data: the data frame that the graph will be based off of
+#    - aes: the asthetics of the graph which require:
+#        - x: x-axis values (usually iterative, but it will become date # later)
+#        - y: y-axis values (usually tests)
+#        - color: the category of the colored lines (usually legend of test)
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+                                           y = Values,
+                                           color = Legend ) )
+
+# -------------------
+# Main Plot Formatted
+# -------------------
+
+print( "Formatting main plot." )
+
+limitExpansion <- expand_limits( y = 0 )
+
+tickLength <- 3
+breaks <- seq( max( dataFrame$iterative ) %% tickLength, max( dataFrame$iterative ), by = tickLength )
+breaks <- breaks[ which( breaks != 0 ) ]
+
+maxYDisplay <- max( dataFrame$Values ) * 1.05
+yBreaks <- ceiling( max( dataFrame$Values ) / 10 )
+yScaleConfig <- scale_y_continuous( breaks = seq( 0, maxYDisplay, by = yBreaks ) )
+xScaleConfig <- scale_x_continuous( breaks = breaks, label = rev( dataFrame$build )[ breaks ] )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+xLabel <- xlab( "Build" )
+yLabel <- ylab( args[ y_axis ] )
+
+# Set other graph configurations here.
+theme <- graphTheme()
+
+title <- labs( title = title, subtitle = lastUpdatedLabel() )
+
+# Colors used for the lines.
+# Note: graphs that have X lines will use the first X colors in this list.
+colors <- scale_color_manual( values=c( webColor( "black" ),   # black
+                                        webColor( "blue" ),   # blue
+                                        webColor( "red" ),   # red
+                                        webColor( "green" ),   # green
+                                        webColor( "yellow" ),   # yellow
+                                        webColor( "purple" ) ) ) # purple (not used)
+
+wrapLegend <- wrapLegend()
+
+fundamentalGraphData <- mainPlot +
+                        limitExpansion +
+                        xScaleConfig +
+                        yScaleConfig +
+                        xLabel +
+                        yLabel +
+                        theme +
+                        colors +
+                        wrapLegend +
+                        title
+
+# ----------------------------
+# Generating Line Graph Format
+# ----------------------------
+
+print( "Generating line graph." )
+
+lineGraphFormat <- geom_line( size = 0.75 )
+pointFormat <- geom_point( size = 1.75 )
+
+result <- fundamentalGraphData +
+          lineGraphFormat +
+          pointFormat
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile ) # from saveGraph.R
+quit( status = 0 )
diff --git a/TestON/drivers/common/cli/emulator/mininetclidriver.py b/TestON/drivers/common/cli/emulator/mininetclidriver.py
index 54d9747..c5ee51d 100644
--- a/TestON/drivers/common/cli/emulator/mininetclidriver.py
+++ b/TestON/drivers/common/cli/emulator/mininetclidriver.py
@@ -574,55 +574,42 @@
 
             main.cleanAndExit()
 
-    def discoverIpv4Hosts( self, hostList, wait=1 , dstIp="6.6.6.6"):
+    def discoverHosts( self, hostList=[], wait=1, dstIp="6.6.6.6", dstIp6="1020::3fe" ):
         '''
-        Can only be used if hosts already have ipv4 addresses.
-
-        Hosts in hostList will do a single ping to a non-existent (dstIp) address for ONOS
-        to discover them again.
+        Hosts in hostList will do a single ping to a non-existent address for ONOS to
+        discover them. A host will use ping/ping6 to send echo requests depending on if
+        it has IPv4/IPv6 addresses configured.
+        Optional:
+            hostList: a list of names of the hosts that need to be discovered. If not
+                      specified mininet will send ping from all the hosts
+            wait: timeout for IPv4/IPv6 echo requests
+            dstIp: destination address used by IPv4 hosts
+            dstIp6: destination address used by IPv6 hosts
+        Returns:
+            main.TRUE if all ping packets were successfully sent. Otherwise main.FALSE
         '''
         try:
-            main.log.info( "Issuing dumb pings for ipv6 hosts to be discovered" )
-            cmd = " ping -c 1 -i 1 -W " + str( wait ) + " "
+            if not hostList:
+                hosts = self.getHosts( getInterfaces=False )
+                hostList = hosts.keys()
+            discoveryResult = main.TRUE
             for host in hostList:
-                pingCmd = str( host ) + cmd + dstIp
-                self.handle.sendline( pingCmd )
-                self.handle.expect( "mininet>", timeout=wait + 1 )
-
-        except pexpect.TIMEOUT:
-            main.log.exception( self.name + ": TIMEOUT exception" )
-            response = self.handle.before
-            # NOTE: Send ctrl-c to make sure command is stopped
-            self.handle.send( "\x03" )
-            self.handle.expect( "Interrupt" )
-            response += self.handle.before + self.handle.after
-            self.handle.expect( "mininet>" )
-            response += self.handle.before + self.handle.after
-            main.log.debug( response )
-            return main.FALSE
-        except pexpect.EOF:
-            main.log.error( self.name + ": EOF exception found" )
-            main.log.error( self.name + ":     " + self.handle.before )
-            main.cleanAndExit()
-        except Exception:
-            main.log.exception( self.name + ": Uncaught exception!" )
-            main.cleanAndExit()
-
-    def discoverIpv6Hosts( self, hostList, wait=1, dstIp="1020::3fe" ):
-        '''
-        Can only be used if hosts already have ipv6 addresses.
-
-        Hosts in hostList will do a single ping to a non-existent address (dstIp) for ONOS
-        to discover them again.
-        '''
-        try:
-            main.log.info( "Issuing dump pings for ipv6 hosts to be discovered" )
-            cmd = " ping6 -c 1 -i 1 -W " + str( wait ) + " "
-            for host in hostList:
-                pingCmd = str( host ) + cmd + dstIp
-                self.handle.sendline( pingCmd )
-                self.handle.expect( "mininet>", timeout=wait + 1 )
-
+                cmd = ""
+                if self.getIPAddress( host ):
+                    cmd = "{} ping -c 1 -i 1 -W {} {}".format( host, wait, dstIp )
+                    main.log.debug( "Sending IPv4 probe ping from host {}".format( host ) )
+                elif self.getIPAddress( host, proto='IPV6' ):
+                    cmd = "{} ping6 -c 1 -i 1 -W {} {}".format( host, wait, dstIp6 )
+                    main.log.debug( "Sending IPv6 probe ping from host {}".format( host ) )
+                else:
+                    main.log.warn( "No IP addresses configured on host {}, skipping discovery".format( host ) )
+                    discoveryResult = main.FALSE
+                if cmd:
+                    self.handle.sendline( "{} ip neigh flush all".format( host ) )
+                    self.handle.expect( "mininet>", timeout=wait + 1 )
+                    self.handle.sendline( cmd )
+                    self.handle.expect( "mininet>", timeout=wait + 1 )
+            return discoveryResult
         except pexpect.TIMEOUT:
             main.log.exception( self.name + ": TIMEOUT exception" )
             response = self.handle.before
diff --git a/TestON/drivers/common/cli/onosclidriver.py b/TestON/drivers/common/cli/onosclidriver.py
index 7af7e4f..439ba80 100755
--- a/TestON/drivers/common/cli/onosclidriver.py
+++ b/TestON/drivers/common/cli/onosclidriver.py
@@ -1173,15 +1173,16 @@
             for host in hosts:
                 if hostIp in host[ "ipAddresses" ]:
                     targetHost = host
-            assert host, "Not able to find host with IP {}".format( hostIp )
+            assert targetHost, "Not able to find host with IP {}".format( hostIp )
             result = main.TRUE
             locationsDiscovered = [ loc[ "elementId" ] + "/" + loc[ "port" ] for loc in targetHost[ "locations" ] ]
             for loc in locations:
                 discovered = False
                 for locDiscovered in locationsDiscovered:
-                    if loc in locDiscovered:
-                        discovered = True
+                    locToMatch = locDiscovered if "/" in loc else locDiscovered.split( "/" )[0]
+                    if loc == locToMatch:
                         main.log.debug( "Host {} discovered with location {}".format( hostIp, loc ) )
+                        discovered = True
                         break
                 if discovered:
                     locationsDiscovered.remove( locDiscovered )
@@ -6253,7 +6254,7 @@
         Create a multicast route by calling 'mcast-host-join' command
         sAddr: we can provide * for ASM or a specific address for SSM
         gAddr: specifies multicast group address
-        srcs: a list of the source connect points e.g. ["of:0000000000000003/12"]
+        srcs: a list of HostId of the sources e.g. ["00:AA:00:00:00:01/None"]
         sinks: a list of HostId of the sinks e.g. ["00:AA:00:00:01:05/40"]
         Returns main.TRUE if mcast route is added; Otherwise main.FALSE
         """
@@ -6330,12 +6331,52 @@
             main.log.exception( self.name + ": Uncaught exception!" )
             main.cleanAndExit()
 
+    def mcastSinkDelete( self, sAddr, gAddr, sink=None ):
+        """
+        Delete multicast sink(s) by calling 'mcast-sink-delete' command
+        sAddr: we can provide * for ASM or a specific address for SSM
+        gAddr: specifies multicast group address
+        host: HostId of the sink e.g. "00:AA:00:00:01:05/40",
+               will delete the route if not specified
+        Returns main.TRUE if the mcast sink is deleted; Otherwise main.FALSE
+        """
+        try:
+            cmdStr = "mcast-sink-delete"
+            cmdStr += " -sAddr " + str( sAddr )
+            cmdStr += " -gAddr " + str( gAddr )
+            if sink:
+                cmdStr += " -s " + str( sink )
+            handle = self.sendline( cmdStr )
+            assert handle is not None, "Error in sendline"
+            assert "Command not found:" not in handle, handle
+            assert "Unsupported command:" not in handle, handle
+            assert "Error executing command" not in handle, handle
+            if "Updated the mcast route" in handle:
+                return main.TRUE
+            elif "Deleted the mcast route" in handle:
+                return main.TRUE
+            else:
+                return main.FALSE
+        except AssertionError:
+            main.log.exception( "" )
+            return None
+        except TypeError:
+            main.log.exception( self.name + ": Object not as expected" )
+            return None
+        except pexpect.EOF:
+            main.log.error( self.name + ": EOF exception found" )
+            main.log.error( self.name + ":    " + self.handle.before )
+            main.cleanAndExit()
+        except Exception:
+            main.log.exception( self.name + ": Uncaught exception!" )
+            main.cleanAndExit()
+
     def mcastSourceDelete( self, sAddr, gAddr, srcs=None ):
         """
         Delete multicast src(s) by calling 'mcast-source-delete' command
         sAddr: we can provide * for ASM or a specific address for SSM
         gAddr: specifies multicast group address
-        srcs: a list of connect points of the sources e.g. ["00:AA:00:00:01:05/40"],
+        srcs: a list of host IDs of the sources e.g. ["00:AA:00:00:01:05/40"],
               will delete the route if not specified
         Returns main.TRUE if mcast sink is deleted; Otherwise main.FALSE
         """
diff --git a/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py b/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py
index f3e686f..2ae8b62 100644
--- a/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py
+++ b/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py
@@ -131,7 +131,10 @@
 
         time.sleep( main.startUpSleep )
         main.step( 'Starting mininet topology' )
-        mnStatus = main.Mininet1.startNet( args='--topo=linear,1' )
+        mnCmd = '--topo=linear,1 '
+        for ctrl in main.Cluster.active():
+            mnCmd += " --controller remote,ip=" + ctrl.ipAddress
+        mnStatus = main.Mininet1.startNet( args=mnCmd )
         utilities.assert_equals( expect=main.TRUE,
                                  actual=mnStatus,
                                  onpass="Successfully started Mininet",
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py b/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py
index 5849a6f..866b273 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py
@@ -307,7 +307,10 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=202, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, "leaf2", 10, { "ipv4": False, "ipv6": False } )
+        verifySwitchDown( main, "leaf2", 10,
+                          { "ipv4": False, "ipv6": False },
+                          [ "h4v4" ],
+                          { "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -327,7 +330,9 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=203, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, "leaf5", 10 )
+        verifySwitchDown( main, "leaf5", 10,
+                          hostsToDiscover=[ "h10v4" ],
+                          hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -347,7 +352,11 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=204, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, "leaf4", 10, { "ipv4": [ True, False, True ], "ipv6": True } )
+        verifySwitchDown( main, "leaf4", 10,
+                          { "ipv4": [ True, False, True ], "ipv6": True },
+                          [ "h8v4", "h10v4" ],
+                          { "h8v4": ["of:0000000000000004/9"],
+                            "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -367,7 +376,13 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=205, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, [ "leaf1", "leaf3", "leaf4", "leaf5" ], 32, { "ipv4": [ True, False, False ], "ipv6": False } )
+        verifySwitchDown( main, [ "leaf1", "leaf3", "leaf4", "leaf5" ], 32,
+                          { "ipv4": [ True, False, False ], "ipv6": False },
+                          [ "h4v4", "h8v4", "h10v4", "h1v6"],
+                          { "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"],
+                            "h8v4": ["of:0000000000000004/9"],
+                            "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"],
+                            "h1v6": ["of:0000000000000001/3"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -411,14 +426,15 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=401, onosNodes=3 )
         verifyMcastRoutes( main )
-        #TODO: Verify host has both locations
         # Verify killing one link of dual-homed host h4
-        verifyLinkDown( main, [ "leaf2", "h4v4" ], 0 )
-        verifyLinkDown( main, [ "leaf3", "h4v4" ], 0 )
+        verifyPortDown( main, "of:0000000000000002", 10, hostsToDiscover=[ "h4v4" ], hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
+        verifyPortDown( main, "of:0000000000000003", 10, hostsToDiscover=[ "h4v4" ], hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         # Verify killing one link of dual-homed host h10
-        verifyLinkDown( main, [ "leaf4", "h10v4" ], 0 )
-        verifyLinkDown( main, [ "leaf5", "h10v4" ], 0 )
-        verifySwitchDown( main, "leaf3", 10 )
+        verifyPortDown( main, "of:0000000000000004", 11, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
+        verifyPortDown( main, "of:0000000000000005", 10, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
+        verifySwitchDown( main, "leaf3", 10,
+                          hostsToDiscover=[ "h4v4" ],
+                          hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -464,11 +480,11 @@
         setupTest( main, test_idx=403, onosNodes=3 )
         verifyMcastRoutes( main )
         # Verify killing one link of dual-homed host h4
-        verifyLinkDown( main, [ "leaf2", "h4v4" ], 0 )
-        verifyLinkDown( main, [ "leaf3", "h4v4" ], 0 )
+        verifyPortDown( main, "of:0000000000000002", 10, hostsToDiscover=[ "h4v4" ], hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
+        verifyPortDown( main, "of:0000000000000003", 10, hostsToDiscover=[ "h4v4" ], hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         # Verify killing one link of dual-homed host h10
-        verifyLinkDown( main, [ "leaf4", "h10v4" ], 0 )
-        verifyLinkDown( main, [ "leaf5", "h10v4" ], 0 )
+        verifyPortDown( main, "of:0000000000000004", 11, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
+        verifyPortDown( main, "of:0000000000000005", 10, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyLinkDown( main, [ [ "leaf3", "spine101" ], [ "leaf3", "spine102" ] ], 8 )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
@@ -494,11 +510,11 @@
         setupTest( main, test_idx=404, onosNodes=3 )
         verifyMcastRoutes( main )
         # Verify killing one link of dual-homed host h4
-        verifyLinkDown( main, [ "leaf2", "h4v4" ], 0 )
-        verifyLinkDown( main, [ "leaf3", "h4v4" ], 0 )
+        verifyPortDown( main, "of:0000000000000002", 10, hostsToDiscover=[ "h4v4" ], hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
+        verifyPortDown( main, "of:0000000000000003", 10, hostsToDiscover=[ "h4v4" ], hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         # Verify killing one link of dual-homed host h10
-        verifyLinkDown( main, [ "leaf4", "h10v4" ], 0 )
-        verifyLinkDown( main, [ "leaf5", "h10v4" ], 0 )
+        verifyPortDown( main, "of:0000000000000004", 11, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
+        verifyPortDown( main, "of:0000000000000005", 10, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyLinkDown( main, [ [ "leaf3", "spine101" ], [ "leaf2", "spine102" ] ], 8 )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py
index 30b7e92..be8e4c9 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py
@@ -42,6 +42,7 @@
     time.sleep( float( main.params[ "timers" ][ "loadNetcfgSleep" ] ) )
     main.cfgName = "common"
     lib.loadMulticastConfig( main )
+    lib.loadHost( main )
 
     if hasattr( main, "Mininet1" ):
         # Run the test with Mininet
@@ -56,6 +57,9 @@
 
     # Create scapy components
     lib.startScapyHosts( main )
+    # Verify host IP assignment
+    lib.verifyOnosHostIp( main )
+    lib.verifyNetworkHostIp( main )
 
 def verifyMcastRoutes( main ):
     """
@@ -75,7 +79,7 @@
     src = main.mcastRoutes[ routeName ][ "src" ]
     dst = main.mcastRoutes[ routeName ][ "dst" ]
     main.Cluster.active( 0 ).CLI.mcastHostJoin( routeData[ "src" ][ src[ 0 ] ][ "ip" ], routeData[ "group" ],
-                                                [ routeData[ "src" ][ i ][ "port" ] for i in src ],
+                                                [ routeData[ "src" ][ i ][ "id" ] for i in src ],
                                                 [ routeData[ "dst" ][ i ][ "id" ] for i in dst ] )
     time.sleep( float( main.params[ "timers" ][ "mcastSleep" ] ) )
 
@@ -85,7 +89,7 @@
     """
     routeData = main.multicastConfig[ routeName ]
     main.step( "Verify removal of {} route".format( routeName ) )
-    main.Cluster.active( 0 ).CLI.mcastHostDelete( routeData[ "src" ][ 0 ][ "ip" ], routeData[ "group" ] )
+    main.Cluster.active( 0 ).CLI.mcastSinkDelete( routeData[ "src" ][ 0 ][ "ip" ], routeData[ "group" ] )
     # TODO: verify the deletion
 
 def verifyMcastSinkRemoval( main, routeName, sinkIndex, expect ):
@@ -96,7 +100,7 @@
     routeData = main.multicastConfig[ routeName ]
     sinkId = routeData[ "dst" ][ sinkIndex ][ "id" ]
     main.step( "Verify removal of {} sink {}".format( routeName, sinkId ) )
-    main.Cluster.active( 0 ).CLI.mcastHostDelete( routeData[ "src" ][ 0 ][ "ip" ], routeData[ "group" ], sinkId )
+    main.Cluster.active( 0 ).CLI.mcastSinkDelete( routeData[ "src" ][ 0 ][ "ip" ], routeData[ "group" ], sinkId )
     time.sleep( float( main.params[ "timers" ][ "mcastSleep" ] ) )
     lib.verifyMulticastTraffic( main, routeName, expect )
 
@@ -106,9 +110,9 @@
     """
     from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
     routeData = main.multicastConfig[ routeName ]
-    sourcePort = [ routeData[ "src" ][ sourceIndex ][ "port" ] ]
-    main.step( "Verify removal of {} source {}".format( routeName, sourcePort ) )
-    main.Cluster.active( 0 ).CLI.mcastSourceDelete( routeData[ "src" ][ 0 ][ "ip" ], routeData[ "group" ], sourcePort )
+    sourceId = [ routeData[ "src" ][ sourceIndex ][ "id" ] ]
+    main.step( "Verify removal of {} source {}".format( routeName, sourceId ) )
+    main.Cluster.active( 0 ).CLI.mcastSourceDelete( routeData[ "src" ][ 0 ][ "ip" ], routeData[ "group" ], sourceId )
     time.sleep( float( main.params[ "timers" ][ "mcastSleep" ] ) )
     lib.verifyMulticastTraffic( main, routeName, expect )
 
@@ -126,7 +130,7 @@
         verifyMcastSinkRemoval( main, "ipv4", 1, [ True, False, False ] )
     verifyMcastSourceRemoval( main, "ipv4", 0, False )
 
-def verifyLinkDown( main, link, affectedLinkNum, expectList={ "ipv4": True, "ipv6": True } ):
+def verifyLinkDown( main, link, affectedLinkNum, expectList={ "ipv4": True, "ipv6": True }, hostsToDiscover=[], hostLocations={} ):
     """
     Kill a batch of links and verify traffic
     Restore the links and verify traffic
@@ -139,10 +143,34 @@
         lib.verifyMulticastTraffic( main, routeName, expectList[ routeName ] )
     # Restore the link(s)
     lib.restoreLinkBatch( main, link, int( main.params[ "TOPO" ][ "linkNum" ] ), int( main.params[ "TOPO" ][ "switchNum" ] ) )
+    if hostsToDiscover:
+        main.Network.discoverHosts( hostList=hostsToDiscover )
+    for host, loc in hostLocations.items():
+        lib.verifyHostLocation( main, host, loc, retry=5 )
     for routeName in expectList.keys():
         lib.verifyMulticastTraffic( main, routeName, True )
 
-def verifySwitchDown( main, switchName, affectedLinkNum, expectList={ "ipv4": True, "ipv6": True } ):
+def verifyPortDown( main, dpid, port, expectList={ "ipv4": True, "ipv6": True }, hostsToDiscover=[], hostLocations={} ):
+    """
+    Disable a port and verify traffic
+    Reenable the port and verify traffic
+    """
+    from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+    main.step( "Disable port {}/{}".format( dpid, port ) )
+    main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+    time.sleep( 10 )
+    for routeName in expectList.keys():
+        lib.verifyMulticastTraffic( main, routeName, expectList[ routeName ] )
+    # Restore the link(s)
+    main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+    if hostsToDiscover:
+        main.Network.discoverHosts( hostList=hostsToDiscover )
+    for host, loc in hostLocations.items():
+        lib.verifyHostLocation( main, host, loc, retry=5 )
+    for routeName in expectList.keys():
+        lib.verifyMulticastTraffic( main, routeName, True )
+
+def verifySwitchDown( main, switchName, affectedLinkNum, expectList={ "ipv4": True, "ipv6": True }, hostsToDiscover=[], hostLocations={} ):
     """
     Kill a batch of switches and verify traffic
     Recover the swithces and verify traffic
@@ -154,7 +182,9 @@
     for routeName in expectList.keys():
         lib.verifyMulticastTraffic( main, routeName, expectList[ routeName ] )
     # Recover the switch(es)
-    lib.recoverSwitch( main, switchName, int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ) )
+    lib.recoverSwitch( main, switchName, int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ), True if hostsToDiscover else False, hostsToDiscover )
+    for host, loc in hostLocations.items():
+        lib.verifyHostLocation( main, host, loc, retry=5 )
     for routeName in expectList.keys():
         lib.verifyMulticastTraffic( main, routeName, True )
 
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/host/common.host b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/host/common.host
new file mode 100644
index 0000000..c0dd913
--- /dev/null
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/host/common.host
@@ -0,0 +1,20 @@
+{
+    "onos":
+    {
+        "00:AA:00:00:00:02/None": "10.2.0.1",
+        "00:AA:00:00:00:03/None": "10.2.30.1",
+        "00:AA:00:00:00:06/None": "10.3.0.1",
+        "00:AA:00:00:00:08/40": "10.3.30.1",
+        "00:BB:00:00:00:01/None": "1000::3fe",
+        "00:BB:00:00:00:02/None": "1002::3fe"
+    },
+    "network":
+    {
+        "h3v4": "10.2.0.1",
+        "h4v4": "10.2.30.1",
+        "h8v4": "10.3.0.1",
+        "h10v4": "10.3.30.1",
+        "h1v6": "1000::3fe",
+        "h3v6": "1002::3fe"
+    }
+}
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/multicast/common.multicastConfig b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/multicast/common.multicastConfig
index 842d540..4d06609 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/multicast/common.multicastConfig
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/multicast/common.multicastConfig
@@ -6,6 +6,7 @@
             {
                 "host": "h3v4",
                 "ip": "10.2.0.1",
+                "id": "00:AA:00:00:00:02/None",
                 "port": "of:0000000000000002/9",
                 "interface": "h3v4-eth0",
                 "Ether": "01:00:5e:02:00:01",
@@ -40,6 +41,7 @@
             {
                 "host": "h3v6",
                 "ip": "1002::3fe",
+                "id": "00:BB:00:00:00:02/None",
                 "port": "of:0000000000000002/6",
                 "interface": "h3v6-eth0",
                 "Ether": "33:33:00:00:03:fe",
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params
index e699d81..52a05fd 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params
@@ -36,10 +36,10 @@
     <timers>
         <LinkDiscovery>30</LinkDiscovery>
         <SwitchDiscovery>30</SwitchDiscovery>
-        <OnosDiscovery>30</OnosDiscovery>
+        <OnosDiscovery>45</OnosDiscovery>
         <loadNetcfgSleep>5</loadNetcfgSleep>
         <startMininetSleep>25</startMininetSleep>
-        <dhcpSleep>30</dhcpSleep>
+        <dhcpSleep>60</dhcpSleep>
         <balanceMasterSleep>10</balanceMasterSleep>
     </timers>
 
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
index c60eda7..2952614 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
@@ -606,6 +606,7 @@
         main.case( "Drop spine and paired leaf" )
         setupTest( main, test_idx=606, onosNodes=3 )
         verify( main, disconnected=False )
+        # Drop spine101 and leaf-2/3
         lib.killSwitch( main, "spine101", 9, 30 )
         verify( main, disconnected=False )
         lib.killSwitch( main, "leaf2", 8, 24 )
@@ -616,8 +617,276 @@
         lib.recoverSwitch( main, "spine101", 8, 30 )
         verify( main )
         lib.recoverSwitch( main, "leaf3", 9, 38 )
-        lib.recoverSwitch( main, "leaf2", 10, 48, rediscoverHosts=True )
+        lib.recoverSwitch( main, "leaf2", 10, 48, rediscoverHosts=True,
+                           hostsToDiscover=main.disconnectedIpv4Hosts + main.disconnectedIpv6Hosts )
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
+        verify( main, disconnected=False )
+        # Drop spine102 and leaf-4/5
+        lib.killSwitch( main, "spine102", 9, 30 )
+        verify( main, disconnected=False )
+        lib.killSwitch( main, "leaf4", 8, 24 )
+        lib.killSwitch( main, "leaf5", 7, 20 )
+        main.disconnectedIpv4Hosts = [ "h8v4", "h9v4", "h10v4", "h11v4" ]
+        main.disconnectedIpv6Hosts = [ "h8v6", "h9v6", "h10v6", "h11v6" ]
+        verify( main, external=False )
+        lib.recoverSwitch( main, "spine102", 8, 30 )
+        verify( main, external=False )
+        lib.recoverSwitch( main, "leaf5", 9, 38 )
+        lib.recoverSwitch( main, "leaf4", 10, 48, rediscoverHosts=True,
+                           hostsToDiscover=main.disconnectedIpv4Hosts + main.disconnectedIpv6Hosts )
+        main.disconnectedIpv4Hosts = []
+        main.disconnectedIpv6Hosts = []
+        verify( main, disconnected=False )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE620( self, main ):
+        """
+        Take down one of double links towards the spine from all leaf switches and
+        check that buckets in select groups change accordingly
+        Bring up links again and check that buckets in select groups change accordingly
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Take down one of double links towards the spine" )
+        setupTest( main, test_idx=620, onosNodes=3 )
+        verify( main, disconnected=False )
+        portsToDisable = [ [ "of:0000000000000002", 1 ], [ "of:0000000000000002", 3 ],
+                           [ "of:0000000000000003", 1 ], [ "of:0000000000000003", 3 ],
+                           [ "of:0000000000000004", 1 ], [ "of:0000000000000004", 3 ],
+                           [ "of:0000000000000005", 1 ], [ "of:0000000000000005", 3 ] ]
+        for dpid, port in portsToDisable:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+        # TODO: check buckets in groups
+        verify( main, disconnected=False )
+        for dpid, port in portsToDisable:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+        # TODO: check buckets in groups
+        verify( main, disconnected=False )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE621( self, main ):
+        """
+        Remove all the links in the network and restore all Links (repeat x3)
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Remove all the links in the network and restore all Links" )
+        setupTest( main, test_idx=621, onosNodes=3 )
+        verify( main, disconnected=False )
+        linksToRemove = [ ["spine101", "spine103"], ["spine102", "spine104"],
+                          ["spine103", "leaf6"], ["spine103", "leaf1"],
+                          ["spine104", "leaf6"], ["spine104", "leaf1"],
+                          ["spine101", "leaf2"], ["spine101", "leaf3"], ["spine101", "leaf4"], ["spine101", "leaf5"],
+                          ["spine102", "leaf2"], ["spine102", "leaf3"], ["spine102", "leaf4"], ["spine102", "leaf5"],
+                          ["leaf2", "leaf3"], ["leaf4", "leaf5"] ]
+        portsToDisable = [ [ "of:0000000000000001", 3 ], [ "of:0000000000000001", 4 ],
+                           [ "of:0000000000000001", 5 ], [ "of:0000000000000001", 6 ],
+                           [ "of:0000000000000002", 6 ], [ "of:0000000000000002", 7 ],
+                           [ "of:0000000000000002", 8 ], [ "of:0000000000000002", 9 ],
+                           [ "of:0000000000000002", 10 ], [ "of:0000000000000002", 11 ],
+                           [ "of:0000000000000003", 6 ], [ "of:0000000000000003", 7 ],
+                           [ "of:0000000000000003", 8 ], [ "of:0000000000000003", 9 ],
+                           [ "of:0000000000000003", 10 ], [ "of:0000000000000003", 11 ],
+                           [ "of:0000000000000003", 12 ], [ "of:0000000000000003", 13 ],
+                           [ "of:0000000000000004", 6 ], [ "of:0000000000000004", 7 ],
+                           [ "of:0000000000000004", 8 ], [ "of:0000000000000004", 9 ],
+                           [ "of:0000000000000004", 10 ], [ "of:0000000000000004", 11 ],
+                           [ "of:0000000000000004", 12 ], [ "of:0000000000000004", 13 ], [ "of:0000000000000004", 14 ],
+                           [ "of:0000000000000005", 6 ], [ "of:0000000000000005", 7 ],
+                           [ "of:0000000000000005", 8 ], [ "of:0000000000000005", 9 ],
+                           [ "of:0000000000000005", 10 ], [ "of:0000000000000005", 11 ],
+                           [ "of:0000000000000005", 12 ], [ "of:0000000000000005", 13 ],
+                           [ "of:0000000000000005", 14 ], [ "of:0000000000000005", 15 ],
+                           [ "of:0000000000000006", 3 ], [ "of:0000000000000006", 4 ],
+                           [ "of:0000000000000006", 5 ], [ "of:0000000000000006", 6 ] ]
+        for i in range( 0, 3 ):
+            lib.killLinkBatch( main, linksToRemove, 0, 10 )
+            for dpid, port in portsToDisable:
+                main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+            time.sleep( 10 )
+            main.disconnectedIpv4Hosts = main.internalIpv4Hosts
+            main.disconnectedIpv6Hosts = main.internalIpv6Hosts
+            verify( main )
+            lib.restoreLinkBatch( main, linksToRemove, 48, 10 )
+            for dpid, port in portsToDisable:
+                main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+            time.sleep( 30 )
+            main.Network.discoverHosts( hostList=main.disconnectedIpv4Hosts + main.disconnectedIpv6Hosts )
+            time.sleep( 10 )
+            main.disconnectedIpv4Hosts = []
+            main.disconnectedIpv6Hosts = []
+            verify( main )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE622( self, main ):
+        """
+        Take down all uplinks from a paired leaf switch
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        from core import utilities
+        main.case( "Take down all uplinks from a paired leaf switch" )
+        setupTest( main, test_idx=622, onosNodes=3 )
+        verify( main, disconnected=False )
+        ctrl = main.Cluster.active( 0 )
+        result1 = ctrl.CLI.verifyHostLocation( "1003::3fe",
+                                               [ "of:0000000000000002/7", "of:0000000000000003/6" ] )
+        result2 = ctrl.CLI.verifyHostLocation( "1004::3fe",
+                                               [ "of:0000000000000002/8", "of:0000000000000003/7" ] )
+        result3 = ctrl.CLI.verifyHostLocation( "10.2.30.1",
+                                               [ "of:0000000000000002/10", "of:0000000000000003/10" ] )
+        result4 = ctrl.CLI.verifyHostLocation( "10.2.20.1",
+                                               [ "of:0000000000000002/11", "of:0000000000000003/11" ] )
+        utilities.assert_equals( expect=main.TRUE, actual=result1 and result2 and result3 and result4,
+                                 onpass="Host locations are correct",
+                                 onfail="Not all host locations are correct" )
+        linksToRemove = [ ["spine101", "leaf2"], ["spine102", "leaf2"] ]
+        lib.killLinkBatch( main, linksToRemove, 40, 10 )
+        # TODO: more verifications are required
+        verify( main )
+        main.step( "Verify some dual-homed hosts become single-homed" )
+        result1 = ctrl.CLI.verifyHostLocation( "1003::3fe", "of:0000000000000003/6" )
+        result2 = ctrl.CLI.verifyHostLocation( "1004::3fe", "of:0000000000000003/7" )
+        result3 = ctrl.CLI.verifyHostLocation( "10.2.30.1", "of:0000000000000003/10" )
+        result4 = ctrl.CLI.verifyHostLocation( "10.2.20.1", "of:0000000000000003/11" )
+        utilities.assert_equals( expect=main.TRUE, actual=result1 and result2 and result3 and result4,
+                                 onpass="Host locations are correct",
+                                 onfail="Not all host locations are correct" )
+        lib.restoreLinkBatch( main, linksToRemove, 48, 10 )
+        verify( main )
+        main.step( "Verify the hosts changed back to be dual-homed" )
+        result1 = ctrl.CLI.verifyHostLocation( "1003::3fe",
+                                               [ "of:0000000000000002/7", "of:0000000000000003/6" ] )
+        result2 = ctrl.CLI.verifyHostLocation( "1004::3fe",
+                                               [ "of:0000000000000002/8", "of:0000000000000003/7" ] )
+        result3 = ctrl.CLI.verifyHostLocation( "10.2.30.1",
+                                               [ "of:0000000000000002/10", "of:0000000000000003/10" ] )
+        result4 = ctrl.CLI.verifyHostLocation( "10.2.20.1",
+                                               [ "of:0000000000000002/11", "of:0000000000000003/11" ] )
+        utilities.assert_equals( expect=main.TRUE, actual=result1 and result2 and result3 and result4,
+                                 onpass="Host locations are correct",
+                                 onfail="Not all host locations are correct" )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE630( self, main ):
+        """
+        Bring an instance down
+        Drop a device
+        Bring that same instance up again and observe that this specific instance sees that the device is down.
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        from core import utilities
+        main.case( "Bring an instance down and drop a device" )
+        setupTest( main, test_idx=630, onosNodes=3 )
+        onosToKill = 0
+        deviceToDrop = "spine101"
+        lib.killOnos( main, [ onosToKill ], 10, 48, 2 )
+        lib.killSwitch( main, deviceToDrop, 9, 30 )
+        lib.recoverOnos( main, [ onosToKill ], 9, 30, 3 )
+        result = main.Cluster.runningNodes[ onosToKill ].CLI.checkStatus( 9, 30, 3 )
+        utilities.assert_equals( expect=main.TRUE, actual=result,
+                                 onpass="ONOS instance {} sees correct device numbers".format( onosToKill ),
+                                 onfail="ONOS instance {} doesn't see correct device numbers".format( onosToKill ) )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE642( self, main ):
+        """
+        Drop one link from each double link
+        Drop a link between DAAS-1 and HAAG-1
+        Drop a link between HAGG-2 and SPINE-2
+        Drop one ONOS instance
+        Test connectivity (expect no failure)
+        Bring up all links and ONOS instance
+        Test connectivity (expect no failure)
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Drop ONOS instance and links at the same time" )
+        setupTest( main, test_idx=642, onosNodes=3 )
+        main.Cluster.active( 0 ).CLI.balanceMasters()
+        time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+        verify( main )
+
+        portsToDisable = [ [ "of:0000000000000001", 1 ], [ "of:0000000000000103", 1 ],
+                           [ "of:0000000000000006", 1 ], [ "of:0000000000000103", 2 ],
+                           [ "of:0000000000000101", 9 ], [ "of:0000000000000103", 3 ],
+                           [ "of:0000000000000002", 1 ], [ "of:0000000000000101", 1 ],
+                           [ "of:0000000000000003", 1 ], [ "of:0000000000000101", 3 ],
+                           [ "of:0000000000000004", 1 ], [ "of:0000000000000101", 5 ],
+                           [ "of:0000000000000005", 1 ], [ "of:0000000000000101", 7 ],
+                           [ "of:0000000000000002", 3 ], [ "of:0000000000000102", 1 ],
+                           [ "of:0000000000000003", 3 ], [ "of:0000000000000102", 3 ],
+                           [ "of:0000000000000004", 3 ], [ "of:0000000000000102", 5 ],
+                           [ "of:0000000000000005", 3 ], [ "of:0000000000000102", 7 ] ]
+        for dpid, port in portsToDisable:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+        lib.killOnos( main, [ 2, ], int( main.params[ "TOPO" ][ "switchNum" ] ),
+                      int( main.params[ "TOPO" ][ "linkNum" ] ) - len( portsToDisable ), 2 )
+        verify( main )
+        for dpid, port in portsToDisable:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+        lib.recoverOnos( main, [ 2, ], int( main.params[ "TOPO" ][ "switchNum" ] ),
+                         int( main.params[ "TOPO" ][ "linkNum" ] ), 3 )
+        verify( main )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE643( self, main ):
+        """
+        Drop one link from each double link
+        Drop a link between DAAS-1 and HAAG-1
+        Drop a link between HAGG-2 and SPINE-2
+        Test connectivity (expect no failure)
+        Bring up all links
+        Drop one ONOS instance
+        Test connectivity (expect no failure)
+        Bring up ONOS instance
+        Test connectivity (expect no failure)
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Drop ONOS instances and bring up links at the same time" )
+        setupTest( main, test_idx=643, onosNodes=3 )
+        main.Cluster.active( 0 ).CLI.balanceMasters()
+        time.sleep( float( main.params[ 'timers' ][ 'balanceMasterSleep' ] ) )
+        verify( main )
+
+        portsToDisable = [ [ "of:0000000000000001", 1 ], [ "of:0000000000000103", 1 ],
+                           [ "of:0000000000000006", 1 ], [ "of:0000000000000103", 2 ],
+                           [ "of:0000000000000101", 9 ], [ "of:0000000000000103", 3 ],
+                           [ "of:0000000000000002", 1 ], [ "of:0000000000000101", 1 ],
+                           [ "of:0000000000000003", 1 ], [ "of:0000000000000101", 3 ],
+                           [ "of:0000000000000004", 1 ], [ "of:0000000000000101", 5 ],
+                           [ "of:0000000000000005", 1 ], [ "of:0000000000000101", 7 ],
+                           [ "of:0000000000000002", 3 ], [ "of:0000000000000102", 1 ],
+                           [ "of:0000000000000003", 3 ], [ "of:0000000000000102", 3 ],
+                           [ "of:0000000000000004", 3 ], [ "of:0000000000000102", 5 ],
+                           [ "of:0000000000000005", 3 ], [ "of:0000000000000102", 7 ] ]
+        for dpid, port in portsToDisable[ : -1 ]:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+        # To trigger sleep for link down discovery and topology check
+        lib.portstate( main, portsToDisable[ -1 ][ 0 ], portsToDisable[ -1 ][ 1 ], "disable",
+                       int( main.params[ "TOPO" ][ "switchNum" ] ),
+                       int( main.params[ "TOPO" ][ "linkNum" ] ) - len( portsToDisable ) )
+
+        verify( main )
+        for dpid, port in portsToDisable[ : -1 ]:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+        # To trigger sleep for link up discovery and topology check
+        lib.portstate( main, portsToDisable[ -1 ][ 0 ], portsToDisable[ -1 ][ 1 ], "enable",
+                       int( main.params[ "TOPO" ][ "switchNum" ] ),
+                       int( main.params[ "TOPO" ][ "linkNum" ] ) )
+        lib.killOnos( main, [ 2, ], int( main.params[ "TOPO" ][ "switchNum" ] ),
+                      int( main.params[ "TOPO" ][ "linkNum" ] ), 2 )
+        verify( main )
+        lib.recoverOnos( main, [ 2, ], int( main.params[ "TOPO" ][ "switchNum" ] ),
+                         int( main.params[ "TOPO" ][ "linkNum" ] ), 3 )
         verify( main )
         lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py b/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
index 5d6f64d..0d7c6f8 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
@@ -113,7 +113,7 @@
     """
     from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
     # Verify connected hosts
-    main.step("Verify reachability of from connected internal hosts to external hosts")
+    main.step("Verify reachability from connected internal hosts to external hosts")
     if ipv4:
         lib.verifyPing( main,
                         [ h for h in main.internalIpv4Hosts if h not in main.disconnectedIpv4Hosts ],
diff --git a/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py b/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py
index 44055d3..752296a 100644
--- a/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py
+++ b/TestON/tests/USECASE/SegmentRouting/dependencies/Testcaselib.py
@@ -573,7 +573,7 @@
 
         main.linkSleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
         main.log.info(
-                "Waiting %s seconds for links down to be discovered" % main.linkSleep )
+                "Waiting %s seconds for links up to be discovered" % main.linkSleep )
         time.sleep( main.linkSleep )
 
         topology = utilities.retry( main.Cluster.active( 0 ).CLI.checkStatus,
@@ -650,7 +650,7 @@
                                  onfail="Failed to kill switch?" )
 
     @staticmethod
-    def recoverSwitch( main, switch, switches, links, rediscoverHosts=False ):
+    def recoverSwitch( main, switch, switches, links, rediscoverHosts=False, hostsToDiscover=[] ):
         """
         Params: switches, links: number of expected switches and links after SwitchUp, ex.: '4', '6'
         Recover a switch and verify ONOS can see the proper change
@@ -665,8 +665,7 @@
             main.switchSleep ) )
         time.sleep( main.switchSleep )
         if rediscoverHosts:
-            main.Network.discoverIpv4Hosts( main.internalIpv4Hosts )
-            main.Network.discoverIpv6Hosts( main.internalIpv6Hosts )
+            main.Network.discoverHosts( hostList=hostsToDiscover )
             main.log.info( "Waiting %s seconds for hosts to get re-discovered" % (
                            main.switchSleep ) )
             time.sleep( main.switchSleep )
@@ -681,6 +680,7 @@
                                  onpass="Switch recovery successful",
                                  onfail="Failed to recover switch?" )
 
+    @staticmethod
     def portstate( main, dpid, port, state, switches, links ):
         """
         Disable/enable a switch port using 'portstate' and verify ONOS can see the proper link change
@@ -691,6 +691,7 @@
             switches, links: number of expected switches and links after link change, ex.: '4', '6'
         """
         main.step( "Port %s on %s:%s" % ( state, dpid, port ) )
+        main.linkSleep = float( main.params[ 'timers' ][ 'LinkDiscovery' ] )
         main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state=state )
         main.log.info( "Waiting %s seconds for port %s to be discovered" % ( main.linkSleep, state ) )
         time.sleep( main.linkSleep )
@@ -1037,7 +1038,7 @@
             main.log.debug( host.hostMac )
 
     @staticmethod
-    def verifyMulticastTraffic( main, routeName, expect, skipOnFail=True, maxRetry=0 ):
+    def verifyMulticastTraffic( main, routeName, expect, skipOnFail=True, maxRetry=1 ):
         """
         Verify multicast traffic using scapy
         """
@@ -1104,7 +1105,7 @@
             main.skipCase()
 
     @staticmethod
-    def verifyHostLocation( main, hostName, locations, ipv6=False ):
+    def verifyHostLocation( main, hostName, locations, ipv6=False, retry=0 ):
         """
         Verify if the specified host is discovered by ONOS on the given locations
         Required:
@@ -1116,7 +1117,13 @@
         Returns:
             main.TRUE if host is discovered on all locations provided, otherwise main.FALSE
         """
-        main.step( "Verify host {} is discovered at {}".format( hostName, locations ) )
+        main.log.info( "Verify host {} is discovered at {}".format( hostName, locations ) )
         hostIp = main.Network.getIPAddress( hostName, proto='IPV6' if ipv6 else 'IPV4' )
-        result = main.Cluster.active( 0 ).CLI.verifyHostLocation( hostIp, locations )
-        return result
+        result = utilities.retry( main.Cluster.active( 0 ).CLI.verifyHostLocation,
+                                  main.FALSE,
+                                  args=( hostIp, locations ),
+                                  attempts=retry + 1,
+                                  sleep=10 )
+        utilities.assert_equals( expect=main.TRUE, actual=result,
+                                 onpass="Location verification for Host {} passed".format( hostName ),
+                                 onfail="Location verification for Host {} failed".format( hostName ) )