[ONOS-7196]Making a separate database, graph and Wiki page for the old flow rules result

Change-Id: Ifeeaa0d725d2cd39c9f276d5383f562f2a67be34
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
index ecfd64a..c1d5f10 100644
--- a/TestON/JenkinsFile/JenkinsfileTrigger
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -1,6 +1,8 @@
 #!groovy
 // This is a Jenkinsfile for a scripted pipeline for the SCPF tests
 // Define sets of tests
+previous_version = "1.11"
+before_previous_version = "1.10"
 AllTheTests=
 [
     "FUNC":[
@@ -32,7 +34,7 @@
             "SCPFintentEventTp":                       ["basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
             "SCPFhostLat":                             ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
             // batch will be on extra_A after fixing from the ONOS side.
-            "SCPFbatchFlowResp":                       ["basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
+            "SCPFbatchFlowResp":                       ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
             "SCPFintentRerouteLat":                    ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
             "SCPFintentInstallWithdrawLat":            ["basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
             "SCPFflowTp1gWithFlowObj":                 ["basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:""],
@@ -79,6 +81,7 @@
 ]
 
 onos_branch = "master"
+test_branch = ""
 onos_tag = params.ONOSTag
 isOldFlow = false
 // Set tests based on day of week
@@ -107,7 +110,7 @@
 if ( manually_run ){
     organize_tests( params.Tests )
     onos_branch = params.ONOSVersion
-    test_branch = params.TestONBranch
+    isOldFlow = params.isOldFlow
     println "Tests to be run manually : "
 }else{
     testcases["SCPF"]["tests"] = SCPF_choices
@@ -152,22 +155,25 @@
             day = "Wednesday"
             break
         case Calendar.THURSDAY:
-            thursday( true, true)
+            thursday( true, true )
             day = "Thursday"
+            isOldFlow = true
             break
         case Calendar.FRIDAY:
-            friday( true, true)
+            friday( true, true )
             day = "Friday"
+            isOldFlow = true
             break
         case Calendar.SATURDAY:
             saturday()
-            onos_branch= "1.11"
+            onos_branch= previous_version
             day = "Saturday"
             break
         case Calendar.SUNDAY:
             sunday()
-            onos_branch= "1.10"
+            onos_branch= before_previous_version
             day = "Sunday"
+            isOldFlow = true
             break
     }
 }
@@ -216,7 +222,6 @@
     SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
     SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
     SCPF_choices += adder( "SCPF", "new_Test", getDay, "W", getResult )
-    isOldFlow = true
 }
 def thursday( getDay, getResult ){
     FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
@@ -225,7 +230,6 @@
     HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
     SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
     SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
-    isOldFlow = true
 }
 def friday( getDay, getResult ){
     FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
@@ -308,16 +312,14 @@
             onos_branch = "onos-" + branch
         }
         wiki = branch
-        if ( !manuallyRun )
-            test_branch = onos_branch
-        if (onos_branch == "onos-1.11")
+        test_branch = onos_branch
+        if (onos_branch == previous_version)
             test_branch = "master"
         println jobOn + "_Pipeline_" + manuallyRun ? "manually" : branch
         node("TestStation-" + nodeName + "s"){
-            if (!manuallyRun)
-                envSetup(onos_branch, test_branch, onosTag)
+            envSetup(onos_branch, test_branch, onosTag, jobOn, manuallyRun )
 
-            exportEnvProperty( onos_branch, test_branch, wiki, tests, post_result, manuallyRun, onosTag )
+            exportEnvProperty( onos_branch, test_branch, wiki, tests, post_result, manuallyRun, onosTag, isOldFlow )
         }
 
         jobToRun = jobOn + "_Pipeline_" + ( manuallyRun ? "manually" : branch )
@@ -326,7 +328,7 @@
 }
 
 // export Environment properties.
-def exportEnvProperty( onos_branch, test_branch, wiki, tests, postResult, manually_run, onosTag ){
+def exportEnvProperty( onos_branch, test_branch, wiki, tests, postResult, manually_run, onosTag, isOldFlow ){
     stage("export Property"){
         sh '''
             echo "ONOSBranch=''' + onos_branch +'''" > /var/jenkins/TestONOS.property
@@ -337,6 +339,8 @@
             echo "Tests=''' + tests +'''" >> /var/jenkins/TestONOS.property
             echo "postResult=''' + postResult +'''" >> /var/jenkins/TestONOS.property
             echo "manualRun=''' + manually_run +'''" >> /var/jenkins/TestONOS.property
+            echo "isOldFlow=''' + isOldFlow +'''" >> /var/jenkins/TestONOS.property
+
         '''
     }
 }
@@ -344,14 +348,29 @@
     slackSend(color:color, message: message)
 }
 // Initialize the environment Setup for the onos and OnosSystemTest
-def envSetup( onos_branch, test_branch, onos_tag ){
+def envSetup( onos_branch, test_branch, onos_tag, jobOn, manuallyRun ){
     stage("envSetup") {
         sh '''#!/bin/bash -l
         set +e
         . ~/.bashrc
         env
-
-        echo -e "\n#####  Set TestON Branch #####"
+        ''' + preSetup( onos_branch, test_branch, onos_tag, manuallyRun ) + '''
+        ''' + oldFlowCheck( jobOn ) + '''
+        ''' + postSetup( onos_branch, test_branch, onos_tag, manuallyRun )
+    }
+}
+def tagCheck(onos_tag, onos_branch){
+    result = "git checkout "
+    if (onos_tag == "" )
+        result += onos_branch //create new local branch
+    else
+        result += onos_tag //checkout the tag
+    return result
+}
+def preSetup( onos_branch, test_branch, onos_tag, isManual ){
+    result = ""
+    if( !isManual ){
+        result = '''echo -e "\n#####  Set TestON Branch #####"
         echo "TestON Branch is set on: ''' + test_branch + '''"
 
         cd ~/OnosSystemTest/
@@ -397,9 +416,21 @@
         echo "##### Check onos-service setting..... #####"
         cat ~/onos/tools/package/bin/onos-service
 
-        export JAVA_HOME=/usr/lib/jvm/java-8-oracle
-        ''' + oldFlowCheck() + '''
-        echo -e "\n##### build ONOS skip unit tests ######"
+        export JAVA_HOME=/usr/lib/jvm/java-8-oracle'''
+    }
+    return result
+}
+def oldFlowCheck( jobOn ){
+    result = ""
+    if( isOldFlow && jobOn == "SCPF" )
+        result = '''sed -i -e 's/@Component(immediate = true)/@Component(enabled = false)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/DistributedFlowRuleStore.java
+        sed -i -e 's/@Component(enabled = false)/@Component(immediate = true)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/ECFlowRuleStore.java'''
+    return result
+}
+def postSetup( onos_branch, test_branch, onos_tag, isManual ){
+    result = ""
+    if( !isManual ){
+        result = '''echo -e "\n##### build ONOS skip unit tests ######"
         #mvn clean install -DskipTests
         # Force buck update
         rm -f ~/onos/bin/buck
@@ -412,19 +443,5 @@
 
         git branch'''
     }
-}
-def tagCheck(onos_tag, onos_branch){
-    result = "git checkout "
-    if (onos_tag == "" )
-        result += onos_branch //create new local branch
-    else
-        result += onos_tag //checkout the tag
-    return result
-}
-def oldFlowCheck(){
-    result = ""
-    if( isOldFlow )
-        result = '''sed -i -e 's/@Component(immediate = true)/@Component(enabled = false)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/DistributedFlowRuleStore.java
-        sed -i -e 's/@Component(enabled = false)/@Component(immediate = true)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/ECFlowRuleStore.java'''
     return result
 }
\ No newline at end of file
diff --git a/TestON/JenkinsFile/SCPFJenkinsFile b/TestON/JenkinsFile/SCPFJenkinsFile
index fa75de2..e59ccc2 100644
--- a/TestON/JenkinsFile/SCPFJenkinsFile
+++ b/TestON/JenkinsFile/SCPFJenkinsFile
@@ -10,23 +10,23 @@
 neighbors = ['y', 'n']
 times = [ 'y', 'n' ]
 SCPF = [
-    SCPFcbench: [ test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:['Cbench Test'], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)'],
-    SCPFhostLat: [ test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:['Host Latency Test'], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)'],
-    SCPFportLat: [ test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb', rFile:'SCPFportLat.R', extra:none, finalResult:1, graphTitle:['Port Latency Test - Port Up','Port Latency Test - Port Down'], dbCols:[ 'up_ofp_to_dev_avg,up_dev_to_link_avg,up_link_to_graph_avg', 'down_ofp_to_dev_avg,down_dev_to_link_avg,down_link_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-    SCPFflowTp1g: [ test:'SCPFflowTp1g', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB', rFile:'SCPFflowTp1g.R n', extra:neighbors,finalResult:1, graphTitle:['Flow Throughput Test - neighbors=0','Flow Throughput Test - neighbors=4'], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ],  y_axis:'Throughput (,000 Flows/sec)' ],
-    SCPFflowTp1gWithFlowObj: [ test:'SCPFflowTp1g --params TEST/flowObj=True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj', rFile:'SCPFflowTp1g.R y', extra:neighbors, finalResult:0],
-    SCPFscaleTopo: [ test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb', rFile:'SCPFscaleTopo.R', extra:none, finalResult:1, graphTitle:['Scale Topology Test'], dbCols:[ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ], dbWhere:'AND scale=20' , y_axis:'Latency (s)'],
-    SCPFswitchLat: [ test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb', rFile:'SCPFswitchLat.R', extra:none, finalResult:1, graphTitle:['Switch Latency Test - Switch Up','Switch Latency Test - Switch Down'], dbCols:[ 'tcp_to_feature_reply_avg,feature_reply_to_role_request_avg,role_request_to_role_reply_avg,role_reply_to_device_avg,up_device_to_graph_avg', 'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-    SCPFbatchFlowResp: [ test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData', rFile:'SCPFbatchFlowResp.R', extra:none, finalResult:1, graphTitle:['Batch Flow Test - Post', 'Batch Flow Test - Del'], dbCols:[ 'elapsepost, posttoconfrm', 'elapsedel, deltoconfrm' ], dbWhere:'', y_axis:'Latency (ms)'],
-    SCPFintentEventTp: [ test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB', rFile:'SCPFintentEventTp.R n', extra:neighbors, finalResult:1, graphTitle:['Intent Throughput Test - neighbors=0','Intent Throughput Test - neighbors=4'], dbCols:'SUM( avg ) as avg', dbWhere:[ 'AND scale=5 AND neighbors=0 GROUP BY date','AND scale=5 AND NOT neighbors=0 GROUP BY date' ], y_axis:'Throughput (Ops/sec)'],
-    SCPFintentRerouteLat: [ test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches, finalResult:1, graphTitle:['Intent Reroute Test'], dbCols:'avg', dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
-    SCPFscalingMaxIntents: [ test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB', rFile:'SCPFscalingMaxIntents.R n', extra:times, finalResult:0],
-    SCPFintentEventTpWithFlowObj: [ test:'SCPFintentEventTp --params TEST/flowObj=True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB', rFile:'SCPFintentEventTp.R y', extra:neighbors,finalResult:0],
-    SCPFintentInstallWithdrawLat: [ test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches,finalResult:1, graphTitle:['Intent Installation Test','Intent Withdrawal Test'], dbCols:[ 'install_avg','withdraw_avg' ], dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
-    SCPFintentRerouteLatWithFlowObj: [ test:'SCPFintentRerouteLat --params TEST/flowObj=True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
-    SCPFscalingMaxIntentsWithFlowObj: [ test:'SCPFscalingMaxIntents --params TEST/flowObj=True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO', rFile:'SCPFscalingMaxIntents.R y', extra:times, finalResult:0],
-    SCPFintentInstallWithdrawLatWithFlowObj: [ test:'SCPFintentInstallWithdrawLat --params TEST/flowObj=True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
-    SCPFmastershipFailoverLat: [test:'SCPFmastershipFailoverLat', table:'mastership_failover_tests', results:'mastership_failover_results', file:'mastershipFailoverLatDB', rFile:'SCPFmastershipFailoverLat.R', extra:none, finalResult:1, graphTitle:['Mastership Failover Test'], dbCols:[ 'kill_deact_avg,deact_role_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ]
+    SCPFcbench: [ flows:false, test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:['Cbench Test'], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)'],
+    SCPFhostLat: [ flows:false, test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:['Host Latency Test'], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)'],
+    SCPFportLat: [ flows:false, test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb', rFile:'SCPFportLat.R', extra:none, finalResult:1, graphTitle:['Port Latency Test - Port Up','Port Latency Test - Port Down'], dbCols:[ 'up_ofp_to_dev_avg,up_dev_to_link_avg,up_link_to_graph_avg', 'down_ofp_to_dev_avg,down_dev_to_link_avg,down_link_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+    SCPFflowTp1g: [ flows:true, test:'SCPFflowTp1g', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB', rFile:'SCPFflowTp1g.R n', extra:neighbors,finalResult:1, graphTitle:['Flow Throughput Test - neighbors=0','Flow Throughput Test - neighbors=4'], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ],  y_axis:'Throughput (,000 Flows/sec)' ],
+    SCPFflowTp1gWithFlowObj: [ flows:true, test:'SCPFflowTp1g --params TEST/flowObj=True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj', rFile:'SCPFflowTp1g.R y', extra:neighbors, finalResult:0],
+    SCPFscaleTopo: [ flows:false, test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb', rFile:'SCPFscaleTopo.R', extra:none, finalResult:1, graphTitle:['Scale Topology Test'], dbCols:[ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ], dbWhere:'AND scale=20' , y_axis:'Latency (s)'],
+    SCPFswitchLat: [ flows:false, test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb', rFile:'SCPFswitchLat.R', extra:none, finalResult:1, graphTitle:['Switch Latency Test - Switch Up','Switch Latency Test - Switch Down'], dbCols:[ 'tcp_to_feature_reply_avg,feature_reply_to_role_request_avg,role_request_to_role_reply_avg,role_reply_to_device_avg,up_device_to_graph_avg', 'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
+    SCPFbatchFlowResp: [ flows:true, test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData', rFile:'SCPFbatchFlowResp.R', extra:none, finalResult:1, graphTitle:['Batch Flow Test - Post', 'Batch Flow Test - Del'], dbCols:[ 'elapsepost, posttoconfrm', 'elapsedel, deltoconfrm' ], dbWhere:'', y_axis:'Latency (ms)'],
+    SCPFintentEventTp: [ flows:true, test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB', rFile:'SCPFintentEventTp.R n', extra:neighbors, finalResult:1, graphTitle:['Intent Throughput Test - neighbors=0','Intent Throughput Test - neighbors=4'], dbCols:'SUM( avg ) as avg', dbWhere:[ 'AND scale=5 AND neighbors=0 GROUP BY date','AND scale=5 AND NOT neighbors=0 GROUP BY date' ], y_axis:'Throughput (Ops/sec)'],
+    SCPFintentRerouteLat: [ flows:true, test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches, finalResult:1, graphTitle:['Intent Reroute Test'], dbCols:'avg', dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
+    SCPFscalingMaxIntents: [ flows:true, test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB', rFile:'SCPFscalingMaxIntents.R n', extra:times, finalResult:0],
+    SCPFintentEventTpWithFlowObj: [ flows:true, test:'SCPFintentEventTp --params TEST/flowObj=True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB', rFile:'SCPFintentEventTp.R y', extra:neighbors,finalResult:0],
+    SCPFintentInstallWithdrawLat: [ flows:true, test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches,finalResult:1, graphTitle:['Intent Installation Test','Intent Withdrawal Test'], dbCols:[ 'install_avg','withdraw_avg' ], dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)'],
+    SCPFintentRerouteLatWithFlowObj: [ flows:true, test:'SCPFintentRerouteLat --params TEST/flowObj=True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
+    SCPFscalingMaxIntentsWithFlowObj: [ flows:true, test:'SCPFscalingMaxIntents --params TEST/flowObj=True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO', rFile:'SCPFscalingMaxIntents.R y', extra:times, finalResult:0],
+    SCPFintentInstallWithdrawLatWithFlowObj: [ flows:true, test:'SCPFintentInstallWithdrawLat --params TEST/flowObj=True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0],
+    SCPFmastershipFailoverLat: [ flows:false, test:'SCPFmastershipFailoverLat', table:'mastership_failover_tests', results:'mastership_failover_results', file:'mastershipFailoverLatDB', rFile:'SCPFmastershipFailoverLat.R', extra:none, finalResult:1, graphTitle:['Mastership Failover Test'], dbCols:[ 'kill_deact_avg,deact_role_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ]
 ]
 
 echo("Testcases:")
@@ -41,7 +41,8 @@
         println test
     }
 }
-
+isOldFlow = prop[ "isOldFlow" ]
+oldFlowRuleCheck( isOldFlow )
 def tests = [:]
 for( String test : SCPF.keySet() ){
     toBeRun = testsToRun.contains( test )
@@ -121,7 +122,7 @@
                                     string(credentialsId: 'db_user', variable: 'user'),
                                     string(credentialsId: 'db_host', variable: 'host'),
                                     string(credentialsId: 'db_port', variable: 'port')]) {
-                                        def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', '\$ONOSBranch', \$line);\""
+                                        def database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', '\$ONOSBranch', " + affectedByOldFlow( isOldFlow, testName ) + "\$line);\""
                                         if (testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat") {
                                             database_command = pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c \"INSERT INTO " + SCPF[testName]['table'] + " VALUES('\$DATE','" + SCPF[testName]['results'] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
                                         }
@@ -138,8 +139,8 @@
                                         echo ''' + database_command + '''
 
                                         done< ''' + SCPF[testName]['file'] + '''
-                                        ''' + getGraphCommand( SCPF[testName]['rFile'], SCPF[testName]['extra'], host, port, user, pass, testName, prop["ONOSBranch"] ) + '''
-                                        ''' + ( SCPF[testName]['finalResult'] ? generateCombinedResultGraph( host,port, user, pass, testName, prop["ONOSBranch"] ) : "" )
+                                        ''' + getGraphCommand( SCPF[testName]['rFile'], SCPF[testName]['extra'], host, port, user, pass, testName, prop["ONOSBranch"], isOldFlow ) + '''
+                                        ''' + ( SCPF[testName]['finalResult'] ? generateCombinedResultGraph( host,port, user, pass, testName, prop["ONOSBranch"], , isOldFlow ) : "" )
                                 }
                             }
                             // Fetch Logs
@@ -165,7 +166,7 @@
                         def post = build job: "Pipeline_postjob_BM", propagate: false
                     }
                     node("TestStation-BMs"){
-                        resultContents = readFile workSpace + "/" + testName + "Result.txt"
+                        resultContents = readFile workSpace + "/" + testName.replaceAll("WithFlowObj","") + "Result.txt"
                         resultContents = resultContents.split("\n")
                         if( resultContents[ 0 ] == "1" ){
                             print "All passed"
@@ -173,8 +174,8 @@
                             print "Failed"
                             if( prop["manualRun"] == "false" )
                                 slackSend(color:"FF0000", message: "[" + prop["ONOSBranch"] + "]" + testName + " : Failed!\n"
-                                                                    + resultContents[ 1 ] + "\n"
-                                                                    + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline" )
+                                + resultContents[ 1 ] + "\n"
+                                + "https://onos-jenkins.onlab.us/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline" )
                             Failed
                         }
                     }
@@ -183,25 +184,45 @@
         }
     }
 }
-def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName ){
+def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName, isOldFlow ){
     result = ""
     for( extra in extras ){
-        result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName ) + ";"
+        result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName, isOldFlow ) + ";"
     }
     return result
 }
-def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName){
-    return "Rscript " + graph_generator_directory + rFileName + " " + host + " " + port + " " + user + " " + pass + " " + testName + " " + branchName +  " " + batch + " " + graph_saved_directory
+def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName, isOldFlow ){
+    return "Rscript " + graph_generator_directory + rFileName + " " + host + " " + port + " " + user + " " + pass + " " +
+            testName + " " + branchName +  " " + batch + " " + usingOldFlow( isOldFlow, testName ) + graph_saved_directory
 }
-def generateCombinedResultGraph( host, port, user, pass, testName, branchName ){
+def generateCombinedResultGraph( host, port, user, pass, testName, branchName, isOldFlow ){
     result = ""
     for ( int i=0; i< SCPF[testName]['graphTitle'].size(); i++){
-        result += "Rscript " + graph_generator_directory + "SCPFLineGraph.R " + host + " " + port + " " + user + " " + pass + " \"" + SCPF[testName]['graphTitle'][i] + "\" " + branchName + " " + 50 + " \"SELECT " +
-        checkIfList( testName, 'dbCols', i ) + " FROM " + SCPF[testName]['table'] + " WHERE  branch=\'" + branchName + "\' " + checkIfList( testName, 'dbWhere', i ) +
-        " ORDER BY date DESC LIMIT 50\" \"" + SCPF[testName]['y_axis'] + "\" " + graph_saved_directory + ";"
+        result += "Rscript " + graph_generator_directory + "SCPFLineGraph.R " + host + " " + port + " " + user + " " + pass + " \"" + SCPF[testName]['graphTitle'][i] + "\" " +
+        branchName + " " + 50 + " \"SELECT " + checkIfList( testName, 'dbCols', i ) + " FROM " + SCPF[testName]['table'] + " WHERE  branch=\'" + branchName + "\' " + sqlOldFlow( isOldFlow, testName ) +
+        checkIfList( testName, 'dbWhere', i ) + " ORDER BY date DESC LIMIT 50\" \"" + SCPF[testName]['y_axis'] + "\" " + hasOldFlow( isOldFlow, testName ) + graph_saved_directory + ";"
     }
     return result
 }
 def checkIfList( testName, forWhich, pos ){
     return SCPF[testName][forWhich].getClass().getName() != "java.lang.String" ? SCPF[testName][forWhich][pos] :  SCPF[testName][forWhich]
+}
+def sqlOldFlow( isOldFlow, testName ){
+    return SCPF[ testName ][ 'flows' ] ? " AND " + ( isOldFlow == "true" ? "" : "NOT " ) + "is_old_flow" : ""
+}
+def oldFlowRuleCheck( isOldFlow ){
+    if( isOldFlow == "false" ){
+        SCPF[ 'SCPFflowTp1g' ][ 'test' ] += " --params TEST/flows=6125"
+        SCPF[ 'SCPFbatchFlowResp' ][ 'test' ] += " --params CASE1000/batchSize=100"
+        SCPF[ 'SCPFintentEventTp' ][ 'test' ] += " --params TEST/numKeys=4000"
+    }
+}
+def affectedByOldFlow( isOldFlow, testName ){
+    return SCPF[ testName ][ 'flows' ] ? "" + isOldFlow + ", " : ""
+}
+def usingOldFlow( isOldFlow, testName ){
+    return SCPF[ testName ][ 'flows' ] ? ( isOldFlow == "true" ? "y" : "n" ) + " " : ""
+}
+def hasOldFlow( isOldFlow, testName ){
+    return ( SCPF[ testName ][ 'flows' ] && isOldFlow == "true" ? "y" : "n" ) + " "
 }
\ No newline at end of file
diff --git a/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R b/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R
index 93e9e00..4a406cb 100644
--- a/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R
+++ b/TestON/JenkinsFile/scripts/SCPFIntentInstallWithdrawRerouteLat.R
@@ -23,10 +23,19 @@
 # **********************************************************
 # STEP 1: Data management.
 # **********************************************************
-
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+batch_size = 8
+old_flow = 9
+save_directory = 10
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -47,7 +56,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 9 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFIntentInstallWithdrawRerouteLat.R",
                                   "<isFlowObj>" ,
@@ -58,9 +67,9 @@
                                   "<test-name>",
                                   "<branch-name>",
                                   "<batch-size>",
+                                  "<using-old-flow>",
                                   "<directory-to-save-graphs>",
                                   sep=" " ) )
-
     q()  # basically exit(), but in R
 }
 
@@ -72,26 +81,31 @@
 
 chartTitle <- "Intent Install, Withdraw, & Reroute Latencies"
 flowObjFileModifier <- ""
-errBarOutputFile <- paste( args[ 9 ],
+errBarOutputFile <- paste( args[ save_directory ],
                     "SCPFIntentInstallWithdrawRerouteLat_",
-                    args[ 7 ],
+                    args[ branch_name ],
                     sep="" )
 
-if ( args[ 1 ] == "y" ){
+if ( args[ has_flow_obj ] == "y" ){
     errBarOutputFile <- paste( errBarOutputFile, "_fobj", sep="" )
     flowObjFileModifier <- "fobj_"
     chartTitle <- paste( chartTitle, "w/ FlowObj" )
 }
-
+if ( args[ old_flow ] == "y" ){
+    errBarOutputFile <- paste( errBarOutputFile, "_OldFlow", sep="" )
+    chartTitle <- paste( chartTitle,
+                         "With Old Flow",
+                         sep="\n" )
+}
 errBarOutputFile <- paste( errBarOutputFile,
                            "_",
-                           args[ 8 ],
+                           args[ batch_size ],
                            "-batchSize_graph.jpg",
                            sep="" )
 
 chartTitle <- paste( chartTitle,
                      "\nBatch Size =",
-                     args[ 8 ],
+                     args[ batch_size ],
                      sep=" " )
 
 # ------------------
@@ -102,10 +116,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 2 ],
-                  port = strtoi( args[ 3 ] ),
-                  user = args[ 4 ],
-                  password = args[ 5 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ---------------------------------------
 # Intent Install and Withdraw SQL Command
@@ -115,14 +129,17 @@
 installWithdrawSQLCommand <- paste( "SELECT * FROM intent_latency_",
                                     flowObjFileModifier,
                                     "tests WHERE batch_size=",
-                                    args[ 8 ],
+                                    args[ batch_size ],
                                     " AND branch = '",
-                                    args[ 7 ],
+                                    args[ branch_name ],
                                     "' AND date IN ( SELECT MAX( date ) FROM intent_latency_",
                                     flowObjFileModifier,
                                     "tests WHERE branch='",
-                                    args[ 7 ],
-                                    "')",
+                                    args[ branch_name ],
+                                    "' AND ",
+                                    ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+                                    "is_old_flow",
+                                    ")",
                                     sep="" )
 
 print( "Sending Intent Install and Withdraw SQL command:" )
@@ -138,14 +155,17 @@
 rerouteSQLCommand <- paste( "SELECT * FROM intent_reroute_latency_",
                             flowObjFileModifier,
                             "tests WHERE batch_size=",
-                            args[ 8 ],
+                            args[ batch_size ],
                             " AND branch = '",
-                            args[ 7 ],
+                            args[ branch_name ],
                             "' AND date IN ( SELECT MAX( date ) FROM intent_reroute_latency_",
                             flowObjFileModifier,
                             "tests WHERE branch='",
-                            args[ 7 ],
-                            "')",
+                            args[ branch_name ],
+                            "' AND ",
+                            ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+                            "is_old_flow",
+                            ")",
                             sep="" )
 
 print( "Sending Intent Reroute SQL command:" )
@@ -174,6 +194,7 @@
                                   "name",
                                   "date",
                                   "branch",
+                                  "is_old_flow",
                                   "commit",
                                   "scale",
                                   "batch_size",
diff --git a/TestON/JenkinsFile/scripts/SCPFLineGraph.R b/TestON/JenkinsFile/scripts/SCPFLineGraph.R
index f080a4d..93b3060 100644
--- a/TestON/JenkinsFile/scripts/SCPFLineGraph.R
+++ b/TestON/JenkinsFile/scripts/SCPFLineGraph.R
@@ -27,6 +27,18 @@
 # STEP 1: Data management.
 # **********************************************************
 
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+graph_title = 5
+branch_name = 6
+num_dates = 7
+sql_commands = 8
+y_axis = 9
+old_flow = 10
+save_directory = 11
+
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
@@ -51,7 +63,7 @@
 args <- commandArgs( trailingOnly=TRUE )
 
 # Check if sufficient args are provided.
-if ( is.na( args[ 10 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript testresultgraph.R",
                                     "<database-host>",
@@ -63,9 +75,9 @@
                                     "<#-dates>",        # part of the output filename
                                     "<SQL-command>",
                                     "<y-axis-title>",   # y-axis may be different among other SCPF graphs (ie: batch size, latency, etc. )
+                                    "<using-old-flow>",
                                     "<directory-to-save-graph>",
                   sep = " " ) )
-
     q()  # basically exit(), but in R
 }
 
@@ -76,19 +88,23 @@
 print( "Creating title of graph" )
 
 # Title of graph based on command line args.
-title <- args[ 5 ]
+
+title <- args[ graph_title ]
+title <- paste( title, if( args[ old_flow ] == "y" ) "\nWith Old Flow" else "" )
 
 print( "Creating graph filename." )
 
 # Filenames for the output graph include the testname, branch, and the graph type.
-outputFile <- paste( args[ 10 ],
+outputFile <- paste( args[ save_directory ],
                     "SCPF_Front_Page_",
-                    gsub( " ", "_", args[ 5 ] ),
+                    gsub( " ", "_", args[ graph_title ] ),
                     "_",
-                    args[ 6 ],
+                    args[ branch_name ],
                     "_",
-                    args[ 7 ],
-                    "-dates_graph.jpg",
+                    args[ num_dates ],
+                    "-dates",
+                    if( args[ old_flow ] == "y" ) "_OldFlow" else "",
+                    "_graph.jpg",
                     sep="" )
 
 # ------------------
@@ -98,15 +114,14 @@
 print( "Initializing SQL" )
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 print( "Sending SQL command:" )
-print( args[ 8 ] )
-fileData <- dbGetQuery( con, args[ 8 ] )
-
+print( args[ sql_commands ] )
+fileData <- dbGetQuery( con, args[ sql_commands ] )
 
 # **********************************************************
 # STEP 2: Organize data.
@@ -194,7 +209,7 @@
 
 theme_set( theme_grey( base_size = 22 ) )   # set the default text size of the graph.
 xLabel <- xlab( "Build" )
-yLabel <- ylab( args[ 9 ] )
+yLabel <- ylab( args[ y_axis ] )
 
 imageWidth <- 15
 imageHeight <- 10
diff --git a/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R b/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R
index d63bce3..19e4a1f 100644
--- a/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R
+++ b/TestON/JenkinsFile/scripts/SCPFbatchFlowResp.R
@@ -23,6 +23,14 @@
 # **********************************************************
 # STEP 1: Data management.
 # **********************************************************
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+old_flow = 7
+save_directory = 8
 
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
@@ -47,7 +55,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 7 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFbatchFlowResp",
                                   "<database-host>",
@@ -56,6 +64,7 @@
                                   "<database-password>",
                                   "<test-name>",
                                   "<branch-name>",
+                                  "<using-old-flow>",
                                   "<directory-to-save-graphs>",
                                   sep=" " ) )
 
@@ -68,22 +77,30 @@
 
 print( "Creating filenames and title of graph." )
 
-postOutputFile <- paste( args[ 7 ],
-                         args[ 5 ],
+postOutputFile <- paste( args[ save_directory ],
+                         args[ test_name ],
                          "_",
-                         args[ 6 ],
+                         args[ branch_name ],
+                         if( args[ old_flow ] == "y" ) "_OldFlow" else "",
                          "_PostGraph.jpg",
                          sep="" )
 
-delOutputFile <- paste( args[ 7 ],
-                        args[ 5 ],
+delOutputFile <- paste( args[ save_directory ],
+                        args[ test_name ],
                         "_",
-                        args[ 6 ],
+                        args[ branch_name ],
+                        if( args[ old_flow ] == "y" ) "_OldFlow" else "",
                         "_DelGraph.jpg",
                         sep="" )
 
-postChartTitle <- paste( "Single Bench Flow Latency - Post", "Last 3 Builds", sep = "\n" )
-delChartTitle <- paste( "Single Bench Flow Latency - Del", "Last 3 Builds", sep = "\n" )
+postChartTitle <- paste( "Single Bench Flow Latency - Post\n",
+                         "Last 3 Builds",
+                         if( args[ old_flow ] == "y" ) " With Old Flow" else "",
+                         sep = "" )
+delChartTitle <- paste( "Single Bench Flow Latency - Del\n",
+                        "Last 3 Builds",
+                        if( args[ old_flow ] == "y" ) " With Old Flow" else "",
+                        sep = "" )
 
 # ------------------
 # SQL Initialization
@@ -93,10 +110,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ---------------------------
 # Batch Flow Resp SQL Command
@@ -105,8 +122,11 @@
 print( "Generating Batch Flow Resp SQL Command" )
 
 command <- paste( "SELECT * FROM batch_flow_tests WHERE branch='",
-                  args[ 6 ],
-                  "' ORDER BY date DESC LIMIT 3",
+                  args[ branch_name ],
+                  "' AND " ,
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+                  "is_old_flow",
+                  " ORDER BY date DESC LIMIT 3",
                   sep="" )
 
 print( "Sending SQL command:" )
@@ -354,4 +374,4 @@
         height = imageHeight,
         dpi = imageDPI )
 
-print( paste( "[SUCCESS] Successfully wrote stacked bar chart out to", delOutputFile ) )
\ No newline at end of file
+print( paste( "[SUCCESS] Successfully wrote stacked bar chart out to", delOutputFile ) )
diff --git a/TestON/JenkinsFile/scripts/SCPFcbench.R b/TestON/JenkinsFile/scripts/SCPFcbench.R
index fa59c55..b62fa0f 100644
--- a/TestON/JenkinsFile/scripts/SCPFcbench.R
+++ b/TestON/JenkinsFile/scripts/SCPFcbench.R
@@ -27,7 +27,13 @@
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
-
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+save_directory = 7
 # Command line arguments are read.
 print( "Reading commmand-line args." )
 args <- commandArgs( trailingOnly=TRUE )
@@ -47,7 +53,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 7 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFcbench",
                                   "<database-host>",
@@ -68,10 +74,10 @@
 
 print( "Creating filenames and title of graph." )
 
-errBarOutputFile <- paste( args[ 7 ],
-                           args[ 5 ],
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
                            "_",
-                           args[ 6 ],
+                           args[ branch_name ],
                            "_errGraph.jpg",
                            sep="" )
 
@@ -85,10 +91,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ------------------
 # Cbench SQL Command
@@ -97,7 +103,7 @@
 print( "Generating Scale Topology SQL Command" )
 
 command <- paste( "SELECT * FROM cbench_bm_tests WHERE branch='",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' ORDER BY date DESC LIMIT 3",
                   sep="" )
 
diff --git a/TestON/JenkinsFile/scripts/SCPFflowTp1g.R b/TestON/JenkinsFile/scripts/SCPFflowTp1g.R
index 9c79ac8..140d5f8 100644
--- a/TestON/JenkinsFile/scripts/SCPFflowTp1g.R
+++ b/TestON/JenkinsFile/scripts/SCPFflowTp1g.R
@@ -23,6 +23,16 @@
 # **********************************************************
 # STEP 1: Data management.
 # **********************************************************
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+has_neighbors = 8
+old_flow = 9
+save_directory = 10
 
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
@@ -47,7 +57,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 9 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFflowTp1g.R",
                                   "<has-flow-obj>",
@@ -58,6 +68,7 @@
                                   "<test-name>",
                                   "<branch-name>",
                                   "<has-neighbors>",
+                                  "<using-old-flow>",
                                   "<directory-to-save-graphs>",
                                   sep=" " ) )
 
@@ -75,7 +86,7 @@
 commandNeighborModifier <- ""
 fileFlowObjModifier <- ""
 sqlFlowObjModifier <- ""
-if ( args[ 1 ] == 'y' ){
+if ( args[ has_flow_obj ] == 'y' ){
     fileFlowObjModifier <- "_flowObj"
     sqlFlowObjModifier <- "_fobj"
     chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
@@ -83,22 +94,27 @@
 
 chartTitle <- paste( chartTitle, "\nNeighbors =", sep="" )
 
-if ( args[ 8 ] == 'y' ){
+fileOldFlowModifier <- ""
+if ( args[ has_neighbors ] == 'y' ){
     fileNeighborsModifier <- "all"
     commandNeighborModifier <- "scale=1 OR NOT "
     chartTitle <- paste( chartTitle, "Cluster Size - 1" )
 } else {
     chartTitle <- paste( chartTitle, "0" )
 }
-
-errBarOutputFile <- paste( args[ 9 ],
-                           args[ 6 ],
+if ( args[ old_flow ] == 'y' ){
+    fileOldFlowModifier <- "_OldFlow"
+    chartTitle <- paste( chartTitle, "With Old Flow", sep="\n" )
+}
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
                            "_",
-                           args[ 7 ],
+                           args[ branch_name ],
                            "_",
                            fileNeighborsModifier,
                            "-neighbors",
                            fileFlowObjModifier,
+                           fileOldFlowModifier,
                            "_graph.jpg",
                            sep="" )
 # ------------------
@@ -109,10 +125,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 2 ],
-                  port = strtoi( args[ 3 ] ),
-                  user = args[ 4 ],
-                  password = args[ 5 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ---------------------------
 # Flow Throughput SQL Command
@@ -125,12 +141,15 @@
                   "_tests WHERE (",
                   commandNeighborModifier,
                   "neighbors = 0 ) AND branch = '",
-                  args[ 7 ],
+                  args[ branch_name ],
                   "' AND date IN ( SELECT max( date ) FROM flow_tp",
                   sqlFlowObjModifier,
                   "_tests WHERE branch='",
-                  args[ 7 ],
-                  "' ) GROUP BY scale ORDER BY scale",
+                  args[ branch_name ],
+                  "' AND ",
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+                  "is_old_flow",
+                  " ) GROUP BY scale ORDER BY scale",
                   sep="" )
 
 print( "Sending SQL command:" )
diff --git a/TestON/JenkinsFile/scripts/SCPFhostLat.R b/TestON/JenkinsFile/scripts/SCPFhostLat.R
index 0ae64cf..56d0f11 100644
--- a/TestON/JenkinsFile/scripts/SCPFhostLat.R
+++ b/TestON/JenkinsFile/scripts/SCPFhostLat.R
@@ -22,11 +22,17 @@
 
 # **********************************************************
 # STEP 1: Data management.
-# **********************************************************
-
+# **********************************************************\
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+save_directory = 7
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -47,7 +53,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 7 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFhostLat",
                                   "<database-host>",
@@ -68,15 +74,14 @@
 
 print( "Creating filenames and title of graph." )
 
-errBarOutputFile <- paste( args[ 7 ],
-                           args[ 5 ],
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
                            "_",
-                           args[ 6 ],
+                           args[ branch_name ],
                            "_errGraph.jpg",
                            sep="" )
 
 chartTitle <- "Host Latency"
-
 # ------------------
 # SQL Initialization
 # ------------------
@@ -85,10 +90,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ------------------------
 # Host Latency SQL Command
@@ -97,9 +102,9 @@
 print( "Generating Host Latency SQL Command" )
 
 command  <- paste( "SELECT * FROM host_latency_tests WHERE branch = '",
-                   args[ 6 ],
+                   args[ branch_name ],
                    "' AND date IN ( SELECT MAX( date ) FROM host_latency_tests WHERE branch = '",
-                   args[ 6 ],
+                   args[ branch_name ],
                    "' ) ",
                    sep = "" )
 
@@ -230,4 +235,4 @@
         height = imageHeight,
         dpi = imageDPI )
 
-print( paste( "[SUCCESS] Successfully wrote bar chart out to", errBarOutputFile ) )
\ No newline at end of file
+print( paste( "[SUCCESS] Successfully wrote bar chart out to", errBarOutputFile ) )
diff --git a/TestON/JenkinsFile/scripts/SCPFintentEventTp.R b/TestON/JenkinsFile/scripts/SCPFintentEventTp.R
index 53fe2d4..c7578f5 100644
--- a/TestON/JenkinsFile/scripts/SCPFintentEventTp.R
+++ b/TestON/JenkinsFile/scripts/SCPFintentEventTp.R
@@ -27,6 +27,16 @@
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+has_neighbors = 8
+old_flow = 9
+save_directory = 10
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -47,7 +57,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 9 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFIntentEventTp.R",
                                   "<has-flow-obj>",
@@ -58,6 +68,7 @@
                                   "<test-name>",
                                   "<branch-name>",
                                   "<has-neighbors>",
+                                  "<using-old-flow>",
                                   "<directory-to-save-graphs>",
                                   sep=" " ) )
 
@@ -76,7 +87,7 @@
 fileFlowObjModifier <- ""
 sqlFlowObjModifier <- ""
 
-if ( args[ 1 ] == 'y' ){
+if ( args[ has_flow_obj ] == 'y' ){
     fileFlowObjModifier <- "_flowObj"
     sqlFlowObjModifier <- "_fobj"
     chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
@@ -84,22 +95,28 @@
 
 chartTitle <- paste( chartTitle, "\nevents/second with Neighbors =", sep="" )
 
-if ( args[ 8 ] == 'y' ){
+fileOldFlowModifier <- ""
+if ( args[ has_neighbors ] == 'y' ){
     fileNeighborsModifier <- "all"
     commandNeighborModifier <- "scale=1 OR NOT "
     chartTitle <- paste( chartTitle, "all" )
 } else {
     chartTitle <- paste( chartTitle, "0" )
 }
+if ( args[ old_flow ] == 'y' ){
+    fileOldFlowModifier <- "_OldFlow"
+    chartTitle <- paste( chartTitle, "With Old Flow", sep="\n" )
+}
 
-errBarOutputFile <- paste( args[ 9 ],
-                           args[ 6 ],
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
                            "_",
-                           args[ 7 ],
+                           args[ branch_name ],
                            "_",
                            fileNeighborsModifier,
                            "-neighbors",
                            fileFlowObjModifier,
+                           fileOldFlowModifier,
                            "_graph.jpg",
                            sep="" )
 
@@ -111,10 +128,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 2 ],
-                  port = strtoi( args[ 3 ] ),
-                  user = args[ 4 ],
-                  password = args[ 5 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # -----------------------------------
 # Intent Event Throughput SQL Command
@@ -127,12 +144,15 @@
                   "_tests WHERE (",
                   commandNeighborModifier,
                   "neighbors = 0 ) AND branch = '",
-                  args[ 7 ],
+                  args[ branch_name ],
                   "' AND date IN ( SELECT max( date ) FROM intent_tp",
                   sqlFlowObjModifier,
                   "_tests WHERE branch='",
-                  args[ 7 ],
-                  "' ) GROUP BY scale ORDER BY scale",
+                  args[ branch_name ],
+                  "' AND ",
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+                  "is_old_flow",
+                  " ) GROUP BY scale ORDER BY scale",
                   sep="" )
 
 print( "Sending SQL command:" )
diff --git a/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R b/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R
index 82638dc..8681f29 100644
--- a/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R
+++ b/TestON/JenkinsFile/scripts/SCPFmastershipFailoverLat.R
@@ -27,6 +27,13 @@
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+save_directory = 7
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -46,9 +53,7 @@
 # -------------------
 
 print( "Verifying CLI args." )
-
-if ( is.na( args[ 7 ] ) ){
-
+if ( is.na( args[ save_directory ] ) ){
     print( paste( "Usage: Rscript SCPFmastershipFailoverLat",
                                   "<database-host>",
                                   "<database-port>",
@@ -70,17 +75,17 @@
 
 chartTitle <- "Mastership Failover Latency"
 
-errBarOutputFile <- paste( args[ 7 ],
-                           args[ 5 ],
+errBarOutputFile <- paste( args[ save_directory ],
+                           args[ test_name ],
                            "_",
-                           args[ 6 ],
+                           args[ branch_name ],
                            "_errGraph.jpg",
                            sep="" )
 
-stackedBarOutputFile <- paste( args[ 7 ],
-                        args[ 5 ],
+stackedBarOutputFile <- paste( args[ save_directory ],
+                        args[ test_name ],
                         "_",
-                        args[ 6 ],
+                        args[ branch_name ],
                         "_stackedGraph.jpg",
                         sep="" )
 
@@ -92,10 +97,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ---------------------------------------
 # Mastership Failover Latency SQL Command
@@ -104,9 +109,9 @@
 print( "Generating Mastership Failover Latency SQL command" )
 
 command  <- paste( "SELECT * FROM mastership_failover_tests WHERE branch = '",
-                   args[ 6 ],
+                   args[ branch_name ],
                    "' AND date IN ( SELECT MAX( date ) FROM mastership_failover_tests WHERE branch = '",
-                   args[ 6 ],
+                   args[ branch_name ],
                    "' ) ",
                    sep = "" )
 
diff --git a/TestON/JenkinsFile/scripts/SCPFportLat.R b/TestON/JenkinsFile/scripts/SCPFportLat.R
index bb43248..254b718 100644
--- a/TestON/JenkinsFile/scripts/SCPFportLat.R
+++ b/TestON/JenkinsFile/scripts/SCPFportLat.R
@@ -23,10 +23,16 @@
 # **********************************************************
 # STEP 1: Data management.
 # **********************************************************
-
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+save_directory = 7
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -47,7 +53,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 7 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFportLat",
                                   "<database-host>",
@@ -67,31 +73,29 @@
 # -----------------
 
 print( "Creating filenames and title of graph." )
-
-errBarOutputFileUp <- paste( args[ 7 ],
+errBarOutputFileUp <- paste( args[ save_directory ],
                              "SCPFportLat_",
-                             args[ 6 ],
+                             args[ branch_name ],
                              "_UpErrBarWithStack.jpg",
                              sep = "" )
 
-errBarOutputFileDown <- paste( args[ 7 ],
+errBarOutputFileDown <- paste( args[ save_directory ],
                              "SCPFportLat_",
-                             args[ 6 ],
+                             args[ branch_name ],
                              "_DownErrBarWithStack.jpg",
                              sep = "" )
 
 # ------------------
 # SQL Initialization
 # ------------------
-
 print( "Initializing SQL" )
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # ------------------------
 # Port Latency SQL Command
@@ -100,9 +104,9 @@
 print( "Generating Port Latency SQL Command" )
 
 command <- paste( "SELECT * FROM port_latency_details WHERE branch = '",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' AND date IN ( SELECT MAX( date ) FROM port_latency_details WHERE branch = '",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' ) ",
                   sep = "" )
 
diff --git a/TestON/JenkinsFile/scripts/SCPFscaleTopo.R b/TestON/JenkinsFile/scripts/SCPFscaleTopo.R
index 8344efb..fdb39e4 100644
--- a/TestON/JenkinsFile/scripts/SCPFscaleTopo.R
+++ b/TestON/JenkinsFile/scripts/SCPFscaleTopo.R
@@ -23,10 +23,16 @@
 # **********************************************************
 # STEP 1: Data management.
 # **********************************************************
-
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+save_directory = 7
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -47,7 +53,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 7 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFgraphGenerator",
                                   "<database-host>",
@@ -68,10 +74,10 @@
 
 print( "Creating filenames and title of graph." )
 
-outputFile <- paste( args[ 7 ],
-                     args[ 5 ],
+outputFile <- paste( args[ save_directory ],
+                     args[ test_name ],
                      "_",
-                     args[ 6 ],
+                     args[ branch_name ],
                      "_graph.jpg",
                      sep="" )
 
@@ -85,10 +91,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # --------------------------
 # Scale Topology SQL Command
@@ -97,9 +103,9 @@
 print( "Generating Scale Topology SQL Command" )
 
 command <- paste( "SELECT * FROM scale_topo_latency_details WHERE branch = '",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' AND date IN ( SELECT MAX( date ) FROM scale_topo_latency_details WHERE branch = '",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' ) ",
                   sep = "" )
 
diff --git a/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R b/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R
index 2ca0627..130e2dd 100644
--- a/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R
+++ b/TestON/JenkinsFile/scripts/SCPFscalingMaxIntents.R
@@ -27,6 +27,15 @@
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+old_flow = 8
+save_directory = 9
 
 print( "Reading commmand-line args." )
 args <- commandArgs( trailingOnly=TRUE )
@@ -46,8 +55,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 8 ] ) ){
-
+if ( is.na( args[ save_directory ] ) ){
     print( paste( "Usage: Rscript SCPFInstalledIntentsFlows",
                                   "<has-flowObj>",
                                   "<database-host>",
@@ -56,6 +64,7 @@
                                   "<database-password>",
                                   "<test-name>",
                                   "<branch-name>",
+                                  "<using-old-flow>",
                                   "<directory-to-save-graphs>",
                                   sep=" " ) )
 
@@ -72,17 +81,23 @@
 sqlFlowObjModifier <- ""
 chartTitle <- "Number of Installed Intents & Flows"
 
-if ( args[ 1 ] == "y" ){
+if ( args[ has_flow_obj ] == "y" ){
     fileFlowObjModifier <- "_flowObj"
     sqlFlowObjModifier <- "fobj_"
     chartTitle <- "Number of Installed Intents & Flows\n with Flow Objectives"
 }
+fileOldFlowModifier <- ""
+if ( args[ old_flow ] == 'y' ){
+    fileOldFlowModifier <- "_OldFlow"
+    chartTitle <- paste( chartTitle, "With Old Flow", sep="\n" )
+}
 
-outputFile <- paste( args[ 8 ],
-                     args[ 6 ],
+outputFile <- paste( args[ save_directory ],
+                     args[ test_name ],
                      fileFlowObjModifier,
+                     fileOldFlowModifier,
                      "_",
-                     args[ 7 ],
+                     args[ branch_name ],
                      "_errGraph.jpg",
                      sep="" )
 
@@ -94,10 +109,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 2 ],
-                  port = strtoi( args[ 3 ] ),
-                  user = args[ 4 ],
-                  password = args[ 5 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # -------------------------------
 # Scaling Max Intents SQL Command
@@ -108,12 +123,15 @@
 command <- paste( "SELECT * FROM max_intents_",
                   sqlFlowObjModifier,
                   "tests WHERE branch = '",
-                  args[ 7 ],
+                  args[ branch_name ],
                   "' AND date IN ( SELECT MAX( date ) FROM max_intents_",
                   sqlFlowObjModifier,
                   "tests WHERE branch = '",
-                  args[ 7 ],
-                  "' ) ",
+                  args[ branch_name ],
+                  "' AND ",
+                  ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+                  "is_old_flow",
+                  " ) ",
                   sep="" )
 
 print( "Sending SQL command:" )
diff --git a/TestON/JenkinsFile/scripts/SCPFswitchLat.R b/TestON/JenkinsFile/scripts/SCPFswitchLat.R
index 97b8d44..8a68e08 100644
--- a/TestON/JenkinsFile/scripts/SCPFswitchLat.R
+++ b/TestON/JenkinsFile/scripts/SCPFswitchLat.R
@@ -27,6 +27,13 @@
 print( "**********************************************************" )
 print( "STEP 1: Data management." )
 print( "**********************************************************" )
+database_host = 1
+database_port = 2
+database_u_id = 3
+database_pw = 4
+test_name = 5
+branch_name = 6
+save_directory = 7
 
 # Command line arguments are read.
 print( "Reading commmand-line args." )
@@ -47,7 +54,7 @@
 
 print( "Verifying CLI args." )
 
-if ( is.na( args[ 7 ] ) ){
+if ( is.na( args[ save_directory ] ) ){
 
     print( paste( "Usage: Rscript SCPFswitchLat",
                             "<database-host>",
@@ -68,15 +75,15 @@
 
 print( "Creating filenames and title of graph." )
 
-errBarOutputFileUp <- paste( args[ 7 ],
+errBarOutputFileUp <- paste( args[ save_directory ],
                              "SCPFswitchLat_",
-                             args[ 6 ],
+                             args[ branch_name ],
                              "_UpErrBarWithStack.jpg",
                              sep="" )
 
-errBarOutputFileDown <- paste( args[ 7 ],
+errBarOutputFileDown <- paste( args[ save_directory ],
                                "SCPFswitchLat_",
-                               args[ 6 ],
+                               args[ branch_name ],
                                "_DownErrBarWithStack.jpg",
                                sep="" )
 # ------------------
@@ -87,10 +94,10 @@
 
 con <- dbConnect( dbDriver( "PostgreSQL" ),
                   dbname = "onostest",
-                  host = args[ 1 ],
-                  port = strtoi( args[ 2 ] ),
-                  user = args[ 3 ],
-                  password = args[ 4 ] )
+                  host = args[ database_host ],
+                  port = strtoi( args[ database_port ] ),
+                  user = args[ database_u_id ],
+                  password = args[ database_pw ] )
 
 # --------------------------
 # Switch Latency SQL Command
@@ -99,9 +106,9 @@
 print( "Generating Switch Latency SQL Command" )
 
 command <- paste( "SELECT * FROM switch_latency_details WHERE branch = '",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' AND date IN ( SELECT MAX( date ) FROM switch_latency_details WHERE branch='",
-                  args[ 6 ],
+                  args[ branch_name ],
                   "' )",
                   sep="" )