Merge "Enforce code style for groovy files"
diff --git a/TestON/JenkinsFile/CHO_Graph_Generator b/TestON/JenkinsFile/CHO_Graph_Generator
index 04a6f74..91ad9e5 100644
--- a/TestON/JenkinsFile/CHO_Graph_Generator
+++ b/TestON/JenkinsFile/CHO_Graph_Generator
@@ -42,16 +42,16 @@
 // create a bash script that will generate the graph
 graphScript = generateGraphScript( branchList )
 
-stage( 'Generating-Graph' ){
+stage( 'Generating-Graph' ) {
     // This will run on TestStation-Fabric5s node.
-    node( "TestStation-Fabric5s" ){
+    node( "TestStation-Fabric5s" ) {
         // run the bash script on this node.
         runScript( graphScript )
     }
 }
 // stage that will trigger postjob.
 // Need to be executed outside the current node to avoid deadlock.
-stage( 'posting-result' ){
+stage( 'posting-result' ) {
     postJob()
 }
 
@@ -60,7 +60,7 @@
     graphScript = ''''''
 
     // In case there are multiple branches running.
-    for( branch in branchList ){
+    for ( branch in branchList ){
         branchDir = scriptDir + branch + "/"
         graphScript += '''export BRANCH=''' + branchDir + '''
                           # make branch dir if not existing.
@@ -75,14 +75,15 @@
                           # run the log-summary that will export status
                           bash log-summary;''' + '''
                           # run Rscript with it's parameters.
-                          Rscript ''' +  script_file + ' ' + branchDir + 'event.csv ' +
-                                branchDir + 'failure.csv ' + branchDir + 'error.csv ' +
-                                branch + ' 60 ' + hours + ' ' +  saving_directory + ''';
+                          Rscript ''' + script_file + ' ' + branchDir + 'event.csv ' +
+                          branchDir + 'failure.csv ' + branchDir + 'error.csv ' +
+                          branch + ' 60 ' + hours + ' ' + saving_directory + ''';
         '''
         print( graphScript )
     }
     return graphScript
 }
+
 def runScript( graphScript ){
     // run bash script that will init the environment and run the graph generating part.
     sh '''#!/bin/bash -l
@@ -92,6 +93,7 @@
           cd ''' + scriptDir + ''';
           ''' + graphScript
 }
+
 def postJob(){
     // Triggering jenkins job called `postjob-Fabric5`
     jobToRun = "postjob-Fabric5"
diff --git a/TestON/JenkinsFile/FUNCJenkinsFile b/TestON/JenkinsFile/FUNCJenkinsFile
index 613dbf5..419f35d 100644
--- a/TestON/JenkinsFile/FUNCJenkinsFile
+++ b/TestON/JenkinsFile/FUNCJenkinsFile
@@ -50,11 +50,12 @@
 funcs.printTestToRun( testsToRun )
 
 // run the test sequentially and save the function into the dictionary.
-def tests = [:]
-for( String test : FUNC.keySet() ){
-    toBeRun = testsToRun.contains( test )
+def tests = [ : ]
+for ( String test : FUNC.keySet() ){
+    def toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, FUNC, graph_generator_file, graph_saved_directory )
+    tests[ stepName ] = funcs.runTest( test, toBeRun, prop, test, false,
+                                       FUNC, graph_generator_file, graph_saved_directory )
 }
 
 // get the start time of the test.
@@ -69,4 +70,4 @@
 funcs.generateOverallGraph( prop, FUNC, graph_saved_directory )
 
 // send the notification to Slack that running FUNC tests was ended.
-funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
+funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/FabricJenkinsfileTrigger b/TestON/JenkinsFile/FabricJenkinsfileTrigger
index 21f5ec4..04695f7 100644
--- a/TestON/JenkinsFile/FabricJenkinsfileTrigger
+++ b/TestON/JenkinsFile/FabricJenkinsfileTrigger
@@ -43,23 +43,11 @@
 wikiContents = ""
 
 testcases = [
-    "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
-    "SR" : [ tests : "", nodeName : [ "Fabric2", "Fabric3", "Fabric4" ], wikiContent : "" ],
-    "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
-]
-
-// depends on the First two letter of the testname, it will decide which category to put test.
-Prefix_organizer = [
-    "FU" : "FUNC",
-    "HA" : "HA",
-    "PL" : "USECASE",
-    "SA" : "USECASE",
-    "SC" : "SCPF",
-    "SR" : "SR",
-    "US" : "USECASE",
-    "VP" : "USECASE"
+        "FUNC": [ tests: "", nodeName: "VM", wikiContent: "" ],
+        "HA": [ tests: "", nodeName: "VM", wikiContent: "" ],
+        "SCPF": [ tests: "", nodeName: "BM", wikiContent: "" ],
+        "SR": [ tests: "", nodeName: [ "Fabric2", "Fabric3", "Fabric4" ], wikiContent: "" ],
+        "USECASE": [ tests: "", nodeName: "BM", wikiContent: "" ]
 ]
 
 // set some variables from the parameter
@@ -100,19 +88,20 @@
 
 // get the post_result. This will be affected only for the manual runs.
 post_result = params.PostResult
-if( !manually_run ){
+if ( !manually_run ){
     // If it is automated running, it will post the beginning message to the channel.
-    slackSend( channel:'sr-failures', color:'#03CD9F',
-               message:":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
-                        + "Starting tests on : " + now.toString()
-                        + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
+    slackSend( channel: 'sr-failures', color: '#03CD9F',
+               message: ":sparkles:" * 16 + "\n" +
+                        "Starting tests on : " + now.toString() +
+                        "\n" + ":sparkles:" * 16 )
 
-    // Choices will get the list of the test with Segment Rounting type tests.
+    // Choices will get the list of the test with Segment Routing type tests.
     SR_choices += adder( "SR", "basic", true )
     if ( today == Calendar.FRIDAY ){
         // if today is Friday, it will also test tests with extra_A category
         SR_choices += adder( "SR", "extra_A", true )
-    } else if( today == Calendar.SATURDAY ){
+    }
+    else if ( today == Calendar.SATURDAY ){
         // if today is Saturday, it will add the test with extra_B category
         SR_choices += adder( "SR", "extra_B", true )
     }
@@ -126,7 +115,8 @@
 
     isOldFlow = params.isOldFlow
     println "Tests to be run manually : "
-}else{
+}
+else {
     // set the list of the tests to run.
     testcases[ "SR" ][ "tests" ] = SR_choices
     println "Defaulting to " + day + " tests:"
@@ -137,27 +127,46 @@
 
 // This will hold the block of code to be run.
 def runTest = [
-    "Fabric2" : [:],
-    "Fabric3" : [:],
-    "Fabric4" : [:]
+        "Fabric2": [ : ],
+        "Fabric3": [ : ],
+        "Fabric4": [ : ]
 ]
 if ( manually_run ){
     // for manual run situation.
-    for( String test in testcases.keySet() ){
+    for ( String test in testcases.keySet() ){
         println test
         // Unless the list of the tests on the test category is empty, it will save the block of code to run in dictionary.
         if ( testcases[ test ][ "tests" ] != "" ){
-            runTest[ testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ], test, manually_run, onos_tag )
+            runTest[ testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ] ][ test ] = triggerFuncs.
+                    trigger_pipeline( onos_b,
+                                      testcases[ test ][ "tests" ],
+                                      testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ],
+                                      test,
+                                      manually_run,
+                                      onos_tag )
         }
     }
-}else{
+}
+else {
     // for automated situation, it will save current version to Fabric4, previous version to Fabric2 and before_previous_version to Fabric3.
-    runTest[ "Fabric4" ][ "SR" ] = triggerFuncs.trigger_pipeline( current_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 2 ], "SR", manually_run, onos_tag )
-    runTest[ "Fabric2" ][ "SR" ] = triggerFuncs.trigger_pipeline( previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
-    runTest[ "Fabric3" ][ "SR" ] = triggerFuncs.trigger_pipeline( before_previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 1 ], "SR", manually_run, onos_tag )
+    runTest[ "Fabric4" ][ "SR" ] = triggerFuncs.trigger_pipeline( current_version,
+                                                                  testcases[ "SR" ][ "tests" ],
+                                                                  testcases[ "SR" ][ "nodeName" ][ 2 ],
+                                                                  "SR",
+                                                                  manually_run, onos_tag )
+    runTest[ "Fabric2" ][ "SR" ] = triggerFuncs.trigger_pipeline( previous_version,
+                                                                  testcases[ "SR" ][ "tests" ],
+                                                                  testcases[ "SR" ][ "nodeName" ][ 0 ],
+                                                                  "SR",
+                                                                  manually_run, onos_tag )
+    runTest[ "Fabric3" ][ "SR" ] = triggerFuncs.trigger_pipeline( before_previous_version,
+                                                                  testcases[ "SR" ][ "tests" ],
+                                                                  testcases[ "SR" ][ "nodeName" ][ 1 ],
+                                                                  "SR",
+                                                                  manually_run, onos_tag )
 }
 
-def finalList = [:]
+def finalList = [ : ]
 
 // It will run each category of test to run sequentially on each branch.
 finalList[ "Fabric2" ] = triggerFuncs.runTestSeq( runTest[ "Fabric2" ] )
@@ -200,10 +209,11 @@
     // set : set of the test ( Eg. basic, extra_A ... )
     // if getResult == true, it will add the result.
     result = ""
-    for( String test in AllTheTests[ testCat ].keySet() ){
-        if( AllTheTests[ testCat ][ test ][ set ] ){
-            if( getResult )
+    for ( String test in AllTheTests[ testCat ].keySet() ){
+        if ( AllTheTests[ testCat ][ test ][ set ] ){
+            if ( getResult ){
                 result += test + ","
+            }
         }
     }
     return result
@@ -211,7 +221,7 @@
 
 // check which node is on.
 def nodeOn( branch ){
-    switch( branch ) {
+    switch ( branch ){
         case current_version: return 2
         case previous_version: return 0
         case before_previous_version: return 1
diff --git a/TestON/JenkinsFile/HAJenkinsFile b/TestON/JenkinsFile/HAJenkinsFile
index c1bd8bf..cb407ab 100644
--- a/TestON/JenkinsFile/HAJenkinsFile
+++ b/TestON/JenkinsFile/HAJenkinsFile
@@ -50,11 +50,12 @@
 funcs.printTestToRun( testsToRun )
 
 // run the test sequentially and save the function into the dictionary.
-def tests = [:]
-for( String test : HA.keySet() ){
+def tests = [ : ]
+for ( String test : HA.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, HA, graph_generator_file, graph_saved_directory )
+    tests[ stepName ] = funcs.runTest( test, toBeRun, prop, test, false,
+                                       HA, graph_generator_file, graph_saved_directory )
 }
 
 // get the start time of the test.
@@ -68,4 +69,4 @@
 funcs.generateOverallGraph( prop, HA, graph_saved_directory )
 
 // send the notification to Slack that running HA tests was ended.
-funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
+funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
index a2996df..0836a97 100644
--- a/TestON/JenkinsFile/JenkinsfileTrigger
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -31,19 +31,19 @@
 // read the passed parameters from the Jenkins.
 machines = params.machines
 manually_run = params.manual_run
-if( !manually_run ){
-    slackSend( color:'#03CD9F',
-               message:":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
-                        + "Starting tests on : " + now.toString()
-                        + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
+if ( !manually_run ){
+    slackSend( color: '#03CD9F',
+               message: ":sparkles:" * 16 + "\n" +
+                        "Starting tests on : " + now.toString() +
+                        "\n" + ":sparkles:" * 16 )
 }
 
 // store the list of the machines into the dictionary.
 machineList = machines.tokenize( "\n;, " )
-machineOn = [:]
+machineOn = [ : ]
 
 // save the triggering job function as a dictionary.
-for (machine in machineList){
+for ( machine in machineList ){
     print( machine )
     machineOn[ machine ] = triggerJob( machine )
 }
@@ -53,7 +53,7 @@
 
 // function that will trigger the specific jobs from current pipeline.
 def triggerJob( on ){
-    return{
+    return {
         jobToRun = on + "-pipeline-trigger"
         build job: jobToRun, propagate: false
     }
diff --git a/TestON/JenkinsFile/Overall_Graph_Generator b/TestON/JenkinsFile/Overall_Graph_Generator
index 41b1ccb..012b11d 100644
--- a/TestON/JenkinsFile/Overall_Graph_Generator
+++ b/TestON/JenkinsFile/Overall_Graph_Generator
@@ -36,7 +36,7 @@
 funcs.initializeTrend( "VM" )
 
 onos_branch = params.ONOSbranch
-AllTheTests = test_lists.getAllTheTests("")
+AllTheTests = test_lists.getAllTheTests( "" )
 
 // generate the graph and post the result on TestStation-VMs. Right now, all the pie and histograms are saved
 // on VM.
diff --git a/TestON/JenkinsFile/SCPFJenkinsFile b/TestON/JenkinsFile/SCPFJenkinsFile
index ee242e1..9d1122e 100644
--- a/TestON/JenkinsFile/SCPFJenkinsFile
+++ b/TestON/JenkinsFile/SCPFJenkinsFile
@@ -25,8 +25,8 @@
 SCPFfuncs.init()
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
 
-// init funcs with SCPF specificaton
-funcs.initialize( "SCPF", SCPFfuncs );
+// init funcs with SCPF specification
+funcs.initialize( "SCPF", SCPFfuncs )
 
 // read the information from TestON.property on BM
 def prop = null
@@ -43,8 +43,8 @@
 SCPFfuncs.oldFlowRuleCheck( isOldFlow, prop[ "ONOSBranch" ] )
 
 // set test to run as a list of function
-def tests = [:]
-for( String test : SCPFfuncs.SCPF.keySet() ){
+def tests = [ : ]
+for ( String test : SCPFfuncs.SCPF.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
 
@@ -60,4 +60,4 @@
 }
 
 // send result to slack after running test is done.
-funcs.sendResultToSlack( start,  prop["manualRun"], prop[ "WikiPrefix" ] )
+funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/SCPF_Graph_Generator b/TestON/JenkinsFile/SCPF_Graph_Generator
index e12b9e2..da57cd0 100644
--- a/TestON/JenkinsFile/SCPF_Graph_Generator
+++ b/TestON/JenkinsFile/SCPF_Graph_Generator
@@ -20,7 +20,7 @@
 
 // This is the Jenkins script for manual-graph-generator-SCPF
 
-// read and set the functions from dependcies.
+// read and set the functions from dependencies.
 SCPFfuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy' )
 SCPFfuncs.init()
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy' )
@@ -40,11 +40,11 @@
 testsToRun = funcs.getTestsToRun( Tests )
 
 // pureTestName is because we don't want 'WFobj' to be used for test name.
-def tests = [:]
-for( String test : testsToRun ){
+def tests = [ : ]
+for ( String test : testsToRun ){
     println test
     pureTestName = test.replaceAll( "WithFlowObj", "" )
-    tests[ test ] = funcs.runTest( test, true, prop, pureTestName, true, [], "", "" )
+    tests[ test ] = funcs.runTest( test, true, prop, pureTestName, true, [ ], "", "" )
 }
 
 // generate the graphs sequentially.
diff --git a/TestON/JenkinsFile/SRJenkinsFile b/TestON/JenkinsFile/SRJenkinsFile
index 3d3f309..8bee5b8 100644
--- a/TestON/JenkinsFile/SRJenkinsFile
+++ b/TestON/JenkinsFile/SRJenkinsFile
@@ -35,7 +35,8 @@
 // additional setup for Segment routing because it is running multiple branch concurrently on different machines.
 funcs.additionalInitForSR( jobName )
 
-// read the TestON.property depends on which branch it is running. ( currently master on Fabric4, 1.13 on Fabric2 and 1.12 on Fabric3 )
+// read the TestON.property depends on which branch it is running.
+// ( currently master on Fabric4, 1.13 on Fabric2 and 1.12 on Fabric3 )
 def prop = null
 prop = funcs.getProperties()
 SR = test_lists.getAllTheTests( prop[ "WikiPrefix" ] )[ "SR" ]
@@ -51,11 +52,12 @@
 funcs.printTestToRun( testsToRun )
 
 // save the functions to run test in the dictionary.
-def tests = [:]
-for( String test : SR.keySet() ){
+def tests = [ : ]
+for ( String test : SR.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false, SR, graph_generator_file, graph_saved_directory )
+    tests[stepName] = funcs.runTest( test, toBeRun, prop, test, false,
+                                     SR, graph_generator_file, graph_saved_directory )
 }
 
 // get start time
@@ -67,4 +69,4 @@
 }
 //funcs.generateOverallGraph( prop, SR, graph_saved_directory )
 // send the notification of ending test after SR tests is done.
-funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
+funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
diff --git a/TestON/JenkinsFile/Trend_Graph_Generator b/TestON/JenkinsFile/Trend_Graph_Generator
index b95504d..4c51964 100644
--- a/TestON/JenkinsFile/Trend_Graph_Generator
+++ b/TestON/JenkinsFile/Trend_Graph_Generator
@@ -29,11 +29,12 @@
 // read the node cluster ( VM or BM or Fabrics ) from the Jenkins job.
 nodeCluster = params.NodeCluster
 
-funcs.initializeTrend( nodeCluster );
+funcs.initializeTrend( nodeCluster )
 
 // do additional check for Fabric since it will be different depends on which branch it is running on.
-if( nodeCluster == "Fabric" )
-  funcs.additionalInitForSR( params.ONOSbranch )
+if ( nodeCluster == "Fabric" ){
+    funcs.additionalInitForSR( params.ONOSbranch )
+}
 def prop = null
 prop = funcs.getProperties()
 
@@ -43,16 +44,17 @@
 
 // set some of the paths of the file and directory
 graph_generator_file = fileRelated.trendIndividual
-graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-" + nodeCluster + ( nodeCluster == "Fabric" ? funcs.fabricOn( prop[ "ONOSBranch" ] ) : "" ) + "/"
+graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-" + nodeCluster +
+                        ( nodeCluster == "Fabric" ? funcs.fabricOn( prop[ "ONOSBranch" ] ) : "" ) + "/"
 
 def testsToRun = null
 testsToRun = funcs.getTestsToRun( Tests )
 
 // set functions into the dictionary.
-def tests = [:]
-for( String test : testsToRun ){
+def tests = [ : ]
+for ( String test : testsToRun ){
     println test
-    tests[ test ] = funcs.runTest( test, true, prop, test, true, [], graph_generator_file, graph_saved_directory )
+    tests[ test ] = funcs.runTest( test, true, prop, test, true, [ ], graph_generator_file, graph_saved_directory )
 }
 
 for ( test in tests.keySet() ){
diff --git a/TestON/JenkinsFile/USECASEJenkinsFile b/TestON/JenkinsFile/USECASEJenkinsFile
index dd5cf88..7d55330 100644
--- a/TestON/JenkinsFile/USECASEJenkinsFile
+++ b/TestON/JenkinsFile/USECASEJenkinsFile
@@ -45,22 +45,23 @@
 funcs.printTestToRun( testsToRun )
 
 // save the function of the test running into the dictionary.
-def tests = [:]
-for( String test : USECASE.keySet() ){
+def tests = [ : ]
+for ( String test : USECASE.keySet() ){
     toBeRun = testsToRun.contains( test )
     def stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-    tests[ stepName ] = funcs.runTest( test, toBeRun, prop, test, false, USECASE, graph_generator_file, graph_saved_directory )
+    tests[ stepName ] = funcs.runTest( test, toBeRun, prop, test, false,
+                                       USECASE, graph_generator_file, graph_saved_directory )
 }
 
 // get start time of the test.
 start = funcs.getCurrentTime()
-// run the tests sequntially
+// run the tests sequentially
 for ( test in tests.keySet() ){
     tests[ test ].call()
 }
 
 // send the result to slack after USECASE test is done.
-funcs.sendResultToSlack( start,  prop[ "manualRun" ], prop[ "WikiPrefix" ] )
+funcs.sendResultToSlack( start, prop[ "manualRun" ], prop[ "WikiPrefix" ] )
 
 // generate the overall graph for USECASE.
 funcs.generateOverallGraph( prop, USECASE, graph_saved_directory )
diff --git a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
index 729cb89..d203f07 100644
--- a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
+++ b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
@@ -43,23 +43,11 @@
 // default FUNC,HA to be VM, SCPF,USECASE to be BM.
 // SR will not be used in here.
 testcases = [
-    "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
-    "SR" : [ tests : "", nodeName : "Fabric", wikiContent : "" ],
-    "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
-]
-
-// depends on the first two characters of the test name, it will be divided.
-Prefix_organizer = [
-    "FU" : "FUNC",
-    "HA" : "HA",
-    "PL" : "USECASE",
-    "SA" : "USECASE",
-    "SC" : "SCPF",
-    "SR" : "SR",
-    "US" : "USECASE",
-    "VP" : "USECASE"
+        "FUNC": [ tests: "", nodeName: "VM", wikiContent: "" ],
+        "HA": [ tests: "", nodeName: "VM", wikiContent: "" ],
+        "SCPF": [ tests: "", nodeName: "BM", wikiContent: "" ],
+        "SR": [ tests: "", nodeName: "Fabric", wikiContent: "" ],
+        "USECASE": [ tests: "", nodeName: "BM", wikiContent: "" ]
 ]
 
 // read the parameters from the Jenkins
@@ -79,12 +67,14 @@
 // get branch from parameter if it is manually running
 if ( manually_run ){
     onos_b = params.ONOSVersion
-} else {
+}
+else {
     // otherwise, the version would be different over the weekend.
     // If today is weekdays, it will be default to current_version.
     if ( today == Calendar.SATURDAY ){
         onos_b = previous_version
-    } else if( today == Calendar.SUNDAY ){
+    }
+    else if ( today == Calendar.SUNDAY ){
         onos_b = before_previous_version
     }
 }
@@ -109,13 +99,13 @@
 post_result = params.PostResult
 
 // if automatically run, it will remove the comma at the end after dividing the tests.
-if( !manually_run ){
+if ( !manually_run ){
     testDivider( today )
-    FUNC_choices =  triggerFuncs.lastCommaRemover( FUNC_choices )
-    HA_choices =  triggerFuncs.lastCommaRemover( HA_choices )
-    SCPF_choices =  triggerFuncs.lastCommaRemover( SCPF_choices )
-    USECASE_choices =  triggerFuncs.lastCommaRemover( USECASE_choices )
-    SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
+    FUNC_choices = triggerFuncs.lastCommaRemover( FUNC_choices )
+    HA_choices = triggerFuncs.lastCommaRemover( HA_choices )
+    SCPF_choices = triggerFuncs.lastCommaRemover( SCPF_choices )
+    USECASE_choices = triggerFuncs.lastCommaRemover( USECASE_choices )
+    SR_choices = triggerFuncs.lastCommaRemover( SR_choices )
 }
 
 
@@ -124,7 +114,8 @@
 
     isOldFlow = params.isOldFlow
     println "Tests to be run manually : "
-}else{
+}
+else {
     testcases[ "SCPF" ][ "tests" ] = SCPF_choices
     testcases[ "USECASE" ][ "tests" ] = USECASE_choices
     testcases[ "FUNC" ][ "tests" ] = FUNC_choices
@@ -136,18 +127,20 @@
 triggerFuncs.print_tests( testcases )
 
 def runTest = [
-    "VM" : [:],
-    "BM" : [:]
+        "VM": [ : ],
+        "BM": [ : ]
 ]
 
 // set the test running function into the dictionary.
-for( String test in testcases.keySet() ){
+for ( String test in testcases.keySet() ){
     println test
     if ( testcases[ test ][ "tests" ] != "" ){
-        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test, manually_run, onos_tag )
+        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = triggerFuncs.
+                trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test,
+                                  manually_run, onos_tag )
     }
 }
-def finalList = [:]
+def finalList = [ : ]
 
 // get the name of the job.
 jobName = env.JOB_NAME
@@ -158,10 +151,12 @@
 
 // if first two character of the job name is vm, only call VM.
 // else, only on BM
-if( jobName.take( 2 ) == "vm" )
+if ( jobName.take( 2 ) == "vm" ){
     finalList[ "VM" ].call()
-else
+}
+else {
     finalList[ "BM" ].call()
+}
 
 // If it is automated running, it will generate the stats graph on VM.
 if ( !manually_run ){
@@ -175,7 +170,7 @@
 
 // function that will divide tests depends on which day it is.
 def testDivider( today ){
-    switch ( today ) {
+    switch ( today ){
         case Calendar.MONDAY:
             // THe reason Monday calls all the days is because we want to post the test schedules on the wiki
             // and slack channel every monday.
@@ -191,7 +186,9 @@
             day = "Monday"
             closeHtmlForWiki()
             postToWiki( wikiContents )
-            slackSend( color:'#FFD988', message:"Tests to be run this weekdays : \n" + triggerFuncs.printDaysForTest( AllTheTests ) )
+            slackSend( color: '#FFD988',
+                       message: "Tests to be run this weekdays : \n" +
+                                triggerFuncs.printDaysForTest( AllTheTests ) )
             break
         case Calendar.TUESDAY:
             tuesday( false, true )
@@ -214,7 +211,7 @@
             day = "Saturday"
             break
         case Calendar.SUNDAY:
-            sunday( false , true )
+            sunday( false, true )
             day = "Sunday"
             break
     }
@@ -267,6 +264,7 @@
     USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
     closingHeader( "USECASE" )
 }
+
 def wednesday( getDay, getResult ){
     addingHeader( "FUNC" )
     FUNC_choices += adder( "FUNC", "basic", getDay, "W", getResult )
@@ -286,6 +284,7 @@
     addingHeader( "USECASE" )
     closingHeader( "USECASE" )
 }
+
 def thursday( getDay, getResult ){
     addingHeader( "FUNC" )
     FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
@@ -305,6 +304,7 @@
     addingHeader( "USECASE" )
     closingHeader( "USECASE" )
 }
+
 def friday( getDay, getResult ){
     addingHeader( "FUNC" )
     FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
@@ -326,6 +326,7 @@
     addingHeader( "USECASE" )
     closingHeader( "USECASE" )
 }
+
 def saturday( getDay, getResult ){
     addingHeader( "FUNC" )
     FUNC_choices += adder( "FUNC", "basic", getDay, "Sa", getResult )
@@ -352,6 +353,7 @@
     USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
     closingHeader( "USECASE" )
 }
+
 def sunday( getDay, getResult ){
     addingHeader( "FUNC" )
     FUNC_choices += adder( "FUNC", "basic", getDay, "S", getResult )
@@ -386,13 +388,15 @@
     // day : the day you are trying to add (m,t,w,th... )
     // getResult : if want to get the list of the test to be run. False will return empty list.
     //             And once the list is empty, it will not be run.
-    result = ""
-    for( String test in AllTheTests[ testCat ].keySet() ){
-        if( AllTheTests[ testCat ][ test ][ set ] ){
-            if( getResult )
+    def result = ""
+    for ( String test in AllTheTests[ testCat ].keySet() ){
+        if ( AllTheTests[ testCat ][ test ][ set ] ){
+            if ( getResult ){
                 result += test + ","
-            if( dayAdding )
+            }
+            if ( dayAdding ){
                 dayAdder( testCat, test, day )
+            }
             // make HTML columns for wiki page on schedule.
             makeHtmlColList( testCat, test )
         }
@@ -425,7 +429,7 @@
                 <th class="confluenceTh"><p>Saturday</p></th>
                 <th class="confluenceTh"><p>Sunday</p></th>
             </tr>'''
-    for( String test in testcases.keySet() ){
+    for ( String test in testcases.keySet() ){
         testcases[ test ][ 'wikiContent' ] = '''
             <tr>
                 <th colspan="1" class="confluenceTh">''' + test + '''</th>'''
@@ -442,7 +446,7 @@
 // making column list for html
 def makeHtmlColList( testCategory, testName ){
     testcases[ testCategory ][ 'wikiContent' ] += '''
-                        <li>'''+ testName +'''</li>'''
+                        <li>''' + testName + '''</li>'''
 
 }
 
@@ -455,7 +459,7 @@
 
 // close the html for the wiki page.
 def closeHtmlForWiki(){
-    for( String test in testcases.keySet() ){
+    for ( String test in testcases.keySet() ){
         wikiContents += testcases[ test ][ 'wikiContent' ]
         wikiContents += '''
             </tr>'''
@@ -465,13 +469,15 @@
     </table>
     <p><strong>Everyday</strong>, all SegmentRouting tests are built and run on every supported branch.</p>
     <p>On <strong>Weekdays</strong>, all the other tests are built and run on the master branch.</p>
-    <p>On <strong>Saturdays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( previous_version ) +''' branch.</p>
-    <p>On <strong>Sundays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( before_previous_version ) +''' branch.</p>'''
+    <p>On <strong>Saturdays</strong>, all the other tests are built and run on the ''' +
+                    funcs.branchWithPrefix( previous_version ) + ''' branch.</p>
+    <p>On <strong>Sundays</strong>, all the other tests are built and run on the ''' +
+                    funcs.branchWithPrefix( before_previous_version ) + ''' branch.</p>'''
 }
 
 // post the result to wiki page using publish to confluence.
 def postToWiki( contents ){
-    node( testMachine ){
+    node( testMachine ) {
         workspace = fileRelated.jenkinsWorkspace + "all-pipeline-trigger/"
         filename = "jenkinsSchedule.txt"
         writeFile file: workspace + filename, text: contents
diff --git a/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy b/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy
index cae7f6f..4c6f021 100644
--- a/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/GeneralFuncs.groovy
@@ -23,19 +23,21 @@
 
 // make the init part of the database command
 def database_command_create( pass, host, port, user ){
-  return pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c "
+    return pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c "
 }
 
 // make the basic graph part for the Rscript
 def basicGraphPart( rFileName, host, port, user, pass, subject, branchName ){
-  return " Rscript " + rFileName + " " + host + " " + port + " " + user + " " + pass + " " + subject + " " + branchName
+    return " Rscript " + rFileName + " " + host + " " + port + " " + user + " " + pass + " " + subject + " " + branchName
 }
 
 // get the list of the test as dictionary then return as a string
 def getTestList( tests ){
-    list = ""
-    for( String test : tests.keySet() )
+    def list = ""
+    for ( String test : tests.keySet() ){
         list += test + ","
+    }
     return list[ 0..-2 ]
 }
-return this;
+
+return this
diff --git a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
index b7c9c2e..9cf5e7b 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsCommonFuncs.groovy
@@ -21,138 +21,158 @@
 // This is the dependency Jenkins script.
 // it has some common functions that runs test and generate graph.
 
-import groovy.time.*
+import groovy.time.TimeCategory
+import groovy.time.TimeDuration
+
 generalFuncs = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/GeneralFuncs.groovy' )
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
 
 def initializeTrend( machine ){
-  // For initializing any trend graph jobs
-  // machine : Either VM,BM, or Fabric#
+    // For initializing any trend graph jobs
+    // machine : Either VM,BM, or Fabric#
 
-  table_name = "executed_test_tests"
-  result_name = "executed_test_results"
-  testMachine = "TestStation-" + machine + "s";
-  this.machine = machine
-  isSCPF = false
-  isTrend = true
+    table_name = "executed_test_tests"
+    result_name = "executed_test_results"
+    testMachine = "TestStation-" + machine + "s"
+    this.machine = machine
+    isSCPF = false
+    isTrend = true
 }
+
 def initialize( type, SCPFfuncs ){
-  // Initializing for SCPF tests
-  // type : type of the test ( SR,FUNC,SCPF... )
-  // Passing the SCPFfunction which will be PerformanceFuncs.groovy
+    // Initializing for SCPF tests
+    // type : type of the test ( SR,FUNC,SCPF... )
+    // Passing the SCPFfunction which will be PerformanceFuncs.groovy
 
-  init( type )
-  SCPFfunc = SCPFfuncs
-  isSCPF = true
-  machine = machineType[ type ]
+    init( type )
+    SCPFfunc = SCPFfuncs
+    isSCPF = true
+    machine = machineType[ type ]
 }
+
 def initialize( type ){
-  // initializing for FUNC,HA,SR, and USECASE
-  // type : type of the test ( SR,FUNC,SCPF... )
+    // initializing for FUNC,HA,SR, and USECASE
+    // type : type of the test ( SR,FUNC,SCPF... )
 
-  init( type )
-  SCPFfunc = null
-  table_name = "executed_test_tests"
-  result_name = "executed_test_results"
-  trend_generator_file = fileRelated.trendMultiple
-  build_stats_generator_file = fileRelated.histogramMultiple
-  isSCPF = false
+    init( type )
+    SCPFfunc = null
+    table_name = "executed_test_tests"
+    result_name = "executed_test_results"
+    trend_generator_file = fileRelated.trendMultiple
+    build_stats_generator_file = fileRelated.histogramMultiple
+    isSCPF = false
 }
+
 def init( type ){
-  // type : type of the test ( SR,FUNC,SCPF... )
+    // type : type of the test ( SR,FUNC,SCPF... )
 
-  machineType = [ "FUNC"    : "VM",
-                  "HA"      : "VM",
-                  "SR"      : "Fabric",
-                  "SCPF"    : "BM",
-                  "USECASE" : "BM" ]
-  testType = type;
-  testMachine = "TestStation-" + machineType[ type ] + "s";
-  isTrend = false
+    machineType = [ "FUNC": "VM",
+                    "HA": "VM",
+                    "SR": "Fabric",
+                    "SCPF": "BM",
+                    "USECASE": "BM" ]
+    testType = type
+    testMachine = "TestStation-" + machineType[ type ] + "s"
+    isTrend = false
 }
+
 def additionalInitForSR( branch ){
-  // additional setup for SegmentRouting tests to determine the machine depends on the branch it is running.
-  // branch : branch of the onos. ( master, 1.12, 1.13... )
+    // additional setup for SegmentRouting tests to determine the machine depends on the branch it is running.
+    // branch : branch of the onos. ( master, 1.12, 1.13... )
 
-  testMachine = ( ( new StringBuilder( testMachine ) ).insert( testMachine.size()-1, fabricOn( branch ) ) ).toString()
-  if( isTrend )
-    machine += fabricOn( branch )
-  else
-    machineType[ testType ] += fabricOn( branch )
-  print testMachine
-}
-def fabricOn( branch ){
-  // gets the fabric machines with the branch of onos.
-  // branch : master, 1.12, 1.13...
-  // branch.reverse().take(4).reverse() will get last 4 characters of the string.
-  switch( branch.reverse().take(4).reverse() ) {
-    case "ster": return "4"
-    case "1.13": return "2"
-    case "1.12": return "3"
-    default: return "4"
-  }
-}
-def printType(){
-  // print the test type and test machine that was initialized.
-
-  echo testType;
-  echo testMachine;
-}
-def getProperties(){
-  // get the properties of the test by reading the TestONOS.property
-
-  node( testMachine ){
-    return readProperties( file:'/var/jenkins/TestONOS.property' );
-  }
-}
-def getTestsToRun( testList ){
-  // get test to run by tokenizing the list.
-
-  testList.tokenize("\n;, ")
-}
-def getCurrentTime(){
-  // get time of the PST zone.
-
-  TimeZone.setDefault( TimeZone.getTimeZone('PST') )
-  return new Date();
-}
-def getTotalTime( start, end ){
-  // get total time of the test using start and end time.
-
-  return TimeCategory.minus( end, start );
-}
-def printTestToRun( testList ){
-  // printout the list of the test in the list.
-
-  for ( String test : testList ) {
-      println test;
-  }
-}
-def sendResultToSlack( start, isManualRun, branch ){
-  // send the result of the test to the slack when it is not manually running.
-  // start : start time of the test
-  // isManualRun : string that is whether "false" or "true"
-  // branch : branch of the onos.
-
-  try{
-    if( isManualRun == "false" ){
-        end = getCurrentTime();
-        TimeDuration duration = TimeCategory.minus( end , start );
-        slackSend( color:"#5816EE",
-                   message: testType + "-" + branch + " tests ended at: " + end.toString() + "\nTime took : " + duration )
+    testMachine = ( ( new StringBuilder( testMachine ) ).insert( testMachine.size() - 1, fabricOn( branch ) ) ).
+            toString()
+    if ( isTrend ){
+        machine += fabricOn( branch )
     }
-  }
-  catch( all ){}
+    else {
+        machineType[ testType ] += fabricOn( branch )
+    }
+    print testMachine
 }
-def initAndRunTest( testName, testCategory ){
-  // Bash script that will
-  // Initialize the environment to the machine and run the test.
-  // testName : name of the test
-  // testCategory : (SR,FUNC ... )
 
-  return '''#!/bin/bash -l
+def fabricOn( branch ){
+    // gets the fabric machines with the branch of onos.
+    // branch : master, 1.12, 1.13...
+    // branch.reverse().take(4).reverse() will get last 4 characters of the string.
+    switch ( branch.reverse().take( 4 ).reverse() ){
+        case "ster": return "4"
+        case "1.13": return "2"
+        case "1.12": return "3"
+        default: return "4"
+    }
+}
+
+def printType(){
+    // print the test type and test machine that was initialized.
+
+    echo testType
+    echo testMachine
+}
+
+def getProperties(){
+    // get the properties of the test by reading the TestONOS.property
+
+    node( testMachine ) {
+        return readProperties( file: '/var/jenkins/TestONOS.property' )
+    }
+}
+
+def getTestsToRun( testList ){
+    // get test to run by tokenizing the list.
+
+    testList.tokenize( "\n;, " )
+}
+
+def getCurrentTime(){
+    // get time of the PST zone.
+
+    TimeZone.setDefault( TimeZone.getTimeZone( 'PST' ) )
+    return new Date()
+}
+
+def getTotalTime( start, end ){
+    // get total time of the test using start and end time.
+
+    return TimeCategory.minus( end, start )
+}
+
+def printTestToRun( testList ){
+    // printout the list of the test in the list.
+
+    for ( String test : testList ){
+        println test
+    }
+}
+
+def sendResultToSlack( start, isManualRun, branch ){
+    // send the result of the test to the slack when it is not manually running.
+    // start : start time of the test
+    // isManualRun : string that is whether "false" or "true"
+    // branch : branch of the onos.
+
+    try {
+        if ( isManualRun == "false" ){
+            end = getCurrentTime()
+            TimeDuration duration = TimeCategory.minus( end, start )
+            slackSend( color: "#5816EE",
+                       message: testType + "-" + branch + " tests ended at: " + end.toString() +
+                                "\nTime took : " + duration )
+        }
+    }
+    catch ( all ){
+    }
+}
+
+def initAndRunTest( testName, testCategory ){
+    // Bash script that will
+    // Initialize the environment to the machine and run the test.
+    // testName : name of the test
+    // testCategory : (SR,FUNC ... )
+
+    return '''#!/bin/bash -l
         set -i # interactive
         set +e
         shopt -s expand_aliases # expand alias in non-interactive mode
@@ -170,19 +190,22 @@
         cd ~/OnosSystemTest/TestON/bin
         git log |head
         ./cleanup.sh -f
-        ''' + "./cli.py run " + ( !isSCPF ? testName : testCategory[ testName ][ 'test' ] )  + " --params GRAPH/nodeCluster=" + machineType[ testType ]  + '''
+        ''' + "./cli.py run " +
+           ( !isSCPF ? testName : testCategory[ testName ][ 'test' ] ) +
+           " --params GRAPH/nodeCluster=" + machineType[ testType ] + '''
         ./cleanup.sh -f
         # cleanup config changes
         cd ~/onos/tools/package/config
         git clean -df'''
 }
-def copyLogs( testName ){
-  // bash script part for copy the logs and other neccessary element for SR tests.
-  // testName : name of the test.
 
-  result = ""
-    if( testType == "SR" ){
-      result = '''
+def copyLogs( testName ){
+    // bash script part for copy the logs and other neccessary element for SR tests.
+    // testName : name of the test.
+
+    result = ""
+    if ( testType == "SR" ){
+        result = '''
       sudo rm /var/jenkins/workspace/SR-log-${WikiPrefix}/*
       sudo cp *karaf.log.* /var/jenkins/workspace/SR-log-${WikiPrefix}/
       sudo cp *Flows* /var/jenkins/workspace/SR-log-${WikiPrefix}/
@@ -190,14 +213,15 @@
       sudo cp *.tar.gz /var/jenkins/workspace/SR-log-${WikiPrefix}/
       sudo cp t3-* /var/jenkins/workspace/SR-log-${WikiPrefix}/
       '''
-  }
-  return result
+    }
+    return result
 }
-def cleanAndCopyFiles( testName ){
-  // clean up some files that were in the folder and copy the new files from the log
-  // testName : name of the test
 
-  return '''#!/bin/bash -i
+def cleanAndCopyFiles( testName ){
+    // clean up some files that were in the folder and copy the new files from the log
+    // testName : name of the test
+
+    return '''#!/bin/bash -i
         set +e
         echo "ONOS Branch is: ${ONOSBranch}"
         echo "TestON Branch is: ${TestONBranch}"
@@ -221,11 +245,12 @@
         ls -al
         cd '''
 }
-def fetchLogs( testName ){
-  // fetch the logs of onos from onos nodes to onos System Test logs
-  // testName: name of the test
 
-  return '''#!/bin/bash
+def fetchLogs( testName ){
+    // fetch the logs of onos from onos nodes to onos System Test logs
+    // testName: name of the test
+
+    return '''#!/bin/bash
   set +e
   cd ~/OnosSystemTest/TestON/logs
   echo "Job Name is: " + ''' + testName + '''
@@ -243,252 +268,291 @@
   fi
   cd'''
 }
+
 def isPostingResult( manual, postresult ){
-  // check if it is posting the result.
-  // posting when it is automatically running or has postResult condition from the manual run
+    // check if it is posting the result.
+    // posting when it is automatically running or has postResult condition from the manual run
 
-  return manual == "false" || postresult == "true"
+    return manual == "false" || postresult == "true"
 }
+
 def postResult( prop, graphOnly ){
-  // post the result by triggering postjob.
-  // prop : property dictionary that was read from the machine.
-  // graphOnly : if it is graph generating job
+    // post the result by triggering postjob.
+    // prop : property dictionary that was read from the machine.
+    // graphOnly : if it is graph generating job
 
-  if( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
-    def post = build job: "postjob-" + ( graphOnly ? machine : machineType[ testType ] ), propagate: false
-  }
-}
-def postLogs( testName, prefix ){
-  // posting logs of the onos jobs specifically SR tests
-  // testName : name of the test
-  // prefix : branch prefix ( master, 1.12, 1.13 ... )
-
-  resultURL = ""
-  if( testType == "SR" ){
-    def post = build job: "SR-log-" + prefix, propagate: false
-    resultURL = post.getAbsoluteUrl()
-  }
-  return resultURL
-}
-def getSlackChannel(){
-  // get name of the slack channel.
-  // if the test is SR, it will return sr-failures
-
-  return "#" + ( testType == "SR" ? "sr-failures" : "jenkins-related" )
-}
-def analyzeResult( prop, workSpace, testName, otherTestName, resultURL, wikiLink, isSCPF ){
-  // analyzing the result of the test and send to slack if the test was failed.
-  // prop : property dictionary
-  // workSpace : workSpace where the result file is saved
-  // testName : real name of the test
-  // otherTestName : other name of the test for SCPF tests ( SCPFflowTPFobj )
-  // resultURL : url for the logs for SR tests. Will not be posted if it is empty
-  // wikiLink : link of the wiki page where the result was posted
-  // isSCPF : Check if it is SCPF. If so, it won't post the wiki link.
-
-  node( testMachine ){
-    resultContents = readFile workSpace + "/" + testName + "Result.txt"
-    resultContents = resultContents.split("\n")
-    if( resultContents[ 0 ] == "1" ){
-        print "All passed"
-    }else{
-        print "Failed"
-      if( prop[ "manualRun" ] == "false" ){
-        slackSend( channel:getSlackChannel(), color:"FF0000", message: "[" + prop[ "ONOSBranch" ] + "]"
-                                            + otherTestName + " : Failed!\n" + resultContents[ 1 ] + "\n"
-                                            + "[TestON log] : \n"
-                                            + "https://jenkins.onosproject.org/blue/organizations/jenkins/${env.JOB_NAME}/detail/${env.JOB_NAME}/${env.BUILD_NUMBER}/pipeline"
-                                            + ( isSCPF ? "" : ( "\n[Result on Wiki] : \n" + "https://wiki.onosproject.org/display/ONOS/" + wikiLink.replaceAll( "\\s","+" ) ) )
-                                            + ( resultURL != "" ? ( "\n[Karaf log] : \n" + resultURL + "artifact/" ) : "" ),
-                   teamDomain: 'onosproject' )
-      }
-        Failed
+    if ( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+        def post = build job: "postjob-" + ( graphOnly ? machine : machineType[ testType ] ), propagate: false
     }
-  }
 }
+
+def postLogs( testName, prefix ){
+    // posting logs of the onos jobs specifically SR tests
+    // testName : name of the test
+    // prefix : branch prefix ( master, 1.12, 1.13 ... )
+
+    resultURL = ""
+    if ( testType == "SR" ){
+        def post = build job: "SR-log-" + prefix, propagate: false
+        resultURL = post.getAbsoluteUrl()
+    }
+    return resultURL
+}
+
+def getSlackChannel(){
+    // get name of the slack channel.
+    // if the test is SR, it will return sr-failures
+
+    return "#" + ( testType == "SR" ? "sr-failures" : "jenkins-related" )
+}
+
+def analyzeResult( prop, workSpace, testName, otherTestName, resultURL, wikiLink, isSCPF ){
+    // analyzing the result of the test and send to slack if the test was failed.
+    // prop : property dictionary
+    // workSpace : workSpace where the result file is saved
+    // testName : real name of the test
+    // otherTestName : other name of the test for SCPF tests ( SCPFflowTPFobj )
+    // resultURL : url for the logs for SR tests. Will not be posted if it is empty
+    // wikiLink : link of the wiki page where the result was posted
+    // isSCPF : Check if it is SCPF. If so, it won't post the wiki link.
+
+    node( testMachine ) {
+        def resultContents = readFile( workSpace + "/" + testName + "Result.txt" )
+        resultContents = resultContents.split( "\n" )
+        if ( resultContents[ 0 ] == "1" ){
+            print "All passed"
+        }
+        else {
+            print "Failed"
+            if ( prop[ "manualRun" ] == "false" ){
+                slackSend( channel: getSlackChannel(),
+                           color: "FF0000",
+                           message: "[" + prop[ "ONOSBranch" ] + "]" + otherTestName + " : Failed!\n" +
+                                    resultContents[ 1 ] + "\n" +
+                                    "[TestON log] : \n" +
+                                    "https://jenkins.onosproject.org/blue/organizations/jenkins/${ env.JOB_NAME }/detail/${ env.JOB_NAME }/${ env.BUILD_NUMBER }/pipeline" +
+                                    ( isSCPF ? "" : ( "\n[Result on Wiki] : \n" +
+                                                      "https://wiki.onosproject.org/display/ONOS/" +
+                                                      wikiLink.replaceAll( "\\s", "+" ) ) ) +
+                                    ( resultURL != "" ? ( "\n[Karaf log] : \n" +
+                                                          resultURL + "artifact/" ) : "" ),
+                           teamDomain: 'onosproject' )
+            }
+            Failed
+        }
+    }
+}
+
 def publishToConfluence( isManualRun, isPostResult, wikiLink, file ){
-  // publish HTML script to wiki confluence
-  // isManualRun : string "true" "false"
-  // isPostResult : string "true" "false"
-  // wikiLink : link of the wiki page to publish
-  // file : name of the file to be published
+    // publish HTML script to wiki confluence
+    // isManualRun : string "true" "false"
+    // isPostResult : string "true" "false"
+    // wikiLink : link of the wiki page to publish
+    // file : name of the file to be published
 
-  if( isPostingResult( isManualRun, isPostResult ) ){
-    publishConfluence siteName: 'wiki.onosproject.org', pageName: wikiLink, spaceName: 'ONOS',
-                  attachArchivedArtifacts: true, buildIfUnstable: true,
-                  editorList: [
-                      confluenceWritePage( confluenceFile( file ) )
-                  ]
-  }
+    if ( isPostingResult( isManualRun, isPostResult ) ){
+        publishConfluence siteName: 'wiki.onosproject.org', pageName: wikiLink, spaceName: 'ONOS',
+                          attachArchivedArtifacts: true, buildIfUnstable: true,
+                          editorList: [ confluenceWritePage( confluenceFile( file ) ) ]
+    }
 
 }
-def runTest( testName, toBeRun, prop, pureTestName, graphOnly, testCategory, graph_generator_file, graph_saved_directory ) {
-  // run the test on the machine that contains all the steps : init and run test, copy files, publish result ...
-  // testName : name of the test
-  // toBeRun : boolean value whether the test will be run or not. If not, it won't be run but shows up with empty result on pipeline view
-  // prop : dictionary property on the machine
-  // pureTestName : Pure name of the test. ( ex. pureTestName of SCPFflowTpFobj will be SCPFflowTp )
-  // graphOnly : check if it is generating graph job. If so, it will only generate the generating graph part
-  // testCategory : category of the test ( SCPF, SR, FUNC ... )
-  // graph_generator_file : Rscript file with the full path.
-  // graph_saved_directory : where the generated graph will be saved to.
 
-  return {
-      catchError{
-          stage( testName ) {
-              if ( toBeRun ){
-                  workSpace = "/var/jenkins/workspace/" + testName
-                  def fileContents = ""
-                  node( testMachine ){
-                      withEnv( [ 'ONOSBranch=' + prop[ "ONOSBranch" ],
-                                 'ONOSJVMHeap=' + prop[ "ONOSJVMHeap" ],
-                                 'TestONBranch=' + prop[ "TestONBranch" ],
-                                 'ONOSTag=' + prop[ "ONOSTag" ],
-                                 'WikiPrefix=' + prop[ "WikiPrefix" ],
-                                 'WORKSPACE=' + workSpace ] ){
-                        if( ! graphOnly ){
-                          sh initAndRunTest( testName, testCategory )
-                          // For the Wiki page
-                          sh cleanAndCopyFiles( pureTestName )
+def runTest( testName, toBeRun, prop, pureTestName, graphOnly, testCategory, graph_generator_file,
+             graph_saved_directory ){
+    // run the test on the machine that contains all the steps : init and run test, copy files, publish result ...
+    // testName : name of the test
+    // toBeRun : boolean value whether the test will be run or not. If not, it won't be run but shows up with empty
+    //           result on pipeline view
+    // prop : dictionary property on the machine
+    // pureTestName : Pure name of the test. ( ex. pureTestName of SCPFflowTpFobj will be SCPFflowTp )
+    // graphOnly : check if it is generating graph job. If so, it will only generate the generating graph part
+    // testCategory : category of the test ( SCPF, SR, FUNC ... )
+    // graph_generator_file : Rscript file with the full path.
+    // graph_saved_directory : where the generated graph will be saved to.
+
+    return {
+        catchError {
+            stage( testName ) {
+                if ( toBeRun ){
+                    def workSpace = "/var/jenkins/workspace/" + testName
+                    def fileContents = ""
+                    node( testMachine ) {
+                        withEnv( [ 'ONOSBranch=' + prop[ "ONOSBranch" ],
+                                   'ONOSJVMHeap=' + prop[ "ONOSJVMHeap" ],
+                                   'TestONBranch=' + prop[ "TestONBranch" ],
+                                   'ONOSTag=' + prop[ "ONOSTag" ],
+                                   'WikiPrefix=' + prop[ "WikiPrefix" ],
+                                   'WORKSPACE=' + workSpace ] ) {
+                            if ( !graphOnly ){
+                                sh initAndRunTest( testName, testCategory )
+                                // For the Wiki page
+                                sh cleanAndCopyFiles( pureTestName )
+                            }
+                            databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory )
+                            if ( !graphOnly ){
+                                sh fetchLogs( pureTestName )
+                                if ( !isSCPF ){
+                                    publishToConfluence( prop[ "manualRun" ], prop[ "postResult" ],
+                                                         testCategory[ testName ][ 'wiki_link' ],
+                                                         workSpace + "/" + testCategory[ testName ][ 'wiki_file' ] )
+                                }
+                            }
                         }
-                          databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory )
-                        if( ! graphOnly ){
-                          sh fetchLogs( pureTestName )
-                          if( !isSCPF )
-                            publishToConfluence( prop[ "manualRun" ], prop[ "postResult" ],
-                                                 testCategory[ testName ][ 'wiki_link' ],
-                                                 workSpace + "/" + testCategory[ testName ][ 'wiki_file' ] )
-                        }
-                      }
-
-
-                  }
+                    }
                     postResult( prop, graphOnly )
-                  if( ! graphOnly ){
-                    resultURL = postLogs( testName, prop[ "WikiPrefix" ] )
-                    analyzeResult( prop, workSpace, pureTestName, testName, resultURL, isSCPF ? "" : testCategory[ testName ][ 'wiki_link' ], isSCPF )
-                  }
-              }
-          }
-      }
-  }
+                    if ( !graphOnly ){
+                        resultURL = postLogs( testName, prop[ "WikiPrefix" ] )
+                        analyzeResult( prop, workSpace, pureTestName, testName, resultURL,
+                                       isSCPF ? "" : testCategory[ testName ][ 'wiki_link' ],
+                                       isSCPF )
+                    }
+                }
+            }
+        }
+    }
 }
-def databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory ){
-  // part where it insert the data into the database.
-  // It will use the predefined encrypted variables from the Jenkins.
-  // prop : property dictionary that was read from the machine
-  // testName : name of the test
-  // graphOnly : boolean whether it is graph only or not
-  // graph_generator_file : Rscript file with the full path.
-  // graph_saved_directory : where the generated graph will be saved to.
 
-  if( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
-      // Post Results
-      withCredentials( [
-          string( credentialsId: 'db_pass', variable: 'pass' ),
-          string( credentialsId: 'db_user', variable: 'user' ),
-          string( credentialsId: 'db_host', variable: 'host' ),
-          string( credentialsId: 'db_port', variable: 'port' ) ] ) {
-              def database_command =  generalFuncs.database_command_create( pass, host, port, user ) + ( !isSCPF ? sqlCommand( testName ) : SCPFfunc.sqlCommand( testName ) )
-              sh '''#!/bin/bash
+def databaseAndGraph( prop, testName, graphOnly, graph_generator_file, graph_saved_directory ){
+    // part where it insert the data into the database.
+    // It will use the predefined encrypted variables from the Jenkins.
+    // prop : property dictionary that was read from the machine
+    // testName : name of the test
+    // graphOnly : boolean whether it is graph only or not
+    // graph_generator_file : Rscript file with the full path.
+    // graph_saved_directory : where the generated graph will be saved to.
+
+    if ( graphOnly || isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+        // Post Results
+        withCredentials( [
+                string( credentialsId: 'db_pass', variable: 'pass' ),
+                string( credentialsId: 'db_user', variable: 'user' ),
+                string( credentialsId: 'db_host', variable: 'host' ),
+                string( credentialsId: 'db_port', variable: 'port' ) ] ) {
+            def database_command = generalFuncs.database_command_create( pass, host, port, user ) +
+                                   ( !isSCPF ? sqlCommand( testName ) : SCPFfunc.sqlCommand( testName ) )
+            sh '''#!/bin/bash
               export DATE=\$(date +%F_%T)
               cd ~
-              pwd ''' + ( graphOnly ? "" : ( !isSCPF ? databasePart( prop[ "WikiPrefix" ], testName, database_command )  :
-                         SCPFfunc.databasePart( testName, database_command ) ) ) + '''
-              ''' + ( !isSCPF ? graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory, graph_generator_file ) : SCPFfunc.getGraphGeneratingCommand( host, port, user, pass, testName, prop ) )
-      }
-  }
-}
-def generateCategoryStatsGraph( testMachineOn, manualRun, postresult, stat_file, pie_file, type, branch, testListPart, save_path, pieTestListPart ){
-  // function that will generate the category stat graphs for the overall test.
-  // testMachineOn : the machine the graph will be generated. It will be TestStation-VMs for the most cases
-  // manualRun : string of "true" or "false"
-  // postresult : string of "true" or "false"
-  // stat_file : file name with full path for Rscript for the stat graph
-  // pie_file : file name with full path for Rscript for the pie graph
-  // type : type of the test ( USECASE, FUNC, HA )
-  // branch : branch of the test ( master, onos-1.12, onos-1.13 )
-  // testListPart : list of the test to be included
-  // save_path : path that will save the graphs to
-  // pieTestListPart : list of the test for pie graph
-
-  if( isPostingResult( manualRun, postresult ) ){
-    node( testMachineOn ){
-
-      withCredentials( [
-          string( credentialsId: 'db_pass', variable: 'pass' ),
-          string( credentialsId: 'db_user', variable: 'user' ),
-          string( credentialsId: 'db_host', variable: 'host' ),
-          string( credentialsId: 'db_port', variable: 'port' ) ] ) {
-              sh '''#!/bin/bash
-              ''' + generalFuncs.basicGraphPart( stat_file, host, port, user, pass, type, branch ) + " \"" + testListPart + "\" latest " + save_path + '''
-              ''' + getOverallPieGraph( pie_file, host, port, user, pass, branch, type, pieTestListPart, 'y', save_path ) + '''
-              ''' + getOverallPieGraph( pie_file, host, port, user, pass, branch, type, pieTestListPart, 'n', save_path )
-          }
+              pwd ''' + ( graphOnly ? "" :
+                          ( !isSCPF ? databasePart( prop[ "WikiPrefix" ], testName, database_command ) :
+                            SCPFfunc.databasePart( testName, database_command ) ) ) + '''
+              ''' + ( !isSCPF ? graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory,
+                                                 graph_generator_file ) :
+                      SCPFfunc.getGraphGeneratingCommand( host, port, user, pass, testName, prop ) )
         }
-      postResult( [], true )
     }
 }
+
+def generateCategoryStatsGraph( testMachineOn, manualRun, postresult, stat_file, pie_file, type, branch, testListPart,
+                                save_path, pieTestListPart ){
+    // function that will generate the category stat graphs for the overall test.
+    // testMachineOn : the machine the graph will be generated. It will be TestStation-VMs for the most cases
+    // manualRun : string of "true" or "false"
+    // postresult : string of "true" or "false"
+    // stat_file : file name with full path for Rscript for the stat graph
+    // pie_file : file name with full path for Rscript for the pie graph
+    // type : type of the test ( USECASE, FUNC, HA )
+    // branch : branch of the test ( master, onos-1.12, onos-1.13 )
+    // testListPart : list of the test to be included
+    // save_path : path that will save the graphs to
+    // pieTestListPart : list of the test for pie graph
+
+    if ( isPostingResult( manualRun, postresult ) ){
+        node( testMachineOn ) {
+
+            withCredentials( [
+                    string( credentialsId: 'db_pass', variable: 'pass' ),
+                    string( credentialsId: 'db_user', variable: 'user' ),
+                    string( credentialsId: 'db_host', variable: 'host' ),
+                    string( credentialsId: 'db_port', variable: 'port' ) ] ) {
+                sh '''#!/bin/bash
+              ''' + generalFuncs.basicGraphPart( stat_file, host, port, user, pass, type,
+                                                 branch ) + " \"" + testListPart + "\" latest " + save_path + '''
+              ''' + getOverallPieGraph( pie_file, host, port, user, pass, branch, type, pieTestListPart, 'y',
+                                        save_path ) + '''
+              ''' +
+                   getOverallPieGraph( pie_file, host, port, user, pass, branch, type, pieTestListPart, 'n', save_path )
+            }
+        }
+        postResult( [ ], true )
+    }
+}
+
 def makeTestList( list, commaNeeded ){
-  // make the list of the test in to a string.
-  // list : list of the test
-  // commaNeeded : if comma is needed for the string
+    // make the list of the test in to a string.
+    // list : list of the test
+    // commaNeeded : if comma is needed for the string
 
-  return generalFuncs.getTestList( list ) + ( commaNeeded ? "," : "" )
+    return generalFuncs.getTestList( list ) + ( commaNeeded ? "," : "" )
 }
+
 def createStatsList( testCategory, list, semiNeeded ){
-  // make the list for stats
-  // testCategory : category of the test
-  // list : list of the test
-  // semiNeeded: if semi colon is needed
+    // make the list for stats
+    // testCategory : category of the test
+    // list : list of the test
+    // semiNeeded: if semi colon is needed
 
-  return testCategory + "-" + generalFuncs.getTestList( list ) + ( semiNeeded ? ";" : "" )
+    return testCategory + "-" + generalFuncs.getTestList( list ) + ( semiNeeded ? ";" : "" )
 }
+
 def generateOverallGraph( prop, testCategory, graph_saved_directory ){
-  // generate the overall graph for the test
+    // generate the overall graph for the test
 
-  if( isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
-    node( testMachine ){
+    if ( isPostingResult( prop[ "manualRun" ], prop[ "postResult" ] ) ){
+        node( testMachine ) {
 
-      withCredentials( [
-          string( credentialsId: 'db_pass', variable: 'pass' ),
-          string( credentialsId: 'db_user', variable: 'user' ),
-          string( credentialsId: 'db_host', variable: 'host' ),
-          string( credentialsId: 'db_port', variable: 'port' ) ] ) {
-              testList = generalFuncs.getTestList( testCategory )
-              sh '''#!/bin/bash
-              ''' + generalFuncs.basicGraphPart( trend_generator_file, host, port, user, pass, testType, prop[ "ONOSBranch" ] ) + " " + testList + " 20 " + graph_saved_directory
-          }
+            withCredentials( [
+                    string( credentialsId: 'db_pass', variable: 'pass' ),
+                    string( credentialsId: 'db_user', variable: 'user' ),
+                    string( credentialsId: 'db_host', variable: 'host' ),
+                    string( credentialsId: 'db_port', variable: 'port' ) ] ) {
+                testList = generalFuncs.getTestList( testCategory )
+                sh '''#!/bin/bash
+                   ''' +
+                   generalFuncs.basicGraphPart( trend_generator_file, host, port,
+                                                user, pass, testType,
+                                                prop[ "ONOSBranch" ] ) + " " + testList + " 20 " + graph_saved_directory
+            }
         }
-      postResult( prop, false )
+        postResult( prop, false )
     }
 }
+
 def getOverallPieGraph( file, host, port, user, pass, branch, type, testList, yOrN, path ){
-   // Rcommand for the pie graph
+    // Rcommand for the pie graph
 
-   return generalFuncs.basicGraphPart( file, host, port, user, pass, type, branch ) + " \"" + testList + "\" latest " + yOrN + " " + path
+    return generalFuncs.basicGraphPart( file, host, port, user, pass, type, branch ) +
+           " \"" + testList + "\" latest " + yOrN + " " + path
 }
+
 def sqlCommand( testName ){
-  // get the inserting sqlCommand for non-SCPF tests.
+    // get the inserting sqlCommand for non-SCPF tests.
 
-  return "\"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" + testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\" "
+    return "\"INSERT INTO " + table_name + " VALUES('\$DATE','" + result_name + "','" +
+           testName + "',\$BUILD_NUMBER, '\$ONOSBranch', \$line);\" "
 }
+
 def graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory, graph_generator_file ){
-  // get the graphGenerating R command for non-SCPF tests
+    // get the graphGenerating R command for non-SCPF tests
 
-  return generalFuncs.basicGraphPart( graph_generator_file, host, port, user, pass, testName, prop[ "ONOSBranch" ] ) + " 20 " + graph_saved_directory
+    return generalFuncs.basicGraphPart( graph_generator_file, host, port, user, pass, testName,
+                                        prop[ "ONOSBranch" ] ) + " 20 " + graph_saved_directory
 }
-def databasePart( wikiPrefix, testName, database_command ){
-  // to read and insert the data from .csv to the database
 
-  return '''
+def databasePart( wikiPrefix, testName, database_command ){
+    // to read and insert the data from .csv to the database
+
+    return '''
     sed 1d ''' + workSpace + "/" + wikiPrefix + "-" + testName + '''.csv | while read line
     do
     echo \$line
     echo ''' + database_command + '''
     done '''
 }
-def generateStatGraph( testMachineOn, onos_branch, AllTheTests, stat_graph_generator_file, pie_graph_generator_file, graph_saved_directory ){
+
+def generateStatGraph( testMachineOn, onos_branch, AllTheTests, stat_graph_generator_file, pie_graph_generator_file,
+                       graph_saved_directory ){
     // Will generate the stats graph.
 
     testListPart = createStatsList( "FUNC", AllTheTests[ "FUNC" ], true ) +
@@ -497,10 +561,13 @@
     pieTestList = makeTestList( AllTheTests[ "FUNC" ], true ) +
                   makeTestList( AllTheTests[ "HA" ], true ) +
                   makeTestList( AllTheTests[ "USECASE" ], false )
-    generateCategoryStatsGraph( testMachineOn, "false", "true", stat_graph_generator_file, pie_graph_generator_file, "ALL", onos_branch, testListPart, graph_saved_directory, pieTestList )
+    generateCategoryStatsGraph( testMachineOn, "false", "true", stat_graph_generator_file, pie_graph_generator_file,
+                                "ALL", onos_branch, testListPart, graph_saved_directory, pieTestList )
 }
+
 def branchWithPrefix( branch ){
     // get the branch with the prefix ( "onos-" )
     return ( ( branch != "master" ) ? "onos-" : "" ) + branch
 }
-return this;
+
+return this
diff --git a/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy b/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy
index f8700c9..6eb1061 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy
@@ -46,5 +46,6 @@
     initLocation()
     initFiles()
 }
-return this;
+
+return this
 
diff --git a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
index b23591b..d5c5166 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsTestONTests.groovy
@@ -34,74 +34,473 @@
     // wiki_link : link of the wiki page that will be used to publish to confluence later on. SCPF tests don't need one.
 
     return [
-        "FUNC":[
-                "FUNCipv6Intent" : [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCipv6Intent", wiki_file:"FUNCipv6IntentWiki.txt" ],
-                "FUNCoptical" :    [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCoptical", wiki_file:"FUNCopticalWiki.txt" ],
-                "FUNCflow" :       [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCflow", wiki_file:"FUNCflowWiki.txt" ],
-                "FUNCnetCfg":      [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCnetCfg", wiki_file:"FUNCnetCfgWiki.txt" ],
-                "FUNCovsdbtest" :  [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCovsdbtest", wiki_file:"FUNCovsdbtestWiki.txt" ],
-                "FUNCnetconf" :    [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCnetconf", wiki_file:"FUNCnetconfWiki.txt" ],
-                "FUNCgroup" :      [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCgroup", wiki_file:"FUNCgroupWiki.txt" ],
-                "FUNCintent" :     [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCintent", wiki_file:"FUNCintentWiki.txt" ],
-                "FUNCintentRest" : [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCintentRest", wiki_file:"FUNCintentRestWiki.txt" ],
-                "FUNCformCluster" :[ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCformCluster", wiki_file:"FUNCformClusterWiki.txt" ]
-        ],
-        "HA":[
-                "HAsanity" :                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Sanity", wiki_file:"HAsanityWiki.txt"  ],
-                "HAclusterRestart" :        [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Cluster Restart", wiki_file:"HAclusterRestartWiki.txt"  ],
-                "HAsingleInstanceRestart" : [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Single Instance Restart", wiki_file:"HAsingleInstanceRestartWiki.txt"  ],
-                "HAstopNodes" :             [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Stop Nodes", wiki_file:"HAstopNodes.txt"  ],
-                "HAfullNetPartition" :      [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Full Network Partition", wiki_file:"HAfullNetPartitionWiki.txt"  ],
-                "HAswapNodes" :             [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Swap Nodes", wiki_file:"HAswapNodesWiki.txt"  ],
-                "HAscaling" :               [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Scaling", wiki_file:"HAscalingWiki.txt"  ],
-                "HAkillNodes" :             [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Kill Nodes", wiki_file:"HAkillNodes.txt" ],
-                "HAbackupRecover" :         [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Backup Recover", wiki_file:"HAbackupRecoverWiki.txt"  ],
-                "HAupgrade" :               [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Upgrade", wiki_file:"HAupgradeWiki.txt"  ],
-                "HAupgradeRollback" :       [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "HA Upgrade Rollback", wiki_file:"HAupgradeRollbackWiki.txt" ]
-        ],
-        "SCPF":[
-                "SCPFswitchLat":                           [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFcbench":                              [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFportLat":                             [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFflowTp1g":                            [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFintentEventTp":                       [ "basic":true, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFhostLat":                             [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFbatchFlowResp":                       [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFintentRerouteLat":                    [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFintentInstallWithdrawLat":            [ "basic":false, "extra_A":true, "extra_B":false, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFflowTp1gWithFlowObj":                 [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFintentEventTpWithFlowObj":            [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFintentRerouteLatWithFlowObj":         [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFscalingMaxIntentsWithFlowObj":        [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFintentInstallWithdrawLatWithFlowObj": [ "basic":false, "extra_A":false, "extra_B":true, "extra_C":false, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFscaleTopo":                           [ "basic":false, "extra_A":false, "extra_B":false, "extra_C":true, "extra_D":false, "new_Test":false, day:"" ],
-                "SCPFscalingMaxIntents":                   [ "basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":true, "new_Test":false, day:"" ],
-                "SCPFmastershipFailoverLat":               [ "basic":false, "extra_A":false, "extra_B":false, "extra_C":false, "extra_D":true, "new_Test":false, day:"" ]
-        ],
-        "USECASE":[
-                "FUNCvirNetNB" :                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCvirNetNB", wiki_file:"FUNCvirNetNBWiki.txt"  ],
-                "FUNCbgpls" :                   [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "FUNCbgpls", wiki_file:"FUNCbgplsWiki.txt"  ],
-                "VPLSBasic" :                   [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "VPLSBasic", wiki_file:"VPLSBasicWiki.txt"  ],
-                "VPLSfailsafe" :                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "VPLSfailsafe", wiki_file:"VPLSfailsafeWiki.txt"  ],
-                "USECASE_SdnipFunction":        [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SDNIP Function", wiki_file:"USECASE_SdnipFunctionWiki.txt"  ],
-                "USECASE_SdnipFunctionCluster": [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SDNIP Function Cluster", wiki_file:"USECASE_SdnipFunctionClusterWiki.txt" ],
-                "PLATdockertest":               [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:"Docker Images sanity test", wiki_file:"PLATdockertestTableWiki.txt"  ]
-        ],
-        "SR":[
-                "SRBridging":                   [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Bridging", wiki_file:"SRBridgingWiki.txt" ],
-                "SRRouting":                    [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Routing", wiki_file:"SRRoutingWiki.txt" ],
-                "SRDhcprelay":                  [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Dhcp Relay", wiki_file:"SRDhcprelayWiki.txt" ],
-                "SRDynamicConf":                [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Dynamic Config", wiki_file:"SRDynamicConfWiki.txt" ],
-                "SRMulticast":                  [ "basic":true, "extra_A":false, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Multi Cast", wiki_file:"SRMulticastWiki.txt" ],
-                "SRSanity":                     [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Sanity", wiki_file:"SRSanityWiki.txt"  ],
-                "SRSwitchFailure":              [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Switch Failure", wiki_file:"SRSwitchFailureWiki.txt"  ],
-                "SRLinkFailure":                [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Link Failure", wiki_file:"SRLinkFailureWiki.txt"  ],
-                "SROnosFailure":                [ "basic":false, "extra_A":true, "extra_B":false, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Onos node Failure", wiki_file:"SROnosFailureWiki.txt"  ],
-                "SRClusterRestart":             [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Cluster Restart", wiki_file:"SRClusterRestartWiki.txt"  ],
-                "SRDynamic":                    [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR Dynamic", wiki_file:"SRDynamicWiki.txt"  ],
-                "SRHighAvailability":           [ "basic":false, "extra_A":false, "extra_B":true, "new_Test":false, "day":"", wiki_link:wikiPrefix + "-" + "SR High Availability", wiki_file:"SRHighAvailabilityWiki.txt"  ]
-        ]
-    ];
+            "FUNC": [
+                    "FUNCipv6Intent": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCipv6Intent",
+                            wiki_file: "FUNCipv6IntentWiki.txt" ],
+                    "FUNCoptical": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCoptical",
+                            wiki_file: "FUNCopticalWiki.txt" ],
+                    "FUNCflow": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCflow",
+                            wiki_file: "FUNCflowWiki.txt" ],
+                    "FUNCnetCfg": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCnetCfg",
+                            wiki_file: "FUNCnetCfgWiki.txt" ],
+                    "FUNCovsdbtest": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCovsdbtest",
+                            wiki_file: "FUNCovsdbtestWiki.txt" ],
+                    "FUNCnetconf": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCnetconf",
+                            wiki_file: "FUNCnetconfWiki.txt" ],
+                    "FUNCgroup": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCgroup",
+                            wiki_file: "FUNCgroupWiki.txt" ],
+                    "FUNCintent": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCintent",
+                            wiki_file: "FUNCintentWiki.txt" ],
+                    "FUNCintentRest": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "FUNCintentRest",
+                            wiki_file: "FUNCintentRestWiki.txt" ],
+                    "FUNCformCluster": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " FUNCformCluster ",
+                            wiki_file: "FUNCformClusterWiki.txt" ]
+            ],
+            "HA": [
+                    "HAsanity": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Sanity",
+                            wiki_file: "HAsanityWiki.txt" ],
+                    "HAclusterRestart": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Cluster Restart",
+                            wiki_file: "HAclusterRestartWiki.txt" ],
+                    "HAsingleInstanceRestart": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Single Instance Restart",
+                            wiki_file: "HAsingleInstanceRestartWiki.txt" ],
+                    "HAstopNodes": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Stop Nodes",
+                            wiki_file: "HAstopNodes.txt" ],
+                    "HAfullNetPartition": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Full Network Partition",
+                            wiki_file: "HAfullNetPartitionWiki.txt" ],
+                    "HAswapNodes": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Swap Nodes",
+                            wiki_file: "HAswapNodesWiki.txt" ],
+                    "HAscaling": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Scaling",
+                            wiki_file: "HAscalingWiki.txt" ],
+                    "HAkillNodes": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Kill Nodes",
+                            wiki_file: "HAkillNodes.txt" ],
+                    "HAbackupRecover": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Backup Recover",
+                            wiki_file: "HAbackupRecoverWiki.txt" ],
+                    "HAupgrade": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Upgrade",
+                            wiki_file: "HAupgradeWiki.txt" ],
+                    "HAupgradeRollback": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + "-" + "HA Upgrade Rollback",
+                            wiki_file: "HAupgradeRollbackWiki.txt" ]
+            ],
+            "SCPF": [
+                    "SCPFswitchLat": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": "" ],
+                    "SCPFcbench": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFportLat": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFflowTp1g": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFintentEventTp": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFhostLat": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFbatchFlowResp": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFintentRerouteLat": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFintentInstallWithdrawLat": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFflowTp1gWithFlowObj": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFintentEventTpWithFlowObj": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFintentRerouteLatWithFlowObj": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFscalingMaxIntentsWithFlowObj": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFintentInstallWithdrawLatWithFlowObj": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "extra_C": false,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFscaleTopo": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": true,
+                            "extra_D": false,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFscalingMaxIntents": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": true,
+                            "new_Test": false,
+                            "day": " " ],
+                    "SCPFmastershipFailoverLat": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "extra_C": false,
+                            "extra_D": true,
+                            "new_Test": false,
+                            "day": " " ]
+            ],
+            "USECASE": [
+                    "FUNCvirNetNB": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " FUNCvirNetNB ",
+                            wiki_file: " FUNCvirNetNBWiki.txt " ],
+                    "FUNCbgpls": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " FUNCbgpls ",
+                            wiki_file: " FUNCbgplsWiki.txt " ],
+                    "VPLSBasic": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " VPLSBasic ",
+                            wiki_file: " VPLSBasicWiki.txt " ],
+                    "VPLSfailsafe": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " VPLSfailsafe ",
+                            wiki_file: " VPLSfailsafeWiki.txt " ],
+                    "USECASE_SdnipFunction": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SDNIP Function ",
+                            wiki_file: " USECASE_SdnipFunctionWiki.txt " ],
+                    "USECASE_SdnipFunctionCluster": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SDNIP Function Cluster ",
+                            wiki_file: " USECASE_SdnipFunctionClusterWiki.txt " ],
+                    "PLATdockertest": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: " Docker Images sanity test ",
+                            wiki_file: " PLATdockertestTableWiki.txt " ]
+            ],
+            "SR": [
+                    "SRBridging": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Bridging ",
+                            wiki_file: " SRBridgingWiki.txt " ],
+                    "SRRouting": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Routing ",
+                            wiki_file: " SRRoutingWiki.txt " ],
+                    "SRDhcprelay": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Dhcp Relay ",
+                            wiki_file: " SRDhcprelayWiki.txt " ],
+                    "SRDynamicConf": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Dynamic Config ",
+                            wiki_file: " SRDynamicConfWiki.txt " ],
+                    "SRMulticast": [
+                            "basic": true,
+                            "extra_A": false,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Multi Cast ",
+                            wiki_file: " SRMulticastWiki.txt " ],
+                    "SRSanity": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Sanity ",
+                            wiki_file: " SRSanityWiki.txt " ],
+                    "SRSwitchFailure": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Switch Failure ",
+                            wiki_file: " SRSwitchFailureWiki.txt " ],
+                    "SRLinkFailure": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Link Failure ",
+                            wiki_file: " SRLinkFailureWiki.txt " ],
+                    "SROnosFailure": [
+                            "basic": false,
+                            "extra_A": true,
+                            "extra_B": false,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Onos node Failure ",
+                            wiki_file: " SROnosFailureWiki.txt " ],
+                    "SRClusterRestart": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Cluster Restart ",
+                            wiki_file: " SRClusterRestartWiki.txt " ],
+                    "SRDynamic": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR Dynamic ",
+                            wiki_file: " SRDynamicWiki.txt " ],
+                    "SRHighAvailability": [
+                            "basic": false,
+                            "extra_A": false,
+                            "extra_B": true,
+                            "new_Test": false,
+                            "day": "",
+                            wiki_link: wikiPrefix + " - " + " SR High Availability ",
+                            wiki_file: " SRHighAvailabilityWiki.txt " ]
+            ]
+    ]
 }
 
-return this;
+return this
diff --git a/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy b/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy
index c2dfc5d..2c86f43 100644
--- a/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/PerformanceFuncs.groovy
@@ -25,14 +25,15 @@
 fileRelated = evaluate readTrusted( 'TestON/JenkinsFile/dependencies/JenkinsPathAndFiles.groovy' )
 
 fileRelated.init()
+
 def init(){
-    // init step for SCPFfunctions. It has some mandatory init steps
+    // init step for SCPF functions. It has some mandatory init steps
 
     // none, batches, neighbors, times : to be used for extra parameters for generating graphs.
-    none = [ "" ]
-    batches = [ 1, 100, 1000 ]
-    neighbors = [ 'y', 'n' ]
-    times = [ 'y', 'n' ]
+    def none = [ "" ]
+    def batches = [ 1, 100, 1000 ]
+    def neighbors = [ 'y', 'n' ]
+    def times = [ 'y', 'n' ]
 
     //flows : whether the test is affected by oldFlow or newFlow
     // test : command of the test to be executed when running the test
@@ -51,91 +52,293 @@
     // y_axis : title of the y_axis to be shown for 50 data overall graph if there is one.
 
     SCPF = [
-        SCPFcbench:                              [ flows:false, test:'SCPFcbench', table:'cbench_bm_tests', results:'cbench_bm_results', file:'CbenchDB', rFile:'SCPFcbench.R', extra:none, finalResult:1, graphTitle:[ 'Cbench Test' ], dbCols:'avg', dbWhere:'', y_axis:'Throughput (Responses/sec)' ],
-        SCPFhostLat:                             [ flows:false, test:'SCPFhostLat', table:'host_latency_tests', results:'host_latency_results', file:'HostAddLatency', rFile:'SCPFhostLat.R', extra:none,finalResult:1, graphTitle:[ 'Host Latency Test' ], dbCols:'avg', dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-        SCPFportLat:                             [ flows:false, test:'SCPFportLat', table:'port_latency_details', results:'port_latency_results', file:'/tmp/portEventResultDb', rFile:'SCPFportLat.R', extra:none, finalResult:1, graphTitle:[ 'Port Latency Test - Port Up','Port Latency Test - Port Down' ], dbCols:[ 'up_ofp_to_dev_avg, up_dev_to_link_avg, up_link_to_graph_avg', 'down_ofp_to_dev_avg, down_dev_to_link_avg, down_link_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-        SCPFflowTp1g:                            [ flows:true, test:'SCPFflowTp1g', table:'flow_tp_tests', results:'flow_tp_results', file:'flowTP1gDB', rFile:'SCPFflowTp1g.R n', extra:neighbors, finalResult:1, graphTitle:[ 'Flow Throughput Test - neighbors=0', 'Flow Throughput Test - neighbors=4' ], dbCols:'avg', dbWhere:[ 'AND scale=5 AND neighbors=0 ','AND scale=5 AND NOT neighbors=0' ],  y_axis:'Throughput (,000 Flows/sec)' ],
-        SCPFflowTp1gWithFlowObj:                 [ flows:true, test:'SCPFflowTp1g --params TEST/flowObj=True', table:'flow_tp_fobj_tests', results:'flow_tp_fobj_results', file:'flowTP1gDBFlowObj', rFile:'SCPFflowTp1g.R y', extra:neighbors, finalResult:0 ],
-        SCPFscaleTopo:                           [ flows:false, test:'SCPFscaleTopo', table:'scale_topo_latency_details', results:'scale_topo_latency_results', file:'/tmp/scaleTopoResultDb', rFile:'SCPFscaleTopo.R', extra:none, finalResult:1, graphTitle:[ 'Scale Topology Test' ], dbCols:[ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ], dbWhere:'AND scale=20' , y_axis:'Latency (s)' ],
-        SCPFswitchLat:                           [ flows:false, test:'SCPFswitchLat', table:'switch_latency_details', results:'switch_latency_results', file:'/tmp/switchEventResultDb', rFile:'SCPFswitchLat.R', extra:none, finalResult:1, graphTitle:[ 'Switch Latency Test - Switch Up','Switch Latency Test - Switch Down' ], dbCols:[ 'tcp_to_feature_reply_avg,feature_reply_to_device_avg,up_device_to_graph_avg', 'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ],
-        SCPFbatchFlowResp:                       [ flows:true, test:'SCPFbatchFlowResp', table:'batch_flow_tests', results:'batch_flow_results', file:'SCPFbatchFlowRespData', rFile:'SCPFbatchFlowResp.R', extra:none, finalResult:1, graphTitle:[ 'Batch Flow Test - Post', 'Batch Flow Test - Del' ], dbCols:[ 'elapsepost, posttoconfrm', 'elapsedel, deltoconfrm' ], dbWhere:'', y_axis:'Latency (s)' ],
-        SCPFintentEventTp:                       [ flows:true, test:'SCPFintentEventTp', table:'intent_tp_tests', results:'intent_tp_results', file:'IntentEventTPDB', rFile:'SCPFintentEventTp.R n', extra:neighbors, finalResult:1, graphTitle:[ 'Intent Throughput Test - neighbors=0','Intent Throughput Test - neighbors=4' ], dbCols:'SUM( avg ) as avg', dbWhere:[ 'AND scale=5 AND neighbors=0 GROUP BY date,build','AND scale=5 AND NOT neighbors=0 GROUP BY date,build' ], y_axis:'Throughput (Ops/sec)' ],
-        SCPFintentRerouteLat:                    [ flows:true, test:'SCPFintentRerouteLat', table:'intent_reroute_latency_tests', results:'intent_reroute_latency_results', file:'IntentRerouteLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches, finalResult:1, graphTitle:[ 'Intent Reroute Test' ], dbCols:'avg', dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)' ],
-        SCPFscalingMaxIntents:                   [ flows:true, test:'SCPFscalingMaxIntents', table:'max_intents_tests', results:'max_intents_results', file:'ScalingMaxIntentDB', rFile:'SCPFscalingMaxIntents.R n', extra:none, finalResult:0 ],
-        SCPFintentEventTpWithFlowObj:            [ flows:true, test:'SCPFintentEventTp --params TEST/flowObj=True', table:'intent_tp_fobj_tests', results:'intent_tp_fobj_results', file:'IntentEventTPflowObjDB', rFile:'SCPFintentEventTp.R y', extra:neighbors,finalResult:0 ],
-        SCPFintentInstallWithdrawLat:            [ flows:true, test:'SCPFintentInstallWithdrawLat', table:'intent_latency_tests', results:'intent_latency_results', file:'IntentInstallWithdrawLatDB', rFile:'SCPFIntentInstallWithdrawRerouteLat.R n', extra:batches,finalResult:1, graphTitle:[ 'Intent Installation Test','Intent Withdrawal Test' ], dbCols:[ 'install_avg','withdraw_avg' ], dbWhere:'AND scale=5 AND batch_size=100', y_axis:'Latency (ms)' ],
-        SCPFintentRerouteLatWithFlowObj:         [ flows:true, test:'SCPFintentRerouteLat --params TEST/flowObj=True', table:'intent_reroute_latency_fobj_tests', results:'intent_reroute_latency_fobj_results', file:'IntentRerouteLatDBWithFlowObj', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0 ],
-        SCPFscalingMaxIntentsWithFlowObj:        [ flows:true, test:'SCPFscalingMaxIntents --params TEST/flowObj=True', table:'max_intents_fobj_tests', results:'max_intents_fobj_results', file:'ScalingMaxIntentDBWFO', rFile:'SCPFscalingMaxIntents.R y', extra:none, finalResult:0 ],
-        SCPFintentInstallWithdrawLatWithFlowObj: [ flows:true, test:'SCPFintentInstallWithdrawLat --params TEST/flowObj=True', table:'intent_latency_fobj_tests', results:'intent_latency_fobj_results', file:'IntentInstallWithdrawLatDBWFO', rFile:'SCPFIntentInstallWithdrawRerouteLat.R y', extra:batches, finalResult:0 ],
-        SCPFmastershipFailoverLat:               [ flows:false, test:'SCPFmastershipFailoverLat', table:'mastership_failover_tests', results:'mastership_failover_results', file:'mastershipFailoverLatDB', rFile:'SCPFmastershipFailoverLat.R', extra:none, finalResult:1, graphTitle:[ 'Mastership Failover Test' ], dbCols:[ 'kill_deact_avg,deact_role_avg' ], dbWhere:'AND scale=5', y_axis:'Latency (ms)' ]
+            SCPFcbench: [ flows: false,
+                          test: 'SCPFcbench',
+                          table: 'cbench_bm_tests',
+                          results: 'cbench_bm_results',
+                          file: 'CbenchDB',
+                          rFile: 'SCPFcbench.R',
+                          extra: none,
+                          finalResult: 1,
+                          graphTitle: [ 'Cbench Test' ],
+                          dbCols: 'avg',
+                          dbWhere: '',
+                          y_axis: 'Throughput (Responses/sec)' ],
+            SCPFhostLat: [ flows: false,
+                           test: 'SCPFhostLat',
+                           table: 'host_latency_tests',
+                           results: 'host_latency_results',
+                           file: 'HostAddLatency',
+                           rFile: 'SCPFhostLat.R',
+                           extra: none,
+                           finalResult: 1,
+                           graphTitle: [ 'Host Latency Test' ],
+                           dbCols: 'avg',
+                           dbWhere: 'AND scale=5',
+                           y_axis: 'Latency (ms)' ],
+            SCPFportLat: [ flows: false,
+                           test: 'SCPFportLat',
+                           table: 'port_latency_details',
+                           results: 'port_latency_results',
+                           file: '/tmp/portEventResultDb',
+                           rFile: 'SCPFportLat.R',
+                           extra: none,
+                           finalResult: 1,
+                           graphTitle: [ 'Port Latency Test - Port Up', 'Port Latency Test - Port Down' ],
+                           dbCols: [ 'up_ofp_to_dev_avg, up_dev_to_link_avg, up_link_to_graph_avg',
+                                     'down_ofp_to_dev_avg, down_dev_to_link_avg, down_link_to_graph_avg' ],
+                           dbWhere: 'AND scale=5', y_axis: 'Latency (ms)' ],
+            SCPFflowTp1g: [ flows: true,
+                            test: 'SCPFflowTp1g',
+                            table: 'flow_tp_tests',
+                            results: 'flow_tp_results',
+                            file: 'flowTP1gDB',
+                            rFile: 'SCPFflowTp1g.R n',
+                            extra: neighbors,
+                            finalResult: 1,
+                            graphTitle: [ 'Flow Throughput Test - neighbors=0',
+                                          'Flow Throughput Test - neighbors=4' ],
+                            dbCols: 'avg',
+                            dbWhere: [ 'AND scale=5 AND neighbors=0 ',
+                                       'AND scale=5 AND NOT neighbors=0' ],
+                            y_axis: 'Throughput (,000 Flows/sec)' ],
+            SCPFflowTp1gWithFlowObj: [ flows: true,
+                                       test: 'SCPFflowTp1g --params TEST/flowObj=True',
+                                       table: 'flow_tp_fobj_tests',
+                                       results: 'flow_tp_fobj_results',
+                                       file: 'flowTP1gDBFlowObj',
+                                       rFile: 'SCPFflowTp1g.R y',
+                                       extra: neighbors,
+                                       finalResult: 0 ],
+            SCPFscaleTopo: [ flows: false,
+                             test: 'SCPFscaleTopo',
+                             table: 'scale_topo_latency_details',
+                             results: 'scale_topo_latency_results',
+                             file: '/tmp/scaleTopoResultDb',
+                             rFile: 'SCPFscaleTopo.R',
+                             extra: none,
+                             finalResult: 1,
+                             graphTitle: [ 'Scale Topology Test' ],
+                             dbCols: [ 'first_connection_to_last_connection, last_connection_to_last_role_request, last_role_request_to_last_topology' ],
+                             dbWhere: 'AND scale=20',
+                             y_axis: 'Latency (s)' ],
+            SCPFswitchLat: [ flows: false,
+                             test: 'SCPFswitchLat',
+                             table: 'switch_latency_details',
+                             results: 'switch_latency_results',
+                             file: '/tmp/switchEventResultDb',
+                             rFile: 'SCPFswitchLat.R',
+                             extra: none,
+                             finalResult: 1,
+                             graphTitle: [ 'Switch Latency Test - Switch Up',
+                                           'Switch Latency Test - Switch Down' ],
+                             dbCols: [ 'tcp_to_feature_reply_avg,feature_reply_to_device_avg,up_device_to_graph_avg',
+                                       'fin_ack_to_ack_avg,ack_to_device_avg,down_device_to_graph_avg' ],
+                             dbWhere: 'AND scale=5',
+                             y_axis: 'Latency (ms)' ],
+            SCPFbatchFlowResp: [ flows: true,
+                                 test: 'SCPFbatchFlowResp',
+                                 table: 'batch_flow_tests',
+                                 results: 'batch_flow_results',
+                                 file: 'SCPFbatchFlowRespData',
+                                 rFile: 'SCPFbatchFlowResp.R',
+                                 extra: none,
+                                 finalResult: 1,
+                                 graphTitle: [ 'Batch Flow Test - Post',
+                                               'Batch Flow Test - Del' ],
+                                 dbCols: [ 'elapsepost, posttoconfrm',
+                                           'elapsedel, deltoconfrm' ],
+                                 dbWhere: '',
+                                 y_axis: 'Latency (s)' ],
+            SCPFintentEventTp: [ flows: true,
+                                 test: 'SCPFintentEventTp',
+                                 table: 'intent_tp_tests',
+                                 results: 'intent_tp_results',
+                                 file: 'IntentEventTPDB',
+                                 rFile: 'SCPFintentEventTp.R n',
+                                 extra: neighbors,
+                                 finalResult: 1,
+                                 graphTitle: [ 'Intent Throughput Test - neighbors=0',
+                                               'Intent Throughput Test - neighbors=4' ],
+                                 dbCols: 'SUM( avg ) as avg',
+                                 dbWhere: [ 'AND scale=5 AND neighbors=0 GROUP BY date,build',
+                                            'AND scale=5 AND NOT neighbors=0 GROUP BY date,build' ],
+                                 y_axis: 'Throughput (Ops/sec)' ],
+            SCPFintentRerouteLat: [ flows: true,
+                                    test: 'SCPFintentRerouteLat',
+                                    table: 'intent_reroute_latency_tests',
+                                    results: 'intent_reroute_latency_results',
+                                    file: 'IntentRerouteLatDB',
+                                    rFile: 'SCPFIntentInstallWithdrawRerouteLat.R n',
+                                    extra: batches,
+                                    finalResult: 1,
+                                    graphTitle: [ 'Intent Reroute Test' ],
+                                    dbCols: 'avg',
+                                    dbWhere: 'AND scale=5 AND batch_size=100',
+                                    y_axis: 'Latency (ms)' ],
+            SCPFscalingMaxIntents: [ flows: true,
+                                     test: 'SCPFscalingMaxIntents',
+                                     table: 'max_intents_tests',
+                                     results: 'max_intents_results',
+                                     file: 'ScalingMaxIntentDB',
+                                     rFile: 'SCPFscalingMaxIntents.R n',
+                                     extra: none,
+                                     finalResult: 0 ],
+            SCPFintentEventTpWithFlowObj: [ flows: true,
+                                            test: 'SCPFintentEventTp --params TEST/flowObj=True',
+                                            table: 'intent_tp_fobj_tests',
+                                            results: 'intent_tp_fobj_results',
+                                            file: 'IntentEventTPflowObjDB',
+                                            rFile: 'SCPFintentEventTp.R y',
+                                            extra: neighbors,
+                                            finalResult: 0 ],
+            SCPFintentInstallWithdrawLat: [ flows: true,
+                                            test: 'SCPFintentInstallWithdrawLat',
+                                            table: 'intent_latency_tests',
+                                            results: 'intent_latency_results',
+                                            file: 'IntentInstallWithdrawLatDB',
+                                            rFile: 'SCPFIntentInstallWithdrawRerouteLat.R n',
+                                            extra: batches,
+                                            finalResult: 1,
+                                            graphTitle: [ 'Intent Installation Test',
+                                                          'Intent Withdrawal Test' ],
+                                            dbCols: [ 'install_avg', 'withdraw_avg' ],
+                                            dbWhere: 'AND scale=5 AND batch_size=100',
+                                            y_axis: 'Latency (ms)' ],
+            SCPFintentRerouteLatWithFlowObj: [ flows: true,
+                                               test: 'SCPFintentRerouteLat --params TEST/flowObj=True',
+                                               table: 'intent_reroute_latency_fobj_tests',
+                                               results: 'intent_reroute_latency_fobj_results',
+                                               file: 'IntentRerouteLatDBWithFlowObj',
+                                               rFile: 'SCPFIntentInstallWithdrawRerouteLat.R y',
+                                               extra: batches,
+                                               finalResult: 0 ],
+            SCPFscalingMaxIntentsWithFlowObj: [ flows: true,
+                                                test: 'SCPFscalingMaxIntents --params TEST/flowObj=True',
+                                                table: 'max_intents_fobj_tests',
+                                                results: 'max_intents_fobj_results',
+                                                file: 'ScalingMaxIntentDBWFO',
+                                                rFile: 'SCPFscalingMaxIntents.R y',
+                                                extra: none,
+                                                finalResult: 0 ],
+            SCPFintentInstallWithdrawLatWithFlowObj: [ flows: true,
+                                                       test: 'SCPFintentInstallWithdrawLat --params TEST/flowObj=True',
+                                                       table: 'intent_latency_fobj_tests',
+                                                       results: 'intent_latency_fobj_results',
+                                                       file: 'IntentInstallWithdrawLatDBWFO',
+                                                       rFile: 'SCPFIntentInstallWithdrawRerouteLat.R y',
+                                                       extra: batches,
+                                                       finalResult: 0 ],
+            SCPFmastershipFailoverLat: [ flows: false,
+                                         test: 'SCPFmastershipFailoverLat',
+                                         table: 'mastership_failover_tests',
+                                         results: 'mastership_failover_results',
+                                         file: 'mastershipFailoverLatDB',
+                                         rFile: 'SCPFmastershipFailoverLat.R',
+                                         extra: none,
+                                         finalResult: 1,
+                                         graphTitle: [ 'Mastership Failover Test' ],
+                                         dbCols: [ 'kill_deact_avg,deact_role_avg' ],
+                                         dbWhere: 'AND scale=5',
+                                         y_axis: 'Latency (ms)' ]
     ]
     graph_saved_directory = fileRelated.jenkinsWorkspace + "postjob-BM/"
 }
+
 def getGraphCommand( rFileName, extras, host, port, user, pass, testName, branchName, isOldFlow ){
     // generate the list of Rscript command for individual graphs
 
     result = ""
-    for( extra in extras ){
+    for ( extra in extras ){
         result += generateGraph( rFileName, " " + extra, host, port, user, pass, testName, branchName, isOldFlow ) + ";"
     }
     return result
 }
+
 def generateGraph( rFileName, batch, host, port, user, pass, testName, branchName, isOldFlow ){
     //  generate the Rscript command for individual graphs
 
-    return generalFuncs.basicGraphPart( fileRelated.SCPFSpecificLocation + rFileName, host, port, user, pass, testName, branchName ) +
+    return generalFuncs.basicGraphPart( fileRelated.SCPFSpecificLocation + rFileName,
+                                        host, port, user, pass, testName, branchName ) +
            " " + batch + " " + usingOldFlow( isOldFlow, testName ) + graph_saved_directory
 }
+
 def generateCombinedResultGraph( host, port, user, pass, testName, branchName, isOldFlow ){
     // generate Rscript for overall graph for the front page.
-    result = ""
+    def result = ""
 
-    for ( int i=0; i< SCPF[ testName ][ 'graphTitle' ].size(); i++ ){
-        result += generalFuncs.basicGraphPart(  fileRelated.trendSCPF, host, port, user, pass, "\"" + SCPF[ testName ][ 'graphTitle' ][ i ] + "\"", branchName ) +
-        " " + 50 + " \"SELECT " + checkIfList( testName, 'dbCols', i ) + ", build FROM " + SCPF[ testName ][ 'table' ] + " WHERE  branch=\'" + branchName + "\' " + sqlOldFlow( isOldFlow, testName ) +
-        checkIfList( testName, 'dbWhere', i ) + " ORDER BY date DESC LIMIT 50\" \"" + SCPF[ testName ][ 'y_axis' ] + "\" " + hasOldFlow( isOldFlow, testName ) + graph_saved_directory + ";"
+    for ( int i = 0; i < SCPF[ testName ][ 'graphTitle' ].size(); i++ ){
+        result += generalFuncs.basicGraphPart( fileRelated.trendSCPF,
+                                               host,
+                                               port,
+                                               user,
+                                               pass,
+                                               "\"" + SCPF[ testName ][ 'graphTitle' ][ i ] + "\"",
+                                               branchName ) +
+                  " " + 50 + " \"SELECT " +
+                  checkIfList( testName, 'dbCols', i ) +
+                  ", build FROM " + SCPF[ testName ][ 'table' ] + " WHERE  branch=\'" + branchName + "\' " +
+                  sqlOldFlow( isOldFlow, testName ) +
+                  checkIfList( testName, 'dbWhere', i ) +
+                  " ORDER BY date DESC LIMIT 50\" \"" +
+                  SCPF[ testName ][ 'y_axis' ] + "\" " +
+                  hasOldFlow( isOldFlow, testName ) +
+                  graph_saved_directory + ";"
     }
     return result
 }
+
 def checkIfList( testName, forWhich, pos ){
     // check if some dictionary has list or string.
 
-    return SCPF[ testName ][ forWhich ].getClass().getName() != "java.lang.String" ? SCPF[ testName ][ forWhich ][ pos ] :  SCPF[ testName ][ forWhich ]
+    return SCPF[ testName ][ forWhich ].getClass().getName() != "java.lang.String" ?
+           SCPF[ testName ][ forWhich ][ pos ] :
+           SCPF[ testName ][ forWhich ]
 }
+
 def sqlOldFlow( isOldFlow, testName ){
     // sql where command part for checking old flows.
 
     return SCPF[ testName ][ 'flows' ] ? " AND " + ( isOldFlow ? "" : "NOT " ) + "is_old_flow " : ""
 }
+
 def oldFlowRuleCheck( isOldFlow, branch ){
     // checking if it is old flow
 
     this.isOldFlow = isOldFlow
-    if( !isOldFlow ){
+    if ( !isOldFlow ){
         SCPF[ 'SCPFflowTp1g' ][ 'test' ] += " --params TEST/flows=" + ( branch == "onos-1.11" ? "4000" : "3500" )
     }
 }
+
 def affectedByOldFlow( isOldFlow, testName ){
     // For sql command :  if the test is affect by old flow, it will return parameters for old flow
     return SCPF[ testName ][ 'flows' ] ? "" + isOldFlow + ", " : ""
 }
+
 def usingOldFlow( isOldFlow, testName ){
     // For Rscript command : if it is using old flow.
 
     return SCPF[ testName ][ 'flows' ] ? ( isOldFlow ? "y" : "n" ) + " " : ""
 }
+
 def hasOldFlow( isOldFlow, testName ){
     // For Rscript command for 50 data
 
     return ( SCPF[ testName ][ 'flows' ] && isOldFlow ? "y" : "n" ) + " "
 }
+
 def sqlCommand( testName ){
     // sql command for inserting data into the database
 
-    if ( testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat" )
-        return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
-    return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', '\$ONOSBranch', " + affectedByOldFlow( isOldFlow, testName ) + "\$line);\""
+    if ( testName == "SCPFscaleTopo" || testName == "SCPFswitchLat" || testName == "SCPFportLat" ){
+        return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" +
+               SCPF[ testName ][ 'results' ] + "','\$BUILD_NUMBER', \$line, '\$ONOSBranch');\""
+    }
+    return "\"INSERT INTO " + SCPF[ testName ][ 'table' ] + " VALUES( '\$DATE','" + SCPF[ testName ][ 'results' ] +
+           "','\$BUILD_NUMBER', '\$ONOSBranch', " + affectedByOldFlow( isOldFlow, testName ) + "\$line);\""
 }
+
 def databasePart( testName, database_command ){
     // read the file from the machine and insert it to the database
 
@@ -147,10 +350,16 @@
     echo ''' + database_command + '''
     done< ''' + SCPF[ testName ][ 'file' ]
 }
+
 def getGraphGeneratingCommand( host, port, user, pass, testName, prop ){
     // returns the combined Rscript command for each test.
 
-    return getGraphCommand( SCPF[ testName ][ 'rFile' ], SCPF[ testName ][ 'extra' ], host, port, user, pass, testName, prop[ "ONOSBranch" ], isOldFlow ) + '''
-    ''' + ( SCPF[ testName ][ 'finalResult' ] ? generateCombinedResultGraph( host, port, user, pass, testName, prop[ "ONOSBranch" ], , isOldFlow ) : "" )
+    return getGraphCommand( SCPF[ testName ][ 'rFile' ],
+                            SCPF[ testName ][ 'extra' ],
+                            host, port, user, pass, testName,
+                            prop[ "ONOSBranch" ], isOldFlow ) + '''
+    ''' + ( SCPF[ testName ][ 'finalResult' ] ?
+            generateCombinedResultGraph( host, port, user, pass, testName, prop[ "ONOSBranch" ], isOldFlow ) : "" )
 }
-return this;
+
+return this
diff --git a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
index 238d7bc..07ea98b 100644
--- a/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
+++ b/TestON/JenkinsFile/dependencies/TriggerFuncs.groovy
@@ -23,24 +23,27 @@
 // This will provide the portion that will set up the environment of the machine
 //      and trigger the corresponding jobs.
 
+
 def init( commonFuncs ){
     funcs = commonFuncs
 }
+
 def lastCommaRemover( str ){
     // function that will remove the last comma from the string
 
     if ( str.size() > 0 && str[ str.size() - 1 ] == ',' ){
-        str = str.substring( 0,str.size() - 1 )
+        str = str.substring( 0, str.size() - 1 )
     }
     return str
 }
+
 def printDaysForTest( AllTheTests ){
     // Print the days for what test has.
 
     result = ""
     for ( String test in AllTheTests.keySet() ){
         result += test + " : \n"
-        for( String each in AllTheTests[ test ].keySet() ){
+        for ( String each in AllTheTests[ test ].keySet() ){
             AllTheTests[ test ][ each ][ "day" ] = lastCommaRemover( AllTheTests[ test ][ each ][ "day" ] )
             result += "    " + each + ":[" + AllTheTests[ test ][ each ][ "day" ] + "]\n"
         }
@@ -48,34 +51,52 @@
     }
     return result
 }
+
 def runTestSeq( testList ){
     // Running the test sequentially
-    return{
+    return {
         for ( test in testList.keySet() ){
             testList[ test ].call()
         }
     }
 }
+
 def print_tests( tests ){
     // print the list of the tsets to be run
 
-    for( String test in tests.keySet() ){
-        if( tests[ test ][ "tests" ] != "" ){
+    for ( String test in tests.keySet() ){
+        if ( tests[ test ][ "tests" ] != "" ){
             println test + ":"
             println tests[ test ][ "tests" ]
         }
     }
 }
+
 def organize_tests( tests, testcases ){
     // organize the test to its category using its name.
     // most of the time it will use the first two character of the test name
     // but there are some exceptions like FUNCbgpls or FUNCvirNetNB since they are now under USECASE
 
-    testList = tests.tokenize( "\n;, " )
-    for( String test in testList )
-        testcases [ Prefix_organizer[ ( test == "FUNCbgpls" || test == "FUNCvirNetNB" ? "US" : ( test[ 0 ] + test[ 1 ] ) ) ] ][ "tests" ] += test + ","
+    // depends on the first two letters of the test name, it will decide which category to put the test into.
+    def prefixes = [
+            "FU": "FUNC",
+            "HA": "HA",
+            "PL": "USECASE",
+            "SA": "USECASE",
+            "SC": "SCPF",
+            "SR": "SR",
+            "US": "USECASE",
+            "VP": "USECASE"
+    ]
+
+    def testList = tests.tokenize( "\n;, " )
+    for ( String test in testList ){
+        String prefix = ( test == "FUNCbgpls" || test == "FUNCvirNetNB" ) ? "US" : ( test[ 0..1 ] )
+        testcases[ prefixes[ prefix ] ][ "tests" ] += test + ","
+    }
     return testcases
 }
+
 def trigger( branch, tests, nodeName, jobOn, manuallyRun, onosTag ){
     // triggering function that will setup the environment and determine which pipeline to trigger
 
@@ -83,7 +104,7 @@
     def wiki = branch
     branch = funcs.branchWithPrefix( branch )
     test_branch = "master"
-    node( "TestStation-" + nodeName + "s" ){
+    node( "TestStation-" + nodeName + "s" ) {
         envSetup( branch, test_branch, onosTag, jobOn, manuallyRun )
 
         exportEnvProperty( branch, test_branch, wiki, tests, post_result, manuallyRun, onosTag, isOldFlow )
@@ -92,33 +113,36 @@
     jobToRun = jobOn + "-pipeline-" + ( manuallyRun ? "manually" : wiki )
     build job: jobToRun, propagate: false
 }
+
 def trigger_pipeline( branch, tests, nodeName, jobOn, manuallyRun, onosTag ){
     // nodeName : "BM" or "VM"
     // jobOn : "SCPF" or "USECASE" or "FUNC" or "HA"
     // this will return the function by wrapping them up with return{} to prevent them to be
     // executed once this function is called to assign to specific variable.
-    return{
+    return {
         trigger( branch, tests, nodeName, jobOn, manuallyRun, onosTag )
     }
 }
+
 // export Environment properties.
 def exportEnvProperty( onos_branch, test_branch, wiki, tests, postResult, manually_run, onosTag, isOldFlow ){
     // export environment properties to the machine.
 
-    stage( "export Property" ){
+    stage( "export Property" ) {
         sh '''
-            echo "ONOSBranch=''' + onos_branch +'''" > /var/jenkins/TestONOS.property
-            echo "TestONBranch=''' + test_branch +'''" >> /var/jenkins/TestONOS.property
-            echo "ONOSTag='''+ onosTag +'''" >> /var/jenkins/TestONOS.property
-            echo "WikiPrefix=''' + wiki +'''" >> /var/jenkins/TestONOS.property
-            echo "ONOSJVMHeap='''+ env.ONOSJVMHeap +'''" >> /var/jenkins/TestONOS.property
-            echo "Tests=''' + tests +'''" >> /var/jenkins/TestONOS.property
-            echo "postResult=''' + postResult +'''" >> /var/jenkins/TestONOS.property
-            echo "manualRun=''' + manually_run +'''" >> /var/jenkins/TestONOS.property
-            echo "isOldFlow=''' + isOldFlow +'''" >> /var/jenkins/TestONOS.property
+            echo "ONOSBranch=''' + onos_branch + '''" > /var/jenkins/TestONOS.property
+            echo "TestONBranch=''' + test_branch + '''" >> /var/jenkins/TestONOS.property
+            echo "ONOSTag=''' + onosTag + '''" >> /var/jenkins/TestONOS.property
+            echo "WikiPrefix=''' + wiki + '''" >> /var/jenkins/TestONOS.property
+            echo "ONOSJVMHeap=''' + env.ONOSJVMHeap + '''" >> /var/jenkins/TestONOS.property
+            echo "Tests=''' + tests + '''" >> /var/jenkins/TestONOS.property
+            echo "postResult=''' + postResult + '''" >> /var/jenkins/TestONOS.property
+            echo "manualRun=''' + manually_run + '''" >> /var/jenkins/TestONOS.property
+            echo "isOldFlow=''' + isOldFlow + '''" >> /var/jenkins/TestONOS.property
         '''
     }
 }
+
 // Initialize the environment Setup for the onos and OnosSystemTest
 def envSetup( onos_branch, test_branch, onos_tag, jobOn, manuallyRun ){
     // to setup the environment using the bash script
@@ -135,30 +159,36 @@
         generateKey()
     }
 }
+
 def tagCheck( onos_tag, onos_branch ){
     // check the tag for onos if it is not empty
 
     result = "git checkout "
-    if ( onos_tag == "" )
-        result += onos_branch //create new local branch
-    else
-        result += onos_tag //checkout the tag
+    if ( onos_tag == "" ){
+        //create new local branch
+        result += onos_branch
+    }
+    else {
+        //checkout the tag
+        result += onos_tag
+    }
     return result
 }
+
 def preSetup( onos_branch, test_branch, onos_tag, isManual ){
     // pre setup part which will clean up and checkout to corresponding branch.
 
     result = ""
-    if( !isManual ){
+    if ( !isManual ){
         result = '''echo -e "\n#####  Set TestON Branch #####"
         echo "TestON Branch is set on: ''' + test_branch + '''"
         cd ~/OnosSystemTest/
         git checkout HEAD~1      # Make sure you aren't pn a branch
         git branch | grep -v "detached from" | xargs git branch -d # delete all local branches merged with remote
-        git branch -D ''' + test_branch + ''' # just incase there are local changes. This will normally result in a branch not found error
+        git branch -D ''' + test_branch + ''' # just in case there are local changes. This will normally result in a branch not found error
         git clean -df # clean any local files
         git fetch --all # update all caches from remotes
-        git reset --hard origin/''' + test_branch +'''  # force local index to match remote branch
+        git reset --hard origin/''' + test_branch + '''  # force local index to match remote branch
         git clean -df # clean any local files
         git checkout ''' + test_branch + ''' #create new local branch
         git branch
@@ -190,20 +220,22 @@
     }
     return result
 }
+
 def oldFlowCheck( jobOn, onos_branch ){
     // part that will check if it is oldFlow. If so, it will switch to use old flow. Only affected with SCPF.
 
     result = ""
-    if( jobOn == "SCPF" && ( onos_branch== "master" || onos_branch=="onos-1.12" ) )
+    if ( jobOn == "SCPF" && ( onos_branch == "master" || onos_branch == "onos-1.12" ) )
         result = '''sed -i -e 's/@Component(immediate = true)/@Component(enabled = false)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/''' + ( isOldFlow ? "DistributedFlowRuleStore" : "ECFlowRuleStore" ) + '''.java
         sed -i -e 's/@Component(enabled = false)/@Component(immediate = true)/g' ~/onos/core/store/dist/src/main/java/org/onosproject/store/flow/impl/''' + ( isOldFlow ? "ECFlowRuleStore" : "DistributedFlowRuleStore" ) + ".java"
     return result
 }
+
 def postSetup( onos_branch, test_branch, onos_tag, isManual ){
     // setup that will build the onos using buck.
 
     result = ""
-    if( !isManual ){
+    if ( !isManual ){
         result = '''echo -e "\n##### build ONOS skip unit tests ######"
         #mvn clean install -DskipTests
         # Force buck update
@@ -218,10 +250,11 @@
     }
     return result
 }
+
 def generateKey(){
     // generate cluster-key of the onos
 
-    try{
+    try {
         sh '''
         #!/bin/bash -l
         set +e
@@ -230,7 +263,8 @@
         onos-push-bits-through-proxy
         onos-gen-cluster-key -f
         '''
-    }catch( all ){}
+    } catch ( all ){
+    }
 }
 
-return this;
+return this