Merge "[ONOS-7670]: Create Graphs for CHO Tests"
diff --git a/TestON/JenkinsFile/CHO_Graph_Generator b/TestON/JenkinsFile/CHO_Graph_Generator
new file mode 100644
index 0000000..5f45c32
--- /dev/null
+++ b/TestON/JenkinsFile/CHO_Graph_Generator
@@ -0,0 +1,47 @@
+#!groovy
+
+branches = params.ONOSbranch
+branchList = branches.tokenize( "\n;, " )
+script_file = "TestON/JenkinsFile/wikiGraphRScripts/trendCHO.R"
+saving_directory = "/var/jenkins/workspace/postjob-Fabric5/"
+scriptDir = "~/CHO_Jenkins_Scripts/"
+
+graphScript = generateGraphScript( branchList )
+
+stage( 'Generating-Graph' ){
+    node( "TestStation-Fabric5s" ){
+        runScript( graphScript )
+    }
+}
+stage( 'posting-result' ){
+    postJob()
+}
+
+def generateGraphScript( branchList ){
+    graphScript = ''''''
+    for( branch in branchList ){
+        branchDir = scriptDir + branch + "/"
+        graphScript += '''export BRANCH=''' + branchDir + '''
+                          mkdir ''' + branchDir + ''';
+                          cp *.csv ''' + branchDir + ''';
+                          bash log-summary;''' + '''
+                          ''' +  script_file + ' ' + branchDir + 'event.csv ' +
+                                branchDir + 'failure.csv ' + branchDir + 'error.csv ' +
+                                branch + ' 60 ' +  saving_directory + ''';
+        '''
+        print( graphScript )
+    }
+    return graphScript
+}
+def runScript( graphScript ){
+    sh '''#!/bin/bash -l
+          set -i
+          set +e
+          export PYTHONPATH=/home/sdn/TestON:/home/sdn/sts
+          cd ''' + scriptDir + ''';
+          ''' + graphScript
+}
+def postJob(){
+        jobToRun = "postjob-Fabric5"
+        build job: jobToRun, propagate: false
+}
diff --git a/TestON/JenkinsFile/SRJenkinsfileTrigger b/TestON/JenkinsFile/FabricJenkinsfileTrigger
similarity index 77%
rename from TestON/JenkinsFile/SRJenkinsfileTrigger
rename to TestON/JenkinsFile/FabricJenkinsfileTrigger
index 98648ed..da75805 100644
--- a/TestON/JenkinsFile/SRJenkinsfileTrigger
+++ b/TestON/JenkinsFile/FabricJenkinsfileTrigger
@@ -17,6 +17,7 @@
     "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
     "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
     "SR" : [ tests : "", nodeName : [ "Fabric2", "Fabric3" ], wikiContent : "" ],
+    "SR1" : [ tests : "", nodeName : [ "Fabric2", "Fabric3" ], wikiContent : "" ],
     "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
 ]
 Prefix_organizer = [
@@ -78,25 +79,29 @@
     println "Tests to be run manually : "
 }else{
     testcases[ "SR" ][ "tests" ] = SR_choices
+    testcases[ "SR1" ][ "tests" ] = SR_choices
     println "Defaulting to " + day + " tests:"
 }
 
 triggerFuncs.print_tests( testcases )
 
 def runTest = [
-    "VM" : [:],
-    "BM" : [:],
     "Fabric2" : [:],
     "Fabric3" : [:]
 ]
-for( String test in testcases.keySet() ){
-    println test
-    if ( testcases[ test ][ "tests" ] != "" ){
-        runTest[ testcases[ test ][ "nodeName" ][ 0 ] ][ test ] = triggerFuncs.trigger_pipeline( current_version, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ 0 ], test, manually_run, onos_tag )
-        runTest[ testcases[ test ][ "nodeName" ][ 0 ] ][ test ] = triggerFuncs.trigger_pipeline( previous_version, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ 0 ], test, manually_run, onos_tag )
-        runTest[ testcases[ test ][ "nodeName" ][ 1 ] ][ test ] = triggerFuncs.trigger_pipeline( before_previous_version, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ 1 ], test, manually_run, onos_tag )
+if ( manually_run ){
+    for( String test in testcases.keySet() ){
+        println test
+        if ( testcases[ test ][ "tests" ] != "" ){
+            runTest[ testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ][ nodeOn( onos_b ) ], test, manually_run, onos_tag )
+        }
     }
+}else{
+    runTest[ "Fabric2" ][ "SR1" ] = triggerFuncs.trigger_pipeline( current_version, testcases[ "SR1" ][ "tests" ], testcases[ "SR1" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
+    runTest[ "Fabric2" ][ "SR" ] = triggerFuncs.trigger_pipeline( previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 0 ], "SR", manually_run, onos_tag )
+    runTest[ "Fabric3" ][ "SR" ] = triggerFuncs.trigger_pipeline( before_previous_version, testcases[ "SR" ][ "tests" ], testcases[ "SR" ][ "nodeName" ][ 1 ], "SR", manually_run, onos_tag )
 }
+
 def finalList = [:]
 finalList[ "Fabric2" ] = triggerFuncs.runTestSeq( runTest[ "Fabric2" ] )
 finalList[ "Fabric3" ] = triggerFuncs.runTestSeq( runTest[ "Fabric3" ] )
@@ -126,3 +131,6 @@
     }
     return result
 }
+def nodeOn( branch ){
+    return branch == "1.12" ? 1 : 0;
+}
diff --git a/TestON/JenkinsFile/JenkinsCommonFuncs.groovy b/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
index ea4eccd..fd7253b 100644
--- a/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
+++ b/TestON/JenkinsFile/JenkinsCommonFuncs.groovy
@@ -46,8 +46,7 @@
   print testMachine
 }
 def fabricOn( branch ){
-  return branch.reverse().take(4).reverse() == "1.13" ? '2' : '3'
-// Temp Fix  return branch.reverse().take(6).reverse() == "master" ? '2' : '3'
+  return branch.reverse().take(4).reverse() == "1.12" ? '3' : '2'
 }
 def printType(){
   echo testType;
diff --git a/TestON/JenkinsFile/JenkinsfileTrigger b/TestON/JenkinsFile/JenkinsfileTrigger
index 336e237..d744a67 100644
--- a/TestON/JenkinsFile/JenkinsfileTrigger
+++ b/TestON/JenkinsFile/JenkinsfileTrigger
@@ -1,394 +1,32 @@
 #!groovy
-
 funcs = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsCommonFuncs.groovy' )
-test_lists = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsTestONTests.groovy' )
-triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/TriggerFuncs.groovy' )
 
-current_version = "master"
-previous_version = "1.13"
-before_previous_version = "1.12"
-funcs.initializeTrend( "VM" );
-triggerFuncs.init( funcs )
-wikiContents = ""
-testcases = [
-    "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
-    "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
-    "SR" : [ tests : "", nodeName : "Fabric", wikiContent : "" ],
-    "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
-]
-Prefix_organizer = [
-    "FU" : "FUNC",
-    "HA" : "HA",
-    "PL" : "USECASE",
-    "SA" : "USECASE",
-    "SC" : "SCPF",
-    "SR" : "SR",
-    "US" : "USECASE",
-    "VP" : "USECASE"
-]
-
-manually_run = params.manual_run
-onos_b = current_version
-test_branch = ""
-onos_tag = params.ONOSTag
-isOldFlow = true
-
-// Set tests based on day of week
 def now = funcs.getCurrentTime()
 print now.toString()
 today = now[ Calendar.DAY_OF_WEEK ]
-
-if ( manually_run ){
-    onos_b = params.ONOSVersion
-} else {
-    if ( today == Calendar.SATURDAY ){
-        onos_b = previous_version
-    } else if( today == Calendar.SUNDAY ){
-        onos_b = before_previous_version
-    }
-}
-AllTheTests = test_lists.getAllTheTests( onos_b )
-
-day = ""
-SCPF_choices = ""
-USECASE_choices = ""
-FUNC_choices = ""
-HA_choices = ""
-SR_choices = ""
-stat_graph_generator_file = "testCategoryBuildStats.R"
-pie_graph_generator_file = "testCategoryPiePassFail.R"
-graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
-
-post_result = params.PostResult
+machines = params.machines
+manually_run = params.manual_run
 if( !manually_run ){
     slackSend( color:'#03CD9F',
                message:":sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:\n"
                         + "Starting tests on : " + now.toString()
                         + "\n:sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles::sparkles:" )
-    testDivider( today )
-    FUNC_choices =  triggerFuncs.lastCommaRemover( FUNC_choices )
-    HA_choices =  triggerFuncs.lastCommaRemover( HA_choices )
-    SCPF_choices =  triggerFuncs.lastCommaRemover( SCPF_choices )
-    USECASE_choices =  triggerFuncs.lastCommaRemover( USECASE_choices )
-    SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
 }
 
-if ( manually_run ){
-    testcases = triggerFuncs.organize_tests( params.Tests, testcases )
+machineList = machines.tokenize( "\n;, " )
+machineOn = [:]
 
-    isOldFlow = params.isOldFlow
-    println "Tests to be run manually : "
-}else{
-    testcases[ "SCPF" ][ "tests" ] = SCPF_choices
-    testcases[ "USECASE" ][ "tests" ] = USECASE_choices
-    testcases[ "FUNC" ][ "tests" ] = FUNC_choices
-    testcases[ "HA" ][ "tests" ] = HA_choices
-    testcases[ "SR" ][ "tests" ] = SR_choices
-    println "Defaulting to " + day + " tests:"
+for (machine in machineList){
+    print( machine )
+    machineOn[ machine ] = triggerJob( machine )
 }
 
-triggerFuncs.print_tests( testcases )
+parallel machineOn
 
-def runTest = [
-    "VM" : [:],
-    "BM" : [:]
-]
-for( String test in testcases.keySet() ){
-    println test
-    if ( testcases[ test ][ "tests" ] != "" ){
-        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test, manually_run, onos_tag )
+
+def triggerJob( on ){
+    return{
+        jobToRun = on + "-pipeline-trigger"
+        build job: jobToRun, propagate: false
     }
 }
-def finalList = [:]
-finalList[ "VM" ] = triggerFuncs.runTestSeq( runTest[ "VM" ] )
-finalList[ "BM" ] = triggerFuncs.runTestSeq( runTest[ "BM" ] )
-parallel finalList
-//finalList[ "BM" ].call()
-
-if ( !manually_run ){
-    funcs.generateStatGraph( "TestStation-VMs",
-                             funcs.branchWithPrefix( onos_b ),
-                             AllTheTests,
-                             stat_graph_generator_file,
-                             pie_graph_generator_file,
-                             graph_saved_directory )
-}
-
-def testDivider( today ){
-    switch ( today ) {
-        case Calendar.MONDAY:
-            initHtmlForWiki()
-            monday( true )
-            tuesday( true, false )
-            wednesday( true, false )
-            thursday( true, false )
-            friday( true, false )
-            saturday( false, false )
-            sunday( false, false )
-            day = "Monday"
-            closeHtmlForWiki()
-            postToWiki( wikiContents )
-            slackSend( color:'#FFD988', message:"Tests to be run this weekdays : \n" + triggerFuncs.printDaysForTest( AllTheTests ) )
-            break
-        case Calendar.TUESDAY:
-            tuesday( false, true )
-            day = "Tuesday"
-            break
-        case Calendar.WEDNESDAY:
-            wednesday( false, true )
-            day = "Wednesday"
-            break
-        case Calendar.THURSDAY:
-            thursday( false, true )
-            day = "Thursday"
-            break
-        case Calendar.FRIDAY:
-            friday( false, true )
-            day = "Friday"
-            break
-        case Calendar.SATURDAY:
-            saturday( false, true )
-            day = "Saturday"
-            break
-        case Calendar.SUNDAY:
-            sunday( false , true )
-            day = "Sunday"
-            break
-    }
-}
-def monday( getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", true, "M", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", true, "M", getResult )
-    HA_choices += adder( "HA", "extra_A", true, "M", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", true, "M", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", true, "M", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", true, "M", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def tuesday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "T", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "T", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "T", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "T", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "T", getResult )
-    SCPF_choices += adder( "SCPF", "extra_C", getDay, "T", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "T", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
-    USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
-    closingHeader( "USECASE" )
-}
-def wednesday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "W", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "W", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "W", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "W", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "W", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def thursday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Th", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "Th", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "Th", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def friday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "F", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "F", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "F", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "F", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "F", getResult )
-    SCPF_choices += adder( "SCPF", "extra_D", getDay, "F", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "F", false )
-    SR_choices += adder( "SR", "extra_A", getDay, "F", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    closingHeader( "USECASE" )
-}
-def saturday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "Sa", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "Sa", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Sa", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "Sa", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "Sa", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "Sa", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_C", getDay, "Sa", getResult )
-    SCPF_choices += adder( "SCPF", "extra_D", getDay, "Sa", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "Sa", false )
-    SR_choices += adder( "SR", "extra_B", getDay, "Sa", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
-    closingHeader( "USECASE" )
-}
-def sunday( getDay, getResult ){
-    addingHeader( "FUNC" )
-    FUNC_choices += adder( "FUNC", "basic", getDay, "S", getResult )
-    FUNC_choices += adder( "FUNC", "extra_A", getDay, "S", getResult )
-    FUNC_choices += adder( "FUNC", "extra_B", getDay, "S", getResult )
-    closingHeader( "FUNC" )
-    addingHeader( "HA" )
-    HA_choices += adder( "HA", "basic", getDay, "S", getResult )
-    HA_choices += adder( "HA", "extra_A", getDay, "S", getResult )
-    HA_choices += adder( "HA", "extra_B", getDay, "S", getResult )
-    closingHeader( "HA" )
-    addingHeader( "SCPF" )
-    SCPF_choices += adder( "SCPF", "basic", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_A", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_B", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_C", getDay, "S", getResult )
-    SCPF_choices += adder( "SCPF", "extra_D", getDay, "S", getResult )
-    closingHeader( "SCPF" )
-    addingHeader( "SR" )
-    SR_choices += adder( "SR", "basic", getDay, "S", false )
-    closingHeader( "SR" )
-    addingHeader( "USECASE" )
-    USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
-    closingHeader( "USECASE" )
-}
-def adder( testCat, set, dayAdding, day, getResult ){
-    result = ""
-    for( String test in AllTheTests[ testCat ].keySet() ){
-        if( AllTheTests[ testCat ][ test ][ set ] ){
-            if( getResult )
-                result += test + ","
-            if( dayAdding )
-                dayAdder( testCat, test, day )
-            makeHtmlColList( testCat, test )
-        }
-    }
-    return result
-}
-def initHtmlForWiki(){
-    wikiContents = '''
-    <table class="wrapped confluenceTable">
-        <colgroup>
-              <col />
-              <col />
-              <col />
-              <col />
-              <col />
-              <col />
-        </colgroup>
-        <tbody>
-            <tr>
-                <th colspan="1" class="confluenceTh">
-                    <br />
-                </th>
-                <th class="confluenceTh"><p>Monday</p></th>
-                <th class="confluenceTh"><p>Tuesday</p></th>
-                <th class="confluenceTh"><p>Wednesday</p></th>
-                <th class="confluenceTh"><p>Thursday</p></th>
-                <th class="confluenceTh"><p>Friday</p></th>
-                <th class="confluenceTh"><p>Saturday</p></th>
-                <th class="confluenceTh"><p>Sunday</p></th>
-            </tr>'''
-    for( String test in testcases.keySet() ){
-        testcases[ test ][ 'wikiContent' ] = '''
-            <tr>
-                <th colspan="1" class="confluenceTh">''' + test + '''</th>'''
-    }
-}
-def addingHeader( testCategory ){
-    testcases[ testCategory ][ 'wikiContent' ] += '''
-                <td class="confluenceTd">
-                    <ul>'''
-}
-def makeHtmlColList( testCategory, testName ){
-    testcases[ testCategory ][ 'wikiContent' ] += '''
-                        <li>'''+ testName +'''</li>'''
-
-}
-def closingHeader( testCategory ){
-    testcases[ testCategory ][ 'wikiContent' ] += '''
-                    </ul>
-                </td>'''
-}
-def closeHtmlForWiki(){
-    for( String test in testcases.keySet() ){
-        wikiContents += testcases[ test ][ 'wikiContent' ]
-        wikiContents += '''
-            </tr>'''
-    }
-    wikiContents += '''
-        </tbody>
-    </table>
-    <p><strong>Everyday</strong>, all SegmentRouting tests are built and run on every supported branch.</p>
-    <p>On <strong>Weekdays</strong>, all the other tests are built and run on the master branch.</p>
-    <p>On <strong>Saturdays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( previous_version ) +''' branch.</p>
-    <p>On <strong>Sundays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( before_previous_version ) +''' branch.</p>'''
-}
-def postToWiki( contents ){
-    node( testMachine ){
-        workspace = "/var/jenkins/workspace/all-pipeline-trigger/"
-        filename = "jenkinsSchedule.txt"
-        writeFile file: workspace + filename, text: contents
-        funcs.publishToConfluence( "false", "true",
-                                   "Automated Test Schedule",
-                                   workspace + filename )
-    }
-}
-def dayAdder( testCat, testName, dayOfWeek ){
-    AllTheTests[ testCat ][ testName ][ "day" ] += dayOfWeek + ","
-}
diff --git a/TestON/JenkinsFile/VM_BMJenkinsfileTrigger b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
new file mode 100644
index 0000000..3d03048
--- /dev/null
+++ b/TestON/JenkinsFile/VM_BMJenkinsfileTrigger
@@ -0,0 +1,394 @@
+#!groovy
+
+funcs = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsCommonFuncs.groovy' )
+test_lists = evaluate readTrusted( 'TestON/JenkinsFile/JenkinsTestONTests.groovy' )
+triggerFuncs = evaluate readTrusted( 'TestON/JenkinsFile/TriggerFuncs.groovy' )
+
+current_version = "master"
+previous_version = "1.13"
+before_previous_version = "1.12"
+funcs.initializeTrend( "VM" );
+triggerFuncs.init( funcs )
+wikiContents = ""
+testcases = [
+    "FUNC" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
+    "HA" : [ tests : "" , nodeName : "VM", wikiContent : "" ],
+    "SCPF" : [ tests : "" , nodeName : "BM", wikiContent : "" ],
+    "SR" : [ tests : "", nodeName : "Fabric", wikiContent : "" ],
+    "USECASE" : [ tests : "" , nodeName : "BM", wikiContent : "" ]
+]
+Prefix_organizer = [
+    "FU" : "FUNC",
+    "HA" : "HA",
+    "PL" : "USECASE",
+    "SA" : "USECASE",
+    "SC" : "SCPF",
+    "SR" : "SR",
+    "US" : "USECASE",
+    "VP" : "USECASE"
+]
+
+manually_run = params.manual_run
+onos_b = current_version
+test_branch = ""
+onos_tag = params.ONOSTag
+isOldFlow = true
+
+// Set tests based on day of week
+def now = funcs.getCurrentTime()
+print now.toString()
+today = now[ Calendar.DAY_OF_WEEK ]
+
+if ( manually_run ){
+    onos_b = params.ONOSVersion
+} else {
+    if ( today == Calendar.SATURDAY ){
+        onos_b = previous_version
+    } else if( today == Calendar.SUNDAY ){
+        onos_b = before_previous_version
+    }
+}
+AllTheTests = test_lists.getAllTheTests( onos_b )
+
+day = ""
+SCPF_choices = ""
+USECASE_choices = ""
+FUNC_choices = ""
+HA_choices = ""
+SR_choices = ""
+stat_graph_generator_file = "testCategoryBuildStats.R"
+pie_graph_generator_file = "testCategoryPiePassFail.R"
+graph_saved_directory = "/var/jenkins/workspace/postjob-VM/"
+
+post_result = params.PostResult
+if( !manually_run ){
+    testDivider( today )
+    FUNC_choices =  triggerFuncs.lastCommaRemover( FUNC_choices )
+    HA_choices =  triggerFuncs.lastCommaRemover( HA_choices )
+    SCPF_choices =  triggerFuncs.lastCommaRemover( SCPF_choices )
+    USECASE_choices =  triggerFuncs.lastCommaRemover( USECASE_choices )
+    SR_choices =  triggerFuncs.lastCommaRemover( SR_choices )
+}
+
+if ( manually_run ){
+    testcases = triggerFuncs.organize_tests( params.Tests, testcases )
+
+    isOldFlow = params.isOldFlow
+    println "Tests to be run manually : "
+}else{
+    testcases[ "SCPF" ][ "tests" ] = SCPF_choices
+    testcases[ "USECASE" ][ "tests" ] = USECASE_choices
+    testcases[ "FUNC" ][ "tests" ] = FUNC_choices
+    testcases[ "HA" ][ "tests" ] = HA_choices
+    testcases[ "SR" ][ "tests" ] = SR_choices
+    println "Defaulting to " + day + " tests:"
+}
+
+triggerFuncs.print_tests( testcases )
+
+def runTest = [
+    "VM" : [:],
+    "BM" : [:]
+]
+for( String test in testcases.keySet() ){
+    println test
+    if ( testcases[ test ][ "tests" ] != "" ){
+        runTest[ testcases[ test ][ "nodeName" ] ][ test ] = triggerFuncs.trigger_pipeline( onos_b, testcases[ test ][ "tests" ], testcases[ test ][ "nodeName" ], test, manually_run, onos_tag )
+    }
+}
+def finalList = [:]
+jobName = env.JOB_NAME
+finalList[ "VM" ] = triggerFuncs.runTestSeq( runTest[ "VM" ] )
+finalList[ "BM" ] = triggerFuncs.runTestSeq( runTest[ "BM" ] )
+//parallel finalList
+if( jobName.take( 2 ) == "vm" )
+    finalList[ "VM" ].call()
+else
+    finalList[ "BM" ].call()
+
+if ( !manually_run ){
+    funcs.generateStatGraph( "TestStation-VMs",
+                             funcs.branchWithPrefix( onos_b ),
+                             AllTheTests,
+                             stat_graph_generator_file,
+                             pie_graph_generator_file,
+                             graph_saved_directory )
+}
+
+def testDivider( today ){
+    switch ( today ) {
+        case Calendar.MONDAY:
+            initHtmlForWiki()
+            monday( true )
+            tuesday( true, false )
+            wednesday( true, false )
+            thursday( true, false )
+            friday( true, false )
+            saturday( false, false )
+            sunday( false, false )
+            day = "Monday"
+            closeHtmlForWiki()
+            postToWiki( wikiContents )
+            slackSend( color:'#FFD988', message:"Tests to be run this weekdays : \n" + triggerFuncs.printDaysForTest( AllTheTests ) )
+            break
+        case Calendar.TUESDAY:
+            tuesday( false, true )
+            day = "Tuesday"
+            break
+        case Calendar.WEDNESDAY:
+            wednesday( false, true )
+            day = "Wednesday"
+            break
+        case Calendar.THURSDAY:
+            thursday( false, true )
+            day = "Thursday"
+            break
+        case Calendar.FRIDAY:
+            friday( false, true )
+            day = "Friday"
+            break
+        case Calendar.SATURDAY:
+            saturday( false, true )
+            day = "Saturday"
+            break
+        case Calendar.SUNDAY:
+            sunday( false , true )
+            day = "Sunday"
+            break
+    }
+}
+def monday( getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", true, "M", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", true, "M", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", true, "M", getResult )
+    HA_choices += adder( "HA", "extra_A", true, "M", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", true, "M", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", true, "M", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", true, "M", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def tuesday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "T", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "T", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "T", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "T", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "T", getResult )
+    SCPF_choices += adder( "SCPF", "extra_C", getDay, "T", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "T", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    USECASE_choices += adder( "USECASE", "basic", getDay, "T", getResult )
+    USECASE_choices += adder( "USECASE", "extra_A", getDay, "T", getResult )
+    closingHeader( "USECASE" )
+}
+def wednesday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "W", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "W", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "W", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "W", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "W", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "W", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "W", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def thursday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "Th", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Th", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "Th", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "Th", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "Th", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Th", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "Th", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def friday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "F", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "F", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "F", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "F", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "F", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "F", getResult )
+    SCPF_choices += adder( "SCPF", "extra_D", getDay, "F", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "F", false )
+    SR_choices += adder( "SR", "extra_A", getDay, "F", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    closingHeader( "USECASE" )
+}
+def saturday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "Sa", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "Sa", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "Sa", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "Sa", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "Sa", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "Sa", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_C", getDay, "Sa", getResult )
+    SCPF_choices += adder( "SCPF", "extra_D", getDay, "Sa", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "Sa", false )
+    SR_choices += adder( "SR", "extra_B", getDay, "Sa", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    USECASE_choices += adder( "USECASE", "basic", getDay, "Sa", getResult )
+    closingHeader( "USECASE" )
+}
+def sunday( getDay, getResult ){
+    addingHeader( "FUNC" )
+    FUNC_choices += adder( "FUNC", "basic", getDay, "S", getResult )
+    FUNC_choices += adder( "FUNC", "extra_A", getDay, "S", getResult )
+    FUNC_choices += adder( "FUNC", "extra_B", getDay, "S", getResult )
+    closingHeader( "FUNC" )
+    addingHeader( "HA" )
+    HA_choices += adder( "HA", "basic", getDay, "S", getResult )
+    HA_choices += adder( "HA", "extra_A", getDay, "S", getResult )
+    HA_choices += adder( "HA", "extra_B", getDay, "S", getResult )
+    closingHeader( "HA" )
+    addingHeader( "SCPF" )
+    SCPF_choices += adder( "SCPF", "basic", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_A", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_B", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_C", getDay, "S", getResult )
+    SCPF_choices += adder( "SCPF", "extra_D", getDay, "S", getResult )
+    closingHeader( "SCPF" )
+    addingHeader( "SR" )
+    SR_choices += adder( "SR", "basic", getDay, "S", false )
+    closingHeader( "SR" )
+    addingHeader( "USECASE" )
+    USECASE_choices += adder( "USECASE", "basic", getDay, "S", getResult )
+    closingHeader( "USECASE" )
+}
+def adder( testCat, set, dayAdding, day, getResult ){
+    result = ""
+    for( String test in AllTheTests[ testCat ].keySet() ){
+        if( AllTheTests[ testCat ][ test ][ set ] ){
+            if( getResult )
+                result += test + ","
+            if( dayAdding )
+                dayAdder( testCat, test, day )
+            makeHtmlColList( testCat, test )
+        }
+    }
+    return result
+}
+def initHtmlForWiki(){
+    wikiContents = '''
+    <table class="wrapped confluenceTable">
+        <colgroup>
+              <col />
+              <col />
+              <col />
+              <col />
+              <col />
+              <col />
+        </colgroup>
+        <tbody>
+            <tr>
+                <th colspan="1" class="confluenceTh">
+                    <br />
+                </th>
+                <th class="confluenceTh"><p>Monday</p></th>
+                <th class="confluenceTh"><p>Tuesday</p></th>
+                <th class="confluenceTh"><p>Wednesday</p></th>
+                <th class="confluenceTh"><p>Thursday</p></th>
+                <th class="confluenceTh"><p>Friday</p></th>
+                <th class="confluenceTh"><p>Saturday</p></th>
+                <th class="confluenceTh"><p>Sunday</p></th>
+            </tr>'''
+    for( String test in testcases.keySet() ){
+        testcases[ test ][ 'wikiContent' ] = '''
+            <tr>
+                <th colspan="1" class="confluenceTh">''' + test + '''</th>'''
+    }
+}
+def addingHeader( testCategory ){
+    testcases[ testCategory ][ 'wikiContent' ] += '''
+                <td class="confluenceTd">
+                    <ul>'''
+}
+def makeHtmlColList( testCategory, testName ){
+    testcases[ testCategory ][ 'wikiContent' ] += '''
+                        <li>'''+ testName +'''</li>'''
+
+}
+def closingHeader( testCategory ){
+    testcases[ testCategory ][ 'wikiContent' ] += '''
+                    </ul>
+                </td>'''
+}
+def closeHtmlForWiki(){
+    for( String test in testcases.keySet() ){
+        wikiContents += testcases[ test ][ 'wikiContent' ]
+        wikiContents += '''
+            </tr>'''
+    }
+    wikiContents += '''
+        </tbody>
+    </table>
+    <p><strong>Everyday</strong>, all SegmentRouting tests are built and run on every supported branch.</p>
+    <p>On <strong>Weekdays</strong>, all the other tests are built and run on the master branch.</p>
+    <p>On <strong>Saturdays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( previous_version ) +''' branch.</p>
+    <p>On <strong>Sundays</strong>, all the other tests are built and run on the '''+ funcs.branchWithPrefix( before_previous_version ) +''' branch.</p>'''
+}
+def postToWiki( contents ){
+    node( testMachine ){
+        workspace = "/var/jenkins/workspace/all-pipeline-trigger/"
+        filename = "jenkinsSchedule.txt"
+        writeFile file: workspace + filename, text: contents
+        funcs.publishToConfluence( "false", "true",
+                                   "Automated Test Schedule",
+                                   workspace + filename )
+    }
+}
+def dayAdder( testCat, testName, dayOfWeek ){
+    AllTheTests[ testCat ][ testName ][ "day" ] += dayOfWeek + ","
+}
diff --git a/TestON/bin/findPatchScript.sh b/TestON/bin/findPatchScript.sh
new file mode 100755
index 0000000..f88f528
--- /dev/null
+++ b/TestON/bin/findPatchScript.sh
@@ -0,0 +1,105 @@
+#!/bin/bash
+
+# Copyright 2015 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+#     TestON is free software: you can redistribute it and/or modify
+#     it under the terms of the GNU General Public License as published by
+#     the Free Software Foundation, either version 2 of the License, or
+#     (at your option) any later version.
+#
+#     TestON is distributed in the hope that it will be useful,
+#     but WITHOUT ANY WARRANTY; without even the implied warranty of
+#     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#     GNU General Public License for more details.
+#
+#     You should have received a copy of the GNU General Public License
+#     along with TestON.  If not, see <http://www.gnu.org/licenses/>.
+#
+#    Usage : ./findPatchScript.sh -t <testName> (optional choices : -n <# : number of run on each commit>
+#                                                                   -s <# : number of commits to skip every iteration>
+
+#source $ONOS_ROOT/tools/dev/bash_profile
+
+#!/bin/bash
+usage() { echo "Usage:$0 [-t <test_name>] Optional:{ -n <number_of_running_test_on_each_commit>
+                                                       -s <number_of_skipping_commit> }"; 1>&2; exit 1;  }
+
+NUM_RUN=1
+SKIP_COMMIT=1
+LOG_FILE="/home/sdn/OnosSystemTest/TestON/logs/findCommitLog.txt"
+while getopts ":t:n:s:" ARGS; do
+    case $ARGS in
+        t)
+            TESTNAME=${OPTARG}
+            ;;
+        n)
+            NUM_RUN=${OPTARG}
+            ((NUM_RUN > 0)) || usage
+            ;;
+        s)
+            SKIP_COMMIT=${OPTARG}
+            ((SKIP_COMMIT > 0)) || usage
+            ;;
+        *)
+            usage
+            ;;
+    esac
+done
+
+if [ -z "${TESTNAME}" ]; then
+    usage
+fi
+
+exportMsg() {
+    echo "Log exported to $LOG_FILE"
+}
+runScript() {
+    echo -n > "$LOG_FILE"
+    PREVIOUS_COMMIT=""
+    while true; do
+        TEST_RESULT="1"
+        for ((i=0; i < NUM_RUN; i++))
+        do
+            cd ~/onos
+            COMMIT=$(git log -1 --pretty=fuller | grep -m1 -Po "(?<=commit\s)\w+")
+            echo "Current Commit : $COMMIT"
+            echo "Current Commit : $COMMIT" >> "$LOG_FILE"
+            echo "1" > /tmp/findPatchResult.txt
+            cd ~/OnosSystemTest/TestON/bin
+            ./cleanup.sh
+            ./cli.py run $TESTNAME
+            TEST_RESULT=$(cat /tmp/findPatchResult.txt)
+            echo $TEST_RESULT
+            if [ "$TEST_RESULT" == "0" ]; then
+                break
+            fi
+        done
+        if [ "$TEST_RESULT" == "1" ]; then
+            echo "Found the commit that has no problem : $(tput setaf 2)$COMMIT$(tput sgr 0)"
+            echo "Found the commit that has no problem : $COMMIT" >> $LOG_FILE
+            echo "Last commit that had a problem : $(tput setaf 1)$PREVIOUS_COMMIT$(tput sgr 0)"
+            echo "Last commit that had a problem : $PREVIOUS_COMMIT" >> $LOG_FILE
+            break
+        fi
+
+        cd ~/onos
+        COMMIT=$(git log -1 --skip $SKIP_COMMIT --pretty=fuller | grep -m1 -Po "(?<=commit\s)\w+")
+        echo "New commit to be tested : $COMMIT"
+        echo "New commit to be tested : $COMMIT" >> $LOG_FILE
+        PREVIOUS_COMMIT=$COMMIT
+        STASH_RESULT=$(git stash)
+        git checkout $COMMIT
+        if [ "$STASH_RESULT" != "No local changes to save" ]; then
+            git stash pop
+        fi
+    done
+}
+
+runScript
+echo >> $LOG_FILE
+echo >> $LOG_FILE
+exportMsg
diff --git a/TestON/core/teston.py b/TestON/core/teston.py
index 19449f4..978c206 100644
--- a/TestON/core/teston.py
+++ b/TestON/core/teston.py
@@ -474,6 +474,26 @@
         for driver in self.driversList:
             vars( self )[ driver ].write( stepHeader + "\n" + caseFooter )
 
+    def setCheckingPoint( self ):
+        '''
+        Using when running findPatchScript.sh. This function needs to be placed
+        on the point that has the problem.
+
+        For example, if you see unusual behavior or from the portion of the code,
+        this is where you need to put with the conditional statement.
+        If some of the latency result is much greater than usual, have if statement
+        that checks if the result is greater than some point and include this function.
+
+        This will mark the 0 to findPatchResult.txt in /tmp/ and exit the test.
+        Then from findPatchScript, it will move onto the next commit and re-run the
+        test.
+        '''
+        self.log.error( "Reached to the checking point. Will mark the result and exit the test" )
+        resultFile = open( "/tmp/findPatchResult.txt", "w" )
+        resultFile.write( "0" )
+        resultFile.close()
+        self.cleanAndExit()
+
     def cleanup( self ):
         '''
         Print a summary of the current test's results then attempt to release
diff --git a/TestON/drivers/common/cli/onosclidriver.py b/TestON/drivers/common/cli/onosclidriver.py
index d806da6..439ba80 100755
--- a/TestON/drivers/common/cli/onosclidriver.py
+++ b/TestON/drivers/common/cli/onosclidriver.py
@@ -1173,7 +1173,7 @@
             for host in hosts:
                 if hostIp in host[ "ipAddresses" ]:
                     targetHost = host
-            assert host, "Not able to find host with IP {}".format( hostIp )
+            assert targetHost, "Not able to find host with IP {}".format( hostIp )
             result = main.TRUE
             locationsDiscovered = [ loc[ "elementId" ] + "/" + loc[ "port" ] for loc in targetHost[ "locations" ] ]
             for loc in locations:
diff --git a/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py b/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py
index f3e686f..2ae8b62 100644
--- a/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py
+++ b/TestON/tests/SCPF/SCPFhostLat/SCPFhostLat.py
@@ -131,7 +131,10 @@
 
         time.sleep( main.startUpSleep )
         main.step( 'Starting mininet topology' )
-        mnStatus = main.Mininet1.startNet( args='--topo=linear,1' )
+        mnCmd = '--topo=linear,1 '
+        for ctrl in main.Cluster.active():
+            mnCmd += " --controller remote,ip=" + ctrl.ipAddress
+        mnStatus = main.Mininet1.startNet( args=mnCmd )
         utilities.assert_equals( expect=main.TRUE,
                                  actual=mnStatus,
                                  onpass="Successfully started Mininet",
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py b/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py
index 144b4cf..866b273 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/SRMulticast.py
@@ -307,7 +307,10 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=202, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, "leaf2", 10, { "ipv4": False, "ipv6": False }, [ "h4v4" ] )
+        verifySwitchDown( main, "leaf2", 10,
+                          { "ipv4": False, "ipv6": False },
+                          [ "h4v4" ],
+                          { "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -327,7 +330,9 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=203, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, "leaf5", 10, hostsToDiscover=[ "h10v4" ] )
+        verifySwitchDown( main, "leaf5", 10,
+                          hostsToDiscover=[ "h10v4" ],
+                          hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -347,7 +352,11 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=204, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, "leaf4", 10, { "ipv4": [ True, False, True ], "ipv6": True }, [ "h8v4", "h10v4" ] )
+        verifySwitchDown( main, "leaf4", 10,
+                          { "ipv4": [ True, False, True ], "ipv6": True },
+                          [ "h8v4", "h10v4" ],
+                          { "h8v4": ["of:0000000000000004/9"],
+                            "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -367,7 +376,13 @@
         main.mcastRoutes = { "ipv4": { "src": [ 0 ], "dst": [ 0, 1, 2 ] }, "ipv6": { "src": [ 0 ], "dst": [ 0 ] } }
         setupTest( main, test_idx=205, onosNodes=3 )
         verifyMcastRoutes( main )
-        verifySwitchDown( main, [ "leaf1", "leaf3", "leaf4", "leaf5" ], 32, { "ipv4": [ True, False, False ], "ipv6": False }, [ "h4v4", "h8v4", "h10v4", "h1v6"] )
+        verifySwitchDown( main, [ "leaf1", "leaf3", "leaf4", "leaf5" ], 32,
+                          { "ipv4": [ True, False, False ], "ipv6": False },
+                          [ "h4v4", "h8v4", "h10v4", "h1v6"],
+                          { "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"],
+                            "h8v4": ["of:0000000000000004/9"],
+                            "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"],
+                            "h1v6": ["of:0000000000000001/3"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
@@ -417,7 +432,9 @@
         # Verify killing one link of dual-homed host h10
         verifyPortDown( main, "of:0000000000000004", 11, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
         verifyPortDown( main, "of:0000000000000005", 10, hostsToDiscover=[ "h10v4" ], hostLocations={ "h10v4": ["of:0000000000000004/11", "of:0000000000000005/10"] } )
-        verifySwitchDown( main, "leaf3", 10, hostsToDiscover=[ "h4v4" ] )
+        verifySwitchDown( main, "leaf3", 10,
+                          hostsToDiscover=[ "h4v4" ],
+                          hostLocations={ "h4v4": ["of:0000000000000002/10", "of:0000000000000003/10"] } )
         verifyMcastRemoval( main, removeDHT1=False )
         lib.cleanup( main, copyKarafLog=False )
 
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py
index f598f96..be8e4c9 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/SRMulticastTest.py
@@ -42,6 +42,7 @@
     time.sleep( float( main.params[ "timers" ][ "loadNetcfgSleep" ] ) )
     main.cfgName = "common"
     lib.loadMulticastConfig( main )
+    lib.loadHost( main )
 
     if hasattr( main, "Mininet1" ):
         # Run the test with Mininet
@@ -56,6 +57,9 @@
 
     # Create scapy components
     lib.startScapyHosts( main )
+    # Verify host IP assignment
+    lib.verifyOnosHostIp( main )
+    lib.verifyNetworkHostIp( main )
 
 def verifyMcastRoutes( main ):
     """
@@ -180,7 +184,7 @@
     # Recover the switch(es)
     lib.recoverSwitch( main, switchName, int( main.params[ "TOPO" ][ "switchNum" ] ), int( main.params[ "TOPO" ][ "linkNum" ] ), True if hostsToDiscover else False, hostsToDiscover )
     for host, loc in hostLocations.items():
-        lib.verifyHostLocation( host, loc, retry=5 )
+        lib.verifyHostLocation( main, host, loc, retry=5 )
     for routeName in expectList.keys():
         lib.verifyMulticastTraffic( main, routeName, True )
 
diff --git a/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/host/common.host b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/host/common.host
new file mode 100644
index 0000000..c0dd913
--- /dev/null
+++ b/TestON/tests/USECASE/SegmentRouting/SRMulticast/dependencies/host/common.host
@@ -0,0 +1,20 @@
+{
+    "onos":
+    {
+        "00:AA:00:00:00:02/None": "10.2.0.1",
+        "00:AA:00:00:00:03/None": "10.2.30.1",
+        "00:AA:00:00:00:06/None": "10.3.0.1",
+        "00:AA:00:00:00:08/40": "10.3.30.1",
+        "00:BB:00:00:00:01/None": "1000::3fe",
+        "00:BB:00:00:00:02/None": "1002::3fe"
+    },
+    "network":
+    {
+        "h3v4": "10.2.0.1",
+        "h4v4": "10.2.30.1",
+        "h8v4": "10.3.0.1",
+        "h10v4": "10.3.30.1",
+        "h1v6": "1000::3fe",
+        "h3v6": "1002::3fe"
+    }
+}
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params
index e699d81..52a05fd 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.params
@@ -36,10 +36,10 @@
     <timers>
         <LinkDiscovery>30</LinkDiscovery>
         <SwitchDiscovery>30</SwitchDiscovery>
-        <OnosDiscovery>30</OnosDiscovery>
+        <OnosDiscovery>45</OnosDiscovery>
         <loadNetcfgSleep>5</loadNetcfgSleep>
         <startMininetSleep>25</startMininetSleep>
-        <dhcpSleep>30</dhcpSleep>
+        <dhcpSleep>60</dhcpSleep>
         <balanceMasterSleep>10</balanceMasterSleep>
     </timers>
 
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
index bf03f06..2952614 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/SRRouting.py
@@ -606,6 +606,7 @@
         main.case( "Drop spine and paired leaf" )
         setupTest( main, test_idx=606, onosNodes=3 )
         verify( main, disconnected=False )
+        # Drop spine101 and leaf-2/3
         lib.killSwitch( main, "spine101", 9, 30 )
         verify( main, disconnected=False )
         lib.killSwitch( main, "leaf2", 8, 24 )
@@ -616,10 +617,182 @@
         lib.recoverSwitch( main, "spine101", 8, 30 )
         verify( main )
         lib.recoverSwitch( main, "leaf3", 9, 38 )
-        lib.recoverSwitch( main, "leaf2", 10, 48, rediscoverHosts=True )
+        lib.recoverSwitch( main, "leaf2", 10, 48, rediscoverHosts=True,
+                           hostsToDiscover=main.disconnectedIpv4Hosts + main.disconnectedIpv6Hosts )
         main.disconnectedIpv4Hosts = []
         main.disconnectedIpv6Hosts = []
+        verify( main, disconnected=False )
+        # Drop spine102 and leaf-4/5
+        lib.killSwitch( main, "spine102", 9, 30 )
+        verify( main, disconnected=False )
+        lib.killSwitch( main, "leaf4", 8, 24 )
+        lib.killSwitch( main, "leaf5", 7, 20 )
+        main.disconnectedIpv4Hosts = [ "h8v4", "h9v4", "h10v4", "h11v4" ]
+        main.disconnectedIpv6Hosts = [ "h8v6", "h9v6", "h10v6", "h11v6" ]
+        verify( main, external=False )
+        lib.recoverSwitch( main, "spine102", 8, 30 )
+        verify( main, external=False )
+        lib.recoverSwitch( main, "leaf5", 9, 38 )
+        lib.recoverSwitch( main, "leaf4", 10, 48, rediscoverHosts=True,
+                           hostsToDiscover=main.disconnectedIpv4Hosts + main.disconnectedIpv6Hosts )
+        main.disconnectedIpv4Hosts = []
+        main.disconnectedIpv6Hosts = []
+        verify( main, disconnected=False )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE620( self, main ):
+        """
+        Take down one of double links towards the spine from all leaf switches and
+        check that buckets in select groups change accordingly
+        Bring up links again and check that buckets in select groups change accordingly
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Take down one of double links towards the spine" )
+        setupTest( main, test_idx=620, onosNodes=3 )
+        verify( main, disconnected=False )
+        portsToDisable = [ [ "of:0000000000000002", 1 ], [ "of:0000000000000002", 3 ],
+                           [ "of:0000000000000003", 1 ], [ "of:0000000000000003", 3 ],
+                           [ "of:0000000000000004", 1 ], [ "of:0000000000000004", 3 ],
+                           [ "of:0000000000000005", 1 ], [ "of:0000000000000005", 3 ] ]
+        for dpid, port in portsToDisable:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+        # TODO: check buckets in groups
+        verify( main, disconnected=False )
+        for dpid, port in portsToDisable:
+            main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+        # TODO: check buckets in groups
+        verify( main, disconnected=False )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE621( self, main ):
+        """
+        Remove all the links in the network and restore all Links (repeat x3)
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        main.case( "Remove all the links in the network and restore all Links" )
+        setupTest( main, test_idx=621, onosNodes=3 )
+        verify( main, disconnected=False )
+        linksToRemove = [ ["spine101", "spine103"], ["spine102", "spine104"],
+                          ["spine103", "leaf6"], ["spine103", "leaf1"],
+                          ["spine104", "leaf6"], ["spine104", "leaf1"],
+                          ["spine101", "leaf2"], ["spine101", "leaf3"], ["spine101", "leaf4"], ["spine101", "leaf5"],
+                          ["spine102", "leaf2"], ["spine102", "leaf3"], ["spine102", "leaf4"], ["spine102", "leaf5"],
+                          ["leaf2", "leaf3"], ["leaf4", "leaf5"] ]
+        portsToDisable = [ [ "of:0000000000000001", 3 ], [ "of:0000000000000001", 4 ],
+                           [ "of:0000000000000001", 5 ], [ "of:0000000000000001", 6 ],
+                           [ "of:0000000000000002", 6 ], [ "of:0000000000000002", 7 ],
+                           [ "of:0000000000000002", 8 ], [ "of:0000000000000002", 9 ],
+                           [ "of:0000000000000002", 10 ], [ "of:0000000000000002", 11 ],
+                           [ "of:0000000000000003", 6 ], [ "of:0000000000000003", 7 ],
+                           [ "of:0000000000000003", 8 ], [ "of:0000000000000003", 9 ],
+                           [ "of:0000000000000003", 10 ], [ "of:0000000000000003", 11 ],
+                           [ "of:0000000000000003", 12 ], [ "of:0000000000000003", 13 ],
+                           [ "of:0000000000000004", 6 ], [ "of:0000000000000004", 7 ],
+                           [ "of:0000000000000004", 8 ], [ "of:0000000000000004", 9 ],
+                           [ "of:0000000000000004", 10 ], [ "of:0000000000000004", 11 ],
+                           [ "of:0000000000000004", 12 ], [ "of:0000000000000004", 13 ], [ "of:0000000000000004", 14 ],
+                           [ "of:0000000000000005", 6 ], [ "of:0000000000000005", 7 ],
+                           [ "of:0000000000000005", 8 ], [ "of:0000000000000005", 9 ],
+                           [ "of:0000000000000005", 10 ], [ "of:0000000000000005", 11 ],
+                           [ "of:0000000000000005", 12 ], [ "of:0000000000000005", 13 ],
+                           [ "of:0000000000000005", 14 ], [ "of:0000000000000005", 15 ],
+                           [ "of:0000000000000006", 3 ], [ "of:0000000000000006", 4 ],
+                           [ "of:0000000000000006", 5 ], [ "of:0000000000000006", 6 ] ]
+        for i in range( 0, 3 ):
+            lib.killLinkBatch( main, linksToRemove, 0, 10 )
+            for dpid, port in portsToDisable:
+                main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="disable" )
+            time.sleep( 10 )
+            main.disconnectedIpv4Hosts = main.internalIpv4Hosts
+            main.disconnectedIpv6Hosts = main.internalIpv6Hosts
+            verify( main )
+            lib.restoreLinkBatch( main, linksToRemove, 48, 10 )
+            for dpid, port in portsToDisable:
+                main.Cluster.active( 0 ).CLI.portstate( dpid=dpid, port=port, state="enable" )
+            time.sleep( 30 )
+            main.Network.discoverHosts( hostList=main.disconnectedIpv4Hosts + main.disconnectedIpv6Hosts )
+            time.sleep( 10 )
+            main.disconnectedIpv4Hosts = []
+            main.disconnectedIpv6Hosts = []
+            verify( main )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE622( self, main ):
+        """
+        Take down all uplinks from a paired leaf switch
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        from core import utilities
+        main.case( "Take down all uplinks from a paired leaf switch" )
+        setupTest( main, test_idx=622, onosNodes=3 )
+        verify( main, disconnected=False )
+        ctrl = main.Cluster.active( 0 )
+        result1 = ctrl.CLI.verifyHostLocation( "1003::3fe",
+                                               [ "of:0000000000000002/7", "of:0000000000000003/6" ] )
+        result2 = ctrl.CLI.verifyHostLocation( "1004::3fe",
+                                               [ "of:0000000000000002/8", "of:0000000000000003/7" ] )
+        result3 = ctrl.CLI.verifyHostLocation( "10.2.30.1",
+                                               [ "of:0000000000000002/10", "of:0000000000000003/10" ] )
+        result4 = ctrl.CLI.verifyHostLocation( "10.2.20.1",
+                                               [ "of:0000000000000002/11", "of:0000000000000003/11" ] )
+        utilities.assert_equals( expect=main.TRUE, actual=result1 and result2 and result3 and result4,
+                                 onpass="Host locations are correct",
+                                 onfail="Not all host locations are correct" )
+        linksToRemove = [ ["spine101", "leaf2"], ["spine102", "leaf2"] ]
+        lib.killLinkBatch( main, linksToRemove, 40, 10 )
+        # TODO: more verifications are required
         verify( main )
+        main.step( "Verify some dual-homed hosts become single-homed" )
+        result1 = ctrl.CLI.verifyHostLocation( "1003::3fe", "of:0000000000000003/6" )
+        result2 = ctrl.CLI.verifyHostLocation( "1004::3fe", "of:0000000000000003/7" )
+        result3 = ctrl.CLI.verifyHostLocation( "10.2.30.1", "of:0000000000000003/10" )
+        result4 = ctrl.CLI.verifyHostLocation( "10.2.20.1", "of:0000000000000003/11" )
+        utilities.assert_equals( expect=main.TRUE, actual=result1 and result2 and result3 and result4,
+                                 onpass="Host locations are correct",
+                                 onfail="Not all host locations are correct" )
+        lib.restoreLinkBatch( main, linksToRemove, 48, 10 )
+        verify( main )
+        main.step( "Verify the hosts changed back to be dual-homed" )
+        result1 = ctrl.CLI.verifyHostLocation( "1003::3fe",
+                                               [ "of:0000000000000002/7", "of:0000000000000003/6" ] )
+        result2 = ctrl.CLI.verifyHostLocation( "1004::3fe",
+                                               [ "of:0000000000000002/8", "of:0000000000000003/7" ] )
+        result3 = ctrl.CLI.verifyHostLocation( "10.2.30.1",
+                                               [ "of:0000000000000002/10", "of:0000000000000003/10" ] )
+        result4 = ctrl.CLI.verifyHostLocation( "10.2.20.1",
+                                               [ "of:0000000000000002/11", "of:0000000000000003/11" ] )
+        utilities.assert_equals( expect=main.TRUE, actual=result1 and result2 and result3 and result4,
+                                 onpass="Host locations are correct",
+                                 onfail="Not all host locations are correct" )
+        lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
+
+    def CASE630( self, main ):
+        """
+        Bring an instance down
+        Drop a device
+        Bring that same instance up again and observe that this specific instance sees that the device is down.
+        """
+        import time
+        from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import *
+        from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
+        from core import utilities
+        main.case( "Bring an instance down and drop a device" )
+        setupTest( main, test_idx=630, onosNodes=3 )
+        onosToKill = 0
+        deviceToDrop = "spine101"
+        lib.killOnos( main, [ onosToKill ], 10, 48, 2 )
+        lib.killSwitch( main, deviceToDrop, 9, 30 )
+        lib.recoverOnos( main, [ onosToKill ], 9, 30, 3 )
+        result = main.Cluster.runningNodes[ onosToKill ].CLI.checkStatus( 9, 30, 3 )
+        utilities.assert_equals( expect=main.TRUE, actual=result,
+                                 onpass="ONOS instance {} sees correct device numbers".format( onosToKill ),
+                                 onfail="ONOS instance {} doesn't see correct device numbers".format( onosToKill ) )
         lib.cleanup( main, copyKarafLog=False, removeHostComponent=True )
 
     def CASE642( self, main ):
diff --git a/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py b/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
index 5d6f64d..0d7c6f8 100644
--- a/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
+++ b/TestON/tests/USECASE/SegmentRouting/SRRouting/dependencies/SRRoutingTest.py
@@ -113,7 +113,7 @@
     """
     from tests.USECASE.SegmentRouting.dependencies.Testcaselib import Testcaselib as lib
     # Verify connected hosts
-    main.step("Verify reachability of from connected internal hosts to external hosts")
+    main.step("Verify reachability from connected internal hosts to external hosts")
     if ipv4:
         lib.verifyPing( main,
                         [ h for h in main.internalIpv4Hosts if h not in main.disconnectedIpv4Hosts ],