Refactor testName and pureTestName

- Renamed testName to JenkinsLabel
- Rename pureTestName to TestONTest
- Added arguments field to tests.json for runtime test arguemnts
- Refactored exisiting tests that use parameters or where JenkinsLabel !=
  TestONTest
- This should fix a bug in some post build steps as well as clarify some
  variables

Change-Id: I1f653fa072b623fa7e1de01331322dee1a804bae
diff --git a/TestON/JenkinsFile/CommonJenkinsFile.groovy b/TestON/JenkinsFile/CommonJenkinsFile.groovy
index f7e2dcb..348cc77 100644
--- a/TestON/JenkinsFile/CommonJenkinsFile.groovy
+++ b/TestON/JenkinsFile/CommonJenkinsFile.groovy
@@ -137,21 +137,16 @@
 
 def runTests(){
     // run the test sequentially and save the function into the dictionary.
-    for ( String test : testsFromList.keySet() ){
-        toBeRun = testsToRun.keySet().contains( test )
-        stepName = ( toBeRun ? "" : "Not " ) + "Running $test"
-        // pureTestName is what is passed to the cli, here we check to  see if there are any params to pass as well
-        if ( test.contains( "WithFlowObj" ) ){
-            pureTestName = test - "WithFlowObj"
-        } else if ( toBeRun && testsToRun[ test ].keySet().contains( "test" )  ){
-            pureTestName = testsToRun[ test ][ "test" ]
-        } else {
-            pureTestName = test
-        }
-        pipeline[ stepName ] = runTest( test,
+    for ( String JenkinsLabel : testsFromList.keySet() ){
+        toBeRun = testsToRun.keySet().contains( JenkinsLabel )
+        stepName = ( toBeRun ? "" : "Not " ) + "Running $JenkinsLabel"
+
+        TestONTest = testsToRun[ JenkinsLabel ].keySet().contains( "test" ) ? testsToRun[ JenkinsLabel ][ "test" ] : JenkinsLabel
+
+        pipeline[ stepName ] = runTest( JenkinsLabel,
                                         toBeRun,
                                         prop,
-                                        pureTestName,
+                                        TestONTest,
                                         isGraphOnly,
                                         testsToRun,
                                         graphPaths[ "trendIndividual" ],
@@ -162,8 +157,8 @@
     start = getCurrentTime()
 
     // run the tests sequentially.
-    for ( test in pipeline.keySet() ){
-        pipeline[ test ].call()
+    for ( JenkinsLabel in pipeline.keySet() ){
+        pipeline[ JenkinsLabel ].call()
     }
 }
 
@@ -204,18 +199,13 @@
         '''
 }
 
-def runTestCli_py( testName, pureTestName, testCategory ){
+def runTestCli_py( testName, testArguments ){
     // Bash script that will run the test.
-    // testName : name of the test
-    // testCategory : (SR,FUNC ... )
-    flowObjFlag = false
-
-    if ( isSCPF && testName.contains( "WithFlowObj" ) ){
-        flowObjFlag = true
-    }
+    // testName : name of the test in TestON
+    // testArguments : Arguments to be passed to the test framework
 
     command = '''cd ~/OnosSystemTest/TestON/bin
-                 ./cli.py run ''' + pureTestName + ''' --params''' + ( flowObjFlag ? '''TEST/flowObj=True ''' : ''' ''' ) + '''GRAPH/nodeCluster=''' + graphs.getPostjobType( nodeLabel ) + ''' '''
+                 ./cli.py run ''' + testName + ''' --params GRAPH/nodeCluster=''' + graphs.getPostjobType( nodeLabel ) + ''' ''' + testArguments
     echo command
 
     return command
@@ -224,6 +214,7 @@
 }
 
 def concludeRunTest(){
+    // TODO: Add cleanup for if we use docker containers
     return '''cd ~/OnosSystemTest/TestON/bin
               ./cleanup.sh -f || true
               # cleanup config changes
@@ -248,15 +239,15 @@
     return result
 }
 
-def cleanAndCopyFiles( testName ){
+def cleanAndCopyFiles( JenkinsLabel ){
     // clean up some files that were in the folder and copy the new files from the log
-    // testName : name of the test
+    // JenkinsLabel : name of the test in Jenkins
 
     return '''#!/bin/bash -i
         set +e
         echo "ONOS Branch is: ${ONOSBranch}"
         echo "TestON Branch is: ${TestONBranch}"
-        echo "Job name is: "''' + testName + '''
+        echo "Job name is: "''' + JenkinsLabel + '''
         echo "Workspace is: ${WORKSPACE}/"
         echo "Wiki page to post is: ${WikiPrefix}-"
         # remove any leftover files from previous tests
@@ -278,14 +269,14 @@
         cd '''
 }
 
-def fetchLogs( testName ){
+def fetchLogs( JenkinsLabel ){
     // fetch the logs of onos from onos nodes to onos System Test logs
-    // testName: name of the test
+    // JenkinsLabel : name of the test in Jenkins
 
     return '''#!/bin/bash
   set +e
   cd ~/OnosSystemTest/TestON/logs
-  echo "TestON test name is: "''' + testName + '''
+  echo "TestON test name is: "''' + JenkinsLabel + '''
   TestONlogDir=$(ls -t | grep ${TEST_NAME}_  |head -1)
   echo "########################################################################################"
   echo "#####  copying ONOS logs from all nodes to TestON/logs directory: ${TestONlogDir}"
@@ -316,9 +307,8 @@
     }
 }
 
-def postLogs( testName, prefix ){
+def postLogs( prefix ){
     // posting logs of the onos jobs specifically SR tests
-    // testName : name of the test
     // prefix : branch prefix ( master, 2.1, 1.15 ... )
 
     resultURL = ""
@@ -329,23 +319,23 @@
     return resultURL
 }
 
-def analyzeResult( prop, workSpace, pureTestName, testName, resultURL, wikiLink, isSCPF ){
+def analyzeResult( prop, workSpace, TestONTest, JenkinsLabel, resultURL, wikiLink, isSCPF ){
     // analyzing the result of the test and send to slack if any abnormal result is logged.
     // prop : property dictionary
     // workSpace : workSpace where the result file is saved
-    // pureTestName : TestON name of the test
-    // testName : Jenkins name of the test. Example: SCPFflowTPFobj
+    // TestONTest : TestON name of the test
+    // JenkinsLabel : Jenkins name of the test. Example: SCPFflowTPFobj
     // resultURL : url for the logs for SR tests. Will not be posted if it is empty
     // wikiLink : link of the wiki page where the result was posted
     // isSCPF : Check if it is SCPF. If so, it won't post the wiki link.
 
     node( testStation ) {
-        def alarmFile = workSpace + "/" + pureTestName + "Alarm.txt"
+        def alarmFile = workSpace + "/" + TestONTest + "Alarm.txt"
         if ( fileExists( alarmFile ) ) {
             def alarmContents = readFile( alarmFile )
             slackSend( channel: "#jenkins-related",
                        color: "FF0000",
-                       message: "[" + prop[ "ONOSBranch" ] + "]" + testName + " : triggered alarms:\n" +
+                       message: "[" + prop[ "ONOSBranch" ] + "] " + JenkinsLabel + " : triggered alarms:\n" +
                                 alarmContents + "\n" +
                                 "[TestON log] : \n" +
                                 "https://jenkins.onosproject.org/blue/organizations/jenkins/${ env.JOB_NAME }/detail/${ env.JOB_NAME }/${ env.BUILD_NUMBER }/pipeline" +
@@ -364,14 +354,14 @@
     }
 }
 
-def runTest( testName, toBeRun, prop, pureTestName, graphOnly, testCategory, graph_generator_file,
-             graph_saved_directory ){
+def runTest( JenkinsLabel, toBeRun, prop, TestONTest, graphOnly, testCategory,
+             graph_generator_file, graph_saved_directory ){
     // run the test on the machine that contains all the steps : init and run test, copy files, publish result ...
-    // testName : name of the test in Jenkins
+    // JenkinsLabel : name of the test in Jenkins
     // toBeRun : boolean value whether the test will be run or not. If not, it won't be run but shows up with empty
     //           result on pipeline view
     // prop : dictionary property on the machine
-    // pureTestName : Pure name of the test. ( ex. pureTestName of SCPFflowTpFobj will be SCPFflowTp )
+    // TestONTest : Pure name of the test. ( ex. TestONTest of SCPFflowTpFobj will be SCPFflowTp )
     // graphOnly : check if it is generating graph job. If so, it will only generate the generating graph part
     // testCategory : Map for the test suit ( SCPF, SR, FUNC, ... ) which contains information about the tests
     // graph_generator_file : Rscript file with the full path.
@@ -379,10 +369,11 @@
 
     return {
         catchError {
-            stage( testName ) {
+            stage( JenkinsLabel ) {
                 if ( toBeRun ){
-                    def workSpace = "/var/jenkins/workspace/" + testName
+                    def workSpace = "/var/jenkins/workspace/" + JenkinsLabel
                     def fileContents = ""
+                    testArguments = testsToRun[ JenkinsLabel ].keySet().contains( "arguments" ) ? testsToRun[ JenkinsLabel ][ "arguments" ] : ""
                     node( testStation ) {
                         withEnv( [ 'ONOSBranch=' + prop[ "ONOSBranch" ],
                                    'ONOSJAVAOPTS=' + prop[ "ONOSJAVAOPTS" ],
@@ -393,42 +384,42 @@
                             if ( !graphOnly ){
                                 if ( isSCPF ){
                                     // Remove the old database file
-                                    sh SCPFfuncs.cleanupDatabaseFile( testName )
+                                    sh SCPFfuncs.cleanupDatabaseFile( JenkinsLabel )
                                 }
                                 sh script: configureJavaVersion(), label: "Configure Java Version"
                                 sh script: initTest(), label: "Test Initialization: stc shutdown; stc teardown; ./cleanup.sh"
                                 catchError{
-                                    sh script: runTestCli_py( testName, pureTestName, testCategory ), label: ( "Run Test: ./cli.py run " + testName )
+                                    sh script: runTestCli_py( TestONTest, testArguments ), label: ( "Run Test: ./cli.py run " + TestONTest + " " + testArguments )
                                 }
                                 catchError{
                                     sh script: concludeRunTest(), label: "Conclude Running Test: ./cleanup.sh; git clean -df"
                                 }
                                 catchError{
                                     // For the Wiki page
-                                    sh script: cleanAndCopyFiles( pureTestName ), label: "Clean and Copy Files"
+                                    sh script: cleanAndCopyFiles( TestONTest ), label: "Clean and Copy Files"
                                 }
                             }
-                            graphs.databaseAndGraph( prop, testName, pureTestName, graphOnly,
-                                                    graph_generator_file, graph_saved_directory )
+                            graphs.databaseAndGraph( prop, JenkinsLabel, TestONTest, graphOnly,
+                                                     graph_generator_file, graph_saved_directory )
                             if ( !graphOnly ){
-                                sh script: fetchLogs( pureTestName ), label: "Fetch Logs"
+                                sh script: fetchLogs( TestONTest ), label: "Fetch Logs"
                                 if ( !isSCPF ){
                                     publishToConfluence( prop[ "manualRun" ], prop[ "postResult" ],
-                                                         prop[ "WikiPrefix" ] + "-" + testCategory[ testName ][ 'wikiName' ],
-                                                         workSpace + "/" + testCategory[ testName ][ 'wikiFile' ] )
+                                                         prop[ "WikiPrefix" ] + "-" + testCategory[ JenkinsLabel ][ 'wikiName' ],
+                                                         workSpace + "/" + testCategory[ JenkinsLabel ][ 'wikiFile' ] )
                                 }
                             }
                         }
                     }
                     graphs.postResult( prop, graphOnly, nodeLabel )
                     if ( !graphOnly ){
-                        def resultURL = postLogs( testName, prop[ "WikiPrefix" ] )
-                        analyzeResult( prop, workSpace, pureTestName, testName, resultURL,
-                                       isSCPF ? "" : testCategory[ testName ][ 'wikiName' ],
+                        def resultURL = postLogs( prop[ "WikiPrefix" ] )
+                        analyzeResult( prop, workSpace, TestONTest, JenkinsLabel, resultURL,
+                                       isSCPF ? "" : testCategory[ JenkinsLabel ][ 'wikiName' ],
                                        isSCPF )
                     }
                 } else {
-                    echo testName + " is not being run today. Leaving the rest of stage contents blank."
+                    echo JenkinsLabel + " is not being run today. Leaving the rest of stage contents blank."
                 }
             }
         }
diff --git a/TestON/JenkinsFile/dependencies/JenkinsGraphs.groovy b/TestON/JenkinsFile/dependencies/JenkinsGraphs.groovy
index b39c2b4..c2c03f4 100644
--- a/TestON/JenkinsFile/dependencies/JenkinsGraphs.groovy
+++ b/TestON/JenkinsFile/dependencies/JenkinsGraphs.groovy
@@ -76,12 +76,12 @@
     return pass + "|psql --host=" + host + " --port=" + port + " --username=" + user + " --password --dbname onostest -c "
 }
 
-def databaseAndGraph( prop, testName, pureTestName, graphOnly, graph_generator_file, graph_saved_directory ){
+def databaseAndGraph( prop, JenkinsLabel, TestONTest, graphOnly, graph_generator_file, graph_saved_directory ){
     // part where it insert the data into the database.
     // It will use the predefined encrypted variables from the Jenkins.
     // prop : property dictionary that was read from the machine
-    // testName : Jenkins name for the test
-    // pureTestName : TestON name for the test
+    // JenkinsLabel : Jenkins name for the test
+    // TestONTest : TestON name for the test
     // graphOnly : boolean whether it is graph only or not
     // graph_generator_file : Rscript file with the full path.
     // graph_saved_directory : where the generated graph will be saved to.
@@ -93,16 +93,16 @@
                 string( credentialsId: 'db_host', variable: 'host' ),
                 string( credentialsId: 'db_port', variable: 'port' ) ] ) {
             def database_command = database_command_create( pass, host, port, user ) +
-                                   ( !isSCPF ? sqlCommand( testName ) : SCPFfunc.sqlCommand( testName ) )
+                                   ( !isSCPF ? sqlCommand( JenkinsLabel ) : SCPFfunc.sqlCommand( JenkinsLabel ) )
             sh script: '''#!/bin/bash
               export DATE=\$(date +%F_%T)
               cd ~
               pwd ''' + ( graphOnly ? "" :
-                          ( !isSCPF ? databasePart( prop[ "WikiPrefix" ], pureTestName, database_command ) :
-                            SCPFfunc.databasePart( testName, database_command ) ) ), label: "Database"
-            sh script: ( !isSCPF ? graphGenerating( host, port, user, pass, testName, prop, graph_saved_directory,
+                          ( !isSCPF ? databasePart( prop[ "WikiPrefix" ], TestONTest, database_command ) :
+                            SCPFfunc.databasePart( JenkinsLabel, database_command ) ) ), label: "Database"
+            sh script: ( !isSCPF ? graphGenerating( host, port, user, pass, JenkinsLabel, prop, graph_saved_directory,
                                                  graph_generator_file ) :
-                      SCPFfunc.getGraphGeneratingCommand( host, port, user, pass, testName, prop ) ), label: "Generate Test Graph"
+                      SCPFfunc.getGraphGeneratingCommand( host, port, user, pass, JenkinsLabel, prop ) ), label: "Generate Test Graph"
         }
     }
 }
@@ -179,6 +179,8 @@
 
 def sqlCommand( testName ){
     // get the inserting sqlCommand for non-SCPF tests.
+    // testName : the name the the test results are stored under in the db.
+    //            This is usually the same as the Jenkins Test name
     table_name = "executed_test_tests"
     result_name = "executed_test_results"
 
@@ -193,19 +195,19 @@
                                         prop[ "ONOSBranch" ] ) + " 20 " + graph_saved_directory
 }
 
-def databasePart( wikiPrefix, testName, database_command ){
+def databasePart( wikiPrefix, TestONTest, database_command ){
     // to read and insert the data from .csv to the database
 
     return '''
-    sed 1d ''' + workSpace + "/" + wikiPrefix + "-" + testName + '''.csv | while read line
+    sed 1d ''' + workSpace + "/" + wikiPrefix + "-" + TestONTest + '''.csv | while read line
     do
     echo \$line
     echo ''' + database_command + '''
     done '''
 }
 
-def generateStatGraph( testMachineOn, onos_branch, stat_graph_generator_file, pie_graph_generator_file,
-                       graph_saved_directory, nodeLabel ){
+def generateStatGraph( testMachineOn, onos_branch, stat_graph_generator_file,
+                       pie_graph_generator_file, graph_saved_directory, nodeLabel ){
 
     table_name = "executed_test_tests"
     result_name = "executed_test_results"
diff --git a/TestON/JenkinsFile/dependencies/tests.json b/TestON/JenkinsFile/dependencies/tests.json
index a73f72d..215a3e1 100644
--- a/TestON/JenkinsFile/dependencies/tests.json
+++ b/TestON/JenkinsFile/dependencies/tests.json
@@ -516,6 +516,8 @@
         "supportedBranches": [ "all" ]
     },
     "SCPFintentInstallWithdrawLatWithFlowObj": {
+        "test": "SCPFintentInstallWithdrawLat",
+        "arguments": "--params TEST/flowObj=True",
         "schedules": [
             {
                 "branch": "master",
@@ -558,6 +560,8 @@
         "supportedBranches": [ "all" ]
     },
     "SCPFintentRerouteLatWithFlowObj": {
+        "test": "SCPFintentRerouteLat",
+        "arguments": "--params TEST/flowObj=True",
         "schedules": [
             {
                 "branch": "master",
@@ -600,6 +604,8 @@
         "supportedBranches": [ "all" ]
     },
     "SCPFintentEventTpWithFlowObj": {
+        "test": "SCPFintentEventTp",
+        "arguments": "--params TEST/flowObj=True",
         "schedules": [
             {
                 "branch": "master",
@@ -663,6 +669,8 @@
         "supportedBranches": [ "all" ]
     },
     "SCPFflowTp1gWithFlowObj": {
+        "test": "SCPFflowTp1g",
+        "arguments": "--params TEST/flowObj=True",
         "schedules": [
             {
                 "branch": "master",
@@ -711,6 +719,8 @@
         "supportedBranches": [ "all" ]
     },
     "SCPFscalingMaxIntentsWithFlowObj": {
+        "test": "SCPFscalingMaxIntents",
+        "arguments": "--params TEST/flowObj=True",
         "schedules": [
         ],
         "category": "SCPF",
@@ -1056,7 +1066,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAsanity": {
-        "test": "HAsanity --params-file HAsanity.params.fabric",
+        "test": "HAsanity",
+        "arguments": "--params-file HAsanity.params.fabric",
         "wikiName": "SR HA Sanity",
         "wikiFile": "HAsanityWiki.txt",
         "schedules": [
@@ -1080,7 +1091,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAclusterRestart": {
-        "test": "HAclusterRestart --params-file HAclusterRestart.params.fabric",
+        "test": "HAclusterRestart",
+        "arguments": "--params-file HAclusterRestart.params.fabric",
         "wikiName": "SR HA Cluster Restart",
         "schedules": [
             {
@@ -1103,7 +1115,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAsingleInstanceRestart": {
-        "test": "HAsingleInstanceRestart --params-file HAsingleInstanceRestart.params.fabric",
+        "test": "HAsingleInstanceRestart",
+        "arguments": "--params-file HAsingleInstanceRestart.params.fabric",
         "wikiName": "SR HA Single Instance Restart",
         "wikiFile": "HAsingleInstanceRestartWiki.txt",
         "schedules": [
@@ -1127,7 +1140,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAstopNodes": {
-        "test": "HAstopNodes --params-file HAstopNodes.params.fabric",
+        "test": "HAstopNodes",
+        "arguments": "--params-file HAstopNodes.params.fabric",
         "wikiName": "SR HA Stop Nodes",
         "wikiFile": "HAstopNodes.txt",
         "schedules": [
@@ -1151,7 +1165,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAfullNetPartition": {
-        "test": "HAfullNetPartition --params-file HAfullNetPartition.params.fabric",
+        "test": "HAfullNetPartition",
+        "arguments": "--params-file HAfullNetPartition.params.fabric",
         "wikiName": "SR HA Full Network Partition",
         "wikiFile": "HAfullNetPartitionWiki.txt",
         "schedules": [
@@ -1175,7 +1190,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAswapNodes": {
-        "test": "HAswapNodes --params-file HAswapNodes.params.fabric",
+        "test": "HAswapNodes",
+        "arguments": "--params-file HAswapNodes.params.fabric",
         "wikiName": "SR HA Swap Nodes",
         "wikiFile": "HAswapNodesWiki.txt",
         "schedules": [
@@ -1199,7 +1215,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAscaling": {
-        "test": "HAscaling --params-file HAscaling.params.fabric",
+        "test": "HAscaling",
+        "arguments": "--params-file HAscaling.params.fabric",
         "wikiName": "SR HA Scaling",
         "wikiFile": "HAscalingWiki.txt",
         "schedules": [
@@ -1223,7 +1240,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAkillNodes": {
-        "test": "HAkillNodes --params-file HAkillNodes.params.fabric",
+        "test": "HAkillNodes",
+        "arguments": "--params-file HAkillNodes.params.fabric",
         "wikiName": "SR HA Kill Nodes",
         "wikiFile": "HAkillNodes.txt",
         "schedules": [
@@ -1247,7 +1265,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAbackupRecover": {
-        "test": "HAbackupRecover --params-file HAbackupRecover.params.fabric",
+        "test": "HAbackupRecover",
+        "arguments": "--params-file HAbackupRecover.params.fabric",
         "wikiName": "SR HA Backup Recover",
         "wikiFile": "HAbackupRecoverWiki.txt",
         "schedules": [
@@ -1271,7 +1290,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAupgrade": {
-        "test": "HAupgrade --params-file HAupgrade.params.fabric",
+        "test": "HAupgrade",
+        "arguments": "--params-file HAupgrade.params.fabric",
         "wikiName": "SR HA Upgrade",
         "wikiFile": "HAupgradeWiki.txt",
         "schedules": [
@@ -1280,7 +1300,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRHAupgradeRollback": {
-        "test": "HAupgradeRollback --params-file HAupgradeRollback.params.fabric",
+        "test": "HAupgradeRollback",
+        "arguments": "--params-file HAupgradeRollback.params.fabric",
         "wikiName": "SR HA Upgrade Rollback",
         "wikiFile": "HAupgradeRollbackWiki.txt",
         "schedule": [
@@ -1289,7 +1310,8 @@
         "supportedBranches": [ "all" ]
     },
     "SRDhcprelay-stratum": {
-        "test": "SRDhcprelay --params-file SRDhcprelay.params.stratum",
+        "test": "SRDhcprelay",
+        "arguments": "--params-file SRDhcprelay.params.stratum",
         "wikiName": "SR Dhcp Relay with Stratum BMv2 switches",
         "wikiFile": "SRDhcprelayWiki.txt",
         "schedules": [
@@ -1308,7 +1330,8 @@
         "supportedBranches": [ "onos-2.x", "master" ]
     },
     "SRRouting-stratum": {
-        "test": "SRRouting --params-file SRRouting.params.stratum",
+        "test": "SRRouting",
+        "arguments": "--params-file SRRouting.params.stratum",
         "wikiName": "SR Routing with Stratum BMv2 switches",
         "wikiFile": "SRRoutingWiki.txt",
         "schedules": [
@@ -1327,7 +1350,8 @@
         "supportedBranches": [ "onos-2.x", "master" ]
     },
     "SRBridging-stratum": {
-        "test": "SRBridging --params-file SRBridging.params.stratum",
+        "test": "SRBridging",
+        "arguments": "--params-file SRBridging.params.stratum",
         "wikiName": "SR Bridging with Stratum BMv2 switches",
         "wikiFile": "SRBridgingWiki.txt",
         "schedules": [
@@ -1346,7 +1370,8 @@
         "supportedBranches": [ "onos-2.x", "master" ]
     },
     "SRBridging-tofino": {
-        "test": "SRBridging --params-file SRBridging.params.tofino --topo-file SRBridging.topo.0x1.physical",
+        "test": "SRBridging",
+        "arguments": "--params-file SRBridging.params.tofino --topo-file SRBridging.topo.0x1.physical",
         "wikiName": "SR Bridging with Tofino switches",
         "wikiFile": "SRBridgingWiki.txt",
         "schedules": [