[ONOS-7495]: Refactor Wiki Test Result Graph R Scripts
Change-Id: Iccbe89838bba21af276463e73091341063dc7b39
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFIntentInstallWithdrawRerouteLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFIntentInstallWithdrawRerouteLat.R
new file mode 100644
index 0000000..6f67b0d
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFIntentInstallWithdrawRerouteLat.R
@@ -0,0 +1,386 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+batch_size = 8
+old_flow = 9
+save_directory = 10
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+
+ print( paste( "Usage: Rscript SCPFIntentInstallWithdrawRerouteLat.R",
+ "<isFlowObj>" ,
+ "<database-host>",
+ "<database-port>",
+ "<database-user-id>",
+ "<database-password>",
+ "<test-name>",
+ "<branch-name>",
+ "<batch-size>",
+ "<using-old-flow>",
+ "<directory-to-save-graphs>",
+ sep=" " ) )
+ quit( status = 1 ) # basically exit(), but in R
+}
+
+# -----------------------------------
+# Create File Name and Title of Graph
+# -----------------------------------
+
+print( "Creating filename and title of graph." )
+
+chartTitle <- "Intent Install, Withdraw, & Reroute Latencies"
+flowObjFileModifier <- ""
+errBarOutputFile <- paste( args[ save_directory ],
+ "SCPFIntentInstallWithdrawRerouteLat_",
+ args[ branch_name ],
+ sep="" )
+
+if ( args[ has_flow_obj ] == "y" ){
+ errBarOutputFile <- paste( errBarOutputFile, "_fobj", sep="" )
+ flowObjFileModifier <- "fobj_"
+ chartTitle <- paste( chartTitle, "w/ FlowObj" )
+}
+if ( args[ old_flow ] == "y" ){
+ errBarOutputFile <- paste( errBarOutputFile, "_OldFlow", sep="" )
+ chartTitle <- paste( chartTitle,
+ "With Eventually Consistent Flow Rule Store",
+ sep="\n" )
+}
+errBarOutputFile <- paste( errBarOutputFile,
+ "_",
+ args[ batch_size ],
+ "-batchSize_graph.jpg",
+ sep="" )
+
+chartTitle <- paste( chartTitle,
+ "\nBatch Size =",
+ args[ batch_size ],
+ sep=" " )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+ dbname = "onostest",
+ host = args[ database_host ],
+ port = strtoi( args[ database_port ] ),
+ user = args[ database_u_id ],
+ password = args[ database_pw ] )
+
+# ---------------------------------------
+# Intent Install and Withdraw SQL Command
+# ---------------------------------------
+print( "Generating Intent Install and Withdraw SQL Command" )
+
+installWithdrawSQLCommand <- paste( "SELECT * FROM intent_latency_",
+ flowObjFileModifier,
+ "tests WHERE batch_size=",
+ args[ batch_size ],
+ " AND branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM intent_latency_",
+ flowObjFileModifier,
+ "tests WHERE branch='",
+ args[ branch_name ],
+ "' AND ",
+ ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+ "is_old_flow",
+ ")",
+ sep="" )
+
+print( "Sending Intent Install and Withdraw SQL command:" )
+print( installWithdrawSQLCommand )
+installWithdrawData <- dbGetQuery( con, installWithdrawSQLCommand )
+
+# --------------------------
+# Intent Reroute SQL Command
+# --------------------------
+
+print( "Generating Intent Reroute SQL Command" )
+
+rerouteSQLCommand <- paste( "SELECT * FROM intent_reroute_latency_",
+ flowObjFileModifier,
+ "tests WHERE batch_size=",
+ args[ batch_size ],
+ " AND branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM intent_reroute_latency_",
+ flowObjFileModifier,
+ "tests WHERE branch='",
+ args[ branch_name ],
+ "' AND ",
+ ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+ "is_old_flow",
+ ")",
+ sep="" )
+
+print( "Sending Intent Reroute SQL command:" )
+print( rerouteSQLCommand )
+rerouteData <- dbGetQuery( con, rerouteSQLCommand )
+
+# **********************************************************
+# STEP 2: Organize Data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -------------------------------------------------------
+# Combining Install, Withdraw, and Reroute Latencies Data
+# -------------------------------------------------------
+
+print( "Combining Install, Withdraw, and Reroute Latencies Data" )
+
+if ( ncol( rerouteData ) == 0 ){ # Checks if rerouteData exists, so we can exclude it if necessary
+
+ requiredColumns <- c( "install_avg",
+ "withdraw_avg" )
+
+ tryCatch( avgs <- c( installWithdrawData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+} else{
+ colnames( rerouteData ) <- c( "date",
+ "name",
+ "date",
+ "branch",
+ "is_old_flow",
+ "commit",
+ "scale",
+ "batch_size",
+ "reroute_avg",
+ "reroute_std" )
+
+ tryCatch( avgs <- c( installWithdrawData[ 'install_avg' ],
+ installWithdrawData[ 'withdraw_avg' ],
+ rerouteData[ 'reroute_avg' ] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+}
+
+# Combine lists into data frames.
+dataFrame <- melt( avgs )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing data frame." )
+
+if ( ncol( rerouteData ) == 0 ){ # Checks if rerouteData exists (due to batch size) for the dataFrame this time
+ dataFrame$scale <- c( installWithdrawData$scale,
+ installWithdrawData$scale )
+
+ dataFrame$stds <- c( installWithdrawData$install_std,
+ installWithdrawData$withdraw_std )
+} else{
+ dataFrame$scale <- c( installWithdrawData$scale,
+ installWithdrawData$scale,
+ rerouteData$scale )
+
+ dataFrame$stds <- c( installWithdrawData$install_std,
+ installWithdrawData$withdraw_std,
+ rerouteData$reroute_std )
+}
+
+colnames( dataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "stds" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+dataFrame$type <- as.character( dataFrame$type )
+dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graph.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# -------------------
+# Main Plot Generated
+# -------------------
+
+print( "Creating the main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+ y = ms,
+ ymin = ms,
+ ymax = ms + stds,
+ fill = type ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+theme_set( theme_grey( base_size = 22 ) )
+barWidth <- 1.3
+xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9) )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
+
+theme <- theme( plot.title=element_text( hjust = 0.5, size = 32, face='bold' ),
+ legend.position="bottom",
+ legend.text=element_text( size=22 ),
+ legend.title = element_blank(),
+ legend.key.size = unit( 1.5, 'lines' ),
+ plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+colors <- scale_fill_manual( values=c( "#F77670",
+ "#619DFA",
+ "#18BA48" ) )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ title +
+ colors
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ width = barWidth,
+ position = "dodge" )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+ position = position_dodge( barWidth ),
+ color = errorBarColor )
+
+values <- geom_text( aes( x = dataFrame$scale,
+ y = dataFrame$ms + 0.035 * max( dataFrame$ms ),
+ label = format( dataFrame$ms,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ position = position_dodge( width = barWidth ),
+ size = 5.5,
+ fontface = "bold" )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ values +
+ wrapLegend
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
+
+tryCatch( ggsave( errBarOutputFile,
+ width = imageWidth,
+ height = imageHeight,
+ dpi = imageDPI ),
+ error = function( e ){
+ print( "[ERROR] There was a problem saving the graph due to a graph formatting exception. Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFbatchFlowResp.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFbatchFlowResp.R
new file mode 100644
index 0000000..0b68425
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFbatchFlowResp.R
@@ -0,0 +1,372 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+#
+# Example script:
+# Single Bench Flow Latency Graph with Eventually Consistent Flow Rule Store (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFbatchFlowResp_master_OldFlow_PostGraph.jpg):
+# Rscript SCPFbatchFlowResp.R <url> <port> <username> <pass> SCPFbatchFlowResp.R master y /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+old_flow <- 7
+save_directory <- 8
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFbatchFlowResp.R", c( "using-old-flow" ) )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+postOutputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ if( args[ old_flow ] == "y" ) "_OldFlow" else "",
+ "_PostGraph.jpg",
+ sep="" )
+
+delOutputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ if( args[ old_flow ] == "y" ) "_OldFlow" else "",
+ "_DelGraph.jpg",
+ sep="" )
+
+postChartTitle <- paste( "Single Bench Flow Latency - Post\n",
+ "Last 3 Builds",
+ if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "",
+ sep = "" )
+delChartTitle <- paste( "Single Bench Flow Latency - Del\n",
+ "Last 3 Builds",
+ if( args[ old_flow ] == "y" ) "\nWith Eventually Consistent Flow Rule Store" else "",
+ sep = "" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# ---------------------------
+# Batch Flow Resp SQL Command
+# ---------------------------
+
+print( "Generating Batch Flow Resp SQL Command" )
+
+command <- paste( "SELECT * FROM batch_flow_tests WHERE branch='",
+ args[ branch_name ],
+ "' AND " ,
+ ( if( args[ old_flow ] == 'y' ) "" else "NOT " ) ,
+ "is_old_flow",
+ " ORDER BY date DESC LIMIT 3",
+ sep="" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -----------------
+# Post Data Sorting
+# -----------------
+
+print( "Sorting data for Post." )
+
+requiredColumns <- c( "posttoconfrm", "elapsepost" )
+
+tryCatch( postAvgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# -------------------------
+# Post Construct Data Frame
+# -------------------------
+
+postDataFrame <- melt( postAvgs )
+postDataFrame$scale <- fileData$scale
+postDataFrame$date <- fileData$date
+postDataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+colnames( postDataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "date",
+ "iterative" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+postDataFrame$type <- as.character( postDataFrame$type )
+postDataFrame$type <- factor( postDataFrame$type,
+ levels = unique( postDataFrame$type ) )
+
+postDataFrame <- na.omit( postDataFrame ) # Omit any data that doesn't exist
+
+print( "Post Data Frame Results:" )
+print( postDataFrame )
+
+# ----------------
+# Del Data Sorting
+# ----------------
+
+requiredColumns <- c( "deltoconfrm", "elapsedel" )
+
+tryCatch( delAvgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+
+# ------------------------
+# Del Construct Data Frame
+# ------------------------
+
+delDataFrame <- melt( delAvgs )
+delDataFrame$scale <- fileData$scale
+delDataFrame$date <- fileData$date
+delDataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+colnames( delDataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "date",
+ "iterative" )
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+delDataFrame$type <- as.character( delDataFrame$type )
+delDataFrame$type <- factor( delDataFrame$type,
+ levels = unique( delDataFrame$type ) )
+
+delDataFrame <- na.omit( delDataFrame ) # Omit any data that doesn't exist
+
+print( "Del Data Frame Results:" )
+print( delDataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------------
+# Initializing variables used in both graphs
+# ------------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xLabel <- xlab( "Build Date" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+ webColor( "light_blue" ) ) )
+
+wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
+
+barWidth <- 0.3
+
+theme <- graphTheme()
+
+barGraphFormat <- geom_bar( stat = "identity",
+ width = barWidth )
+
+# -----------------------
+# Post Generate Main Plot
+# -----------------------
+
+print( "Creating main plot for Post graph." )
+
+mainPlot <- ggplot( data = postDataFrame, aes( x = iterative,
+ y = ms,
+ fill = type ) )
+
+# -----------------------------------
+# Post Fundamental Variables Assigned
+# -----------------------------------
+
+print( "Generating fundamental graph data for Post graph." )
+
+xScaleConfig <- scale_x_continuous( breaks = postDataFrame$iterative,
+ label = postDataFrame$date )
+
+title <- labs( title = postChartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ wrapLegend +
+ colors +
+ title
+
+# --------------------------------
+# Post Generating Bar Graph Format
+# --------------------------------
+
+print( "Generating bar graph for Post graph." )
+
+sum <- fileData[ 'posttoconfrm' ] +
+ fileData[ 'elapsepost' ]
+
+values <- geom_text( aes( x = postDataFrame$iterative,
+ y = sum + 0.03 * max( sum ),
+ label = format( sum,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ values
+
+# ----------------------------
+# Post Exporting Graph to File
+# ----------------------------
+
+saveGraph( postOutputFile )
+
+# ----------------------
+# Del Generate Main Plot
+# ----------------------
+
+print( "Creating main plot for Del graph." )
+
+mainPlot <- ggplot( data = delDataFrame, aes( x = iterative,
+ y = ms,
+ fill = type ) )
+
+# ----------------------------------
+# Del Fundamental Variables Assigned
+# ----------------------------------
+
+print( "Generating fundamental graph data for Del graph." )
+
+xScaleConfig <- scale_x_continuous( breaks = delDataFrame$iterative,
+ label = delDataFrame$date )
+
+title <- labs( title = delChartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ wrapLegend +
+ colors +
+ title
+
+# -------------------------------
+# Del Generating Bar Graph Format
+# -------------------------------
+
+print( "Generating bar graph for Del graph." )
+
+sum <- fileData[ 'deltoconfrm' ] +
+ fileData[ 'elapsedel' ]
+
+values <- geom_text( aes( x = delDataFrame$iterative,
+ y = sum + 0.03 * max( sum ),
+ label = format( sum,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ title +
+ values
+
+# ---------------------------
+# Del Exporting Graph to File
+# ---------------------------
+
+saveGraph( delOutputFile )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFcbench.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFcbench.R
new file mode 100644
index 0000000..871000f
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFcbench.R
@@ -0,0 +1,232 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Cbench Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFcbench_master_errGraph.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFcbench.R <url> <port> <username> <pass> SCPFcbench master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory <- 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFcbench.R" )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+errBarOutputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ "_errGraph.jpg",
+ sep="" )
+
+chartTitle <- paste( "Single-Node CBench Throughput", "Last 3 Builds", sep = "\n" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# ------------------
+# Cbench SQL Command
+# ------------------
+
+print( "Generating Scale Topology SQL Command" )
+
+command <- paste( "SELECT * FROM cbench_bm_tests WHERE branch='",
+ args[ branch_name ],
+ "' ORDER BY date DESC LIMIT 3",
+ sep="" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( avgs )
+dataFrame$std <- c( fileData$std )
+dataFrame$date <- c( fileData$date )
+dataFrame$iterative <- rev( seq( 1, nrow( fileData ), by = 1 ) )
+
+colnames( dataFrame ) <- c( "ms",
+ "type",
+ "std",
+ "date",
+ "iterative" )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+ y = ms,
+ ymin = ms,
+ ymax = ms + std ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+barWidth <- 0.3
+
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
+ label = dataFrame$date )
+xLabel <- xlab( "Build Date" )
+yLabel <- ylab( "Responses / sec" )
+fillLabel <- labs( fill = "Type" )
+
+theme <- graphTheme()
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ position = position_dodge(),
+ width = barWidth,
+ fill = webColor( "green" ) )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+ color = webColor( "darkerGray" ) )
+
+values <- geom_text( aes( x=dataFrame$iterative,
+ y=fileData[ 'avg' ] + 0.025 * max( fileData[ 'avg' ] ),
+ label = format( fileData[ 'avg' ],
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( errBarOutputFile ) # from saveGraph.R
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFflowTp1g.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFflowTp1g.R
new file mode 100644
index 0000000..ffb91a9
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFflowTp1g.R
@@ -0,0 +1,327 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+has_neighbors = 8
+old_flow = 9
+save_directory = 10
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+
+ print( paste( "Usage: Rscript SCPFflowTp1g.R",
+ "<has-flow-obj>",
+ "<database-host>",
+ "<database-port>",
+ "<database-user-id>",
+ "<database-password>",
+ "<test-name>",
+ "<branch-name>",
+ "<has-neighbors>",
+ "<using-old-flow>",
+ "<directory-to-save-graphs>",
+ sep=" " ) )
+
+ quit( status = 1 ) # basically exit(), but in R
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+chartTitle <- "Flow Throughput Test"
+fileNeighborsModifier <- "no"
+commandNeighborModifier <- ""
+fileFlowObjModifier <- ""
+sqlFlowObjModifier <- ""
+if ( args[ has_flow_obj ] == 'y' ){
+ fileFlowObjModifier <- "_flowObj"
+ sqlFlowObjModifier <- "_fobj"
+ chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
+}
+
+chartTitle <- paste( chartTitle, "\nNeighbors =", sep="" )
+
+fileOldFlowModifier <- ""
+if ( args[ has_neighbors ] == 'y' ){
+ fileNeighborsModifier <- "all"
+ commandNeighborModifier <- "scale=1 OR NOT "
+ chartTitle <- paste( chartTitle, "Cluster Size - 1" )
+} else {
+ chartTitle <- paste( chartTitle, "0" )
+}
+if ( args[ old_flow ] == 'y' ){
+ fileOldFlowModifier <- "_OldFlow"
+ chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
+}
+errBarOutputFile <- paste( args[ save_directory ],
+ args[ test_name ],
+ "_",
+ args[ branch_name ],
+ "_",
+ fileNeighborsModifier,
+ "-neighbors",
+ fileFlowObjModifier,
+ fileOldFlowModifier,
+ "_graph.jpg",
+ sep="" )
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+ dbname = "onostest",
+ host = args[ database_host ],
+ port = strtoi( args[ database_port ] ),
+ user = args[ database_u_id ],
+ password = args[ database_pw ] )
+
+# ---------------------------
+# Flow Throughput SQL Command
+# ---------------------------
+
+print( "Generating Flow Throughput SQL command." )
+
+command <- paste( "SELECT scale, avg( avg ), avg( std ) FROM flow_tp",
+ sqlFlowObjModifier,
+ "_tests WHERE (",
+ commandNeighborModifier,
+ "neighbors = 0 ) AND branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT max( date ) FROM flow_tp",
+ sqlFlowObjModifier,
+ "_tests WHERE branch='",
+ args[ branch_name ],
+ "' AND ",
+ ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+ "is_old_flow",
+ " ) GROUP BY scale ORDER BY scale",
+ sep="" )
+
+print( "Sending SQL command:" )
+print( command )
+
+fileData <- dbGetQuery( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data for Flow Throughput." )
+
+colnames( fileData ) <- c( "scale",
+ "avg",
+ "std" )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+
+# ----------------------------
+# Flow TP Construct Data Frame
+# ----------------------------
+
+print( "Constructing Flow TP data frame." )
+
+dataFrame <- melt( avgs ) # This is where reshape2 comes in. Avgs list is converted to data frame
+dataFrame$scale <- fileData$scale # Add node scaling to the data frame.
+dataFrame$std <- fileData$std
+
+colnames( dataFrame ) <- c( "throughput",
+ "type",
+ "scale",
+ "std" )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Generating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+# - data: the data frame that the graph will be based off of
+# - aes: the asthetics of the graph which require:
+# - x: x-axis values (usually node scaling)
+# - y: y-axis values (usually time in milliseconds)
+# - fill: the category of the colored side-by-side bars (usually type)
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+ y = throughput,
+ ymin = throughput,
+ ymax = throughput + std,
+ fill = type ) )
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+# Formatting the plot
+theme_set( theme_grey( base_size = 22 ) ) # set the default text size of the graph.
+width <- 0.7 # Width of the bars.
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale,
+ label = dataFrame$scale )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Throughput (,000 Flows/sec)" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
+
+theme <- theme( plot.title = element_text( hjust = 0.5,
+ size = 32,
+ face = 'bold' ),
+ plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+# Create the stacked bar graph with error bars.
+# geom_bar contains:
+# - stat: data formatting (usually "identity")
+# - width: the width of the bar types (declared above)
+# geom_errorbar contains similar arguments as geom_bar.
+print( "Generating bar graph with error bars." )
+barGraphFormat <- geom_bar( stat = "identity",
+ width = width,
+ fill = "#FFAA3C" )
+
+errorBarFormat <- geom_errorbar( width = width,
+ position = position_dodge(),
+ color = errorBarColor )
+
+values <- geom_text( aes( x = dataFrame$scale,
+ y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
+ label = format( dataFrame$throughput,
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
+
+tryCatch( ggsave( errBarOutputFile,
+ width = imageWidth,
+ height = imageHeight,
+ dpi = imageDPI ),
+ error = function( e ){
+ print( "[ERROR] There was a problem saving the graph due to a graph formatting exception. Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFhostLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFhostLat.R
new file mode 100644
index 0000000..c4c30b2
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFhostLat.R
@@ -0,0 +1,233 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory = 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFhostLat.R" )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+errBarOutputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ "_errGraph.jpg",
+ sep="" )
+
+chartTitle <- "Host Latency"
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# ------------------------
+# Host Latency SQL Command
+# ------------------------
+
+print( "Generating Host Latency SQL Command" )
+
+command <- paste( "SELECT * FROM host_latency_tests WHERE branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM host_latency_tests WHERE branch = '",
+ args[ branch_name ],
+ "' ) ",
+ sep = "" )
+
+fileData <- retrieveData( con, command )
+
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns ] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+dataFrame$std <- fileData$std
+
+colnames( dataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "std" )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+ y = ms,
+ ymin = ms,
+ ymax = ms + std ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+
+barWidth <- 0.9
+
+xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+
+theme <- graphTheme()
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+errorBarColor <- rgb( 140, 140, 140, maxColorValue = 255 )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ title
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ position = position_dodge(),
+ width = barWidth,
+ fill = webColor( "purple" ) )
+
+errorBarFormat <- geom_errorbar( position = position_dodge(),
+ width = barWidth,
+ color = webColor( "darkerGray" ) )
+
+values <- geom_text( aes( x=dataFrame$scale,
+ y=dataFrame$ms + 0.06 * max( dataFrame$ms ),
+ label = format( dataFrame$ms,
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( errBarOutputFile )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFintentEventTp.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFintentEventTp.R
new file mode 100644
index 0000000..e9a9dc4
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFintentEventTp.R
@@ -0,0 +1,310 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+has_neighbors = 8
+old_flow = 9
+save_directory = 10
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+
+ print( paste( "Usage: Rscript SCPFIntentEventTp.R",
+ "<has-flow-obj>",
+ "<database-host>",
+ "<database-port>",
+ "<database-user-id>",
+ "<database-password>",
+ "<test-name>",
+ "<branch-name>",
+ "<has-neighbors>",
+ "<using-old-flow>",
+ "<directory-to-save-graphs>",
+ sep=" " ) )
+
+ quit( status = 1 ) # basically exit(), but in R
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+chartTitle <- "Intent Event Throughput"
+fileNeighborsModifier <- "no"
+commandNeighborModifier <- ""
+fileFlowObjModifier <- ""
+sqlFlowObjModifier <- ""
+
+if ( args[ has_flow_obj ] == 'y' ){
+ fileFlowObjModifier <- "_flowObj"
+ sqlFlowObjModifier <- "_fobj"
+ chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
+}
+
+chartTitle <- paste( chartTitle, "\nevents/second with Neighbors =", sep="" )
+
+fileOldFlowModifier <- ""
+if ( args[ has_neighbors ] == 'y' ){
+ fileNeighborsModifier <- "all"
+ commandNeighborModifier <- "scale=1 OR NOT "
+ chartTitle <- paste( chartTitle, "all" )
+} else {
+ chartTitle <- paste( chartTitle, "0" )
+}
+if ( args[ old_flow ] == 'y' ){
+ fileOldFlowModifier <- "_OldFlow"
+ chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
+}
+
+errBarOutputFile <- paste( args[ save_directory ],
+ args[ test_name ],
+ "_",
+ args[ branch_name ],
+ "_",
+ fileNeighborsModifier,
+ "-neighbors",
+ fileFlowObjModifier,
+ fileOldFlowModifier,
+ "_graph.jpg",
+ sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+ dbname = "onostest",
+ host = args[ database_host ],
+ port = strtoi( args[ database_port ] ),
+ user = args[ database_u_id ],
+ password = args[ database_pw ] )
+
+# -----------------------------------
+# Intent Event Throughput SQL Command
+# -----------------------------------
+
+print( "Generating Intent Event Throughput SQL command." )
+
+command <- paste( "SELECT scale, SUM( avg ) as avg FROM intent_tp",
+ sqlFlowObjModifier,
+ "_tests WHERE (",
+ commandNeighborModifier,
+ "neighbors = 0 ) AND branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT max( date ) FROM intent_tp",
+ sqlFlowObjModifier,
+ "_tests WHERE branch='",
+ args[ branch_name ],
+ "' AND ",
+ ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+ "is_old_flow",
+ " ) GROUP BY scale ORDER BY scale",
+ sep="" )
+
+print( "Sending SQL command:" )
+print( command )
+
+fileData <- dbGetQuery( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing data frame." )
+dataFrame <- melt( avgs ) # This is where reshape2 comes in. Avgs list is converted to data frame
+dataFrame$scale <- fileData$scale # Add node scaling to the data frame.
+
+colnames( dataFrame ) <- c( "throughput",
+ "type",
+ "scale" )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Generating main plot." )
+# Create the primary plot here.
+# ggplot contains the following arguments:
+# - data: the data frame that the graph will be based off of
+# - aes: the asthetics of the graph which require:
+# - x: x-axis values (usually node scaling)
+# - y: y-axis values (usually time in milliseconds)
+# - fill: the category of the colored side-by-side bars (usually type)
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+ y = throughput,
+ fill = type ) )
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+# Formatting the plot
+theme_set( theme_grey( base_size = 22 ) ) # set the default text size of the graph.
+width <- 0.7 # Width of the bars.
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale, label = dataFrame$scale )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Throughput (events/second)" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+
+theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
+ legend.position = "bottom",
+ legend.text = element_text( size = 18, face = "bold" ),
+ legend.title = element_blank(),
+ plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+values <- geom_text( aes( x = dataFrame$scale,
+ y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
+ label = format( dataFrame$throughput,
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7,
+ fontface = "bold" )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ values
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+barGraphFormat <- geom_bar( stat = "identity",
+ width = width,
+ fill = "#169EFF" )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ title
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart to", errBarOutputFile ) )
+
+tryCatch( ggsave( errBarOutputFile,
+ width = imageWidth,
+ height = imageHeight,
+ dpi = imageDPI ),
+ error = function( e ){
+ print( "[ERROR] There was a problem saving the graph due to a graph formatting exception. Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart out to", errBarOutputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R
new file mode 100644
index 0000000..2525009
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R
@@ -0,0 +1,303 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Mastership Failover Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFmastershipFailoverLat_master_errGraph.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFmastershipFailoverLat.R <url> <port> <username> <pass> SCPFmastershipFailoverLat master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory <- 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFmastershipFailoverLat.R" )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+chartTitle <- "Mastership Failover Latency"
+
+errBarOutputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ "_errGraph.jpg",
+ sep="" )
+
+stackedBarOutputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ "_stackedGraph.jpg",
+ sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# ---------------------------------------
+# Mastership Failover Latency SQL Command
+# ---------------------------------------
+
+print( "Generating Mastership Failover Latency SQL command" )
+
+command <- paste( "SELECT * FROM mastership_failover_tests WHERE branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM mastership_failover_tests WHERE branch = '",
+ args[ branch_name ],
+ "' ) ",
+ sep = "" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Combining averages into a list." )
+
+requiredColumns <- c( "kill_deact_avg", "deact_role_avg" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame from list." )
+
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+dataFrame$stds <- c( fileData$kill_deact_std,
+ fileData$deact_role_std )
+
+colnames( dataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "stds" )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+sum <- fileData[ 'deact_role_avg' ] +
+ fileData[ 'kill_deact_avg' ]
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------
+# Initialize Variables for Both Graphs
+# ------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill = "Type" )
+
+barWidth <- 0.9
+
+theme <- graphTheme()
+
+barColors <- scale_fill_manual( values=c( webColor( "redv2" ),
+ webColor( "light_blue" ) ) )
+
+wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
+
+# ----------------------------------
+# Error Bar Graph Generate Main Plot
+# ----------------------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+ y = ms,
+ ymin = ms,
+ ymax = ms + stds,
+ fill = type ) )
+
+# ----------------------------------------------
+# Error Bar Graph Fundamental Variables Assigned
+# ----------------------------------------------
+
+print( "Generating fundamental graph data for the error bar graph." )
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ title +
+ wrapLegend
+
+# -------------------------------------------
+# Error Bar Graph Generating Bar Graph Format
+# -------------------------------------------
+
+print( "Generating bar graph with error bars." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ position = position_dodge(),
+ width = barWidth )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+ position = position_dodge(),
+ color = webColor( "darkerGray" ) )
+
+values <- geom_text( aes( x = dataFrame$scale,
+ y = dataFrame$ms + 0.02 * max( dataFrame$ms ),
+ label = format( dataFrame$ms,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold",
+ position = position_dodge( 0.9 ) )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ barColors +
+ errorBarFormat +
+ values
+
+# ---------------------------------------
+# Error Bar Graph Exporting Graph to File
+# ---------------------------------------
+
+saveGraph( errBarOutputFile )
+
+# ------------------------------------------------
+# Stacked Bar Graph Fundamental Variables Assigned
+# ------------------------------------------------
+
+print( "Generating fundamental graph data for the stacked bar graph." )
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ title +
+ wrapLegend
+
+# ---------------------------------------------
+# Stacked Bar Graph Generating Bar Graph Format
+# ---------------------------------------------
+
+print( "Generating stacked bar chart." )
+stackedBarFormat <- geom_bar( stat = "identity",
+ width = barWidth )
+
+values <- geom_text( aes( x = dataFrame$scale,
+ y = sum + 0.02 * max( sum ),
+ label = format( sum,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ stackedBarFormat +
+ barColors +
+ title +
+ values
+
+# -----------------------------------------
+# Stacked Bar Graph Exporting Graph to File
+# -----------------------------------------
+
+saveGraph( stackedBarOutputFile )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFportLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFportLat.R
new file mode 100644
index 0000000..70d6607
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFportLat.R
@@ -0,0 +1,367 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Port Latency Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFportLat_master_UpErrBarWithStack.jpg):
+# Rscript SCPFportLat.R <url> <port> <username> <pass> SCPFmastershipFailoverLat master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory = 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFmastershipFailoverLat.R" )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+errBarOutputFileUp <- paste( args[ save_directory ],
+ "SCPFportLat_",
+ args[ branch_name ],
+ "_UpErrBarWithStack.jpg",
+ sep = "" )
+
+errBarOutputFileDown <- paste( args[ save_directory ],
+ "SCPFportLat_",
+ args[ branch_name ],
+ "_DownErrBarWithStack.jpg",
+ sep = "" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# ------------------------
+# Port Latency SQL Command
+# ------------------------
+
+print( "Generating Port Latency SQL Command" )
+
+command <- paste( "SELECT * FROM port_latency_details WHERE branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM port_latency_details WHERE branch = '",
+ args[ branch_name ],
+ "' ) ",
+ sep = "" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -----------------------------
+# Port Up Averages Data Sorting
+# -----------------------------
+
+print( "Sorting data for Port Up Averages." )
+
+requiredColumns <- c( "up_ofp_to_dev_avg", "up_dev_to_link_avg", "up_link_to_graph_avg" )
+
+tryCatch( upAvgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# ----------------------------
+# Port Up Construct Data Frame
+# ----------------------------
+
+print( "Constructing Port Up data frame." )
+
+upAvgsDataFrame <- melt( upAvgs )
+upAvgsDataFrame$scale <- fileData$scale
+upAvgsDataFrame$up_std <- fileData$up_std
+
+colnames( upAvgsDataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "stds" )
+
+upAvgsDataFrame <- na.omit( upAvgsDataFrame )
+
+upAvgsDataFrame$type <- as.character( upAvgsDataFrame$type )
+upAvgsDataFrame$type <- factor( upAvgsDataFrame$type, levels=unique( upAvgsDataFrame$type ) )
+
+sumOfUpAvgs <- fileData[ 'up_ofp_to_dev_avg' ] +
+ fileData[ 'up_dev_to_link_avg' ] +
+ fileData[ 'up_link_to_graph_avg' ]
+
+print( "Up Averages Results:" )
+print( upAvgsDataFrame )
+
+# -------------------------------
+# Port Down Averages Data Sorting
+# -------------------------------
+
+print( "Sorting data for Port Down Averages." )
+
+requiredColumns <- c( "down_ofp_to_dev_avg", "down_dev_to_link_avg", "down_link_to_graph_avg" )
+
+tryCatch( downAvgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# ------------------------------
+# Port Down Construct Data Frame
+# ------------------------------
+
+print( "Constructing Port Down data frame." )
+
+downAvgsDataFrame <- melt( downAvgs )
+downAvgsDataFrame$scale <- fileData$scale
+downAvgsDataFrame$down_std <- fileData$down_std
+
+colnames( downAvgsDataFrame ) <- c( "ms",
+ "type",
+ "scale",
+ "stds" )
+
+downAvgsDataFrame <- na.omit( downAvgsDataFrame )
+
+downAvgsDataFrame$type <- as.character( downAvgsDataFrame$type )
+downAvgsDataFrame$type <- factor( downAvgsDataFrame$type, levels=unique( downAvgsDataFrame$type ) )
+
+sumOfDownAvgs <- fileData[ 'down_ofp_to_dev_avg' ] +
+ fileData[ 'down_dev_to_link_avg' ] +
+ fileData[ 'down_link_to_graph_avg' ]
+
+print( "Down Averages Results:" )
+print( downAvgsDataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------
+# Initialize Variables For Both Graphs
+# ------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+fillLabel <- labs( fill="Type" )
+
+barWidth <- 1
+
+wrapLegend <- guides( fill=guide_legend( nrow=1, byrow=TRUE ) )
+
+theme <- graphTheme()
+
+subtitle <- lastUpdatedLabel()
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+ webColor( "light_blue" ),
+ webColor( "green" ) ) )
+
+errorBarColor <- webColor( "darkerGray" )
+
+# --------------------------
+# Port Up Generate Main Plot
+# --------------------------
+
+print( "Generating main plot (Port Up Latency)." )
+
+mainPlot <- ggplot( data = upAvgsDataFrame, aes( x = scale,
+ y = ms,
+ fill = type,
+ ymin = fileData[ 'up_end_to_end_avg' ],
+ ymax = fileData[ 'up_end_to_end_avg' ] + stds ) )
+
+# --------------------------------------
+# Port Up Fundamental Variables Assigned
+# --------------------------------------
+
+print( "Generating fundamental graph data (Port Up Latency)." )
+
+title <- labs( title = "Port Up Latency", subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ wrapLegend +
+ title +
+ colors
+
+# -----------------------------------
+# Port Up Generating Bar Graph Format
+# -----------------------------------
+
+print( "Generating bar graph with error bars (Port Up Latency)." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ width = barWidth )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+ color = errorBarColor )
+
+values <- geom_text( aes( x = upAvgsDataFrame$scale,
+ y = sumOfUpAvgs + 0.03 * max( sumOfUpAvgs ),
+ label = format( sumOfUpAvgs,
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ values
+
+# -------------------------------
+# Port Up Exporting Graph to File
+# -------------------------------
+
+saveGraph( errBarOutputFileUp )
+
+# ----------------------------
+# Port Down Generate Main Plot
+# ----------------------------
+
+print( "Generating main plot (Port Down Latency)." )
+
+mainPlot <- ggplot( data = downAvgsDataFrame, aes( x = scale,
+ y = ms,
+ fill = type,
+ ymin = fileData[ 'down_end_to_end_avg' ],
+ ymax = fileData[ 'down_end_to_end_avg' ] + stds ) )
+
+# ----------------------------------------
+# Port Down Fundamental Variables Assigned
+# ----------------------------------------
+
+print( "Generating fundamental graph data (Port Down Latency)." )
+
+title <- labs( title = "Port Down Latency", subtitle = lastUpdatedLabel() )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ wrapLegend +
+ title +
+ colors
+
+# -------------------------------------
+# Port Down Generating Bar Graph Format
+# -------------------------------------
+
+print( "Generating bar graph with error bars (Port Down Latency)." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ width = barWidth )
+
+errorBarFormat <- geom_errorbar( width = barWidth,
+ color = errorBarColor )
+
+values <- geom_text( aes( x = downAvgsDataFrame$scale,
+ y = sumOfDownAvgs + 0.03 * max( sumOfDownAvgs ),
+ label = format( sumOfDownAvgs,
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ values
+
+# ---------------------------------
+# Port Down Exporting Graph to File
+# ---------------------------------
+
+saveGraph( errBarOutputFileDown )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscaleTopo.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscaleTopo.R
new file mode 100644
index 0000000..2afe86e
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscaleTopo.R
@@ -0,0 +1,238 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Scale Topology Latency Test Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFscaleTopo_master_graph.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFscaleTopo.R <url> <port> <username> <pass> SCPFscaleTopo master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory = 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFscaleTopo.R" )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+outputFile <- paste( args[ save_directory ],
+ args[ graph_title ],
+ "_",
+ args[ branch_name ],
+ "_graph.jpg",
+ sep="" )
+
+chartTitle <- "Scale Topology Latency Test"
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# --------------------------
+# Scale Topology SQL Command
+# --------------------------
+
+print( "Generating Scale Topology SQL Command" )
+
+command <- paste( "SELECT * FROM scale_topo_latency_details WHERE branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM scale_topo_latency_details WHERE branch = '",
+ args[ branch_name ],
+ "' ) ",
+ sep = "" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "last_role_request_to_last_topology", "last_connection_to_last_role_request", "first_connection_to_last_connection" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+# Parse lists into data frames.
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+colnames( dataFrame ) <- c( "s",
+ "type",
+ "scale")
+
+# Format data frame so that the data is in the same order as it appeared in the file.
+dataFrame$type <- as.character( dataFrame$type )
+dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
+dataFrame$iterative <- seq( 1, nrow( fileData ), by = 1 )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+sum <- fileData[ 'last_role_request_to_last_topology' ] +
+ fileData[ 'last_connection_to_last_role_request' ] +
+ fileData[ 'first_connection_to_last_connection' ]
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+
+mainPlot <- ggplot( data = dataFrame, aes( x = iterative,
+ y = s,
+ fill = type ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks = dataFrame$iterative,
+ label = dataFrame$scale )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (s)" )
+fillLabel <- labs( fill="Type" )
+
+width <- 0.6 # Width of the bars.
+
+theme <- graphTheme()
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+ webColor( "green" ),
+ webColor( "light_blue" ) ) )
+
+values <- geom_text( aes( x = dataFrame$iterative,
+ y = sum + 0.02 * max( sum ),
+ label = format( sum,
+ big.mark = ",",
+ scientific = FALSE ),
+ fontface = "bold" ),
+ size = 7.0 )
+
+wrapLegend <- guides( fill = guide_legend( nrow=2, byrow=TRUE ) )
+
+title <- labs( title = chartTitle, subtitle = lastUpdatedLabel() )
+
+# Store plot configurations as 1 variable
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ values +
+ wrapLegend +
+ title +
+ colors
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+
+barGraphFormat <- geom_bar( stat = "identity", width = width )
+
+result <- fundamentalGraphData +
+ barGraphFormat
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+saveGraph( outputFile )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscalingMaxIntents.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscalingMaxIntents.R
new file mode 100644
index 0000000..045f5e7
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFscalingMaxIntents.R
@@ -0,0 +1,290 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# If you have any questions, or if you don't understand R,
+# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+has_flow_obj = 1
+database_host = 2
+database_port = 3
+database_u_id = 4
+database_pw = 5
+test_name = 6
+branch_name = 7
+old_flow = 8
+save_directory = 9
+
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( is.na( args[ save_directory ] ) ){
+ print( paste( "Usage: Rscript SCPFInstalledIntentsFlows",
+ "<has-flowObj>",
+ "<database-host>",
+ "<database-port>",
+ "<database-user-id>",
+ "<database-password>",
+ "<test-name>",
+ "<branch-name>",
+ "<using-old-flow>",
+ "<directory-to-save-graphs>",
+ sep=" " ) )
+
+ quit( status = 1 ) # basically exit(), but in R
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+fileFlowObjModifier <- ""
+sqlFlowObjModifier <- ""
+chartTitle <- "Number of Installed Intents & Flows"
+
+if ( args[ has_flow_obj ] == "y" ){
+ fileFlowObjModifier <- "_flowObj"
+ sqlFlowObjModifier <- "fobj_"
+ chartTitle <- "Number of Installed Intents & Flows\n with Flow Objectives"
+}
+fileOldFlowModifier <- ""
+if ( args[ old_flow ] == 'y' ){
+ fileOldFlowModifier <- "_OldFlow"
+ chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
+}
+
+outputFile <- paste( args[ save_directory ],
+ args[ test_name ],
+ fileFlowObjModifier,
+ fileOldFlowModifier,
+ "_",
+ args[ branch_name ],
+ "_errGraph.jpg",
+ sep="" )
+
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- dbConnect( dbDriver( "PostgreSQL" ),
+ dbname = "onostest",
+ host = args[ database_host ],
+ port = strtoi( args[ database_port ] ),
+ user = args[ database_u_id ],
+ password = args[ database_pw ] )
+
+# -------------------------------
+# Scaling Max Intents SQL Command
+# -------------------------------
+
+print( "Scaling Max Intents SQL Command" )
+
+command <- paste( "SELECT * FROM max_intents_",
+ sqlFlowObjModifier,
+ "tests WHERE branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM max_intents_",
+ sqlFlowObjModifier,
+ "tests WHERE branch = '",
+ args[ branch_name ],
+ "' AND ",
+ ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
+ "is_old_flow",
+ " ) ",
+ sep="" )
+
+print( "Sending SQL command:" )
+print( command )
+fileData <- dbGetQuery( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# ------------
+# Data Sorting
+# ------------
+
+print( "Sorting data." )
+
+requiredColumns <- c( "max_intents_ovs", "max_flows_ovs" )
+
+tryCatch( avgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# --------------------
+# Construct Data Frame
+# --------------------
+
+print( "Constructing Data Frame" )
+
+dataFrame <- melt( avgs )
+dataFrame$scale <- fileData$scale
+
+colnames( dataFrame ) <- c( "ms", "type", "scale" )
+
+dataFrame$type <- as.character( dataFrame$type )
+dataFrame$type <- factor( dataFrame$type, levels=unique( dataFrame$type ) )
+
+dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
+
+print( "Data Frame Results:" )
+print( dataFrame )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------
+# Generate Main Plot
+# ------------------
+
+print( "Creating main plot." )
+mainPlot <- ggplot( data = dataFrame, aes( x = scale,
+ y = ms,
+ fill = type ) )
+
+# ------------------------------
+# Fundamental Variables Assigned
+# ------------------------------
+
+print( "Generating fundamental graph data." )
+
+barWidth <- 1.3
+theme_set( theme_grey( base_size = 22 ) ) # set the default text size of the graph.
+xScaleConfig <- scale_x_continuous( breaks=c( 1, 3, 5, 7, 9 ) )
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Max Number of Intents/Flow Rules" )
+fillLabel <- labs( fill="Type" )
+imageWidth <- 15
+imageHeight <- 10
+imageDPI <- 200
+
+theme <- theme( plot.title = element_text( hjust = 0.5, size = 32, face = 'bold' ),
+ legend.position = "bottom",
+ legend.text = element_text( size=22 ),
+ legend.title = element_blank(),
+ legend.key.size = unit( 1.5, 'lines' ),
+ plot.subtitle = element_text( size=16, hjust=1.0 ) )
+
+subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
+
+colors <- scale_fill_manual( values = c( "#F77670",
+ "#619DFA" ) )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
+
+title <- labs( title = chartTitle, subtitle = subtitle )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ fillLabel +
+ theme +
+ wrapLegend +
+ title +
+ colors
+
+# ---------------------------
+# Generating Bar Graph Format
+# ---------------------------
+
+print( "Generating bar graph." )
+
+barGraphFormat <- geom_bar( stat = "identity",
+ position = position_dodge(),
+ width = barWidth )
+
+values <- geom_text( aes( x = dataFrame$scale,
+ y = dataFrame$ms + 0.015 * max( dataFrame$ms ),
+ label = format( dataFrame$ms,
+ digits=3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 5.2,
+ fontface = "bold",
+ position = position_dodge( width = 1.25 ) )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ values
+
+# -----------------------
+# Exporting Graph to File
+# -----------------------
+
+print( paste( "Saving bar chart to", outputFile ) )
+
+tryCatch( ggsave( outputFile,
+ width = imageWidth,
+ height = imageHeight,
+ dpi = imageDPI ),
+ error = function( e ){
+ print( "[ERROR] There was a problem saving the graph due to a graph formatting exception. Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+print( paste( "[SUCCESS] Successfully wrote bar chart out to", outputFile ) )
+quit( status = 0 )
diff --git a/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFswitchLat.R b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFswitchLat.R
new file mode 100644
index 0000000..192ac8f
--- /dev/null
+++ b/TestON/JenkinsFile/wikiGraphRScripts/SCPFspecificGraphRScripts/SCPFswitchLat.R
@@ -0,0 +1,361 @@
+# Copyright 2017 Open Networking Foundation (ONF)
+#
+# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
+# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
+# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
+#
+# TestON is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# TestON is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with TestON. If not, see <http://www.gnu.org/licenses/>.
+#
+# Example script:
+# Switch Latency Graph (https://jenkins.onosproject.org/view/QA/job/postjob-BM/lastSuccessfulBuild/artifact/SCPFswitchLat_master_UpErrBarWithStack.jpg):
+# Rscript SCPFspecificGraphRScripts/SCPFswitchLat.R <url> <port> <username> <pass> SCPFswitchLat master /path/to/save/directory/
+
+# **********************************************************
+# STEP 1: Data management.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 1: Data management." )
+print( "**********************************************************" )
+
+save_directory <- 7
+
+# Command line arguments are read.
+print( "Reading commmand-line args." )
+args <- commandArgs( trailingOnly=TRUE )
+
+# ----------------
+# Import Libraries
+# ----------------
+
+print( "Importing libraries." )
+library( ggplot2 )
+library( reshape2 )
+library( RPostgreSQL ) # For databases
+source( "dependencies/saveGraph.R" )
+source( "dependencies/fundamentalGraphData.R" )
+source( "dependencies/initSQL.R" )
+source( "dependencies/cliArgs.R" )
+
+# -------------------
+# Check CLI Arguments
+# -------------------
+
+print( "Verifying CLI args." )
+
+if ( length( args ) != save_directory ){
+ usage( "SCPFswitchLat.R" )
+ quit( status = 1 )
+}
+
+# -----------------
+# Create File Names
+# -----------------
+
+print( "Creating filenames and title of graph." )
+
+errBarOutputFileUp <- paste( args[ save_directory ],
+ "SCPFswitchLat_",
+ args[ branch_name ],
+ "_UpErrBarWithStack.jpg",
+ sep="" )
+
+errBarOutputFileDown <- paste( args[ save_directory ],
+ "SCPFswitchLat_",
+ args[ branch_name ],
+ "_DownErrBarWithStack.jpg",
+ sep="" )
+# ------------------
+# SQL Initialization
+# ------------------
+
+print( "Initializing SQL" )
+
+con <- initSQL( args[ database_host ],
+ args[ database_port ],
+ args[ database_u_id ],
+ args[ database_pw ] )
+
+# --------------------------
+# Switch Latency SQL Command
+# --------------------------
+
+print( "Generating Switch Latency SQL Command" )
+
+command <- paste( "SELECT * FROM switch_latency_details WHERE branch = '",
+ args[ branch_name ],
+ "' AND date IN ( SELECT MAX( date ) FROM switch_latency_details WHERE branch='",
+ args[ branch_name ],
+ "' )",
+ sep="" )
+
+fileData <- retrieveData( con, command )
+
+# **********************************************************
+# STEP 2: Organize data.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 2: Organize Data." )
+print( "**********************************************************" )
+
+# -------------------------------
+# Switch Up Averages Data Sorting
+# -------------------------------
+
+print( "Sorting data for Switch Up Averages." )
+
+requiredColumns <- c( "up_device_to_graph_avg",
+ "feature_reply_to_device_avg",
+ "tcp_to_feature_reply_avg" )
+
+tryCatch( upAvgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# ------------------------------
+# Switch Up Construct Data Frame
+# ------------------------------
+
+print( "Constructing Switch Up data frame." )
+
+upAvgsData <- melt( upAvgs )
+upAvgsData$scale <- fileData$scale
+upAvgsData$up_std <- fileData$up_std
+upAvgsData <- na.omit( upAvgsData )
+
+colnames( upAvgsData ) <- c( "ms",
+ "type",
+ "scale",
+ "stds" )
+
+upAvgsData$type <- as.character( upAvgsData$type )
+upAvgsData$type <- factor( upAvgsData$type, levels=unique( upAvgsData$type ) )
+
+sumOfUpAvgs <- fileData[ 'up_device_to_graph_avg' ] +
+ fileData[ 'feature_reply_to_device_avg' ] +
+ fileData[ 'tcp_to_feature_reply_avg' ]
+
+print( "Up Averages Results:" )
+print( upAvgsData )
+
+# ---------------------------------
+# Switch Down Averages Data Sorting
+# ---------------------------------
+
+print( "Sorting data for Switch Down Averages." )
+
+requiredColumns <- c( "down_device_to_graph_avg",
+ "ack_to_device_avg",
+ "fin_ack_to_ack_avg" )
+
+tryCatch( downAvgs <- c( fileData[ requiredColumns] ),
+ error = function( e ) {
+ print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
+ print( "Required columns: " )
+ print( requiredColumns )
+ print( "Actual columns: " )
+ print( names( fileData ) )
+ print( "Error dump:" )
+ print( e )
+ quit( status = 1 )
+ }
+ )
+
+# --------------------------------
+# Switch Down Construct Data Frame
+# --------------------------------
+
+print( "Constructing Switch Down data frame." )
+
+downAvgsData <- melt( downAvgs )
+downAvgsData$scale <- fileData$scale
+downAvgsData$down_std <- fileData$down_std
+
+colnames( downAvgsData ) <- c( "ms",
+ "type",
+ "scale",
+ "stds" )
+
+downAvgsData$type <- as.character( downAvgsData$type )
+downAvgsData$type <- factor( downAvgsData$type, levels=unique( downAvgsData$type ) )
+
+downAvgsData <- na.omit( downAvgsData )
+
+sumOfDownAvgs <- fileData[ 'down_device_to_graph_avg' ] +
+ fileData[ 'ack_to_device_avg' ] +
+ fileData[ 'fin_ack_to_ack_avg' ]
+
+print( "Down Averages Results:" )
+print( downAvgsData )
+
+# **********************************************************
+# STEP 3: Generate graphs.
+# **********************************************************
+
+print( "**********************************************************" )
+print( "STEP 3: Generate Graph." )
+print( "**********************************************************" )
+
+# ------------------------------------
+# Initialize Variables For Both Graphs
+# ------------------------------------
+
+print( "Initializing variables used in both graphs." )
+
+defaultTextSize()
+xScaleConfig <- scale_x_continuous( breaks = c( 1, 3, 5, 7, 9 ) )
+
+xLabel <- xlab( "Scale" )
+yLabel <- ylab( "Latency (ms)" )
+
+errorBarColor <- webColor( "darkerGray" )
+barWidth <- 1
+
+theme <- graphTheme()
+
+subtitle <- lastUpdatedLabel()
+
+colors <- scale_fill_manual( values=c( webColor( "redv2" ),
+ webColor( "light_blue" ),
+ webColor( "green" ) ) )
+
+# ----------------------------
+# Switch Up Generate Main Plot
+# ----------------------------
+
+print( "Creating main plot (Switch Up Latency)." )
+
+mainPlot <- ggplot( data = upAvgsData, aes( x = scale,
+ y = ms,
+ fill = type,
+ ymin = fileData[ 'up_end_to_end_avg' ],
+ ymax = fileData[ 'up_end_to_end_avg' ] + stds ) )
+
+# ----------------------------------------
+# Switch Up Fundamental Variables Assigned
+# ----------------------------------------
+
+print( "Generating fundamental graph data (Switch Up Latency)." )
+
+title <- labs( title = "Switch Up Latency", subtitle = subtitle )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ theme +
+ title +
+ colors
+
+# -------------------------------------
+# Switch Up Generating Bar Graph Format
+# -------------------------------------
+
+print( "Generating bar graph with error bars (Switch Up Latency)." )
+
+barGraphFormat <- geom_bar( stat = "identity", width = barWidth )
+errorBarFormat <- geom_errorbar( width = barWidth, color = errorBarColor )
+
+barGraphValues <- geom_text( aes( x = upAvgsData$scale,
+ y = sumOfUpAvgs + 0.04 * max( sumOfUpAvgs ),
+ label = format( sumOfUpAvgs,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 2, byrow = TRUE ) )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ barGraphValues +
+ wrapLegend
+
+# ---------------------------------
+# Switch Up Exporting Graph to File
+# ---------------------------------
+
+saveGraph( errBarOutputFileUp )
+
+# ------------------------------
+# Switch Down Generate Main Plot
+# ------------------------------
+
+print( "Creating main plot (Switch Down Latency)." )
+
+mainPlot <- ggplot( data = downAvgsData, aes( x = scale,
+ y = ms,
+ fill = type,
+ ymin = fileData[ 'down_end_to_end_avg' ],
+ ymax = fileData[ 'down_end_to_end_avg' ] + stds ) )
+
+# ------------------------------------------
+# Switch Down Fundamental Variables Assigned
+# ------------------------------------------
+
+print( "Generating fundamental graph data (Switch Down Latency)." )
+
+title <- labs( title = "Switch Down Latency", subtitle = subtitle )
+
+fundamentalGraphData <- mainPlot +
+ xScaleConfig +
+ xLabel +
+ yLabel +
+ theme +
+ title +
+ colors
+
+# ---------------------------------------
+# Switch Down Generating Bar Graph Format
+# ---------------------------------------
+
+print( "Generating bar graph with error bars (Switch Down Latency)." )
+barGraphFormat <- geom_bar( stat = "identity", width = barWidth )
+errorBarFormat <- geom_errorbar( width = barWidth, color = errorBarColor )
+
+barGraphValues <- geom_text( aes( x = downAvgsData$scale,
+ y = sumOfDownAvgs + 0.04 * max( sumOfDownAvgs ),
+ label = format( sumOfDownAvgs,
+ digits = 3,
+ big.mark = ",",
+ scientific = FALSE ) ),
+ size = 7.0,
+ fontface = "bold" )
+
+wrapLegend <- guides( fill = guide_legend( nrow = 1, byrow = TRUE ) )
+
+result <- fundamentalGraphData +
+ barGraphFormat +
+ errorBarFormat +
+ barGraphValues +
+ wrapLegend
+
+# -----------------------------------
+# Switch Down Exporting Graph to File
+# -----------------------------------
+
+saveGraph( errBarOutputFileDown )
+quit( status = 0 )