blob: ffb91a9f52d585312db156ef5814fcae53b5d205 [file] [log] [blame]
Jeremy Ronquillodae11042018-02-21 09:21:44 -08001# Copyright 2017 Open Networking Foundation (ONF)
2#
3# Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>,
4# the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>,
5# or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg>
6#
7# TestON is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation, either version 2 of the License, or
10# (at your option) any later version.
11#
12# TestON is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License
18# along with TestON. If not, see <http://www.gnu.org/licenses/>.
19#
20# If you have any questions, or if you don't understand R,
21# please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu
22
23# **********************************************************
24# STEP 1: Data management.
25# **********************************************************
26has_flow_obj = 1
27database_host = 2
28database_port = 3
29database_u_id = 4
30database_pw = 5
31test_name = 6
32branch_name = 7
33has_neighbors = 8
34old_flow = 9
35save_directory = 10
36
37print( "**********************************************************" )
38print( "STEP 1: Data management." )
39print( "**********************************************************" )
40
41# Command line arguments are read.
42print( "Reading commmand-line args." )
43args <- commandArgs( trailingOnly=TRUE )
44
45# ----------------
46# Import Libraries
47# ----------------
48
49print( "Importing libraries." )
50library( ggplot2 )
51library( reshape2 )
52library( RPostgreSQL ) # For databases
53
54# -------------------
55# Check CLI Arguments
56# -------------------
57
58print( "Verifying CLI args." )
59
60if ( is.na( args[ save_directory ] ) ){
61
62 print( paste( "Usage: Rscript SCPFflowTp1g.R",
63 "<has-flow-obj>",
64 "<database-host>",
65 "<database-port>",
66 "<database-user-id>",
67 "<database-password>",
68 "<test-name>",
69 "<branch-name>",
70 "<has-neighbors>",
71 "<using-old-flow>",
72 "<directory-to-save-graphs>",
73 sep=" " ) )
74
75 quit( status = 1 ) # basically exit(), but in R
76}
77
78# -----------------
79# Create File Names
80# -----------------
81
82print( "Creating filenames and title of graph." )
83
84chartTitle <- "Flow Throughput Test"
85fileNeighborsModifier <- "no"
86commandNeighborModifier <- ""
87fileFlowObjModifier <- ""
88sqlFlowObjModifier <- ""
89if ( args[ has_flow_obj ] == 'y' ){
90 fileFlowObjModifier <- "_flowObj"
91 sqlFlowObjModifier <- "_fobj"
92 chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" )
93}
94
95chartTitle <- paste( chartTitle, "\nNeighbors =", sep="" )
96
97fileOldFlowModifier <- ""
98if ( args[ has_neighbors ] == 'y' ){
99 fileNeighborsModifier <- "all"
100 commandNeighborModifier <- "scale=1 OR NOT "
101 chartTitle <- paste( chartTitle, "Cluster Size - 1" )
102} else {
103 chartTitle <- paste( chartTitle, "0" )
104}
105if ( args[ old_flow ] == 'y' ){
106 fileOldFlowModifier <- "_OldFlow"
107 chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" )
108}
109errBarOutputFile <- paste( args[ save_directory ],
110 args[ test_name ],
111 "_",
112 args[ branch_name ],
113 "_",
114 fileNeighborsModifier,
115 "-neighbors",
116 fileFlowObjModifier,
117 fileOldFlowModifier,
118 "_graph.jpg",
119 sep="" )
120# ------------------
121# SQL Initialization
122# ------------------
123
124print( "Initializing SQL" )
125
126con <- dbConnect( dbDriver( "PostgreSQL" ),
127 dbname = "onostest",
128 host = args[ database_host ],
129 port = strtoi( args[ database_port ] ),
130 user = args[ database_u_id ],
131 password = args[ database_pw ] )
132
133# ---------------------------
134# Flow Throughput SQL Command
135# ---------------------------
136
137print( "Generating Flow Throughput SQL command." )
138
139command <- paste( "SELECT scale, avg( avg ), avg( std ) FROM flow_tp",
140 sqlFlowObjModifier,
141 "_tests WHERE (",
142 commandNeighborModifier,
143 "neighbors = 0 ) AND branch = '",
144 args[ branch_name ],
145 "' AND date IN ( SELECT max( date ) FROM flow_tp",
146 sqlFlowObjModifier,
147 "_tests WHERE branch='",
148 args[ branch_name ],
149 "' AND ",
150 ( if( args[ old_flow ] == 'y' ) "" else "NOT " ),
151 "is_old_flow",
152 " ) GROUP BY scale ORDER BY scale",
153 sep="" )
154
155print( "Sending SQL command:" )
156print( command )
157
158fileData <- dbGetQuery( con, command )
159
160# **********************************************************
161# STEP 2: Organize data.
162# **********************************************************
163
164print( "**********************************************************" )
165print( "STEP 2: Organize Data." )
166print( "**********************************************************" )
167
168# ------------
169# Data Sorting
170# ------------
171
172print( "Sorting data for Flow Throughput." )
173
174colnames( fileData ) <- c( "scale",
175 "avg",
176 "std" )
177
178requiredColumns <- c( "avg" )
179
180tryCatch( avgs <- c( fileData[ requiredColumns] ),
181 error = function( e ) {
182 print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." )
183 print( "Required columns: " )
184 print( requiredColumns )
185 print( "Actual columns: " )
186 print( names( fileData ) )
187 print( "Error dump:" )
188 print( e )
189 quit( status = 1 )
190 }
191 )
192
193
194# ----------------------------
195# Flow TP Construct Data Frame
196# ----------------------------
197
198print( "Constructing Flow TP data frame." )
199
200dataFrame <- melt( avgs ) # This is where reshape2 comes in. Avgs list is converted to data frame
201dataFrame$scale <- fileData$scale # Add node scaling to the data frame.
202dataFrame$std <- fileData$std
203
204colnames( dataFrame ) <- c( "throughput",
205 "type",
206 "scale",
207 "std" )
208
209dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist
210
211print( "Data Frame Results:" )
212print( dataFrame )
213
214# **********************************************************
215# STEP 3: Generate graphs.
216# **********************************************************
217
218print( "**********************************************************" )
219print( "STEP 3: Generate Graph." )
220print( "**********************************************************" )
221
222# ------------------
223# Generate Main Plot
224# ------------------
225
226print( "Generating main plot." )
227# Create the primary plot here.
228# ggplot contains the following arguments:
229# - data: the data frame that the graph will be based off of
230# - aes: the asthetics of the graph which require:
231# - x: x-axis values (usually node scaling)
232# - y: y-axis values (usually time in milliseconds)
233# - fill: the category of the colored side-by-side bars (usually type)
234
235mainPlot <- ggplot( data = dataFrame, aes( x = scale,
236 y = throughput,
237 ymin = throughput,
238 ymax = throughput + std,
239 fill = type ) )
240# ------------------------------
241# Fundamental Variables Assigned
242# ------------------------------
243
244print( "Generating fundamental graph data." )
245
246# Formatting the plot
247theme_set( theme_grey( base_size = 22 ) ) # set the default text size of the graph.
248width <- 0.7 # Width of the bars.
249xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale,
250 label = dataFrame$scale )
251xLabel <- xlab( "Scale" )
252yLabel <- ylab( "Throughput (,000 Flows/sec)" )
253fillLabel <- labs( fill="Type" )
254imageWidth <- 15
255imageHeight <- 10
256imageDPI <- 200
257errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 )
258
259theme <- theme( plot.title = element_text( hjust = 0.5,
260 size = 32,
261 face = 'bold' ),
262 plot.subtitle = element_text( size=16, hjust=1.0 ) )
263
264subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" )
265
266title <- labs( title = chartTitle, subtitle = subtitle )
267
268# Store plot configurations as 1 variable
269fundamentalGraphData <- mainPlot +
270 xScaleConfig +
271 xLabel +
272 yLabel +
273 fillLabel +
274 theme +
275 title
276
277# ---------------------------
278# Generating Bar Graph Format
279# ---------------------------
280
281# Create the stacked bar graph with error bars.
282# geom_bar contains:
283# - stat: data formatting (usually "identity")
284# - width: the width of the bar types (declared above)
285# geom_errorbar contains similar arguments as geom_bar.
286print( "Generating bar graph with error bars." )
287barGraphFormat <- geom_bar( stat = "identity",
288 width = width,
289 fill = "#FFAA3C" )
290
291errorBarFormat <- geom_errorbar( width = width,
292 position = position_dodge(),
293 color = errorBarColor )
294
295values <- geom_text( aes( x = dataFrame$scale,
296 y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ),
297 label = format( dataFrame$throughput,
298 digits=3,
299 big.mark = ",",
300 scientific = FALSE ) ),
301 size = 7.0,
302 fontface = "bold" )
303
304result <- fundamentalGraphData +
305 barGraphFormat +
306 errorBarFormat +
307 values
308
309# -----------------------
310# Exporting Graph to File
311# -----------------------
312
313print( paste( "Saving bar chart with error bars to", errBarOutputFile ) )
314
315tryCatch( ggsave( errBarOutputFile,
316 width = imageWidth,
317 height = imageHeight,
318 dpi = imageDPI ),
319 error = function( e ){
320 print( "[ERROR] There was a problem saving the graph due to a graph formatting exception. Error dump:" )
321 print( e )
322 quit( status = 1 )
323 }
324 )
325
326print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) )
327quit( status = 0 )