Jeremy Ronquillo | dae1104 | 2018-02-21 09:21:44 -0800 | [diff] [blame] | 1 | # Copyright 2017 Open Networking Foundation (ONF) |
| 2 | # |
| 3 | # Please refer questions to either the onos test mailing list at <onos-test@onosproject.org>, |
| 4 | # the System Testing Plans and Results wiki page at <https://wiki.onosproject.org/x/voMg>, |
| 5 | # or the System Testing Guide page at <https://wiki.onosproject.org/x/WYQg> |
| 6 | # |
| 7 | # TestON is free software: you can redistribute it and/or modify |
| 8 | # it under the terms of the GNU General Public License as published by |
| 9 | # the Free Software Foundation, either version 2 of the License, or |
| 10 | # (at your option) any later version. |
| 11 | # |
| 12 | # TestON is distributed in the hope that it will be useful, |
| 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 15 | # GNU General Public License for more details. |
| 16 | # |
| 17 | # You should have received a copy of the GNU General Public License |
| 18 | # along with TestON. If not, see <http://www.gnu.org/licenses/>. |
| 19 | # |
| 20 | # If you have any questions, or if you don't understand R, |
| 21 | # please contact Jeremy Ronquillo: j_ronquillo@u.pacific.edu |
| 22 | |
| 23 | # ********************************************************** |
| 24 | # STEP 1: Data management. |
| 25 | # ********************************************************** |
| 26 | has_flow_obj = 1 |
| 27 | database_host = 2 |
| 28 | database_port = 3 |
| 29 | database_u_id = 4 |
| 30 | database_pw = 5 |
| 31 | test_name = 6 |
| 32 | branch_name = 7 |
| 33 | has_neighbors = 8 |
| 34 | old_flow = 9 |
| 35 | save_directory = 10 |
| 36 | |
| 37 | print( "**********************************************************" ) |
| 38 | print( "STEP 1: Data management." ) |
| 39 | print( "**********************************************************" ) |
| 40 | |
| 41 | # Command line arguments are read. |
| 42 | print( "Reading commmand-line args." ) |
| 43 | args <- commandArgs( trailingOnly=TRUE ) |
| 44 | |
| 45 | # ---------------- |
| 46 | # Import Libraries |
| 47 | # ---------------- |
| 48 | |
| 49 | print( "Importing libraries." ) |
| 50 | library( ggplot2 ) |
| 51 | library( reshape2 ) |
| 52 | library( RPostgreSQL ) # For databases |
| 53 | |
| 54 | # ------------------- |
| 55 | # Check CLI Arguments |
| 56 | # ------------------- |
| 57 | |
| 58 | print( "Verifying CLI args." ) |
| 59 | |
| 60 | if ( is.na( args[ save_directory ] ) ){ |
| 61 | |
| 62 | print( paste( "Usage: Rscript SCPFflowTp1g.R", |
| 63 | "<has-flow-obj>", |
| 64 | "<database-host>", |
| 65 | "<database-port>", |
| 66 | "<database-user-id>", |
| 67 | "<database-password>", |
| 68 | "<test-name>", |
| 69 | "<branch-name>", |
| 70 | "<has-neighbors>", |
| 71 | "<using-old-flow>", |
| 72 | "<directory-to-save-graphs>", |
| 73 | sep=" " ) ) |
| 74 | |
| 75 | quit( status = 1 ) # basically exit(), but in R |
| 76 | } |
| 77 | |
| 78 | # ----------------- |
| 79 | # Create File Names |
| 80 | # ----------------- |
| 81 | |
| 82 | print( "Creating filenames and title of graph." ) |
| 83 | |
| 84 | chartTitle <- "Flow Throughput Test" |
| 85 | fileNeighborsModifier <- "no" |
| 86 | commandNeighborModifier <- "" |
| 87 | fileFlowObjModifier <- "" |
| 88 | sqlFlowObjModifier <- "" |
| 89 | if ( args[ has_flow_obj ] == 'y' ){ |
| 90 | fileFlowObjModifier <- "_flowObj" |
| 91 | sqlFlowObjModifier <- "_fobj" |
| 92 | chartTitle <- paste( chartTitle, " with Flow Objectives", sep="" ) |
| 93 | } |
| 94 | |
| 95 | chartTitle <- paste( chartTitle, "\nNeighbors =", sep="" ) |
| 96 | |
| 97 | fileOldFlowModifier <- "" |
| 98 | if ( args[ has_neighbors ] == 'y' ){ |
| 99 | fileNeighborsModifier <- "all" |
| 100 | commandNeighborModifier <- "scale=1 OR NOT " |
| 101 | chartTitle <- paste( chartTitle, "Cluster Size - 1" ) |
| 102 | } else { |
| 103 | chartTitle <- paste( chartTitle, "0" ) |
| 104 | } |
| 105 | if ( args[ old_flow ] == 'y' ){ |
| 106 | fileOldFlowModifier <- "_OldFlow" |
| 107 | chartTitle <- paste( chartTitle, "With Eventually Consistent Flow Rule Store", sep="\n" ) |
| 108 | } |
| 109 | errBarOutputFile <- paste( args[ save_directory ], |
| 110 | args[ test_name ], |
| 111 | "_", |
| 112 | args[ branch_name ], |
| 113 | "_", |
| 114 | fileNeighborsModifier, |
| 115 | "-neighbors", |
| 116 | fileFlowObjModifier, |
| 117 | fileOldFlowModifier, |
| 118 | "_graph.jpg", |
| 119 | sep="" ) |
| 120 | # ------------------ |
| 121 | # SQL Initialization |
| 122 | # ------------------ |
| 123 | |
| 124 | print( "Initializing SQL" ) |
| 125 | |
| 126 | con <- dbConnect( dbDriver( "PostgreSQL" ), |
| 127 | dbname = "onostest", |
| 128 | host = args[ database_host ], |
| 129 | port = strtoi( args[ database_port ] ), |
| 130 | user = args[ database_u_id ], |
| 131 | password = args[ database_pw ] ) |
| 132 | |
| 133 | # --------------------------- |
| 134 | # Flow Throughput SQL Command |
| 135 | # --------------------------- |
| 136 | |
| 137 | print( "Generating Flow Throughput SQL command." ) |
| 138 | |
| 139 | command <- paste( "SELECT scale, avg( avg ), avg( std ) FROM flow_tp", |
| 140 | sqlFlowObjModifier, |
| 141 | "_tests WHERE (", |
| 142 | commandNeighborModifier, |
| 143 | "neighbors = 0 ) AND branch = '", |
| 144 | args[ branch_name ], |
| 145 | "' AND date IN ( SELECT max( date ) FROM flow_tp", |
| 146 | sqlFlowObjModifier, |
| 147 | "_tests WHERE branch='", |
| 148 | args[ branch_name ], |
| 149 | "' AND ", |
| 150 | ( if( args[ old_flow ] == 'y' ) "" else "NOT " ), |
| 151 | "is_old_flow", |
| 152 | " ) GROUP BY scale ORDER BY scale", |
| 153 | sep="" ) |
| 154 | |
| 155 | print( "Sending SQL command:" ) |
| 156 | print( command ) |
| 157 | |
| 158 | fileData <- dbGetQuery( con, command ) |
| 159 | |
| 160 | # ********************************************************** |
| 161 | # STEP 2: Organize data. |
| 162 | # ********************************************************** |
| 163 | |
| 164 | print( "**********************************************************" ) |
| 165 | print( "STEP 2: Organize Data." ) |
| 166 | print( "**********************************************************" ) |
| 167 | |
| 168 | # ------------ |
| 169 | # Data Sorting |
| 170 | # ------------ |
| 171 | |
| 172 | print( "Sorting data for Flow Throughput." ) |
| 173 | |
| 174 | colnames( fileData ) <- c( "scale", |
| 175 | "avg", |
| 176 | "std" ) |
| 177 | |
| 178 | requiredColumns <- c( "avg" ) |
| 179 | |
| 180 | tryCatch( avgs <- c( fileData[ requiredColumns] ), |
| 181 | error = function( e ) { |
| 182 | print( "[ERROR] One or more expected columns are missing from the data. Please check that the data and SQL command are valid, then try again." ) |
| 183 | print( "Required columns: " ) |
| 184 | print( requiredColumns ) |
| 185 | print( "Actual columns: " ) |
| 186 | print( names( fileData ) ) |
| 187 | print( "Error dump:" ) |
| 188 | print( e ) |
| 189 | quit( status = 1 ) |
| 190 | } |
| 191 | ) |
| 192 | |
| 193 | |
| 194 | # ---------------------------- |
| 195 | # Flow TP Construct Data Frame |
| 196 | # ---------------------------- |
| 197 | |
| 198 | print( "Constructing Flow TP data frame." ) |
| 199 | |
| 200 | dataFrame <- melt( avgs ) # This is where reshape2 comes in. Avgs list is converted to data frame |
| 201 | dataFrame$scale <- fileData$scale # Add node scaling to the data frame. |
| 202 | dataFrame$std <- fileData$std |
| 203 | |
| 204 | colnames( dataFrame ) <- c( "throughput", |
| 205 | "type", |
| 206 | "scale", |
| 207 | "std" ) |
| 208 | |
| 209 | dataFrame <- na.omit( dataFrame ) # Omit any data that doesn't exist |
| 210 | |
| 211 | print( "Data Frame Results:" ) |
| 212 | print( dataFrame ) |
| 213 | |
| 214 | # ********************************************************** |
| 215 | # STEP 3: Generate graphs. |
| 216 | # ********************************************************** |
| 217 | |
| 218 | print( "**********************************************************" ) |
| 219 | print( "STEP 3: Generate Graph." ) |
| 220 | print( "**********************************************************" ) |
| 221 | |
| 222 | # ------------------ |
| 223 | # Generate Main Plot |
| 224 | # ------------------ |
| 225 | |
| 226 | print( "Generating main plot." ) |
| 227 | # Create the primary plot here. |
| 228 | # ggplot contains the following arguments: |
| 229 | # - data: the data frame that the graph will be based off of |
| 230 | # - aes: the asthetics of the graph which require: |
| 231 | # - x: x-axis values (usually node scaling) |
| 232 | # - y: y-axis values (usually time in milliseconds) |
| 233 | # - fill: the category of the colored side-by-side bars (usually type) |
| 234 | |
| 235 | mainPlot <- ggplot( data = dataFrame, aes( x = scale, |
| 236 | y = throughput, |
| 237 | ymin = throughput, |
| 238 | ymax = throughput + std, |
| 239 | fill = type ) ) |
| 240 | # ------------------------------ |
| 241 | # Fundamental Variables Assigned |
| 242 | # ------------------------------ |
| 243 | |
| 244 | print( "Generating fundamental graph data." ) |
| 245 | |
| 246 | # Formatting the plot |
| 247 | theme_set( theme_grey( base_size = 22 ) ) # set the default text size of the graph. |
| 248 | width <- 0.7 # Width of the bars. |
| 249 | xScaleConfig <- scale_x_continuous( breaks = dataFrame$scale, |
| 250 | label = dataFrame$scale ) |
| 251 | xLabel <- xlab( "Scale" ) |
| 252 | yLabel <- ylab( "Throughput (,000 Flows/sec)" ) |
| 253 | fillLabel <- labs( fill="Type" ) |
| 254 | imageWidth <- 15 |
| 255 | imageHeight <- 10 |
| 256 | imageDPI <- 200 |
| 257 | errorBarColor <- rgb( 140, 140, 140, maxColorValue=255 ) |
| 258 | |
| 259 | theme <- theme( plot.title = element_text( hjust = 0.5, |
| 260 | size = 32, |
| 261 | face = 'bold' ), |
| 262 | plot.subtitle = element_text( size=16, hjust=1.0 ) ) |
| 263 | |
| 264 | subtitle <- paste( "Last Updated: ", format( Sys.time(), format = "%b %d, %Y at %I:%M %p %Z" ), sep="" ) |
| 265 | |
| 266 | title <- labs( title = chartTitle, subtitle = subtitle ) |
| 267 | |
| 268 | # Store plot configurations as 1 variable |
| 269 | fundamentalGraphData <- mainPlot + |
| 270 | xScaleConfig + |
| 271 | xLabel + |
| 272 | yLabel + |
| 273 | fillLabel + |
| 274 | theme + |
| 275 | title |
| 276 | |
| 277 | # --------------------------- |
| 278 | # Generating Bar Graph Format |
| 279 | # --------------------------- |
| 280 | |
| 281 | # Create the stacked bar graph with error bars. |
| 282 | # geom_bar contains: |
| 283 | # - stat: data formatting (usually "identity") |
| 284 | # - width: the width of the bar types (declared above) |
| 285 | # geom_errorbar contains similar arguments as geom_bar. |
| 286 | print( "Generating bar graph with error bars." ) |
| 287 | barGraphFormat <- geom_bar( stat = "identity", |
| 288 | width = width, |
| 289 | fill = "#FFAA3C" ) |
| 290 | |
| 291 | errorBarFormat <- geom_errorbar( width = width, |
| 292 | position = position_dodge(), |
| 293 | color = errorBarColor ) |
| 294 | |
| 295 | values <- geom_text( aes( x = dataFrame$scale, |
| 296 | y = dataFrame$throughput + 0.03 * max( dataFrame$throughput ), |
| 297 | label = format( dataFrame$throughput, |
| 298 | digits=3, |
| 299 | big.mark = ",", |
| 300 | scientific = FALSE ) ), |
| 301 | size = 7.0, |
| 302 | fontface = "bold" ) |
| 303 | |
| 304 | result <- fundamentalGraphData + |
| 305 | barGraphFormat + |
| 306 | errorBarFormat + |
| 307 | values |
| 308 | |
| 309 | # ----------------------- |
| 310 | # Exporting Graph to File |
| 311 | # ----------------------- |
| 312 | |
| 313 | print( paste( "Saving bar chart with error bars to", errBarOutputFile ) ) |
| 314 | |
| 315 | tryCatch( ggsave( errBarOutputFile, |
| 316 | width = imageWidth, |
| 317 | height = imageHeight, |
| 318 | dpi = imageDPI ), |
| 319 | error = function( e ){ |
| 320 | print( "[ERROR] There was a problem saving the graph due to a graph formatting exception. Error dump:" ) |
| 321 | print( e ) |
| 322 | quit( status = 1 ) |
| 323 | } |
| 324 | ) |
| 325 | |
| 326 | print( paste( "[SUCCESS] Successfully wrote bar chart with error bars out to", errBarOutputFile ) ) |
| 327 | quit( status = 0 ) |