blob: e6f2fdefd0b51983d68065503b62de58afcb3261 [file] [log] [blame]
Jon Hall73cf9cc2014-11-20 22:28:38 -08001'''
2Description: This test is to determine if a single
3 instance ONOS 'cluster' can handle a restart
4
5List of test cases:
6CASE1: Compile ONOS and push it to the test machines
7CASE2: Assign mastership to controllers
8CASE3: Assign intents
9CASE4: Ping across added host intents
10CASE5: Reading state of ONOS
11CASE6: The Failure case. Since this is the Sanity test, we do nothing.
12CASE7: Check state after control plane failure
13CASE8: Compare topo
14CASE9: Link s3-s28 down
15CASE10: Link s3-s28 up
16CASE11: Switch down
17CASE12: Switch up
18CASE13: Clean up
19'''
20class SingleInstanceHATestRestart:
21
22 def __init__(self) :
23 self.default = ''
24
25 def CASE1(self,main) :
26 '''
27 CASE1 is to compile ONOS and push it to the test machines
28
29 Startup sequence:
30 git pull
31 mvn clean install
32 onos-package
33 cell <name>
34 onos-verify-cell
35 NOTE: temporary - onos-remove-raft-logs
36 onos-install -f
37 onos-wait-for-start
38 '''
39 import time
40 main.log.report("ONOS Single node cluster restart HA test - initialization")
41 main.case("Setting up test environment")
42
43 # load some vairables from the params file
44 PULL_CODE = False
45 if main.params['Git'] == 'True':
46 PULL_CODE = True
47 cell_name = main.params['ENV']['cellName']
48
49 #set global variables
50 global ONOS1_ip
51 global ONOS1_port
52 global ONOS2_ip
53 global ONOS2_port
54 global ONOS3_ip
55 global ONOS3_port
56 global ONOS4_ip
57 global ONOS4_port
58 global ONOS5_ip
59 global ONOS5_port
60 global ONOS6_ip
61 global ONOS6_port
62 global ONOS7_ip
63 global ONOS7_port
64
65 ONOS1_ip = main.params['CTRL']['ip1']
66 ONOS1_port = main.params['CTRL']['port1']
67 ONOS2_ip = main.params['CTRL']['ip2']
68 ONOS2_port = main.params['CTRL']['port2']
69 ONOS3_ip = main.params['CTRL']['ip3']
70 ONOS3_port = main.params['CTRL']['port3']
71 ONOS4_ip = main.params['CTRL']['ip4']
72 ONOS4_port = main.params['CTRL']['port4']
73 ONOS5_ip = main.params['CTRL']['ip5']
74 ONOS5_port = main.params['CTRL']['port5']
75 ONOS6_ip = main.params['CTRL']['ip6']
76 ONOS6_port = main.params['CTRL']['port6']
77 ONOS7_ip = main.params['CTRL']['ip7']
78 ONOS7_port = main.params['CTRL']['port7']
79
80
81 main.step("Applying cell variable to environment")
82 cell_result = main.ONOSbench.set_cell(cell_name)
83 verify_result = main.ONOSbench.verify_cell()
84
85 #FIXME:this is short term fix
86 main.log.report("Removing raft logs")
87 main.ONOSbench.onos_remove_raft_logs()
88 main.log.report("Uninstalling ONOS")
89 main.ONOSbench.onos_uninstall(ONOS1_ip)
90 main.ONOSbench.onos_uninstall(ONOS2_ip)
91 main.ONOSbench.onos_uninstall(ONOS3_ip)
92 main.ONOSbench.onos_uninstall(ONOS4_ip)
93 main.ONOSbench.onos_uninstall(ONOS5_ip)
94 main.ONOSbench.onos_uninstall(ONOS6_ip)
95 main.ONOSbench.onos_uninstall(ONOS7_ip)
96
97 clean_install_result = main.TRUE
98 git_pull_result = main.TRUE
99
100 main.step("Compiling the latest version of ONOS")
101 if PULL_CODE:
102 main.step("Git checkout and pull master")
103 main.ONOSbench.git_checkout("master")
104 git_pull_result = main.ONOSbench.git_pull()
105
106 main.step("Using mvn clean & install")
107 clean_install_result = main.TRUE
108 if git_pull_result == main.TRUE:
109 clean_install_result = main.ONOSbench.clean_install()
110 else:
111 main.log.warn("Did not pull new code so skipping mvn "+ \
112 "clean install")
113 main.ONOSbench.get_version(report=True)
114
115 main.step("Creating ONOS package")
116 package_result = main.ONOSbench.onos_package()
117
118 main.step("Installing ONOS package")
119 onos1_install_result = main.ONOSbench.onos_install(options="-f",
120 node=ONOS1_ip)
121
122
123 main.step("Checking if ONOS is up yet")
124 #TODO: Refactor
125 # check bundle:list?
126 #this should be enough for ONOS to start
127 time.sleep(60)
128 onos1_isup = main.ONOSbench.isup(ONOS1_ip)
129 if not onos1_isup:
130 main.log.report("ONOS1 didn't start!")
131 # TODO: if it becomes an issue, we can retry this step a few times
132
133
134 cli_result1 = main.ONOScli1.start_onos_cli(ONOS1_ip)
135
136 main.step("Start Packet Capture MN")
137 main.Mininet2.start_tcpdump(
138 str(main.params['MNtcpdump']['folder'])+str(main.TEST)+"-MN.pcap",
139 intf = main.params['MNtcpdump']['intf'],
140 port = main.params['MNtcpdump']['port'])
141
142
143 case1_result = (clean_install_result and package_result and
144 cell_result and verify_result and onos1_install_result and
145 onos1_isup and cli1_results)
146
147 utilities.assert_equals(expect=main.TRUE, actual=case1_result,
148 onpass="Test startup successful",
149 onfail="Test startup NOT successful")
150
151
152 if case1_result==main.FALSE:
153 main.cleanup()
154 main.exit()
155
156 def CASE2(self,main) :
157 '''
158 Assign mastership to controllers
159 '''
160 import time
161 import json
162 import re
163
164 main.log.report("Assigning switches to controllers")
165 main.case("Assigning Controllers")
166 main.step("Assign switches to controllers")
167
168 for i in range (1,29):
169 main.Mininet1.assign_sw_controller(sw=str(i),
170 ip1=ONOS1_ip,port1=ONOS1_port)
171
172 mastership_check = main.TRUE
173 for i in range (1,29):
174 response = main.Mininet1.get_sw_controller("s"+str(i))
Jon Hallffb386d2014-11-21 13:43:38 -0800175 try:
176 main.log.info(str(response))
177 except:
178 main.log.info(repr(response))
Jon Hall73cf9cc2014-11-20 22:28:38 -0800179 if re.search("tcp:"+ONOS1_ip,response):
180 mastership_check = mastership_check and main.TRUE
181 else:
182 mastership_check = main.FALSE
183 if mastership_check == main.TRUE:
184 main.log.report("Switch mastership assigned correctly")
185 utilities.assert_equals(expect = main.TRUE,actual=mastership_check,
186 onpass="Switch mastership assigned correctly",
187 onfail="Switches not assigned correctly to controllers")
188
189 #TODO: If assign roles is working reliably then manually
190 # assign mastership to the controller we want
191
192
193 def CASE3(self,main) :
194 """
195 Assign intents
196
197 """
198 import time
199 import json
200 import re
201 main.log.report("Adding host intents")
202 main.case("Adding host Intents")
203
204 main.step("Discovering Hosts( Via pingall for now)")
205 #FIXME: Once we have a host discovery mechanism, use that instead
206
207 #REACTIVE FWD test
208 ping_result = main.FALSE
209 time1 = time.time()
210 ping_result = main.Mininet1.pingall()
211 time2 = time.time()
212 main.log.info("Time for pingall: %2f seconds" % (time2 - time1))
213
214 #uninstall onos-app-fwd
215 main.log.info("Uninstall reactive forwarding app")
216 main.ONOScli1.feature_uninstall("onos-app-fwd")
217
218 main.step("Add host intents")
219 #TODO: move the host numbers to params
220 import json
221 intents_json= json.loads(main.ONOScli1.hosts())
222 intent_add_result = main.FALSE
223 for i in range(8,18):
224 main.log.info("Adding host intent between h"+str(i)+" and h"+str(i+10))
225 host1 = "00:00:00:00:00:" + str(hex(i)[2:]).zfill(2).upper()
226 host2 = "00:00:00:00:00:" + str(hex(i+10)[2:]).zfill(2).upper()
227 #NOTE: get host can return None
228 #TODO: handle this
229 host1_id = main.ONOScli1.get_host(host1)['id']
230 host2_id = main.ONOScli1.get_host(host2)['id']
231 tmp_result = main.ONOScli1.add_host_intent(host1_id, host2_id )
232 intent_add_result = intent_add_result and tmp_result
233 #TODO Check if intents all exist in datastore
234 #NOTE: Do we need to print this once the test is working?
235 #main.log.info(json.dumps(json.loads(main.ONOScli1.intents(json_format=True)),
236 # sort_keys=True, indent=4, separators=(',', ': ') ) )
237
238 def CASE4(self,main) :
239 """
240 Ping across added host intents
241 """
242 description = " Ping across added host intents"
243 main.log.report(description)
244 main.case(description)
245 Ping_Result = main.TRUE
246 for i in range(8,18):
247 ping = main.Mininet1.pingHost(src="h"+str(i),target="h"+str(i+10))
248 Ping_Result = Ping_Result and ping
249 if ping==main.FALSE:
250 main.log.warn("Ping failed between h"+str(i)+" and h" + str(i+10))
251 elif ping==main.TRUE:
252 main.log.info("Ping test passed!")
253 Ping_Result = main.TRUE
254 if Ping_Result==main.FALSE:
255 main.log.report("Intents have not been installed correctly, pings failed.")
256 if Ping_Result==main.TRUE:
257 main.log.report("Intents have been installed correctly and verified by pings")
258 utilities.assert_equals(expect = main.TRUE,actual=Ping_Result,
259 onpass="Intents have been installed correctly and pings work",
260 onfail ="Intents have not been installed correctly, pings failed." )
261
262 def CASE5(self,main) :
263 '''
264 Reading state of ONOS
265 '''
266 import time
267 import json
268 from subprocess import Popen, PIPE
269 from sts.topology.teston_topology import TestONTopology # assumes that sts is already in you PYTHONPATH
270
271 main.log.report("Setting up and gathering data for current state")
272 main.case("Setting up and gathering data for current state")
273 #The general idea for this test case is to pull the state of (intents,flows, topology,...) from each ONOS node
274 #We can then compare them with eachother and also with past states
275
276 main.step("Get the Mastership of each switch from each controller")
277 global mastership_state
278 ONOS1_mastership = main.ONOScli1.roles()
279 #print json.dumps(json.loads(ONOS1_mastership), sort_keys=True, indent=4, separators=(',', ': '))
280 #TODO: Make this a meaningful check
281 if "Error" in ONOS1_mastership or not ONOS1_mastership:
282 main.log.report("Error in getting ONOS roles")
283 main.log.warn("ONOS1 mastership response: " + repr(ONOS1_mastership))
284 consistent_mastership = main.FALSE
285 else:
286 mastership_state = ONOS1_mastership
287 consistent_mastership = main.TRUE
288 main.log.report("Switch roles are consistent across all ONOS nodes")
289 utilities.assert_equals(expect = main.TRUE,actual=consistent_mastership,
290 onpass="Switch roles are consistent across all ONOS nodes",
291 onfail="ONOS nodes have different views of switch roles")
292
293
294 main.step("Get the intents from each controller")
295 global intent_state
296 ONOS1_intents = main.ONOScli1.intents( json_format=True )
297 intent_check = main.FALSE
298 if "Error" in ONOS1_intents or not ONOS1_intents:
299 main.log.report("Error in getting ONOS intents")
300 main.log.warn("ONOS1 intents response: " + repr(ONOS1_intents))
301 else:
302 intent_check = main.TRUE
303 main.log.report("Intents are consistent across all ONOS nodes")
304 utilities.assert_equals(expect = main.TRUE,actual=intent_check,
305 onpass="Intents are consistent across all ONOS nodes",
306 onfail="ONOS nodes have different views of intents")
307
308
309 main.step("Get the flows from each controller")
310 global flow_state
311 ONOS1_flows = main.ONOScli1.flows( json_format=True )
312 flow_check = main.FALSE
313 if "Error" in ONOS1_flows or not ONOS1_flows:
314 main.log.report("Error in getting ONOS intents")
315 main.log.warn("ONOS1 flows repsponse: "+ ONOS1_flows)
316 else:
317 #TODO: Do a better check, maybe compare flows on switches?
318 flow_state = ONOS1_flows
319 flow_check = main.TRUE
320 main.log.report("Flow count is consistent across all ONOS nodes")
321 utilities.assert_equals(expect = main.TRUE,actual=flow_check,
322 onpass="The flow count is consistent across all ONOS nodes",
323 onfail="ONOS nodes have different flow counts")
324
325
326 main.step("Get the OF Table entries")
327 global flows
328 flows=[]
329 for i in range(1,29):
330 flows.append(main.Mininet2.get_flowTable("s"+str(i),1.0))
331
332 #TODO: Compare switch flow tables with ONOS flow tables
333
334 main.step("Start continuous pings")
335 main.Mininet2.pingLong(src=main.params['PING']['source1'],
336 target=main.params['PING']['target1'],pingTime=500)
337 main.Mininet2.pingLong(src=main.params['PING']['source2'],
338 target=main.params['PING']['target2'],pingTime=500)
339 main.Mininet2.pingLong(src=main.params['PING']['source3'],
340 target=main.params['PING']['target3'],pingTime=500)
341 main.Mininet2.pingLong(src=main.params['PING']['source4'],
342 target=main.params['PING']['target4'],pingTime=500)
343 main.Mininet2.pingLong(src=main.params['PING']['source5'],
344 target=main.params['PING']['target5'],pingTime=500)
345 main.Mininet2.pingLong(src=main.params['PING']['source6'],
346 target=main.params['PING']['target6'],pingTime=500)
347 main.Mininet2.pingLong(src=main.params['PING']['source7'],
348 target=main.params['PING']['target7'],pingTime=500)
349 main.Mininet2.pingLong(src=main.params['PING']['source8'],
350 target=main.params['PING']['target8'],pingTime=500)
351 main.Mininet2.pingLong(src=main.params['PING']['source9'],
352 target=main.params['PING']['target9'],pingTime=500)
353 main.Mininet2.pingLong(src=main.params['PING']['source10'],
354 target=main.params['PING']['target10'],pingTime=500)
355
356 main.step("Create TestONTopology object")
357 ctrls = []
358 count = 1
359 temp = ()
360 temp = temp + (getattr(main,('ONOS' + str(count))),)
361 temp = temp + ("ONOS"+str(count),)
362 temp = temp + (main.params['CTRL']['ip'+str(count)],)
363 temp = temp + (eval(main.params['CTRL']['port'+str(count)]),)
364 ctrls.append(temp)
365 MNTopo = TestONTopology(main.Mininet1, ctrls) # can also add Intent API info for intent operations
366
367 main.step("Collecting topology information from ONOS")
368 devices = []
369 devices.append( main.ONOScli1.devices() )
370 '''
371 hosts = []
372 hosts.append( main.ONOScli1.hosts() )
373 '''
374 ports = []
375 ports.append( main.ONOScli1.ports() )
376 links = []
377 links.append( main.ONOScli1.links() )
378
379
380 main.step("Comparing ONOS topology to MN")
381 devices_results = main.TRUE
382 ports_results = main.TRUE
383 links_results = main.TRUE
384 for controller in range(1): #TODO parameterize the number of controllers
385 if devices[controller] or not "Error" in devices[controller]:
386 current_devices_result = main.Mininet1.compare_switches(MNTopo, json.loads(devices[controller]))
387 else:
388 current_devices_result = main.FALSE
389 utilities.assert_equals(expect=main.TRUE, actual=current_devices_result,
390 onpass="ONOS"+str(int(controller+1))+" Switches view is correct",
391 onfail="ONOS"+str(int(controller+1))+" Switches view is incorrect")
392
393 if ports[controller] or not "Error" in ports[controller]:
394 current_ports_result = main.Mininet1.compare_ports(MNTopo, json.loads(ports[controller]))
395 else:
396 current_ports_result = main.FALSE
397 utilities.assert_equals(expect=main.TRUE, actual=current_ports_result,
398 onpass="ONOS"+str(int(controller+1))+" ports view is correct",
399 onfail="ONOS"+str(int(controller+1))+" ports view is incorrect")
400
401 if links[controller] or not "Error" in links[controller]:
402 current_links_result = main.Mininet1.compare_links(MNTopo, json.loads(links[controller]))
403 else:
404 current_links_result = main.FALSE
405 utilities.assert_equals(expect=main.TRUE, actual=current_links_result,
406 onpass="ONOS"+str(int(controller+1))+" links view is correct",
407 onfail="ONOS"+str(int(controller+1))+" links view is incorrect")
408
409 devices_results = devices_results and current_devices_result
410 ports_results = ports_results and current_ports_result
411 links_results = links_results and current_links_result
412
413 topo_result = devices_results and ports_results and links_results
414 utilities.assert_equals(expect=main.TRUE, actual=topo_result,
415 onpass="Topology Check Test successful",
416 onfail="Topology Check Test NOT successful")
417
418 final_assert = main.TRUE
419 final_assert = final_assert and topo_result and flow_check \
420 and intent_check and consistent_mastership
421 utilities.assert_equals(expect=main.TRUE, actual=final_assert,
422 onpass="State check successful",
423 onfail="State check NOT successful")
424
425
426 def CASE6(self,main) :
427 '''
Jon Hallffb386d2014-11-21 13:43:38 -0800428 The Failure case.
Jon Hall73cf9cc2014-11-20 22:28:38 -0800429 '''
Jon Hallffb386d2014-11-21 13:43:38 -0800430 import time
Jon Hall73cf9cc2014-11-20 22:28:38 -0800431
432 main.log.report("Restart ONOS node")
433 main.log.case("Restart ONOS node")
434 main.ONOSbench.onos_kill(ONOS1_ip)
Jon Hallffb386d2014-11-21 13:43:38 -0800435 start = time.time()
Jon Hall73cf9cc2014-11-20 22:28:38 -0800436
437 main.step("Checking if ONOS is up yet")
Jon Hallffb386d2014-11-21 13:43:38 -0800438 count = 0
439 while count < 10
440 onos1_isup = main.ONOSbench.isup(ONOS1_ip)
441 if onos1_isup == main.TRUE:
442 elapsed = time.time() - start
443 break
444 else:
445 count = count + 1
Jon Hall73cf9cc2014-11-20 22:28:38 -0800446
Jon Hallffb386d2014-11-21 13:43:38 -0800447 cli_result = main.ONOScli1.start_onos_cli(ONOS1_ip)
Jon Hall73cf9cc2014-11-20 22:28:38 -0800448
Jon Hallffb386d2014-11-21 13:43:38 -0800449 case_results = main.TRUE and onosi1_isup and cli_result
Jon Hall73cf9cc2014-11-20 22:28:38 -0800450 utilities.assert_equals(expect=main.TRUE, actual=case_results,
451 onpass="ONOS restart successful",
452 onfail="ONOS restart NOT successful")
Jon Hallffb386d2014-11-21 13:43:38 -0800453 main.log.info("ONOS took %s seconds to restart" % str(elapsed) )
Jon Hall73cf9cc2014-11-20 22:28:38 -0800454
455 def CASE7(self,main) :
456 '''
457 Check state after ONOS failure
458 '''
459 import os
460 import json
461 main.case("Running ONOS Constant State Tests")
462
463 main.step("Check if switch roles are consistent across all nodes")
464 ONOS1_mastership = main.ONOScli1.roles()
Jon Hallffb386d2014-11-21 13:43:38 -0800465 #FIXME: Refactor this whole case for single instance
Jon Hall73cf9cc2014-11-20 22:28:38 -0800466 #print json.dumps(json.loads(ONOS1_mastership), sort_keys=True, indent=4, separators=(',', ': '))
467 if "Error" in ONOS1_mastership or not ONOS1_mastership:
Jon Hallffb386d2014-11-21 13:43:38 -0800468 main.log.report("Error in getting ONOS mastership")
Jon Hall73cf9cc2014-11-20 22:28:38 -0800469 main.log.warn("ONOS1 mastership response: " + repr(ONOS1_mastership))
470 consistent_mastership = main.FALSE
471 else:
472 consistent_mastership = main.TRUE
473 main.log.report("Switch roles are consistent across all ONOS nodes")
474 utilities.assert_equals(expect = main.TRUE,actual=consistent_mastership,
475 onpass="Switch roles are consistent across all ONOS nodes",
476 onfail="ONOS nodes have different views of switch roles")
477
478
479 description2 = "Compare switch roles from before failure"
480 main.step(description2)
481
482 current_json = json.loads(ONOS1_mastership)
483 old_json = json.loads(mastership_state)
484 mastership_check = main.TRUE
485 for i in range(1,29):
486 switchDPID = str(main.Mininet1.getSwitchDPID(switch="s"+str(i)))
487
488 current = [switch['master'] for switch in current_json if switchDPID in switch['id']]
489 old = [switch['master'] for switch in old_json if switchDPID in switch['id']]
490 if current == old:
491 mastership_check = mastership_check and main.TRUE
492 else:
493 main.log.warn("Mastership of switch %s changed" % switchDPID)
494 mastership_check = main.FALSE
495 if mastership_check == main.TRUE:
496 main.log.report("Mastership of Switches was not changed")
497 utilities.assert_equals(expect=main.TRUE,actual=mastership_check,
498 onpass="Mastership of Switches was not changed",
499 onfail="Mastership of some switches changed")
500 mastership_check = mastership_check and consistent_mastership
501
502
503
504 main.step("Get the intents and compare across all nodes")
505 ONOS1_intents = main.ONOScli1.intents( json_format=True )
506 intent_check = main.FALSE
507 if "Error" in ONOS1_intents or not ONOS1_intents:
508 main.log.report("Error in getting ONOS intents")
509 main.log.warn("ONOS1 intents response: " + repr(ONOS1_intents))
510 else:
511 intent_state = ONOS1_intents
512 intent_check = main.TRUE
513 main.log.report("Intents are consistent across all ONOS nodes")
514 utilities.assert_equals(expect = main.TRUE,actual=intent_check,
515 onpass="Intents are consistent across all ONOS nodes",
516 onfail="ONOS nodes have different views of intents")
517
518 main.step("Compare current intents with intents before the failure")
519 if intent_state == ONOS1_intents:
520 same_intents = main.TRUE
521 main.log.report("Intents are consistent with before failure")
522 #TODO: possibly the states have changed? we may need to figure out what the aceptable states are
523 else:
524 same_intents = main.FALSE
525 utilities.assert_equals(expect = main.TRUE,actual=same_intents,
526 onpass="Intents are consistent with before failure",
527 onfail="The Intents changed during failure")
528 intent_check = intent_check and same_intents
529
530
531
532 main.step("Get the OF Table entries and compare to before component failure")
533 Flow_Tables = main.TRUE
534 flows2=[]
535 for i in range(28):
536 main.log.info("Checking flow table on s" + str(i+1))
537 tmp_flows = main.Mininet2.get_flowTable("s"+str(i+1),1.0)
538 flows2.append(tmp_flows)
539 Flow_Tables = Flow_Tables and main.Mininet2.flow_comp(flow1=flows[i],flow2=tmp_flows)
540 if Flow_Tables == main.FALSE:
541 main.log.info("Differences in flow table for switch: "+str(i+1))
542 break
543 if Flow_Tables == main.TRUE:
544 main.log.report("No changes were found in the flow tables")
545 utilities.assert_equals(expect=main.TRUE,actual=Flow_Tables,
546 onpass="No changes were found in the flow tables",
547 onfail="Changes were found in the flow tables")
548
549 main.step("Check the continuous pings to ensure that no packets were dropped during component failure")
550 #FIXME: This check is always failing. Investigate cause
551 #NOTE: this may be something to do with file permsissions
552 # or slight change in format
553 main.Mininet2.pingKill(main.params['TESTONUSER'], main.params['TESTONIP'])
554 Loss_In_Pings = main.FALSE
555 #NOTE: checkForLoss returns main.FALSE with 0% packet loss
556 for i in range(8,18):
557 main.log.info("Checking for a loss in pings along flow from s" + str(i))
558 Loss_In_Pings = Loss_In_Pings or main.Mininet2.checkForLoss("/tmp/ping.h"+str(i))
559 if Loss_In_Pings == main.TRUE:
560 main.log.info("Loss in ping detected")
561 elif Loss_In_Pings == main.ERROR:
562 main.log.info("There are multiple mininet process running")
563 elif Loss_In_Pings == main.FALSE:
564 main.log.info("No Loss in the pings")
565 main.log.report("No loss of dataplane connectivity")
566 utilities.assert_equals(expect=main.FALSE,actual=Loss_In_Pings,
567 onpass="No Loss of connectivity",
568 onfail="Loss of dataplane connectivity detected")
569
570
571 #TODO:add topology to this or leave as a seperate case?
572 result = mastership_check and intent_check and Flow_Tables and (not Loss_In_Pings)
573 result = int(result)
574 if result == main.TRUE:
575 main.log.report("Constant State Tests Passed")
576 utilities.assert_equals(expect=main.TRUE,actual=result,
577 onpass="Constant State Tests Passed",
578 onfail="Constant state tests failed")
579
580 def CASE8 (self,main):
581 '''
582 Compare topo
583 '''
584 import sys
585 sys.path.append("/home/admin/sts") # Trying to remove some dependancies, #FIXME add this path to params
586 from sts.topology.teston_topology import TestONTopology # assumes that sts is already in you PYTHONPATH
587 import json
588 import time
589
590 description ="Compare ONOS Topology view to Mininet topology"
591 main.case(description)
592 main.log.report(description)
593 main.step("Create TestONTopology object")
594 ctrls = []
595 count = 1
596 temp = ()
597 temp = temp + (getattr(main,('ONOS' + str(count))),)
598 temp = temp + ("ONOS"+str(count),)
599 temp = temp + (main.params['CTRL']['ip'+str(count)],)
600 temp = temp + (eval(main.params['CTRL']['port'+str(count)]),)
601 ctrls.append(temp)
602 MNTopo = TestONTopology(main.Mininet1, ctrls) # can also add Intent API info for intent operations
603
Jon Hall73cf9cc2014-11-20 22:28:38 -0800604 main.step("Comparing ONOS topology to MN")
605 devices_results = main.TRUE
606 ports_results = main.TRUE
607 links_results = main.TRUE
608 topo_result = main.FALSE
609 start_time = time.time()
610 elapsed = 0
Jon Hallffb386d2014-11-21 13:43:38 -0800611 count = 0
Jon Hall73cf9cc2014-11-20 22:28:38 -0800612 while topo_result == main.FALSE and elapsed < 120:
Jon Hallffb386d2014-11-21 13:43:38 -0800613 count = count + 1
Jon Hall73cf9cc2014-11-20 22:28:38 -0800614 try:
Jon Hallffb386d2014-11-21 13:43:38 -0800615 main.step("Collecting topology information from ONOS")
616 devices = []
617 devices.append( main.ONOScli1.devices() )
618 '''
619 hosts = []
620 hosts.append( main.ONOScli1.hosts() )
621 '''
622 ports = []
623 ports.append( main.ONOScli1.ports() )
624 links = []
625 links.append( main.ONOScli1.links() )
Jon Hall73cf9cc2014-11-20 22:28:38 -0800626 for controller in range(1): #TODO parameterize the number of controllers
627 if devices[controller] or not "Error" in devices[controller]:
628 current_devices_result = main.Mininet1.compare_switches(MNTopo, json.loads(devices[controller]))
629 else:
630 current_devices_result = main.FALSE
631 utilities.assert_equals(expect=main.TRUE, actual=current_devices_result,
632 onpass="ONOS"+str(int(controller+1))+" Switches view is correct",
633 onfail="ONOS"+str(int(controller+1))+" Switches view is incorrect")
634
635 if ports[controller] or not "Error" in ports[controller]:
636 current_ports_result = main.Mininet1.compare_ports(MNTopo, json.loads(ports[controller]))
637 else:
638 current_ports_result = main.FALSE
639 utilities.assert_equals(expect=main.TRUE, actual=current_ports_result,
640 onpass="ONOS"+str(int(controller+1))+" ports view is correct",
641 onfail="ONOS"+str(int(controller+1))+" ports view is incorrect")
642
643 if links[controller] or not "Error" in links[controller]:
644 current_links_result = main.Mininet1.compare_links(MNTopo, json.loads(links[controller]))
645 else:
646 current_links_result = main.FALSE
647 utilities.assert_equals(expect=main.TRUE, actual=current_links_result,
648 onpass="ONOS"+str(int(controller+1))+" links view is correct",
649 onfail="ONOS"+str(int(controller+1))+" links view is incorrect")
650 except:
651 main.log.error("something went wrong in topo comparison")
652 main.log.warn( repr( devices ) )
653 main.log.warn( repr( ports ) )
654 main.log.warn( repr( links ) )
655
656 devices_results = devices_results and current_devices_result
657 ports_results = ports_results and current_ports_result
658 links_results = links_results and current_links_result
Jon Hallffb386d2014-11-21 13:43:38 -0800659 elapsed = time.time() - start_time
660 time_threshold = elapsed < 1
661 topo_result = devices_results and ports_results and links_results and time_threshold
Jon Hall73cf9cc2014-11-20 22:28:38 -0800662 #TODO make sure this step is non-blocking. IE add a timeout
Jon Hallffb386d2014-11-21 13:43:38 -0800663 main.log.report("Very crass estimate for topology discovery/convergence: " +\
664 str(elapsed) + " seconds, " + str(count) +" tries" )
Jon Hall73cf9cc2014-11-20 22:28:38 -0800665 utilities.assert_equals(expect=main.TRUE, actual=topo_result,
666 onpass="Topology Check Test successful",
667 onfail="Topology Check Test NOT successful")
668 if topo_result == main.TRUE:
669 main.log.report("ONOS topology view matches Mininet topology")
670
671
672 def CASE9 (self,main):
673 '''
674 Link s3-s28 down
675 '''
676 #NOTE: You should probably run a topology check after this
677
678 link_sleep = int(main.params['timers']['LinkDiscovery'])
679
680 description = "Turn off a link to ensure that Link Discovery is working properly"
681 main.log.report(description)
682 main.case(description)
683
684
685 main.step("Kill Link between s3 and s28")
686 Link_Down = main.Mininet1.link(END1="s3",END2="s28",OPTION="down")
687 main.log.info("Waiting " + str(link_sleep) + " seconds for link down to be discovered")
688 time.sleep(link_sleep)
689 utilities.assert_equals(expect=main.TRUE,actual=Link_Down,
690 onpass="Link down succesful",
691 onfail="Failed to bring link down")
692 #TODO do some sort of check here
693
694 def CASE10 (self,main):
695 '''
696 Link s3-s28 up
697 '''
698 #NOTE: You should probably run a topology check after this
699
700 link_sleep = int(main.params['timers']['LinkDiscovery'])
701
702 description = "Restore a link to ensure that Link Discovery is working properly"
703 main.log.report(description)
704 main.case(description)
705
706 main.step("Bring link between s3 and s28 back up")
707 Link_Up = main.Mininet1.link(END1="s3",END2="s28",OPTION="up")
708 main.log.info("Waiting " + str(link_sleep) + " seconds for link up to be discovered")
709 time.sleep(link_sleep)
710 utilities.assert_equals(expect=main.TRUE,actual=Link_Up,
711 onpass="Link up succesful",
712 onfail="Failed to bring link up")
713 #TODO do some sort of check here
714
715
716 def CASE11 (self, main) :
717 '''
718 Switch Down
719 '''
720 #NOTE: You should probably run a topology check after this
721 import time
722
723 switch_sleep = int(main.params['timers']['SwitchDiscovery'])
724
725 description = "Killing a switch to ensure it is discovered correctly"
726 main.log.report(description)
727 main.case(description)
728
729 #TODO: Make this switch parameterizable
730 main.step("Kill s28 ")
731 main.log.report("Deleting s28")
732 #FIXME: use new dynamic topo functions
733 main.Mininet1.del_switch("s28")
734 main.log.info("Waiting " + str(switch_sleep) + " seconds for switch down to be discovered")
735 time.sleep(switch_sleep)
736 #Peek at the deleted switch
737 main.log.warn(main.ONOScli1.get_device(dpid="0028"))
738 #TODO do some sort of check here
739
740 def CASE12 (self, main) :
741 '''
742 Switch Up
743 '''
744 #NOTE: You should probably run a topology check after this
745 import time
746 #FIXME: use new dynamic topo functions
747 description = "Adding a switch to ensure it is discovered correctly"
748 main.log.report(description)
749 main.case(description)
750
751 main.step("Add back s28")
752 main.log.report("Adding back s28")
753 main.Mininet1.add_switch("s28", dpid = '0000000000002800')
754 #TODO: New dpid or same? Ask Thomas?
755 main.Mininet1.add_link('s28', 's3')
756 main.Mininet1.add_link('s28', 's6')
757 main.Mininet1.add_link('s28', 'h28')
758 main.Mininet1.assign_sw_controller(sw="28",
759 ip1=ONOS1_ip,port1=ONOS1_port)
760 main.log.info("Waiting " + str(switch_sleep) + " seconds for switch up to be discovered")
761 time.sleep(switch_sleep)
762 #Peek at the added switch
763 main.log.warn(main.ONOScli1.get_device(dpid="0028"))
764 #TODO do some sort of check here
765
766 def CASE13 (self, main) :
767 '''
768 Clean up
769 '''
770 import os
771 import time
772 description = "Test Cleanup"
773 main.log.report(description)
774 main.case(description)
775 main.step("Killing tcpdumps")
776 main.Mininet2.stop_tcpdump()
777
778 main.step("Copying MN pcap and ONOS log files to test station")
779 testname = main.TEST
780 #NOTE: MN Pcap file is being saved to ~/packet_captures
781 # scp this file as MN and TestON aren't necessarily the same vm
782 #FIXME: scp
783 #####mn files
784 #TODO: Load these from params
785 #NOTE: must end in /
786 log_folder = "/opt/onos/log/"
787 log_files = ["karaf.log", "karaf.log.1"]
788 #NOTE: must end in /
789 dst_dir = "~/packet_captures/"
790 for f in log_files:
791 main.ONOSbench.secureCopy( "sdn", ONOS1_ip,log_folder+f,"rocks",\
792 dst_dir + str(testname) + "-ONOS1-"+f )
793
794 #std*.log's
795 #NOTE: must end in /
796 log_folder = "/opt/onos/var/"
797 log_files = ["stderr.log", "stdout.log"]
798 #NOTE: must end in /
799 dst_dir = "~/packet_captures/"
800 for f in log_files:
801 main.ONOSbench.secureCopy( "sdn", ONOS1_ip,log_folder+f,"rocks",\
802 dst_dir + str(testname) + "-ONOS1-"+f )
803
804
805 #sleep so scp can finish
806 time.sleep(10)
807 main.step("Packing and rotating pcap archives")
808 os.system("~/TestON/dependencies/rotate.sh "+ str(testname))
809
810
811 #TODO: actually check something here
812 utilities.assert_equals(expect=main.TRUE, actual=main.TRUE,
813 onpass="Test cleanup successful",
814 onfail="Test cleanup NOT successful")