blob: f39eb12a23dd1f982a6e82b7376b2d7fedb9bac7 [file] [log] [blame]
Jon Hall73cf9cc2014-11-20 22:28:38 -08001'''
2Description: This test is to determine if a single
3 instance ONOS 'cluster' can handle a restart
4
5List of test cases:
6CASE1: Compile ONOS and push it to the test machines
7CASE2: Assign mastership to controllers
8CASE3: Assign intents
9CASE4: Ping across added host intents
10CASE5: Reading state of ONOS
11CASE6: The Failure case. Since this is the Sanity test, we do nothing.
12CASE7: Check state after control plane failure
13CASE8: Compare topo
14CASE9: Link s3-s28 down
15CASE10: Link s3-s28 up
16CASE11: Switch down
17CASE12: Switch up
18CASE13: Clean up
19'''
20class SingleInstanceHATestRestart:
21
22 def __init__(self) :
23 self.default = ''
24
25 def CASE1(self,main) :
26 '''
27 CASE1 is to compile ONOS and push it to the test machines
28
29 Startup sequence:
30 git pull
31 mvn clean install
32 onos-package
33 cell <name>
34 onos-verify-cell
35 NOTE: temporary - onos-remove-raft-logs
36 onos-install -f
37 onos-wait-for-start
38 '''
39 import time
40 main.log.report("ONOS Single node cluster restart HA test - initialization")
41 main.case("Setting up test environment")
42
43 # load some vairables from the params file
44 PULL_CODE = False
45 if main.params['Git'] == 'True':
46 PULL_CODE = True
47 cell_name = main.params['ENV']['cellName']
48
49 #set global variables
50 global ONOS1_ip
51 global ONOS1_port
52 global ONOS2_ip
53 global ONOS2_port
54 global ONOS3_ip
55 global ONOS3_port
56 global ONOS4_ip
57 global ONOS4_port
58 global ONOS5_ip
59 global ONOS5_port
60 global ONOS6_ip
61 global ONOS6_port
62 global ONOS7_ip
63 global ONOS7_port
64
65 ONOS1_ip = main.params['CTRL']['ip1']
66 ONOS1_port = main.params['CTRL']['port1']
67 ONOS2_ip = main.params['CTRL']['ip2']
68 ONOS2_port = main.params['CTRL']['port2']
69 ONOS3_ip = main.params['CTRL']['ip3']
70 ONOS3_port = main.params['CTRL']['port3']
71 ONOS4_ip = main.params['CTRL']['ip4']
72 ONOS4_port = main.params['CTRL']['port4']
73 ONOS5_ip = main.params['CTRL']['ip5']
74 ONOS5_port = main.params['CTRL']['port5']
75 ONOS6_ip = main.params['CTRL']['ip6']
76 ONOS6_port = main.params['CTRL']['port6']
77 ONOS7_ip = main.params['CTRL']['ip7']
78 ONOS7_port = main.params['CTRL']['port7']
79
80
81 main.step("Applying cell variable to environment")
82 cell_result = main.ONOSbench.set_cell(cell_name)
83 verify_result = main.ONOSbench.verify_cell()
84
85 #FIXME:this is short term fix
86 main.log.report("Removing raft logs")
87 main.ONOSbench.onos_remove_raft_logs()
88 main.log.report("Uninstalling ONOS")
89 main.ONOSbench.onos_uninstall(ONOS1_ip)
90 main.ONOSbench.onos_uninstall(ONOS2_ip)
91 main.ONOSbench.onos_uninstall(ONOS3_ip)
92 main.ONOSbench.onos_uninstall(ONOS4_ip)
93 main.ONOSbench.onos_uninstall(ONOS5_ip)
94 main.ONOSbench.onos_uninstall(ONOS6_ip)
95 main.ONOSbench.onos_uninstall(ONOS7_ip)
96
97 clean_install_result = main.TRUE
98 git_pull_result = main.TRUE
99
100 main.step("Compiling the latest version of ONOS")
101 if PULL_CODE:
102 main.step("Git checkout and pull master")
103 main.ONOSbench.git_checkout("master")
104 git_pull_result = main.ONOSbench.git_pull()
105
106 main.step("Using mvn clean & install")
107 clean_install_result = main.TRUE
108 if git_pull_result == main.TRUE:
109 clean_install_result = main.ONOSbench.clean_install()
110 else:
111 main.log.warn("Did not pull new code so skipping mvn "+ \
112 "clean install")
113 main.ONOSbench.get_version(report=True)
114
115 main.step("Creating ONOS package")
116 package_result = main.ONOSbench.onos_package()
117
118 main.step("Installing ONOS package")
119 onos1_install_result = main.ONOSbench.onos_install(options="-f",
120 node=ONOS1_ip)
121
122
123 main.step("Checking if ONOS is up yet")
124 #TODO: Refactor
125 # check bundle:list?
126 #this should be enough for ONOS to start
127 time.sleep(60)
128 onos1_isup = main.ONOSbench.isup(ONOS1_ip)
129 if not onos1_isup:
130 main.log.report("ONOS1 didn't start!")
131 # TODO: if it becomes an issue, we can retry this step a few times
132
133
134 cli_result1 = main.ONOScli1.start_onos_cli(ONOS1_ip)
135
136 main.step("Start Packet Capture MN")
137 main.Mininet2.start_tcpdump(
138 str(main.params['MNtcpdump']['folder'])+str(main.TEST)+"-MN.pcap",
139 intf = main.params['MNtcpdump']['intf'],
140 port = main.params['MNtcpdump']['port'])
141
142
143 case1_result = (clean_install_result and package_result and
144 cell_result and verify_result and onos1_install_result and
145 onos1_isup and cli1_results)
146
147 utilities.assert_equals(expect=main.TRUE, actual=case1_result,
148 onpass="Test startup successful",
149 onfail="Test startup NOT successful")
150
151
152 if case1_result==main.FALSE:
153 main.cleanup()
154 main.exit()
155
156 def CASE2(self,main) :
157 '''
158 Assign mastership to controllers
159 '''
160 import time
161 import json
162 import re
163
164 main.log.report("Assigning switches to controllers")
165 main.case("Assigning Controllers")
166 main.step("Assign switches to controllers")
167
168 for i in range (1,29):
169 main.Mininet1.assign_sw_controller(sw=str(i),
170 ip1=ONOS1_ip,port1=ONOS1_port)
171
172 mastership_check = main.TRUE
173 for i in range (1,29):
174 response = main.Mininet1.get_sw_controller("s"+str(i))
175 main.log.info(repr(response))
176 if re.search("tcp:"+ONOS1_ip,response):
177 mastership_check = mastership_check and main.TRUE
178 else:
179 mastership_check = main.FALSE
180 if mastership_check == main.TRUE:
181 main.log.report("Switch mastership assigned correctly")
182 utilities.assert_equals(expect = main.TRUE,actual=mastership_check,
183 onpass="Switch mastership assigned correctly",
184 onfail="Switches not assigned correctly to controllers")
185
186 #TODO: If assign roles is working reliably then manually
187 # assign mastership to the controller we want
188
189
190 def CASE3(self,main) :
191 """
192 Assign intents
193
194 """
195 import time
196 import json
197 import re
198 main.log.report("Adding host intents")
199 main.case("Adding host Intents")
200
201 main.step("Discovering Hosts( Via pingall for now)")
202 #FIXME: Once we have a host discovery mechanism, use that instead
203
204 #REACTIVE FWD test
205 ping_result = main.FALSE
206 time1 = time.time()
207 ping_result = main.Mininet1.pingall()
208 time2 = time.time()
209 main.log.info("Time for pingall: %2f seconds" % (time2 - time1))
210
211 #uninstall onos-app-fwd
212 main.log.info("Uninstall reactive forwarding app")
213 main.ONOScli1.feature_uninstall("onos-app-fwd")
214
215 main.step("Add host intents")
216 #TODO: move the host numbers to params
217 import json
218 intents_json= json.loads(main.ONOScli1.hosts())
219 intent_add_result = main.FALSE
220 for i in range(8,18):
221 main.log.info("Adding host intent between h"+str(i)+" and h"+str(i+10))
222 host1 = "00:00:00:00:00:" + str(hex(i)[2:]).zfill(2).upper()
223 host2 = "00:00:00:00:00:" + str(hex(i+10)[2:]).zfill(2).upper()
224 #NOTE: get host can return None
225 #TODO: handle this
226 host1_id = main.ONOScli1.get_host(host1)['id']
227 host2_id = main.ONOScli1.get_host(host2)['id']
228 tmp_result = main.ONOScli1.add_host_intent(host1_id, host2_id )
229 intent_add_result = intent_add_result and tmp_result
230 #TODO Check if intents all exist in datastore
231 #NOTE: Do we need to print this once the test is working?
232 #main.log.info(json.dumps(json.loads(main.ONOScli1.intents(json_format=True)),
233 # sort_keys=True, indent=4, separators=(',', ': ') ) )
234
235 def CASE4(self,main) :
236 """
237 Ping across added host intents
238 """
239 description = " Ping across added host intents"
240 main.log.report(description)
241 main.case(description)
242 Ping_Result = main.TRUE
243 for i in range(8,18):
244 ping = main.Mininet1.pingHost(src="h"+str(i),target="h"+str(i+10))
245 Ping_Result = Ping_Result and ping
246 if ping==main.FALSE:
247 main.log.warn("Ping failed between h"+str(i)+" and h" + str(i+10))
248 elif ping==main.TRUE:
249 main.log.info("Ping test passed!")
250 Ping_Result = main.TRUE
251 if Ping_Result==main.FALSE:
252 main.log.report("Intents have not been installed correctly, pings failed.")
253 if Ping_Result==main.TRUE:
254 main.log.report("Intents have been installed correctly and verified by pings")
255 utilities.assert_equals(expect = main.TRUE,actual=Ping_Result,
256 onpass="Intents have been installed correctly and pings work",
257 onfail ="Intents have not been installed correctly, pings failed." )
258
259 def CASE5(self,main) :
260 '''
261 Reading state of ONOS
262 '''
263 import time
264 import json
265 from subprocess import Popen, PIPE
266 from sts.topology.teston_topology import TestONTopology # assumes that sts is already in you PYTHONPATH
267
268 main.log.report("Setting up and gathering data for current state")
269 main.case("Setting up and gathering data for current state")
270 #The general idea for this test case is to pull the state of (intents,flows, topology,...) from each ONOS node
271 #We can then compare them with eachother and also with past states
272
273 main.step("Get the Mastership of each switch from each controller")
274 global mastership_state
275 ONOS1_mastership = main.ONOScli1.roles()
276 #print json.dumps(json.loads(ONOS1_mastership), sort_keys=True, indent=4, separators=(',', ': '))
277 #TODO: Make this a meaningful check
278 if "Error" in ONOS1_mastership or not ONOS1_mastership:
279 main.log.report("Error in getting ONOS roles")
280 main.log.warn("ONOS1 mastership response: " + repr(ONOS1_mastership))
281 consistent_mastership = main.FALSE
282 else:
283 mastership_state = ONOS1_mastership
284 consistent_mastership = main.TRUE
285 main.log.report("Switch roles are consistent across all ONOS nodes")
286 utilities.assert_equals(expect = main.TRUE,actual=consistent_mastership,
287 onpass="Switch roles are consistent across all ONOS nodes",
288 onfail="ONOS nodes have different views of switch roles")
289
290
291 main.step("Get the intents from each controller")
292 global intent_state
293 ONOS1_intents = main.ONOScli1.intents( json_format=True )
294 intent_check = main.FALSE
295 if "Error" in ONOS1_intents or not ONOS1_intents:
296 main.log.report("Error in getting ONOS intents")
297 main.log.warn("ONOS1 intents response: " + repr(ONOS1_intents))
298 else:
299 intent_check = main.TRUE
300 main.log.report("Intents are consistent across all ONOS nodes")
301 utilities.assert_equals(expect = main.TRUE,actual=intent_check,
302 onpass="Intents are consistent across all ONOS nodes",
303 onfail="ONOS nodes have different views of intents")
304
305
306 main.step("Get the flows from each controller")
307 global flow_state
308 ONOS1_flows = main.ONOScli1.flows( json_format=True )
309 flow_check = main.FALSE
310 if "Error" in ONOS1_flows or not ONOS1_flows:
311 main.log.report("Error in getting ONOS intents")
312 main.log.warn("ONOS1 flows repsponse: "+ ONOS1_flows)
313 else:
314 #TODO: Do a better check, maybe compare flows on switches?
315 flow_state = ONOS1_flows
316 flow_check = main.TRUE
317 main.log.report("Flow count is consistent across all ONOS nodes")
318 utilities.assert_equals(expect = main.TRUE,actual=flow_check,
319 onpass="The flow count is consistent across all ONOS nodes",
320 onfail="ONOS nodes have different flow counts")
321
322
323 main.step("Get the OF Table entries")
324 global flows
325 flows=[]
326 for i in range(1,29):
327 flows.append(main.Mininet2.get_flowTable("s"+str(i),1.0))
328
329 #TODO: Compare switch flow tables with ONOS flow tables
330
331 main.step("Start continuous pings")
332 main.Mininet2.pingLong(src=main.params['PING']['source1'],
333 target=main.params['PING']['target1'],pingTime=500)
334 main.Mininet2.pingLong(src=main.params['PING']['source2'],
335 target=main.params['PING']['target2'],pingTime=500)
336 main.Mininet2.pingLong(src=main.params['PING']['source3'],
337 target=main.params['PING']['target3'],pingTime=500)
338 main.Mininet2.pingLong(src=main.params['PING']['source4'],
339 target=main.params['PING']['target4'],pingTime=500)
340 main.Mininet2.pingLong(src=main.params['PING']['source5'],
341 target=main.params['PING']['target5'],pingTime=500)
342 main.Mininet2.pingLong(src=main.params['PING']['source6'],
343 target=main.params['PING']['target6'],pingTime=500)
344 main.Mininet2.pingLong(src=main.params['PING']['source7'],
345 target=main.params['PING']['target7'],pingTime=500)
346 main.Mininet2.pingLong(src=main.params['PING']['source8'],
347 target=main.params['PING']['target8'],pingTime=500)
348 main.Mininet2.pingLong(src=main.params['PING']['source9'],
349 target=main.params['PING']['target9'],pingTime=500)
350 main.Mininet2.pingLong(src=main.params['PING']['source10'],
351 target=main.params['PING']['target10'],pingTime=500)
352
353 main.step("Create TestONTopology object")
354 ctrls = []
355 count = 1
356 temp = ()
357 temp = temp + (getattr(main,('ONOS' + str(count))),)
358 temp = temp + ("ONOS"+str(count),)
359 temp = temp + (main.params['CTRL']['ip'+str(count)],)
360 temp = temp + (eval(main.params['CTRL']['port'+str(count)]),)
361 ctrls.append(temp)
362 MNTopo = TestONTopology(main.Mininet1, ctrls) # can also add Intent API info for intent operations
363
364 main.step("Collecting topology information from ONOS")
365 devices = []
366 devices.append( main.ONOScli1.devices() )
367 '''
368 hosts = []
369 hosts.append( main.ONOScli1.hosts() )
370 '''
371 ports = []
372 ports.append( main.ONOScli1.ports() )
373 links = []
374 links.append( main.ONOScli1.links() )
375
376
377 main.step("Comparing ONOS topology to MN")
378 devices_results = main.TRUE
379 ports_results = main.TRUE
380 links_results = main.TRUE
381 for controller in range(1): #TODO parameterize the number of controllers
382 if devices[controller] or not "Error" in devices[controller]:
383 current_devices_result = main.Mininet1.compare_switches(MNTopo, json.loads(devices[controller]))
384 else:
385 current_devices_result = main.FALSE
386 utilities.assert_equals(expect=main.TRUE, actual=current_devices_result,
387 onpass="ONOS"+str(int(controller+1))+" Switches view is correct",
388 onfail="ONOS"+str(int(controller+1))+" Switches view is incorrect")
389
390 if ports[controller] or not "Error" in ports[controller]:
391 current_ports_result = main.Mininet1.compare_ports(MNTopo, json.loads(ports[controller]))
392 else:
393 current_ports_result = main.FALSE
394 utilities.assert_equals(expect=main.TRUE, actual=current_ports_result,
395 onpass="ONOS"+str(int(controller+1))+" ports view is correct",
396 onfail="ONOS"+str(int(controller+1))+" ports view is incorrect")
397
398 if links[controller] or not "Error" in links[controller]:
399 current_links_result = main.Mininet1.compare_links(MNTopo, json.loads(links[controller]))
400 else:
401 current_links_result = main.FALSE
402 utilities.assert_equals(expect=main.TRUE, actual=current_links_result,
403 onpass="ONOS"+str(int(controller+1))+" links view is correct",
404 onfail="ONOS"+str(int(controller+1))+" links view is incorrect")
405
406 devices_results = devices_results and current_devices_result
407 ports_results = ports_results and current_ports_result
408 links_results = links_results and current_links_result
409
410 topo_result = devices_results and ports_results and links_results
411 utilities.assert_equals(expect=main.TRUE, actual=topo_result,
412 onpass="Topology Check Test successful",
413 onfail="Topology Check Test NOT successful")
414
415 final_assert = main.TRUE
416 final_assert = final_assert and topo_result and flow_check \
417 and intent_check and consistent_mastership
418 utilities.assert_equals(expect=main.TRUE, actual=final_assert,
419 onpass="State check successful",
420 onfail="State check NOT successful")
421
422
423 def CASE6(self,main) :
424 '''
425 The Failure case. Since this is the Sanity test, we do nothing.
426 '''
427
428 main.log.report("Restart ONOS node")
429 main.log.case("Restart ONOS node")
430 main.ONOSbench.onos_kill(ONOS1_ip)
431
432 main.step("Checking if ONOS is up yet")
433 onos1_isup = main.ONOSbench.isup(ONOS1_ip)
434 # TODO: if it becomes an issue, we can retry this step a few times
435
436
437 cli_result1 = main.ONOScli1.start_onos_cli(ONOS1_ip)
438
439 case_results = main.TRUE and onosi1_isup and cli_result1
440 utilities.assert_equals(expect=main.TRUE, actual=case_results,
441 onpass="ONOS restart successful",
442 onfail="ONOS restart NOT successful")
443
444 def CASE7(self,main) :
445 '''
446 Check state after ONOS failure
447 '''
448 import os
449 import json
450 main.case("Running ONOS Constant State Tests")
451
452 main.step("Check if switch roles are consistent across all nodes")
453 ONOS1_mastership = main.ONOScli1.roles()
454 #print json.dumps(json.loads(ONOS1_mastership), sort_keys=True, indent=4, separators=(',', ': '))
455 if "Error" in ONOS1_mastership or not ONOS1_mastership:
456 main.log.error("Error in getting ONOS mastership")
457 main.log.warn("ONOS1 mastership response: " + repr(ONOS1_mastership))
458 consistent_mastership = main.FALSE
459 else:
460 consistent_mastership = main.TRUE
461 main.log.report("Switch roles are consistent across all ONOS nodes")
462 utilities.assert_equals(expect = main.TRUE,actual=consistent_mastership,
463 onpass="Switch roles are consistent across all ONOS nodes",
464 onfail="ONOS nodes have different views of switch roles")
465
466
467 description2 = "Compare switch roles from before failure"
468 main.step(description2)
469
470 current_json = json.loads(ONOS1_mastership)
471 old_json = json.loads(mastership_state)
472 mastership_check = main.TRUE
473 for i in range(1,29):
474 switchDPID = str(main.Mininet1.getSwitchDPID(switch="s"+str(i)))
475
476 current = [switch['master'] for switch in current_json if switchDPID in switch['id']]
477 old = [switch['master'] for switch in old_json if switchDPID in switch['id']]
478 if current == old:
479 mastership_check = mastership_check and main.TRUE
480 else:
481 main.log.warn("Mastership of switch %s changed" % switchDPID)
482 mastership_check = main.FALSE
483 if mastership_check == main.TRUE:
484 main.log.report("Mastership of Switches was not changed")
485 utilities.assert_equals(expect=main.TRUE,actual=mastership_check,
486 onpass="Mastership of Switches was not changed",
487 onfail="Mastership of some switches changed")
488 mastership_check = mastership_check and consistent_mastership
489
490
491
492 main.step("Get the intents and compare across all nodes")
493 ONOS1_intents = main.ONOScli1.intents( json_format=True )
494 intent_check = main.FALSE
495 if "Error" in ONOS1_intents or not ONOS1_intents:
496 main.log.report("Error in getting ONOS intents")
497 main.log.warn("ONOS1 intents response: " + repr(ONOS1_intents))
498 else:
499 intent_state = ONOS1_intents
500 intent_check = main.TRUE
501 main.log.report("Intents are consistent across all ONOS nodes")
502 utilities.assert_equals(expect = main.TRUE,actual=intent_check,
503 onpass="Intents are consistent across all ONOS nodes",
504 onfail="ONOS nodes have different views of intents")
505
506 main.step("Compare current intents with intents before the failure")
507 if intent_state == ONOS1_intents:
508 same_intents = main.TRUE
509 main.log.report("Intents are consistent with before failure")
510 #TODO: possibly the states have changed? we may need to figure out what the aceptable states are
511 else:
512 same_intents = main.FALSE
513 utilities.assert_equals(expect = main.TRUE,actual=same_intents,
514 onpass="Intents are consistent with before failure",
515 onfail="The Intents changed during failure")
516 intent_check = intent_check and same_intents
517
518
519
520 main.step("Get the OF Table entries and compare to before component failure")
521 Flow_Tables = main.TRUE
522 flows2=[]
523 for i in range(28):
524 main.log.info("Checking flow table on s" + str(i+1))
525 tmp_flows = main.Mininet2.get_flowTable("s"+str(i+1),1.0)
526 flows2.append(tmp_flows)
527 Flow_Tables = Flow_Tables and main.Mininet2.flow_comp(flow1=flows[i],flow2=tmp_flows)
528 if Flow_Tables == main.FALSE:
529 main.log.info("Differences in flow table for switch: "+str(i+1))
530 break
531 if Flow_Tables == main.TRUE:
532 main.log.report("No changes were found in the flow tables")
533 utilities.assert_equals(expect=main.TRUE,actual=Flow_Tables,
534 onpass="No changes were found in the flow tables",
535 onfail="Changes were found in the flow tables")
536
537 main.step("Check the continuous pings to ensure that no packets were dropped during component failure")
538 #FIXME: This check is always failing. Investigate cause
539 #NOTE: this may be something to do with file permsissions
540 # or slight change in format
541 main.Mininet2.pingKill(main.params['TESTONUSER'], main.params['TESTONIP'])
542 Loss_In_Pings = main.FALSE
543 #NOTE: checkForLoss returns main.FALSE with 0% packet loss
544 for i in range(8,18):
545 main.log.info("Checking for a loss in pings along flow from s" + str(i))
546 Loss_In_Pings = Loss_In_Pings or main.Mininet2.checkForLoss("/tmp/ping.h"+str(i))
547 if Loss_In_Pings == main.TRUE:
548 main.log.info("Loss in ping detected")
549 elif Loss_In_Pings == main.ERROR:
550 main.log.info("There are multiple mininet process running")
551 elif Loss_In_Pings == main.FALSE:
552 main.log.info("No Loss in the pings")
553 main.log.report("No loss of dataplane connectivity")
554 utilities.assert_equals(expect=main.FALSE,actual=Loss_In_Pings,
555 onpass="No Loss of connectivity",
556 onfail="Loss of dataplane connectivity detected")
557
558
559 #TODO:add topology to this or leave as a seperate case?
560 result = mastership_check and intent_check and Flow_Tables and (not Loss_In_Pings)
561 result = int(result)
562 if result == main.TRUE:
563 main.log.report("Constant State Tests Passed")
564 utilities.assert_equals(expect=main.TRUE,actual=result,
565 onpass="Constant State Tests Passed",
566 onfail="Constant state tests failed")
567
568 def CASE8 (self,main):
569 '''
570 Compare topo
571 '''
572 import sys
573 sys.path.append("/home/admin/sts") # Trying to remove some dependancies, #FIXME add this path to params
574 from sts.topology.teston_topology import TestONTopology # assumes that sts is already in you PYTHONPATH
575 import json
576 import time
577
578 description ="Compare ONOS Topology view to Mininet topology"
579 main.case(description)
580 main.log.report(description)
581 main.step("Create TestONTopology object")
582 ctrls = []
583 count = 1
584 temp = ()
585 temp = temp + (getattr(main,('ONOS' + str(count))),)
586 temp = temp + ("ONOS"+str(count),)
587 temp = temp + (main.params['CTRL']['ip'+str(count)],)
588 temp = temp + (eval(main.params['CTRL']['port'+str(count)]),)
589 ctrls.append(temp)
590 MNTopo = TestONTopology(main.Mininet1, ctrls) # can also add Intent API info for intent operations
591
592 main.step("Collecting topology information from ONOS")
593 devices = []
594 devices.append( main.ONOScli1.devices() )
595 '''
596 hosts = []
597 hosts.append( main.ONOScli1.hosts() )
598 '''
599 ports = []
600 ports.append( main.ONOScli1.ports() )
601 links = []
602 links.append( main.ONOScli1.links() )
603
604 main.step("Comparing ONOS topology to MN")
605 devices_results = main.TRUE
606 ports_results = main.TRUE
607 links_results = main.TRUE
608 topo_result = main.FALSE
609 start_time = time.time()
610 elapsed = 0
611 while topo_result == main.FALSE and elapsed < 120:
612 try:
613 for controller in range(1): #TODO parameterize the number of controllers
614 if devices[controller] or not "Error" in devices[controller]:
615 current_devices_result = main.Mininet1.compare_switches(MNTopo, json.loads(devices[controller]))
616 else:
617 current_devices_result = main.FALSE
618 utilities.assert_equals(expect=main.TRUE, actual=current_devices_result,
619 onpass="ONOS"+str(int(controller+1))+" Switches view is correct",
620 onfail="ONOS"+str(int(controller+1))+" Switches view is incorrect")
621
622 if ports[controller] or not "Error" in ports[controller]:
623 current_ports_result = main.Mininet1.compare_ports(MNTopo, json.loads(ports[controller]))
624 else:
625 current_ports_result = main.FALSE
626 utilities.assert_equals(expect=main.TRUE, actual=current_ports_result,
627 onpass="ONOS"+str(int(controller+1))+" ports view is correct",
628 onfail="ONOS"+str(int(controller+1))+" ports view is incorrect")
629
630 if links[controller] or not "Error" in links[controller]:
631 current_links_result = main.Mininet1.compare_links(MNTopo, json.loads(links[controller]))
632 else:
633 current_links_result = main.FALSE
634 utilities.assert_equals(expect=main.TRUE, actual=current_links_result,
635 onpass="ONOS"+str(int(controller+1))+" links view is correct",
636 onfail="ONOS"+str(int(controller+1))+" links view is incorrect")
637 except:
638 main.log.error("something went wrong in topo comparison")
639 main.log.warn( repr( devices ) )
640 main.log.warn( repr( ports ) )
641 main.log.warn( repr( links ) )
642
643 devices_results = devices_results and current_devices_result
644 ports_results = ports_results and current_ports_result
645 links_results = links_results and current_links_result
646 topo_result = devices_results and ports_results and links_results
647 elapsed = time.time()-start_time()
648 #TODO make sure this step is non-blocking. IE add a timeout
649 main.log.report("Very crass estimate for topology discovery/convergence: " + str(elapsed) + " seconds")
650 utilities.assert_equals(expect=main.TRUE, actual=topo_result,
651 onpass="Topology Check Test successful",
652 onfail="Topology Check Test NOT successful")
653 if topo_result == main.TRUE:
654 main.log.report("ONOS topology view matches Mininet topology")
655
656
657 def CASE9 (self,main):
658 '''
659 Link s3-s28 down
660 '''
661 #NOTE: You should probably run a topology check after this
662
663 link_sleep = int(main.params['timers']['LinkDiscovery'])
664
665 description = "Turn off a link to ensure that Link Discovery is working properly"
666 main.log.report(description)
667 main.case(description)
668
669
670 main.step("Kill Link between s3 and s28")
671 Link_Down = main.Mininet1.link(END1="s3",END2="s28",OPTION="down")
672 main.log.info("Waiting " + str(link_sleep) + " seconds for link down to be discovered")
673 time.sleep(link_sleep)
674 utilities.assert_equals(expect=main.TRUE,actual=Link_Down,
675 onpass="Link down succesful",
676 onfail="Failed to bring link down")
677 #TODO do some sort of check here
678
679 def CASE10 (self,main):
680 '''
681 Link s3-s28 up
682 '''
683 #NOTE: You should probably run a topology check after this
684
685 link_sleep = int(main.params['timers']['LinkDiscovery'])
686
687 description = "Restore a link to ensure that Link Discovery is working properly"
688 main.log.report(description)
689 main.case(description)
690
691 main.step("Bring link between s3 and s28 back up")
692 Link_Up = main.Mininet1.link(END1="s3",END2="s28",OPTION="up")
693 main.log.info("Waiting " + str(link_sleep) + " seconds for link up to be discovered")
694 time.sleep(link_sleep)
695 utilities.assert_equals(expect=main.TRUE,actual=Link_Up,
696 onpass="Link up succesful",
697 onfail="Failed to bring link up")
698 #TODO do some sort of check here
699
700
701 def CASE11 (self, main) :
702 '''
703 Switch Down
704 '''
705 #NOTE: You should probably run a topology check after this
706 import time
707
708 switch_sleep = int(main.params['timers']['SwitchDiscovery'])
709
710 description = "Killing a switch to ensure it is discovered correctly"
711 main.log.report(description)
712 main.case(description)
713
714 #TODO: Make this switch parameterizable
715 main.step("Kill s28 ")
716 main.log.report("Deleting s28")
717 #FIXME: use new dynamic topo functions
718 main.Mininet1.del_switch("s28")
719 main.log.info("Waiting " + str(switch_sleep) + " seconds for switch down to be discovered")
720 time.sleep(switch_sleep)
721 #Peek at the deleted switch
722 main.log.warn(main.ONOScli1.get_device(dpid="0028"))
723 #TODO do some sort of check here
724
725 def CASE12 (self, main) :
726 '''
727 Switch Up
728 '''
729 #NOTE: You should probably run a topology check after this
730 import time
731 #FIXME: use new dynamic topo functions
732 description = "Adding a switch to ensure it is discovered correctly"
733 main.log.report(description)
734 main.case(description)
735
736 main.step("Add back s28")
737 main.log.report("Adding back s28")
738 main.Mininet1.add_switch("s28", dpid = '0000000000002800')
739 #TODO: New dpid or same? Ask Thomas?
740 main.Mininet1.add_link('s28', 's3')
741 main.Mininet1.add_link('s28', 's6')
742 main.Mininet1.add_link('s28', 'h28')
743 main.Mininet1.assign_sw_controller(sw="28",
744 ip1=ONOS1_ip,port1=ONOS1_port)
745 main.log.info("Waiting " + str(switch_sleep) + " seconds for switch up to be discovered")
746 time.sleep(switch_sleep)
747 #Peek at the added switch
748 main.log.warn(main.ONOScli1.get_device(dpid="0028"))
749 #TODO do some sort of check here
750
751 def CASE13 (self, main) :
752 '''
753 Clean up
754 '''
755 import os
756 import time
757 description = "Test Cleanup"
758 main.log.report(description)
759 main.case(description)
760 main.step("Killing tcpdumps")
761 main.Mininet2.stop_tcpdump()
762
763 main.step("Copying MN pcap and ONOS log files to test station")
764 testname = main.TEST
765 #NOTE: MN Pcap file is being saved to ~/packet_captures
766 # scp this file as MN and TestON aren't necessarily the same vm
767 #FIXME: scp
768 #####mn files
769 #TODO: Load these from params
770 #NOTE: must end in /
771 log_folder = "/opt/onos/log/"
772 log_files = ["karaf.log", "karaf.log.1"]
773 #NOTE: must end in /
774 dst_dir = "~/packet_captures/"
775 for f in log_files:
776 main.ONOSbench.secureCopy( "sdn", ONOS1_ip,log_folder+f,"rocks",\
777 dst_dir + str(testname) + "-ONOS1-"+f )
778
779 #std*.log's
780 #NOTE: must end in /
781 log_folder = "/opt/onos/var/"
782 log_files = ["stderr.log", "stdout.log"]
783 #NOTE: must end in /
784 dst_dir = "~/packet_captures/"
785 for f in log_files:
786 main.ONOSbench.secureCopy( "sdn", ONOS1_ip,log_folder+f,"rocks",\
787 dst_dir + str(testname) + "-ONOS1-"+f )
788
789
790 #sleep so scp can finish
791 time.sleep(10)
792 main.step("Packing and rotating pcap archives")
793 os.system("~/TestON/dependencies/rotate.sh "+ str(testname))
794
795
796 #TODO: actually check something here
797 utilities.assert_equals(expect=main.TRUE, actual=main.TRUE,
798 onpass="Test cleanup successful",
799 onfail="Test cleanup NOT successful")