blob: b2ab1f8f4ae5638e100222844a24caf1e37cc399 [file] [log] [blame]
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08001#
2# Copyright (c) 2011,2012,2013 Big Switch Networks, Inc.
3#
4# Licensed under the Eclipse Public License, Version 1.0 (the
5# "License"); you may not use this file except in compliance with the
6# License. You may obtain a copy of the License at
7#
8# http://www.eclipse.org/legal/epl-v10.html
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13# implied. See the License for the specific language governing
14# permissions and limitations under the License.
15#
16
17import re
18import numbers
19import collections
20import traceback
21import types
22import json
23import time
24import sys
25import datetime
26import os
27import subprocess
28import socket
29import urllib2 # exception, dump_log()
30
31import modi
32import error
33import command
34import run_config
35import rest_to_model
36import url_cache
37
38from midw import *
39from vnsw import *
40#from html5lib.constants import DataLossWarning
41
42onos=1
43#
44# ACTION PROCS
45#Format actions for stats per table
46def remove_unicodes(actions):
47
48 if actions:
49 #TODO: Check:- Why I have to remove last two character from string
50 #instead of 1 character to get rid of comma from last aciton
51 a=''
52 b=''
53 newActions=''
54 isRemoved_u = False
55 for ch in actions:
56 if ch =='u':
57 a= 'u'
58 if ch =='\'':
59 b= '\''
60 if isRemoved_u:
61 isRemoved_u=False
62 continue
63 if (a+b) == 'u\'':
64 newActions = newActions[:-1]
65 a= ''
66 isRemoved_u = True
67 else:
68 newActions += ch
69 return newActions
70 else:
71 ''
72def renameActions(actions):
73
74 actions = actions.replace('GOTO_TABLE','GOTO')
75 actions = actions.replace('WRITE_ACTIONS','WRITE')
76 actions = actions.replace('APPLY_ACTIONS','APPLY')
77 actions = actions.replace('DEC_NW_TTL: True','DEC_NW_TTL')
78 actions = actions.replace('POP_MPLS: True','POP_MPLS')
79 actions = actions.replace('COPY_TTL_IN: True','COPY_TTL_IN')
80 actions = actions.replace('COPY_TTL_OUT: True','COPY_TTL_OUT')
81 actions = actions.replace('DEC_MPLS_TTL: True','DEC_MPLS_TTL')
82 actions = actions.replace('SET_DL_SRC','SRC_MAC')
83 actions = actions.replace('SET_DL_DST','DST_MAC')
84 actions = actions.replace('SET_NW_SRC','SRC_IP')
85 actions = actions.replace('SET_NW_DST','DST_IP')
86 actions = actions.replace('CLEAR_ACTIONS: {CLEAR_ACTIONS: True}','CLEAR_ACTIONS')
87
88 return actions
89
90def check_rest_result(result, message=None):
91 if isinstance(result, collections.Mapping):
92 error_type = result.get('error_type')
93 if error_type:
94 raise error.CommandRestError(result, message)
95
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -080096tunnelset_id=None
97tunnelset_dict=[]
98def tunnelset_create(data=None):
99 global tunnelset_id,tunnelset_dict
100 if sdnsh.description: # description debugging
101 print "tunnelset_create:" , data
102 if data.has_key('tunnelset-id'):
103 if (tunnelset_id != None):
104 if sdnsh.description: # description debugging
105 print "tunnelset_create: previous data is not cleaned up"
106 tunnelset_id=None
107 tunnelset_dict=[]
108 tunnelset_id=data['tunnelset-id']
109 tunnelset_dict=[]
110 if sdnsh.description: # description debugging
111 print "tunnelset_create:" , tunnelset_id
112
113def tunnelset_config_exit():
114 global tunnelset_id,tunnelset_dict
115 if sdnsh.description: # description debugging
116 print "tunnelset_config_exit entered", tunnelset_dict
117 if tunnelset_dict:
118 url_str = ""
119 entries = tunnelset_dict
120 url_str = "http://%s/rest/v1/tunnelset/" % (sdnsh.controller)
121 obj_data = {}
122 obj_data['tunnelset_id']=tunnelset_id
123 obj_data['tunnel_params']=entries
124 result = "fail"
125 try:
126 result = sdnsh.store.rest_post_request(url_str,obj_data)
127 except Exception, e:
128 errors = sdnsh.rest_error_to_dict(e)
129 print sdnsh.rest_error_dict_to_message(errors)
130 # LOOK! successful stuff should be returned in json too.
131 tunnelset_dict = []
132 tunnelset_id = None
133 curr_tunnel_id = None
134 if result != "success":
135 print "command failed"
136 else:
137 print "empty command"
138 #Clear the transit information
139
140def tunnelset_remove(data=None):
141 if sdnsh.description: # description debugging
142 print "tunnelset_remove:" , data
143 tunnelset_id=data['tunnelset-id']
144 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
145 obj_data = {}
146 obj_data['tunnelset_id']=data['tunnelset-id']
147 result = "fail"
148 try:
149 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
150 except Exception, e:
151 errors = sdnsh.rest_error_to_dict(e)
152 print sdnsh.rest_error_dict_to_message(errors)
153 if not result.startswith("SUCCESS"):
154 print result
155
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800156tunnel_id=None
157tunnel_dict={}
158def tunnel_create(data=None):
159 global tunnel_id,tunnel_dict
160 if sdnsh.description: # description debugging
161 print "tunnel_create:" , data
162 if data.has_key('tunnel-id'):
163 if (tunnel_id != None):
164 if sdnsh.description: # description debugging
165 print "tunnel_create: previous data is not cleaned up"
166 tunnel_id=None
167 tunnel_dict={}
168 tunnel_id=data['tunnel-id']
169 tunnel_dict[tunnel_id]=[]
170 if data.has_key('node-label'):
171 tunnel_dict[tunnel_id].append(data['node-label'])
172 if data.has_key('adjacency-label'):
173 tunnel_dict[tunnel_id].append(data['adjacency-label'])
174 if sdnsh.description: # description debugging
175 print "tunnel_create:" , tunnel_id, tunnel_dict
176
177def tunnel_config_exit():
178 global tunnel_id,tunnel_dict
179 if sdnsh.description: # description debugging
180 print "tunnel_config_exit entered", tunnel_dict
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800181
182 entries = tunnel_dict[tunnel_id]
183 obj_data = {}
184 obj_data['tunnel_id']=tunnel_id
185 obj_data['label_path']=entries
186 if tunnelset_id:
187 tunnelset_dict.append(obj_data)
188 tunnel_dict = {}
189 tunnel_id = None
190 elif tunnel_dict:
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800191 url_str = ""
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800192 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800193 result = "fail"
194 try:
195 result = sdnsh.store.rest_post_request(url_str,obj_data)
196 except Exception, e:
197 errors = sdnsh.rest_error_to_dict(e)
198 print sdnsh.rest_error_dict_to_message(errors)
199 # LOOK! successful stuff should be returned in json too.
200 tunnel_dict = {}
201 tunnel_id = None
202 if result != "success":
203 print "command failed"
204 else:
205 print "empty command"
206 #Clear the transit information
207
208def tunnel_remove(data=None):
209 if sdnsh.description: # description debugging
210 print "tunnel_remove:" , data
211 tunnel_id=data['tunnel-id']
212 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
213 obj_data = {}
214 obj_data['tunnel_id']=data['tunnel-id']
215 result = "fail"
216 try:
217 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
218 except Exception, e:
219 errors = sdnsh.rest_error_to_dict(e)
220 print sdnsh.rest_error_dict_to_message(errors)
221 if not result.startswith("SUCCESS"):
222 print result
223
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800224
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800225policy_obj_data = {}
226def policy_create(data=None):
227 global policy_obj_data
228 if sdnsh.description: # description debugging
229 print "policy_create:" , data
230 if data.has_key('policy-id'):
231 if policy_obj_data:
232 if sdnsh.description: # description debugging
233 print "policy_create: previous data is not cleaned up"
234 policy_obj_data = {}
235 policy_obj_data['policy_id'] = data['policy-id']
236 policy_obj_data['policy_type'] = data['policy-type']
237 if data.has_key('src_ip'):
238 for key in data:
239 policy_obj_data[key] = data[key]
240 if data.has_key('priority'):
241 policy_obj_data['priority'] = data['priority']
242 if data.has_key('tunnel-id'):
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800243 if policy_obj_data.has_key('tunnelset_id'):
244 print "ERROR: Policy can not point to both tunnelset and tunnel"
245 return
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800246 policy_obj_data['tunnel_id'] = data['tunnel-id']
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800247 if data.has_key('tunnelset-id'):
248 if policy_obj_data.has_key('tunnel_id'):
249 print "ERROR: Policy can not point to both tunnelset and tunnel"
250 return
251 policy_obj_data['tunnelset_id'] = data['tunnelset-id']
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800252
253 if sdnsh.description: # description debugging
254 print policy_obj_data
255
256def policy_config_exit():
257 global policy_obj_data
258 if sdnsh.description: # description debugging
259 print "policy_config_exit entered", policy_obj_data
260 if policy_obj_data:
261 url_str = "http://%s/rest/v1/policy/" % (sdnsh.controller)
262 result = "fail"
263 try:
264 result = sdnsh.store.rest_post_request(url_str,policy_obj_data)
265 except Exception, e:
266 errors = sdnsh.rest_error_to_dict(e)
267 print sdnsh.rest_error_dict_to_message(errors)
268 if result != "success":
269 print "command failed"
270 policy_obj_data = {}
271 else:
272 print "empty command"
273 #Clear the transit information
274
275def policy_remove(data=None):
276 if sdnsh.description: # description debugging
277 print "policy_remove:" , data
278 policy_id=data['policy-id']
279 url_str = "http://%s/rest/v1/policy/" % (sdnsh.controller)
280 obj_data = {}
281 obj_data['policy_id']=data['policy-id']
282 result = "fail"
283 try:
284 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
285 except Exception, e:
286 errors = sdnsh.rest_error_to_dict(e)
287 print sdnsh.rest_error_dict_to_message(errors)
288 if result != "deleted":
289 print "command failed"
290
291
292
293def write_fields(obj_type, obj_id, data):
294 """
295 Typical action to update fields of a row in the model
296
297 @param obj_type a string, the name of the db table to update
298 @param obj_id a string, the value of the primary key in for the table
299 @param data a dict, the name:value pairs of data to update in the table
300 """
301 if sdnsh.description: # description debugging
302 print "write_fields:", obj_type, obj_id, data
303
304 pk_name = mi.pk(obj_type)
305 if not pk_name:
306 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
307 if sdnsh.description: # description debugging
308 print "write_fields:", obj_type, pk_name, obj_id, data
309 for fk in mi.obj_type_foreign_keys(obj_type):
310 if fk in data and mi.is_null_allowed(obj_type, fk):
311 if data[fk] == 'default': # XXX much too magic, seems an option here would be good
312 data[fk] = None
313
314 result = sdnsh.rest_update_object(obj_type, pk_name, obj_id, data)
315 check_rest_result(result)
316
317
318def verify_row_includes(obj_type, pk_value, data, verify):
319 """
320 Intended to raise an exception when a user enters 'no field value',
321 and the field isn't currently set to value, for example:
322 'address-space as1 ; no address-space as2', should complain
323 that the 'address-space' field isn't currently set to 'as2'.
324
325 @param obj_type a string, identifies the db table
326 @param pk_value a string, identifies the value for the primary key
327 @param data is a dict, collecting the name:value pairs from the description
328 @verify the string or list of field names to be verified
329 """
330 if sdnsh.description: # description debugging
331 print "validate_row_includes:", obj_type, pk_value, data, verify
332
333 if type(verify) == str:
334 verify = [verify] # if not a list, make it a list
335
336 try:
337 row = sdnsh.get_object_from_store(obj_type, pk_value)
338 except Exception, e:
339 if sdnsh.debug or sdnsh.debug_backtrace:
340 print 'Failed lookup of %s:%s:%s', (obj_type, pk_value, e)
341 traceback.print_exc()
342 raise error.ArgumentValidationError("%s: '%s' doesn't exist" %
343 (obj_type, pk_value))
344 return
345
346 if sdnsh.description: # description debugging
347 print "validate_includes: ", row
348 for field in [x for x in verify if x in data and x in row]:
349 if row[field] != data[field]:
350 raise error.ArgumentValidationError("%s: %s found '%s' current value '%s'" %
351 (obj_type, field, data[field], row[field]))
352
353
354def reset_fields(obj_type, arg_data,
355 obj_id = None, fields = None, match_for_no = None):
356 """
357 For an obj_type, revert fields back to their default value.
358 This is the typical action for 'no' commands.
359
360 When verify is set, this is a string or list of fields who's values
361 must match in the table for the primary key associated with the reset.
362 This allows command descriptions to identify any fields which need to
363 be checked against, when they are explicidly named in the 'no' command,
364 so that 'no XXX value' will verify that 'value' matches the current
365 row's value before allowing the reset to continue
366
367 @param obj_type a string, identifies the db table
368 @param obj_id a string, identifies the value for the primary key of the row in the table,
369 possibly unset, the key is looked for in the arg_data in that case.
370 @param arg_data a dict, collection of name:value pairs from the description
371 @param fields a list, collection of fields to update in the table
372 @param match_for_no a string or list, list of fields to check for matched values in arg_data
373 """
374
375 if obj_type == None:
376 raise error.CommandDescriptionError("No object to reset (missing obj-type)")
377
378 pk_name = mi.pk(obj_type)
379 # If the fields aren't specified explicitly, then derive from the arg_data
380 if fields is None:
381 fields = []
382 for field in arg_data.keys():
383 # Only add arguments that correspond to valid fields in the object
384 if mi.obj_type_has_field(obj_type, field):
385 if field != pk_name: # don't reset primary keys
386 fields.append(field)
387
388 if len(fields) == 0:
389 raise error.CommandDescriptionError("No fields to reset: type: %s" % obj_type)
390
391 # Get the primary key name
392 if not pk_name:
393 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
394 if obj_id == None:
395 if pk_name in arg_data:
396 obj_id = arg_data[pk_name]
397 elif mi.field_default_value(obj_type, pk_name):
398 # unusual, but not impossible for singletons
399 obj_id = mi.field_default_value(obj_type, pk_name)
400 else:
401 raise error.CommandDescriptionError("Can't find id value name for type: %s"
402 " field %s" % (obj_type, pk_name))
403
404 if match_for_no:
405 verify_row_includes(obj_type, obj_id, arg_data, match_for_no)
406
407 # Get the default values of the specified field from CLI model info
408 data = {}
409 for field in fields:
410 if field == pk_name:
411 continue
412 type_info = mi.cli_model_info.get_field_info(obj_type, field)
413 if type_info == None:
414 raise error.CommandDescriptionError("Can't find field details for "
415 "field %s in type %s" % (field, obj_type))
416 data[field] = type_info.get('default')
417 if data[field] == None and type_info.get('type') == 'BooleanField':
418 data[field] = False
419 # why does boolean not respect the default in the model?!?
420 # data[field] = type_info.get('default') if type_info.get('type') != 'BooleanField' else False
421
422 if sdnsh.description: # description debugging
423 print "reset_fields:", obj_type, pk_name, obj_id, data, match_for_no
424
425 # Invoke the REST API to set the default values
426 try:
427 result = sdnsh.rest_update_object(obj_type, pk_name, obj_id, data)
428 except Exception, e:
429 errors = sdnsh.rest_error_to_dict(e, obj_type)
430 raise error.CommandError('REST', sdnsh.rest_error_dict_to_message(errors))
431
432
433def obj_type_fields_have_default_value(obj_type, row, data):
434 """
435 Return True when all the fields have a default value,
436 row is the queried data from the store,
437 data is the data to be updated.
438
439 The goal is to determine whether to delete or update
440 the row in the store.
441
442 """
443
444 ckf = []
445 if mi.is_compound_key(obj_type, mi.pk(obj_type)):
446 # XXX primitive compound keys' too?
447 ckf = mi.compound_key_fields(obj_type, mi.pk(obj_type))
448
449 for field in mi.obj_type_fields(obj_type):
450 if mi.is_primary_key(obj_type, field):
451 continue
452 if mi.is_foreign_key(obj_type, field):
453 # perhaps only allow a single foreign key?
454 continue
455 # also any fields which are used to compound the ident.
456 if field in ckf:
457 continue
458 # Needs a better way to identify non-model-fields
459 if field == 'Idx':
460 continue
461 if mi.is_null_allowed(obj_type, field):
462 # does this need to be more complex?
463 if field in data and data[field] != None:
464 return False
465 continue # next field
466 default_value = mi.field_default_value(obj_type, field)
467 if default_value == None:
468 if sdnsh.description: # description debugging
469 print 'default_value: no default: %s %s' % (obj_type, field)
470 return False
471 # check to see if the updated value would be the default
472 if field in data and data[field] != default_value:
473 if sdnsh.description: # description debugging
474 print 'default_value: not default %s %s %s' % \
475 (field, data[field], default_value)
476 return False
477 elif row.get(field, default_value) != default_value:
478 if field in data and data[field] == default_value:
479 if sdnsh.description: # description debugging
480 print 'default_value: db not default %s %s %s' \
481 ' new value in data %s is default' % \
482 (field, row[field], default_value, data[field])
483 continue
484 if sdnsh.description: # description debugging
485 print 'default_value: db not default %s %s %s' % \
486 (field, row[field], default_value)
487 return False
488 return True
489
490
491def update_config(obj_type, obj_id, data, no_command):
492 """
493 update_config is intended to write a row when the described data
494 is different from the default values of the fields of the row.
495
496 When the data described in the call updates the field's values
497 to all default values, the row associated with the obj_id is
498 deleted.
499
500 This is intended to be used for models which contain configuration
501 row data, and that every field has a default value,
502 so that when the config data is transitioned to the default
503 state, the row is intended to be removed. For these sorts of
504 command descriptions, updating a field to some default value
505 may result in the row getting deleted.
506 """
507
508 c_data = dict(data) # make a local copy
509 if sdnsh.description: # description debugging
510 print "update_config: ", obj_type, obj_id, c_data, no_command
511
512 if not mi.obj_type_exists(obj_type):
513 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
514
515 # collect any dict.key names which aren't fields in the object
516 for unknown_field in [x for x in c_data.keys() if not mi.obj_type_has_field(obj_type, x)]:
517 del c_data[unknown_field]
518
519 # if its a no command, set the value to 'None' if it's allowed,
520 # of to its default value otherwise
521 if no_command:
522 for field in c_data.keys():
523 if mi.is_null_allowed(obj_type, field):
524 c_data[field] = None
525 else:
526 # required to have a default value
527 c_data[field] = mi.field_default_value(obj_type, field)
528
529 # Get the primary key name
530 pk_name = mi.pk(obj_type)
531 if not pk_name:
532 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
533 pk_value = obj_id
534 if pk_name in data:
535 pk_value = data[pk_name]
536 if pk_name in c_data:
537 del c_data[pk_name]
538
539 # Query for the row, if it doesn't exist, create the item if any item isn't default
540 if sdnsh.description: # description debugging
541 print "update_config: query:", obj_type, pk_value
542
543 result = sdnsh.rest_query_objects(obj_type, { pk_name : pk_value })
544 check_rest_result(result)
545 if len(result) == 0:
546 # result[0] -> dictionary of field:value pairs
547 # check to ensure c_data isn't just default row values
548 if not obj_type_fields_have_default_value(obj_type, {}, c_data):
549 if sdnsh.description: # description debugging
550 print "update_config: create:", obj_type, c_data
551 # populate the create dictionary
552 create_dict = dict(c_data)
553 create_dict[pk_name] = pk_value
554 result = sdnsh.rest_create_object(obj_type, create_dict)
555 check_rest_result(result)
556 else:
557 if sdnsh.description: # description debugging
558 print "update_config: no current row"
559 return
560 else:
561 if sdnsh.description: # description debugging
562 print "update_config: found row", result[0]
563
564 if len(result) > 1:
565 raise error.CommandInternalError("Multiple rows for obj-type: %s: pk %s" %
566 (obj_type, pk_value))
567
568 # See if the complete row needs to be deleted.
569 # For each of the current fields, if a field's default doesn't exist,
570 # skip the row delete, or if any field has a non-default value, update
571 # the requested fields instead of deleting the row.
572 if obj_type_fields_have_default_value(obj_type, result[0], c_data):
573 # if the table has foreign keys, check no children refer to this table.
574 no_foreign_keys_active = True
575 if obj_type in mi.foreign_key_xref:
576 for (fk_obj_type, fk_fn) in mi.foreign_key_xref[obj_type][mi.pk(obj_type)]:
577 try:
578 rows = sdnsh.get_table_from_store(fk_obj_type, fk_fn,
579 pk_value, "exact")
580 except Exception, e:
581 rows = []
582 if len(rows):
583 if sdnsh.description: # description debugging
584 print "update_config: foreign key active:", \
585 fk_obj_type, fk_fn, pk_value
586 no_foreign_keys_active = False
587 break
588
589 if no_foreign_keys_active:
590 if sdnsh.description: # description debugging
591 print "update_config: delete:", obj_type, pk_value
592 try:
593 delete_result = sdnsh.rest_delete_objects(obj_type, { pk_name : pk_value })
594 check_rest_result(delete_result)
595 except Exception, e:
596 errors = sdnsh.rest_error_to_dict(e)
597 raise error.CommandInvocationError(sdnsh.rest_error_dict_to_message(errors))
598 return
599 # XXX if a row from some table is removed, and that table is using
600 # foreign keys, then the table which is refered to ought to be
601 # reviewed, to see if all the entries of the row which this table
602 # refer's to are default, and if that parent table is a config-style
603 # table, with all default values for every field, there's a good
604 # argument that the row ought to be removed.
605
606 # See if any of the c_data items in the matching row are different
607 # (ie: is this update really necessary?)
608 update_necessary = False
609 for (name, value) in c_data.items():
610 if name in result[0]:
611 if value != result[0][name]:
612 update_necessary = True
613 if sdnsh.description: # description debugging
614 print "update_config: update necessary:", name, result[0][name], value
615 else:
616 update_necessary = True
617
618 if not update_necessary:
619 if sdnsh.description: # description debugging
620 print "update_config: no update needed", obj_type, pk_name, pk_value
621 return
622
623 if sdnsh.description: # description debugging
624 print "update_config: update:", obj_type, pk_name, pk_value, c_data
625 # Invoke the REST API to set the default values
626 result = sdnsh.rest_update_object(obj_type, pk_name, pk_value, c_data)
627 check_rest_result(result)
628
629
630def delete_objects(obj_type, data, parent_field=None, parent_id=None):
631 """
632 Delete a row in the table.
633
634 @param obj_type a string, the name of the table to update
635 @param data a dictionary, name:value pairs to describe the delete
636 @param parent_field a string, the name of a field in the obj_type,
637 identifying a relationship between this table, and another table
638 @param parent_id a string, the value of the parent_field, to identify
639 another row in the other table identified by a field in this table
640 """
641
642 pk_name = mi.pk(obj_type)
643 if not pk_name:
644 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
645
646 query_data = dict(data)
647 if parent_field:
648 query_data[parent_field] = parent_id
649
650 # case conversion
651 for field in data:
652 if mi.obj_type_has_field(obj_type, field):
653 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
654 if case:
655 if sdnsh.description: # description debugging
656 print 'delete_objects: case convert %s:%s to %s' % \
657 (obj_type, field, case)
658 data[field] = utif.convert_case(case, data[field])
659
660 query_result = sdnsh.rest_query_objects(obj_type, query_data)
661 check_rest_result(query_result)
662 #
663 # if there were no results, try to delete by removing any
664 # items which have "None" values
665 if len(query_result) == 0:
666 for key in query_data.keys():
667 if query_data[key] == None:
668 del query_data[key]
669 query_result = sdnsh.rest_query_objects(obj_type, query_data)
670 check_rest_result(query_result)
671
672 if sdnsh.description: # description debugging
673 print "delete_objects:", obj_type, query_data
674 delete_result = sdnsh.rest_delete_objects(obj_type, query_data)
675 check_rest_result(delete_result)
676
677 for item in query_result:
678 key = item[pk_name]
679 sdnsh.cascade_delete(obj_type, key)
680
681
682def set_data(data, key, value):
683 """
684 Action to associate a new name:value pair with 'data', the dictionary used
685 to pass to REST API's. Allows the action to describe a value for a field
686 which wasn't directly named in the description.
687
688 """
689 if sdnsh.description: # description debugging
690 print "set_data:", data, key, value
691 data[key] = value
692
693
694def write_object(obj_type, data, parent_field=None, parent_id=None):
695 """
696 Write a new row into a specific table.
697
698 """
699 # If we're pushing a config submode with an object, then we need to extend the
700 # argument data that was entered explicitly in the command with the information
701 # about the parent object (by default obtained by looking at the obj info on
702 # the mode stack -- see default arguments for this action when it is added).
703
704 if sdnsh.description: # description debugging
705 print 'write_object: params ', obj_type, data, parent_field, parent_id
706 data = dict(data) # data is overwriten in various situations below
707 if parent_field:
708 if not parent_id:
709 raise error.CommandDescriptionError('Invalid command description;'
710 'improperly configured parent info for create-object')
711 data[parent_field] = parent_id
712
713 pk_name = mi.pk(obj_type)
714 if not pk_name:
715 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
716
717 # look for unpopulated foreign keys.
718 fks = mi.obj_type_foreign_keys(obj_type)
719 if fks:
720 for fk in fks:
721 (fk_obj, fk_nm) = mi.foreign_key_references(obj_type, fk)
722
723 if not fk in data or \
724 (mi.is_compound_key(fk_obj, fk_nm) and data[fk].find('|') == -1):
725 # use various techniques to populate the foreign key
726 # - if the foreign key is for class which has a compound key, see if all the
727 # parts of the compound key are present
728
729 if mi.is_compound_key(fk_obj, fk_nm):
730 kfs = mi.deep_compound_key_fields(fk_obj, fk_nm)
731 missing = [x for x in kfs if not x in data]
732 if len(missing) == 0:
733 # remove the entries, build the compound key for the foreign key reference
734 new_value = mi.compound_key_separator(fk_obj, fk_nm).\
735 join([data[x] for x in kfs])
736 # verify the foreign key exists, if not complain and return,
737 # preventing a error during the create request
738 query_result = sdnsh.rest_query_objects( fk_obj, { fk_nm : new_value })
739 check_rest_result(query_result)
740 if len(query_result) == 0:
741 joinable_name = ["%s: %s" % (x, data[x]) for x in kfs]
742 raise error.CommandSemanticError("Reference to non-existant object: %s " %
743 ', '.join(joinable_name))
744 for rfn in kfs: # remove field name
745 del data[rfn]
746 data[fk] = new_value
747 else:
748 qr = sdnsh.rest_query_objects(fk_obj, data)
749 if len(qr) == 1:
750 data[fk] = qr[0][mi.pk(fk_obj)]
751
752 if pk_name in data:
753 if sdnsh.description: # description debugging
754 print command._line(), 'write_object: query pk_name ', obj_type, pk_name, data
755 case = mi.get_obj_type_field_case_sensitive(obj_type, pk_name)
756 if case:
757 data[pk_name] = utif.convert_case(case, data[pk_name])
758 query_result = sdnsh.rest_query_objects(obj_type, { pk_name : data[pk_name]})
759 else:
760 query_data = dict([[n,v] for (n,v) in data.items() if v != None])
761 if sdnsh.description: # description debugging
762 print command._line(), 'write_object: query ', obj_type, query_data
763 query_result = sdnsh.rest_query_objects(obj_type, query_data)
764 check_rest_result(query_result)
765
766 # Consider checking to see if all the fields listed here
767 # already match a queried result, if so, no write is needed
768
769 if (len(query_result) > 0) and (pk_name in data):
770 if sdnsh.description: # description debugging
771 print "write_object: update object", obj_type, pk_name, data
772 result = sdnsh.rest_update_object(obj_type, pk_name, data[pk_name], data)
773 else:
774 if sdnsh.description: # description debugging
775 print "write_object: create_object", obj_type, data
776 result = sdnsh.rest_create_object(obj_type, data)
777
778 check_rest_result(result)
779
780 for item in query_result:
781 key = item[pk_name]
782 sdnsh.cascade_delete(obj_type, key)
783
784
785def delete_object(obj_type, data, parent_field=None, parent_id=None):
786 global sdnsh
787
788 data = dict(data)
789 if parent_field:
790 if not parent_id:
791 raise error.CommandDescriptionError('Invalid command description;'
792 'improperly configured parent info for delete-object')
793 data[parent_field] = parent_id
794
795 # case conversion
796 for field in data:
797 if mi.obj_type_has_field(obj_type, field):
798 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
799 if case:
800 if sdnsh.description: # description debugging
801 print 'delete_object: case convert %s:%s to %s' % \
802 (obj_type, field, case)
803 data[field] = utif.convert_case(case, data[field])
804
805 if sdnsh.description: # description debugging
806 print "delete_object: ", obj_type, data
807 result = sdnsh.rest_delete_objects(obj_type, data)
808 check_rest_result(result)
809
810
811def push_mode_stack(mode_name, obj_type, data, parent_field = None, parent_id = None, create=True):
812 """
813 Push a submode on the config stack.
814 """
815 global sdnsh, modi
816
817 # Some few minor validations: enable only in login, config only in enable,
818 # and additional config modes must also have the same prefix as the
819 # current mode.
820 current_mode = sdnsh.current_mode()
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800821 if (mode_name == 'config-tunnel'):
822 if (current_mode == 'config-tunnelset'):
823 mode_name = 'config-tunnelset-tunnel'
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800824
825 if sdnsh.description: # description debugging
826 print "push_mode: ", mode_name, obj_type, data, parent_field, parent_id
827
828 # See if this is a nested submode, or whether some current modes
829 # need to be popped.
830 if (mode_name.startswith('config-') and
831 (not mode_name.startswith(current_mode) or (mode_name == current_mode))):
832
833 sdnsh.pop_mode()
834 current_mode = sdnsh.current_mode()
835 # pop until it it matches
836 while not mode_name.startswith(current_mode):
837 if len(sdnsh.mode_stack) == 0:
838 raise error.CommandSemanticError('%s not valid within %s mode' %
839 (mode_name, current_mode))
840 sdnsh.pop_mode()
841 current_mode = sdnsh.current_mode()
842
843 # if there's a parent id, it is typically the parent, and audit
844 # ought to be done to verify this
845 if parent_field:
846 data = dict(data)
847 data[parent_field] = sdnsh.get_current_mode_obj()
848
849 elif mode_name in ['config', 'enable', 'login']:
850 # see if the mode is in the stack
851 if mode_name in [x['mode_name'] for x in sdnsh.mode_stack]:
852 if sdnsh.description: # description debugging
853 print 'push_mode: popping stack for', mode_name
854 current_mode = sdnsh.current_mode()
855 while current_mode != mode_name:
856 sdnsh.pop_mode()
857 current_mode = sdnsh.current_mode()
858 return
859
860
861 # If we're pushing a config submode with an object, then we need to extend the
862 # argument data that was entered explicitly in the command with the information
863 # about the parent object (by default obtained by looking at the obj info on
864 # the mode stack -- see default arguments for this action when it is added).
865 elif parent_field:
866 if not parent_id:
867 raise error.CommandDescriptionError('Invalid command description; '
868 'improperly configured parent info for push-mode-stack')
869 data = dict(data)
870 data[parent_field] = parent_id
871
872 key = None
873 if obj_type:
874 for field in data:
875 if mi.obj_type_has_field(obj_type, field):
876 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
877 if case:
878 if sdnsh.description: # description debugging
879 print 'push_mode: case convert %s:%s to %s' % \
880 (obj_type, field, case)
881 data[field] = utif.convert_case(case, data[field])
882
883
884 # Query for the object both to see if it exists and also to determine
885 # the pk value we're going to push on the stack. We need to do
886 # the query in the case where the model uses compound keys and we're
887 # specifying the individual fields that compose the compound key.
888 result = sdnsh.rest_query_objects(obj_type, data)
889 check_rest_result(result)
890 if len(result) == 0 and create:
891 #
892 # For vns-interface, the association of 'rule' with the data dict
893 # is difficult to explain via the command description. This is
894 # obviously a poor method of dealing with the issue, but until
895 # a better one arises (possibly REST api create? possibly
896 # model validation code?), this solution works.
897 if obj_type == 'vns-interface':
898 data = associate_foreign_key_for_vns_interface(data)
899
900 # Create the object and re-query to get the id/pk value
901 # FIXME: Could probably optimize here if the data already
902 # contains the pk value.
903 if sdnsh.description: # description debugging
904 print "push_mode: create ", obj_type, data
905 result = sdnsh.rest_create_object(obj_type, data)
906 check_rest_result(result)
907 result = sdnsh.rest_query_objects(obj_type, data)
908 check_rest_result(result)
909 else:
910 if sdnsh.description: # description debugging
911 print "push_mode: object found", obj_type, result
912
913 # Check (again) to make sure that we have an object
914 if len(result) == 0:
915 raise error.CommandSemanticError('Object not found; type = %s' % obj_type)
916
917 # Check to make sure there aren't multiple matching objects. If there
918 # are that would indicate a problem in the command description.
919 if len(result) > 1:
920 raise error.CommandDescriptionError('Push mode info must identify a single object;'
921 'type = %s; data = %s' %
922 (obj_type, str(data)))
923
924 # Get the id/pk value from the object info
925 pk_name = mi.pk(obj_type)
926 if not pk_name:
927 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
928 key = result[0][pk_name]
929 else:
930 pk_name = '<none>'
931
932 if sdnsh.description: # description debugging
933 print "push_mode: ", mode_name, obj_type, pk_name, key
934 exitCallback = None
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800935 if (mode_name == 'config-tunnelset'):
936 exitCallback = tunnelset_config_exit
937 if ((mode_name == 'config-tunnel') or (mode_name == 'config-tunnelset-tunnel')):
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800938 exitCallback = tunnel_config_exit
939 if (mode_name == 'config-policy'):
940 exitCallback = policy_config_exit
941 sdnsh.push_mode(mode_name, obj_type, key, exitCallback)
942
943
944def pop_mode_stack():
945 global sdnsh
946
947 if sdnsh.description: # description debugging
948 print "pop_mode: "
949 sdnsh.pop_mode()
950
951def confirm_request(prompt):
952 global sdnsh
953
954 if sdnsh.batch:
955 return
956 result = raw_input(prompt)
957 if result.lower() == 'y' or result.lower() == 'yes':
958 return
959 raise error.ArgumentValidationError("Expected y or yes, command: ")
960
961import c_data_handlers
962
963def convert_vns_access_list(obj_type, key, data):
964 """
965 For vns-access-group's, the access list which is the first parameter
966 needs to be converted into a vns-access-list foreign key. This is
967 possible since the vns name is part of the current object id.
968 """
969 global sdnsh, modi
970
971 key_parts = key.split('|')
972 if len(key_parts) != 3:
973 raise error.ArgumentValidationError("invalid id")
974 if not 'vns-access-list' in data:
975 raise error.ArgumentValidationError("missing vns-access-list")
976 try:
977 key_parts.pop()
978 vnskey='|'.join(key_parts)
979 entry = sdnsh.rest_query_objects('vns-access-list',
980 { 'vns' : vnskey,
981 'name' : data['vns-access-list']
982 })
983 except Exception, _e:
984 entry = []
985
986 if len(entry) != 1:
987 raise error.ArgumentValidationError("unknown acl %s" % data['vns-access-list'])
988 data['vns-access-list'] = entry[0]['id']
989
990def command_query_object(obj_type, data, scoped, sort):
991 """
992 Return model entries (db rows) via the REST API. Try to be
993 very smart about using parameters and the model definition to
994 figure out how to query for the entries.
995 """
996
997 if sdnsh.description:
998 print 'command_query_object: ', obj_type, data, scoped, sort
999
1000 skipforeignsearch=False
1001 if (obj_type=='virtualrouter-routingrule' or obj_type=='virtualrouter-interface'):
1002 skipforeignsearch=True
1003 # big_search describes a related search which must be done to
1004 # satisfy this request, see the relationship of tag-mapping to tag
1005 # as an example.
1006 big_search = []
1007
1008 key = mi.pk(obj_type)
1009 #
1010 if mi.is_compound_key(obj_type, key):
1011 if sdnsh.description: # description debugging
1012 print "command_query_object: %s compound %s" % (obj_type, key)
1013 #
1014 # collect compound key names, look for these in the data,
1015 # if any of the values are 'all', remove the item from
1016 # the group of data.
1017 #
1018 # XXX needs work: we ought to check to see if the
1019 # compound key is part of some other key.
1020 #
1021 if scoped:
1022 obj_d = { key : sdnsh.get_current_mode_obj() }
1023 mi.split_compound_into_dict(obj_type, key, obj_d, is_prefix = True)
1024 for (k,v) in obj_d.items():
1025 if k != key and not k in data:
1026 data[k] = v
1027
1028 new_data = {}
1029 dckfs = mi.deep_compound_key_fields(obj_type, key)
1030 if key in data:
1031 mi.split_compound_into_dict(obj_type, key, data, is_prefix = True)
1032 foreign_obj_type_search = {}
1033
1034 for kf in dckfs:
1035 if mi.obj_type_has_field(obj_type, kf) and kf in data and data[kf] != 'all':
1036 new_data[kf] = data[kf]
1037 elif not mi.obj_type_has_field(obj_type, kf):
1038 # deep_compound_keys returns references via foreign keys.
1039 # if the field is missing in obj_type, its likely from
1040 # some related fk.
1041 for fk in mi.obj_type_foreign_keys(obj_type):
1042 (_fk_obj_type, fk_name) = mi.foreign_key_references(obj_type,
1043 fk)
1044 if kf == fk_name:
1045 # print "FOUND MATCH ", kf, _fk_obj_type, fk_name
1046 continue
1047 elif not mi.is_compound_key( _fk_obj_type, fk_name):
1048 continue
1049 for fkcf in mi.compound_key_fields(_fk_obj_type, fk_name):
1050 if fkcf in data and data[fkcf] != 'all':
1051 # assume all models use COMPOUND_KEY_FIELDS
1052 if _fk_obj_type not in foreign_obj_type_search:
1053 foreign_obj_type_search[_fk_obj_type] = {}
1054 foreign_obj_type_search[_fk_obj_type][fkcf] = data[fkcf]
1055 pass
1056 # see if foreign key fields are indirectly named
1057 elif mi.is_foreign_key(obj_type, kf):
1058 (_fk_obj_type, fk_name) = mi.foreign_key_references(obj_type,
1059 kf)
1060 if fk_name in data and data[fk_name] != 'all':
1061 new_data[kf] = data[fk_name]
1062 if (not skipforeignsearch): #skip foreign key search for routingrule type
1063 if len(foreign_obj_type_search):
1064 # This means to collect the entries, a search though a
1065 # related obj_type (through foreign key) will need to be done
1066 # a single query isn't enough, unless all entries are collected
1067 # consider the relationship between tag-mapping and tags
1068 #
1069 # This code seems to handle single indirected foreign key
1070 # lookup, but if deep_compound_key_fields() found more than
1071 # three layers deep (the obj-type has a fk reference to a
1072 # table, which had a fk reference to another table, which
1073 # had a value to search with), this won't do the trick.
1074 # at that point some sort of recursive building of the
1075 # foreign keys would be needed to collect up the required
1076 # final seraches
1077 for (_fk_obj_type, search) in foreign_obj_type_search.items():
1078 fk_entries = sdnsh.rest_query_objects(_fk_obj_type, search)
1079 # need to identify the name associated foreign key in this model
1080 for fk in mi.obj_type_foreign_keys(obj_type):
1081 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
1082 if fk_obj == _fk_obj_type:
1083 obj_type_field = fk
1084 break
1085 else:
1086 raise error.CommandSemanticError("bigsearch: can't find fk reference"
1087 "for %s for obj-type %s" %
1088 (fk, obj_type))
1089 big_search += [{obj_type_field:
1090 x[mi.pk(_fk_obj_type)]} for x in fk_entries]
1091 # big_search would return id's for the _fk_obj_type,
1092 # which can be used to search this obj_type
1093 # look for fields which are set in new_data, which aren't in data.
1094 for (field, value) in data.items():
1095 if field not in new_data:
1096 if mi.is_marked_searchable(obj_type, field) and value!='all':
1097 new_data[field] = value
1098
1099 data = new_data
1100 else:
1101 # Only allow fields which are searchable (XXX need a prediate)
1102 # only save primary key's and foreigh keys.
1103 new_data = {}
1104 if key in data and mi.is_primary_key(obj_type, key):
1105 new_data[key] = data[key]
1106 for fk in mi.obj_type_foreign_keys(obj_type):
1107 if fk in data:
1108 new_data[fk] = data[fk]
1109 (_fk_obj, fk_fn) = mi.foreign_key_references(obj_type, fk)
1110 if fk_fn in data:
1111 new_data[fk_fn] = data[fk_fn]
1112 for f in mi.obj_type_fields(obj_type):
1113 if f in data and f not in new_data:
1114 new_data[f] = data[f]
1115
1116 data = new_data
1117
1118 if scoped:
1119 data[key] = sdnsh.get_current_mode_obj()
1120
1121 if key in data and (data[key]=='all' or data[key]==None):
1122 del data[key]
1123 #
1124 # Now that the fields have been disassembled as much as possible, see
1125 # if some of the entries need to be cobbled back together.
1126 fks = mi.obj_type_foreign_keys(obj_type)
1127 if sdnsh.description: # description debugging
1128 print "command_query_object: %s foreign-key %s" % (obj_type, fks)
1129 if fks:
1130 for fk in fks:
1131 (fk_obj, fk_nm) = mi.foreign_key_references(obj_type, fk)
1132
1133 if not fk in data or \
1134 (mi.is_compound_key(fk_obj, fk_nm) and data[fk].find('|') == -1):
1135
1136 # use various techniques to populate the foreign key
1137 # - if the foreign key is for class which has a compound key, see if all the
1138 # parts of the compound key are present
1139 if mi.is_compound_key(fk_obj, fk_nm):
1140 kfs = mi.deep_compound_key_fields(fk_obj, fk_nm)
1141 missing = [x for x in kfs if not x in data]
1142 if len(missing) == 0:
1143 # remove the entries, build the compound key for the foreign key reference
1144 new_value = mi.compound_key_separator(fk_obj, fk_nm).\
1145 join([data[x] for x in kfs])
1146 # verify the foreign key exists, if not complain and return,
1147 # preventing a error during the create request
1148 query_result = sdnsh.rest_query_objects( fk_obj, { fk_nm : new_value })
1149 check_rest_result(query_result)
1150 if len(query_result) == 0:
1151 joinable_name = ["%s: %s" % (x, data[x]) for x in kfs]
1152 raise error.CommandSemanticError("Reference to non-existant object: %s " %
1153 ', '.join(joinable_name))
1154 for rfn in kfs: # remove field name
1155 del data[rfn]
1156 data[fk] = new_value
1157 if sdnsh.description: # description debugging
1158 print "command_query_object: %s foreign key construction " % obj_type, data
1159 #
1160 # Do something for alias displays, for obj_types which sdnsh says
1161 # are aliases, find the foreign reference in the alias obj_type,
1162 # and use that to determine the field name (fk_fn) in the parent.
1163 # Do lookups based on either the alias field name, or the parent's
1164 # fk_fn when set in data{}
1165 if obj_type in mi.alias_obj_types:
1166 field = mi.alias_obj_type_field(obj_type)
1167 (_fk_obj, fk_fn) = mi.foreign_key_references(obj_type, field)
1168 new_data = {}
1169 if fk_fn in data and data[fk_fn] != 'all':
1170 new_data[field] = data[fk_fn]
1171 elif field in data and data[field] != 'all':
1172 new_data[field] = data[field]
1173 data = new_data
1174
1175 #
1176 # The sort value ought to be a command separated list of fields within the model
1177 #
1178 if sort:
1179 data['orderby'] = sort
1180
1181 if not mi.obj_type_has_model(obj_type):
1182 return rest_to_model.get_model_from_url(obj_type, data)
1183
1184 if sdnsh.description: # description debugging
1185 print "command_query_object: ", obj_type, data
1186
1187 if len(big_search):
1188 entries = []
1189 if sdnsh.description: # description debugging
1190 print "command_query_object: big search", big_search
1191 for bs in big_search:
1192 search = dict(list(bs.items()) + list(data.items()))
1193 entries += sdnsh.rest_query_objects(obj_type, search)
1194 # XXX needs to be re-sorted
1195 return entries
1196
1197 return sdnsh.rest_query_objects(obj_type, data)
1198
1199
1200def command_display_table_join_entries(obj_type, data, entries, detail):
1201 """
1202 """
1203 if obj_type == 'tag-mapping':
1204 # lift persist from the parent tag
1205 if len(entries) == 1:
1206 entry = entries[0]
1207 tag = sdnsh.rest_query_objects('tag', { mi.pk('tag') : entry['tag']})
1208 entry['persist'] = tag[0]['persist']
1209 else:
1210 # key? value? for the _dict?
1211 tags = create_obj_type_dict('tag', mi.pk('tag'))
1212 for entry in entries:
1213 entry['persist'] = tags[entry['tag']][0]['persist']
1214
1215 if obj_type == 'controller-node':
1216 # This is a big odd, since the current node needs to be asked
1217 # which controller node it is
1218 url = "http://%s/rest/v1/system/controller" % sdnsh.controller
1219
1220 result = sdnsh.store.rest_simple_request(url)
1221 check_rest_result(result)
1222 iam = json.loads(result)
1223
1224 cluster_url = ("http://%s/rest/v1/system/ha/clustername"
1225 % sdnsh.controller)
1226 result = sdnsh.store.rest_simple_request(cluster_url)
1227 check_rest_result(result)
1228 # perhaps ought to assert on lenresult) == 1
1229 clustername = json.loads(result)[0]['clustername']
1230
1231 for entry in entries:
1232 controller = None
1233 if entry['id'] == iam['id']:
1234 controller = sdnsh.controller
1235 else:
1236 # find interfaces which hacve a firewall rule open for
1237 # tcp/80. ie: ip for the interface with rest-api role
1238 ips = local_interfaces_firewall_open("tcp", 80, entry)
1239
1240 # controller-interfaces needs to be examined to determine
1241 # if there's an ip address to use to discover the ha-role
1242 if len(ips) == 1:
1243 # Not even certain if this is reachable
1244 if ips[0]['discovered-ip'] != '':
1245 controller = ips[0]['discovered-ip']
1246 elif ips[0]['ip'] != '':
1247 controller = ips[0]['ip']
1248 else:
1249 entry['ha-role'] = 'no-ip'
1250 entry['errors'] = 'No IP Address'
1251 else:
1252 entry['errors'] = 'No IP Address'
1253
1254 if controller == None:
1255 entry['errors'] = 'No ip address configured'
1256 entry['ha-role'] = 'unknown'
1257 continue
1258
1259 try:
1260 url = "http://%s/rest/v1/system/ha/role" % controller
1261 result = sdnsh.store.rest_simple_request(url, timeout = 2)
1262 check_rest_result(result)
1263 ha_role = json.loads(result)
1264 entry['ha-role'] = ha_role['role']
1265 if not 'clustername' in ha_role:
1266 entry['errors'] = 'no clustername in ha-role rest api'
1267 entry['ha-role'] = 'Untrusted: %s' % ha_role['role']
1268 elif ha_role['clustername'] != clustername:
1269 entry['errors'] = 'Not in HA Cluster, requires decomission'
1270 entry['ha-role'] = 'External Cluster: %s' % ha_role['role']
1271 if 'change-date-time' in ha_role:
1272 entry['change-date-time'] = ha_role['change-date-time']
1273 if 'change-description' in ha_role:
1274 entry['change-description'] = ha_role['change-description']
1275 except urllib2.HTTPError, e: # timeout?
1276 entry['errors'] = e.reason
1277 entry['ha-role'] = 'unknown'
1278 continue
1279 except urllib2.URLError, e: # timeout?
1280 entry['errors'] = '%s: %s' % (controller, e.reason)
1281 entry['ha-role'] = 'unknown'
1282 continue # dontt try the uptime, it will fail too
1283 except Exception, e:
1284 entry['errors'] = str(e)
1285 entry['ha-role'] = 'unknown'
1286
1287 url = "http://%s/rest/v1/system/uptime" % controller
1288 try:
1289 result = sdnsh.store.rest_simple_request(url)
1290 check_rest_result(result)
1291 uptime = json.loads(result)
1292 entry['uptime'] = uptime['systemUptimeMsec']
1293
1294 except Exception, e:
1295 pass
1296
1297 return detail
1298
1299
1300def command_display_table(obj_type, data, detail = 'default',
1301 table_format = None, title = None, scoped = None, sort = None):
1302
1303 """
1304 Display entries from a obj_type, with some filtering done via data,
1305 and the output format described by table_format, with the devel of detail in detail
1306
1307 @param obj_type string name of the object type
1308 @param data dictionary of configured data items from the description
1309 @param table_format string describing table format to use for output
1310 @param detail string describing the detail-flavor for format
1311 @param scoped string, when not null, indicates the submode level is used to filter query request
1312 @param sort string, describes sort to append to the query request
1313 """
1314
1315 if not mi.obj_type_exists(obj_type):
1316 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1317
1318 if sdnsh.description: # description debugging
1319 print "command_display_table:", obj_type, data, table_format, detail, scoped, sort
1320
1321 if 'detail' in data:
1322 detail = data['detail']
1323
1324 if not table_format:
1325 if 'format' in data:
1326 table_format = data['format']
1327 else:
1328 table_format = obj_type
1329 if 'scoped' in data:
1330 scoped=data['scoped']
1331 del data['scoped']
1332 entries = command_query_object(obj_type, data, scoped, sort)
1333 if sdnsh.description: # description debugging
1334 print "command_display_table: %d entries found, using %s" % (len(entries), data)
1335
1336 # update any of the pretty-printer tables based on the obj_type
1337 obj_type_show_alias_update(obj_type)
1338
1339 # with_key manages whether a 'detail' or table is displayed.
1340 with_key = '<with_key>' if detail == 'details' and len(entries) > 0 else '<no_key>'
1341
1342 # pick foreign keys which are compound keys, explode these into fields
1343 fks = [x for x in mi.obj_type_foreign_keys(obj_type) if mi.is_compound_key(obj_type,x)]
1344 for entry in entries:
1345 for fk in fks:
1346 if fk in entry: # fk may be null-able
1347 mi.split_compound_into_dict(obj_type, fk, entry, True)
1348 #
1349 detail = command_display_table_join_entries(obj_type, data, entries, detail)
1350
1351 # use display_obj_type_rows since it (currently) joins fields for obj_types.
1352 display = sdnsh.display_obj_type_rows(table_format, entries, with_key, detail)
1353 if title:
1354 return title + display
1355 return display
1356
1357
1358def command_display_rest_join_entries(table_format, data, entries, detail):
1359 """
1360 @param table_format string, identifying the final table output
1361 @param data dict, used to query the rest api output
1362 @param entries list of dicts, ready to be displayed
1363 @return string replacing detail
1364
1365 """
1366
1367 if sdnsh.description: # description debugging
1368 print "command_display_rest_join_entries: ", table_format, data, detail
1369
1370 if table_format == 'controller-interface':
1371 # join firewall rules for these interfaces
1372 for intf in entries:
1373 rules = [x['rule'] for x in sdnsh.get_firewall_rules(intf['id'])]
1374 intf['firewall'] = ', '.join(rules)
1375
1376 if table_format == 'system-clock':
1377 # join the 'time' string, possibly remove 'tz' from entries[0]
1378 entries[0]['time'] = sdnsh.get_clock_string(entries[0], data.get('detail'))
1379 return 'details' # force table format
1380
1381 return detail
1382
1383
1384def command_display_rest_type_converter(table_format, rest_type, data, entries):
1385 """
1386 the expected display table_format is a list of dictionaries
1387 each dictionary has the field : value pairs. Many rest api's
1388 return a dictionary of different layers, the description
1389 provides a rest-type, which is used to describe the form
1390 of the value returned from the rest api.
1391 """
1392
1393 if sdnsh.description: # description debugging
1394 print "command_display_rest_type_converter: ", table_format, rest_type
1395
1396 if rest_type.startswith('dict-of-list-of-'):
1397 # entries look like { row_name : [value, ...], ... more-row-value-pairs }
1398 #
1399 # dict-of-list-of: a dict with key's which are given
1400 # the name of the first token, then the dict's value is
1401 # a list which can be given an associated name.
1402 # for example 'dict-of-list-of-cluster-id|[switches]'
1403 #
1404 # 'dict-of-list-of-switch' is a dict with key : value's
1405 # where the value is a list. The member's of the list
1406 # are dictionaries. the key of the outer dict is added to
1407 # each of the dicts, and this interior dict is added to
1408 # the final output list.
1409
1410 # identify the added key from the rest_type
1411 key = rest_type.replace('dict-of-list-of-','')
1412 parts = key.split('|')
1413 names = None
1414 build_list = False
1415 if len(parts) > 0:
1416 key = parts[0]
1417 names = parts[1:] # should only be one name
1418 if len(names) > 0 and names[0][0] == '[':
1419 build_list = True
1420 formatted_list = []
1421 for (row_name, rows) in entries.items():
1422 if not rows:
1423 continue
1424 # use the names as ways of describing each of the list's items
1425 if type(rows) == list and build_list:
1426 # name[0] looks like '[switches]', requesting that this
1427 # list become switches : [rows]
1428 formatted_list.append({key : row_name, names[0][1:-1] : rows})
1429 elif type(rows) == list:
1430 for row in rows:
1431 add_dict = {key : row_name}
1432 if type(row) == str or type(row) == unicode:
1433 add_dict[names[0]] = row
1434 elif type(row) == dict:
1435 # addition names make no difference
1436 add_dict.update(row)
1437 formatted_list.append(add_dict)
1438 elif type(rows) == dict:
1439 do_append = True
1440 new_row = { key : row_name }
1441 for name in [x for x in names.keys() if x in row]:
1442 item = row[name]
1443 if type(item) == str or type(item) == unicode:
1444 new_row[name] = item
1445 if type(item) == dict:
1446 new_row[name].update(item)
1447 if type(item) == list:
1448 do_append = False
1449 for i_row in item:
1450 new_row.update(i_row)
1451 formatted_list.append(new_row)
1452 new_row = { key : row_name }
1453 if do_append:
1454 formatted_list.append(new_row)
1455
1456 entries = formatted_list
1457 elif rest_type.startswith('dict-of-dict-of-'):
1458 # entries looks like { row_name : { [ { }, ... ] } }
1459 # ^
1460 # want this |
1461 # ie: dict with a value which is a dict, whose
1462 # 'dict-of-dict-of-switch|ports' The dict has key : values
1463 # where the value is a dict. That dict has the 'switch' : key
1464 # added, and it becomes the final output dict.
1465 #
1466 # if a second name is included, then the outer dict is
1467 # examined to find these values (ie: values[names]), and these
1468 # get added to the final output dict.
1469 #
1470 # identify the added key from the rest_type
1471 key = rest_type.replace('dict-of-dict-of-','')
1472 parts = key.split('|')
1473 name = None
1474 if len(parts) > 0:
1475 names = parts[1:]
1476 key = parts[0]
1477 formatted_list = []
1478 for (row_name, row) in entries.items():
1479 row[key] = row_name
1480 do_append = False
1481 if names:
1482 new_row = {}
1483 for name in names:
1484 if name in row:
1485 item = row[name]
1486 if type(item) == str or type(item) == unicode:
1487 new_row[name] = item
1488 do_append = True
1489 elif type(item) == dict:
1490 if name == row_name:
1491 do_append = True
1492 elif type(item) == list:
1493 for i_row in item:
1494 row_items = {}
1495 row_items[key] = row_name
1496 row_items.update(i_row)
1497 formatted_list.append(row_items)
1498 if do_append:
1499 formatted_list.append(row)
1500
1501 else:
1502 formatted_list.append(row)
1503
1504 entries = formatted_list
1505 elif rest_type.startswith('dict-with-'):
1506 # rest result looks like: { k : v, k : { } }
1507 # ^
1508 # want this |
1509 # dict-with: typically used for dict returns which have
1510 # nested dict's who's values are promoted to a single
1511 # list with a dict with these values.
1512 #
1513 # identify the added key from the rest_type
1514 key = rest_type.replace('dict-with-','')
1515 names = key.split('|')
1516 collect_row = {}
1517 formatted_list = []
1518 for name in names:
1519 if name in entries:
1520 item = entries[name]
1521 if type(item) == str or type(item) == unicode or \
1522 type(item) == int or type(item) == long: # XXX float?
1523 collect_row[name] = item
1524 elif type(item) == list:
1525 for i_row in item:
1526 row_items = {}
1527 formatted_list.append(i_row)
1528 elif type(item) == dict:
1529 collect_row.update(item)
1530
1531 if len(collect_row) == 0:
1532 entries = formatted_list
1533 else:
1534 entries = [collect_row] + formatted_list
1535
1536 elif rest_type == 'dict':
1537 entries = [entries]
1538 else:
1539 raise error.CommandDescriptionError("Unknown rest-type: %s" % rest_type)
1540 return entries
1541
1542
1543def missing_part(key_parts, entry, key_case = False):
1544 """
1545 Return the name of the missing field of one of the strings
1546 in the key_parts list when it doesn't appear in the 'entry' dictionary.
1547
1548 Return None otherwise.
1549
1550 This is used to identify rows which don't have all the
1551 parts needed to constrcut a join key, or a db-table or
1552 query "key" to support addition of two different tables.
1553
1554 @key_parts list of strings,
1555 @entry dictionary, needs to contains each string in key_parts
1556 @key_case True when all key_parts may contain a leading '~' to
1557 denote the field needs to be lower cased for joining
1558 """
1559 for kn in key_parts:
1560 if not kn in entry:
1561 if key_case == False:
1562 return kn
1563 if kn[0] != '~':
1564 return kn
1565 if kn[1:] not in entry:
1566 return kn[1:]
1567
1568 return None
1569
1570
1571def case_cvt(fn, f_dict):
1572 """
1573 For join operations, the fields in the partial result can no longer
1574 be associated with any obj-type, which means its impossible to determine
1575 whether the associated field is case sensitive.
1576
1577 One approach to this problem is to case-normalize the obj-type's
1578 field values when they're first added to the row. That doesn't
1579 help for rest-api's, which means it can only be a partial solution.
1580 In addition, it makes sense to preserve these values when possible,
1581 but still join based on case-normalization.
1582 """
1583 if fn[0] == '~':
1584 return str(f_dict.get(fn[1:], '').lower())
1585 return str(f_dict.get(fn, ''))
1586
1587
1588def obj_type_field_case(data, obj_type, field):
1589 """
1590 For objects where the case-normalization is identifed,
1591 manage conversion of the value associated with the field
1592 """
1593 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
1594 return str(data[field]) if not case else str(utif.convert_case(case, data[field]))
1595
1596
1597def add_fields(dest, src):
1598 """
1599 These should both be dictionaries, leave the original entries in place
1600 when the 'dest' entries are populated from 'src'. This operation is
1601 handy since the original 'dest' entries may differ from the 'src' due
1602 to case normalization. Since having consistent names is a plus, by
1603 not updating the value with the 'src' entries, 'dest' retains its original
1604 values.
1605 """
1606 for (n,v) in src.items():
1607 if n not in dest:
1608 dest[n] = v
1609 elif str(dest[n]).lower() == str(v).lower:
1610 # should have better controls for when the case matters
1611 if sdnsh.description:
1612 print 'ADD %s skipping updating %s <-> %s' % (n, dest[n], v)
1613 else:
1614 dest[n] = v
1615
1616
1617def command_query_table(obj_type, data,
1618 clear = True,
1619 key = None, append = None, scoped = None, sort = None, crack = None):
1620 """
1621 Leave the result in command's global query_result, which can
1622 be used by other c_action steps
1623
1624 'key' is one or more fields which are concatenated together to form
1625 the display-pipeline's version of a primary key. It could be the
1626 actual primary key of the table, or it could be some fields which
1627 appear in all the rows. Once the 'key' is constructed, it used to
1628 determine how results are added to the command.query_result.
1629
1630 If the existing entries are to be 'cleared', then te primary key's
1631 are simply added to the table. When the entries aren't cleared, then
1632 the computed primary key is used to join against existing items.
1633
1634 Finally, the dict field name for the primary key is a single character: '@'
1635 This name was picked since its not possible for the database to ever
1636 use that name.
1637 """
1638
1639 if not mi.obj_type_exists(obj_type):
1640 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1641
1642 if sdnsh.description: # description debugging
1643 print "command_query_table:", obj_type, data, clear, key, append, scoped, sort, crack
1644
1645 if 'scoped' in data:
1646 scoped=data['scoped']
1647 del data['scoped']
1648
1649 result = command_query_object(obj_type, data, scoped, sort)
1650 if sdnsh.description: # description debugging
1651 print "command_query_table: %d entries found, using %s" % \
1652 (len(result), data)
1653
1654 if crack:
1655 if crack == True:
1656 crack = mi.pk(obj_type)
1657 for entry in result:
1658 mi.split_compound_into_dict(obj_type, crack, entry, True)
1659
1660 if append:
1661 for entry in result:
1662 if type(append) == dict:
1663 entry.update(append)
1664 elif type(append) == list:
1665 entry.update(dict(append))
1666 else:
1667 entry[append] = True
1668
1669 # all the field from all the rows need to be present.
1670 if key:
1671 fields = key.split('|')
1672
1673 if clear:
1674 command.query_result = result
1675 if key:
1676 for r in result:
1677 missing = missing_part(fields, r)
1678 if missing:
1679 if sdnsh.description:
1680 print "command_query_table: ' \
1681 ' missing field in row %s (%s) " % (missing, obj_type)
1682 continue
1683 r['@'] = '|'.join([obj_type_field_case(r, obj_type, f) for f in fields])
1684 else:
1685 if key == None:
1686 if command.query_resuls != None:
1687 command.query_result += result
1688 else:
1689 command.query_result = result
1690 else:
1691 r_dict = {}
1692 for r in result:
1693 missing = missing_part(fields, r)
1694 if missing:
1695 if sdnsh.description:
1696 print "command_query_table: ' \
1697 ' missing field in row %s (%s) " % (missing, obj_type)
1698 continue
1699 pk = '|'.join([r[f] for f in fields])
1700 r_dict[pk] = r
1701 if hasattr(command, 'query_result') and command.query_result:
1702 for qr in command.query_result:
1703 if '@' in qr and qr['@'] in r_dict:
1704 add_fields(qr, r_dict[qr['@']])
1705 del r_dict[qr['@']]
1706 command.query_result += r_dict.values()
1707 else:
1708 for (r, value) in r_dict.items():
1709 value['@'] = '|'.join([value[f] for f in fields])
1710 command.query_result = r_dict.values()
1711
1712
1713def command_query_rest(data,
1714 url = None, path = None, clear = True,
1715 key = None, rest_type = None, scoped = None, sort = None, append = None):
1716 """
1717 Leave the result in command's global query_result, which can
1718 be used by other c_action steps (query-table, join-table, join-rest, display)
1719
1720 'key' is one or more fields which are concatenated together to form
1721 the display-pipeline's version of a primary key. It could be the
1722 actual primary key of the table, or it could be some fields which
1723 appear in all the rows. Once the 'key' is constructed, it used to
1724 determine how results are added to the command.query_result.
1725
1726 If the existing entries are to be 'cleared', then te primary key's
1727 are simply added to the table. When the entries aren't cleared, then
1728 the computed primary key is used to join against existing items.
1729
1730 Finally, the dict field name for the primary key is a single character: '@'
1731 This name was picked since its not possible for the database to ever
1732 use that name.
1733
1734 """
1735
1736 if sdnsh.description: # description debugging
1737 print "command_query_rest:", url, path, rest_type, data, scoped, sort, append
1738
1739 if url == None and path == None:
1740 raise error.CommandDescriptionError("missing url or path")
1741
1742 if path:
1743 schema = sdnsh.sdndb.schema_detail(path)
1744 if schema:
1745 result = sdnsh.sdndb.data_rest_request(path)
1746 if key:
1747 # create a key dictionary, with the key values, pointing to
1748 # a psth in the schema.
1749 pass
1750 print 'PATH', path, result
1751 else:
1752 # if url is a list, pick the first one which can be build from the data
1753 if type(url) == list:
1754 select_url = url
1755 else:
1756 select_url = [url]
1757
1758 use_url = None
1759 for u in select_url:
1760 try:
1761 use_url = (u % data)
1762 break
1763 except:
1764 pass
1765
1766 if use_url == None:
1767 if sdnsh.description: # description debugging
1768 print "command_query_rest: no url found"
1769 return
1770
1771 query_url = "http://%s/rest/v1/" % sdnsh.controller + use_url
1772
1773 if sdnsh.description: # description debugging
1774 print "command_query_rest: query ", query_url
1775 try:
1776 result = sdnsh.store.rest_simple_request(query_url)
1777 check_rest_result(result)
1778 entries = json.loads(result)
1779 except Exception, e:
1780 if sdnsh.description or sdnsh.debug:
1781 print 'command_query_rest: ERROR url %s %s' % (url, e)
1782 entries = []
1783
1784 if entries == None or len(entries) == 0:
1785 if sdnsh.description: # description debugging
1786 print "command_query_rest: no new entries ", query_url
1787 if clear:
1788 command.query_result = None
1789 return
1790
1791 # It certainly seems possible to map from url's to the type associated,
1792 # with the result, but it also makes sense to encode that type information
1793 # into the description
1794 if rest_type:
1795 result = command_display_rest_type_converter(None,
1796 rest_type,
1797 data,
1798 entries)
1799 if sdnsh.description: # description debugging
1800 print "command_query_rest: %s #entries %d " % (url, len(entries))
1801 print result
1802 else:
1803 result = []
1804 import fmtcnv
1805 if (onos == 1) and (url == 'links'):
1806 for entry in entries:
1807 src = entry.get('src')
1808 dst = entry.get('dst')
1809 for tempEntry in entries:
1810 if cmp(src, tempEntry.get('dst')) == 0:
1811 if cmp(dst, tempEntry.get('src')) == 0:
1812 entries.remove(tempEntry)
1813 result.append({
1814 'src-switch' : fmtcnv.print_switch_and_alias(entry['src']['dpid']),
1815 'src-port' : entry['src']['portNumber'],
1816 'src-port-state' : 0,
1817 'dst-switch' : fmtcnv.print_switch_and_alias(entry['dst']['dpid']),
1818 'dst-port' : entry['dst']['portNumber'],
1819 'dst-port-state' : 0,
1820 'type' : entry['type'],
1821 })
1822 else:
1823 result = entries
1824
1825 if append:
1826 for entry in result:
1827 if type(append) == dict:
1828 entry.update(append)
1829 elif type(append) == list:
1830 entry.update(dict(append))
1831 else:
1832 entry[append] = True
1833
1834 if key:
1835 fields = key.split('|')
1836
1837 if clear:
1838 command.query_result = result
1839 if key:
1840 for r in result:
1841 r['@'] = '|'.join([r[f] for f in fields])
1842 else:
1843 if key == None:
1844 if command.query_result != None:
1845 command.query_result += result
1846 else:
1847 command.query_result = result
1848 else:
1849 r_dict = {}
1850 for r in result:
1851 missing = missing_part(fields, r, key_case = True)
1852 if missing:
1853 if sdnsh.description:
1854 print "command_query_rest: missing field %s in row %s" % (missing, r)
1855 continue
1856 pk = '|'.join([case_cvt(f, r) for f in fields])
1857 r_dict[pk] = r
1858 for qr in command.query_result:
1859 if '@' in qr and qr['@'] in r_dict:
1860 add_fields(qr, r_dict[qr['@']])
1861
1862
1863def command_join_rest(url, data, key, join_field,
1864 add_field = None, rest_type = None, crack = None, url_key = None):
1865
1866 """
1867 url-key allows single row results to have a name:value added to the
1868 entry in situations where a single dictionary is computed after the
1869 rest-type conversion. this allows simple results from the url to
1870 have a keyword added to allow joins.
1871 """
1872 if not hasattr(command, 'query_result'):
1873 if sdnsh.description: # description debugging
1874 print "command_join_rest: no entries found"
1875 return
1876
1877 if command.query_result == None:
1878 if sdnsh.description: # description debugging
1879 print "command_join_rest: query_result: None"
1880 return
1881
1882 if sdnsh.description: # description debugging
1883 print "command_join_rest: %d entries found, using %s, url %s" % \
1884 (len(command.query_result), data, url)
1885 print "command_join_rest:", data, key, join_field
1886
1887 if url == None:
1888 return
1889 if join_field == None:
1890 return
1891 if key == None:
1892 return
1893
1894
1895 # Collect all the queries, removing any duplicates
1896 queries = {}
1897 for entry in command.query_result:
1898 # if url is a list, pick the first one which can be build from the data
1899 if type(url) == list:
1900 select_url = url
1901 else:
1902 select_url = [url]
1903
1904 use_url = None
1905 for u in select_url:
1906 try:
1907 use_url = (u % entry)
1908 break
1909 except:
1910 pass
1911
1912 if use_url == None:
1913 if sdnsh.description: # description debugging
1914 print "command_join_rest: no url found", url
1915 continue
1916 query_url = "http://%s/rest/v1/" % sdnsh.controller + use_url
1917
1918 if sdnsh.description: # description debugging
1919 print "command_join_rest: query ", query_url, entry
1920 if query_url in queries:
1921 continue
1922
1923 try:
1924 result = sdnsh.store.rest_simple_request(query_url)
1925 check_rest_result(result)
1926 entries = json.loads(result)
1927 except Exception, e:
1928 entries = []
1929
1930 if entries == None or len(entries) == 0:
1931 continue
1932
1933 # It certainly seems possible to map from url's to the type associated,
1934 # with the result, but it also makes sense to encode that type information
1935 # into the description
1936 if rest_type:
1937 queries[query_url] = command_display_rest_type_converter(None,
1938 rest_type,
1939 data,
1940 entries)
1941 #
1942 # url_key allows the addition of a key for joining for single results
1943 if url_key and len(queries[query_url]) == 1:
1944 queries[query_url][0][url_key] = entry.get(url_key)
1945
1946 if sdnsh.description: # description debugging
1947 print "command_join_rest: %s #entries %d #result %s" % \
1948 (url, len(entries), len(queries[query_url]))
1949 else:
1950 queries[query_url] = entries
1951
1952 # From the query results, generate the dictionary to join through
1953
1954 key_parts = key.split('|') # all the fields needed to make a key
1955 key_dict = {} # resulting key dictionary
1956 for (url, value) in queries.items():
1957 for entry in value:
1958 # see if all the key parts are in the entry
1959 missing = missing_part(key_parts, entry)
1960 if missing:
1961 if sdnsh.description:
1962 print 'command_join_rest: missing field %s in %s' % (missing, entry)
1963 continue
1964 new_key = '|'.join([str(entry[kn]) for kn in key_parts])
1965 if sdnsh.description: # description debugging
1966 print 'command_join_rest: new-key', new_key
1967 key_dict[new_key] = entry
1968
1969 # Using the key-dictinoary, look for matches from the original entries
1970
1971 if add_field:
1972 parts = add_field.split('|')
1973 from_fields = None
1974 if len(parts):
1975 add_field = parts[0]
1976 from_fields = parts[1:]
1977
1978 join_parts = join_field.split('|')
1979 for entry in command.query_result:
1980 if len(join_parts):
1981 missing = missing_part(join_parts, entry, key_case = True)
1982 if missing:
1983 if sdnsh.description: # description debugging
1984 print "command_join_rest: missing field %s in %s" % (missing, entry)
1985 continue
1986
1987 joiner = '|'.join([case_cvt(kn, entry) for kn in join_parts])
1988 else:
1989 if sdnsh.description: # description debugging
1990 print "command_join_rest: joining ", entry, join_field, entry.get(join_field)
1991 if not join_field in entry:
1992 continue
1993 joiner = case_cvt(join_field, entry)
1994
1995 if sdnsh.description: # description debugging
1996 print "command_join_rest: joining ", entry, joiner, key_dict.get(joiner)
1997
1998 if joiner in key_dict:
1999 # add all the entries from the key_dict
2000 if sdnsh.description: # description debugging
2001 print 'command_join_rest: ADD', key_dict[joiner]
2002 if add_field == None:
2003 add_fields(entry, key_dict[joiner])
2004 elif from_fields:
2005 if len(from_fields) == 1:
2006 # add a single field
2007 if from_fields[0] in key_dict[joiner]:
2008 entry[add_field] = key_dict[joiner][from_fields[0]]
2009 else:
2010 # add a dictionary
2011 entry[add_field] = dict([[ff, key_dict[joiner][ff]]
2012 for ff in from_fields])
2013 else:
2014 entry[add_field] = key_dict[joiner]
2015
2016 if sdnsh.description: # description debugging
2017 print "command_join_rest: ", command.query_result
2018
2019
2020def command_join_table(obj_type, data, key, join_field,
2021 key_value = None, add_field = None, crack = None):
2022 """
2023 Add fieds to the current command.query_result by looking up the entry in
2024 the db/store. key represents the value of the index to use from
2025 the entries read from the database. The key can be composed of
2026 multiple fields within the entry. The join_field is the name
2027 of the field within the command.query_result to use as the value to match
2028 against the key field.
2029
2030 When key_value is None, the matched entry from the join_field's is
2031 treated as a dictionary, and all the pair of name:values are added
2032 directly to the new entry.
2033
2034 When key_value is a field name, the joined entries are collected
2035 as a list, and added to the new entry a the key_value name.
2036 (see the use of tag-mapping as an example)
2037 """
2038 if not hasattr(command, 'query_result'):
2039 if sdnsh.description: # description debugging
2040 print "command_join_table: no entries found"
2041 return
2042
2043 if command.query_result == None:
2044 if sdnsh.description: # description debugging
2045 print "command_join_table: query_result: None"
2046 return
2047
2048 if sdnsh.description: # description debugging
2049 print "command_join_table: %d entries found, using %s, obj_type %s %s %s" % \
2050 (len(command.query_result), data, obj_type, key, join_field)
2051 print "command_join_table:", data, key, join_field
2052
2053 if join_field == None:
2054 return
2055 if key == None:
2056 return
2057
2058 if not mi.obj_type_exists(obj_type):
2059 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
2060
2061 # build the join_dict, which will have keys for the items to
2062 # add into the entries
2063 if not mi.obj_type_has_model(obj_type):
2064 entries = rest_to_model.get_model_from_url(obj_type, data)
2065 else:
2066 entries = sdnsh.get_table_from_store(obj_type)
2067
2068 # determine whether specific field names are added
2069 if add_field:
2070 parts = add_field.split('|')
2071 from_fields = None
2072 if len(parts):
2073 add_field = parts[0]
2074 from_fields = parts[1:]
2075
2076 # constuct the join key for each row from the db table
2077 key_parts = key.split('|') # all the fields needed to make a key
2078 key_dict = {} # resulting key dictionary
2079 for entry in entries:
2080 # see if all the key parts are in the entry
2081 missing = missing_part(key_parts, entry)
2082 if missing:
2083 if sdnsh.description: # description debugging
2084 print "command_join_table: missing field %s in %s" % (missing, entry)
2085 continue
2086
2087 new_key = '|'.join([obj_type_field_case(entry, obj_type, kn) for kn in key_parts])
2088 if sdnsh.description: # description debugging
2089 print 'command_join_table: new-key', new_key, key_value
2090 if key_value:
2091 if not new_key in key_dict:
2092 key_dict[new_key] = [entry]
2093 else:
2094 key_dict[new_key].append(entry)
2095 else:
2096 key_dict[new_key] = entry
2097
2098
2099 # let 'crack' contain the field's name, not a boolean.
2100 if crack and crack == True:
2101 crack = mi.pk(obj_type)
2102
2103 # Using the key-dictinoary, look for matches from the original entries
2104
2105 join_parts = join_field.split('|')
2106 for entry in command.query_result:
2107 if len(join_parts):
2108 missing = missing_part(join_parts, entry, key_case = True)
2109 if missing:
2110 if sdnsh.description: # description debugging
2111 print "command_join_table: missing field %s in %s" % (missing, entry)
2112 continue
2113
2114 joiner = '|'.join([case_cvt(kn, entry) for kn in join_parts])
2115 else:
2116 if sdnsh.description: # description debugging
2117 print "command_join_table: joining ", entry, join_field, entry.get(join_field)
2118 if not join_field in entry:
2119 continue
2120 joiner = case_cvt(join_field, entry)
2121
2122 if joiner in key_dict:
2123 if crack:
2124 if not crack in key_dict[entry[joiner]]:
2125 if sdnsh.description: # description debugging
2126 print "command_join_table: field %s not in entry" % crack, key_dict[joiner]
2127 else:
2128 mi.split_compound_into_dict(obj_type, crack, key_dict[joiner], True)
2129
2130 # add all the entries from the key_dict
2131 if sdnsh.description: # description debugging
2132 print 'command_join_table: ADD %s as %s ' % (key_dict[joiner], add_field)
2133 if add_field == None:
2134 if key_value:
2135 entry[key_value] = key_dict[joiner]
2136 else:
2137 add_fields(entry, key_dict[joiner])
2138 elif from_fields:
2139 if len(from_fields) == 1:
2140 # add a single field
2141 if type(key_dict[joiner]) == list:
2142 entry[add_field] = [x[from_fields[0]] for x in key_dict[joiner]]
2143 else:
2144 entry[add_field] = key_dict[joiner][from_fields[0]]
2145 else:
2146 # add a dictionary with named fields
2147 if type(key_dict[joiner]) == list:
2148 for item in key_dict[joiner]:
2149 entry[add_field] = dict([[ff, item[ff]]
2150 for ff in from_fields])
2151 else:
2152 entry[add_field] = dict([[ff, key_dict[joiner][ff]]
2153 for ff in from_fields])
2154
2155 else:
2156 entry[add_field] = key_dict[joiner]
2157
2158 if sdnsh.description: # description debugging
2159 print "command_join_table: ", command.query_result
2160
2161
2162def command_display_rest(data, url = None, sort = None, rest_type = None,
2163 table_format = None, title = None, detail = None):
2164 """
2165 Perform a call to the rest api, and format the result.
2166
2167 When sort isn't None, it names a field whose's value are sorted on.
2168 """
2169 #just a hack check to implement decending sorting
2170 descending = False
2171 #raise error.ArgumentValidationError('\n\n\n %s' % (descending))
2172 if sdnsh.description: # description debugging
2173 print "command_display_rest: ", data, url, rest_type, table_format, detail
2174
2175 if not url:
2176 url = data.get('url')
2177 if not table_format:
2178 table_format = data.get('format')
2179
2180 check_single_entry = True
2181
2182 # if url is a list, pick the first one which can be build from the data
2183 select_url = url
2184 if url and type(url) == list:
2185 for u in url:
2186 try:
2187 select_url = (u % data)
2188 select_url = u # select this url from the list
2189 break
2190 except:
2191 pass
2192
2193 if not detail:
2194 detail = data.get('detail', 'default')
2195 url = "http://%s/rest/v1/" % sdnsh.controller + (select_url % data)
2196
2197 result = sdnsh.store.rest_simple_request(url)
2198 check_rest_result(result)
2199 if sdnsh.description: # description debugging
2200 print "command_display_rest: result ", result
2201 entries = json.loads(result)
2202 #rest_type = None
2203 #raise error.ArgumentValidationError('\n\n\n %s' % (attributes))
2204 #if 'realtimestats' in data and data['realtimestats'] == 'group':
2205
2206 entries2 = None
2207
2208
2209 if 'realtimestats' in data and data['realtimestats'] == 'group':
2210 url2 = "http://%s/rest/v1/" % sdnsh.controller + ("realtimestats/groupdesc/%(dpid)s/" % data)
2211 result2 = sdnsh.store.rest_simple_request(url2)
2212 check_rest_result(result2)
2213 if sdnsh.description: # description debugging
2214 print "command_display_rest: groupdesc result ", result2
2215 entries2 = json.loads(result2)
2216
2217 # It certainly seems possible to map from url's to the type associated,
2218 # with the result, but it also makes sense to encode that type information
2219 # into the description
2220 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'adjacency':
2221 rest_type =False
2222 if rest_type:
2223 entries = command_display_rest_type_converter(table_format,
2224 rest_type,
2225 data,
2226 entries)
2227 if 'realtimestats' in data and data['realtimestats'] == 'group':
2228 if entries2 is not None:
2229 entries2 = command_display_rest_type_converter(table_format,
2230 rest_type,
2231 data,
2232 entries2)
2233
2234 if 'router' in data and data['router'] == 'router':
2235 combResult = []
2236 for entry in entries:
2237 attributes = entry.get('stringAttributes')
2238 #raise error.ArgumentValidationError('\n\n\n %s' % (attributes))
2239 combResult.append({
2240 'dpid' : entry.get('dpid'),
2241 'routerIP' : attributes['routerIp'],
2242 'name' : attributes['name'],
2243 'isEdgeRouter' : attributes['isEdgeRouter'],
2244 'routerMac' : attributes['routerMac'],
2245 'nodeSId' : attributes['nodeSid'],
2246 },)
2247 entries = combResult
2248 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2249 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'port':
2250 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2251 combResult = []
2252 portList = entries
2253 for port in portList:
2254 portData = port.get("port")
2255 name = portData.get("stringAttributes").get('name')
2256 portNo = portData.get("portNumber") & 0xFFFF # converting to unsigned16int
2257 subnetIp = port.get("subnetIp")
2258 adjacency = str(port.get('adjacency'))
2259 combResult.append({
2260 'name' :name,
2261 'portNo' : portNo,
2262 'subnetIp' : subnetIp,
2263 'adjacency' : adjacency,
2264 })
2265 entries = combResult
2266 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'adjacency':
2267 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2268 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2269 combResult = []
2270 adjacencyPairList = entries
2271 for adjacencyPair in adjacencyPairList:
2272 adjacencySid = adjacencyPair.get("adjacencySid")
2273 ports = adjacencyPair.get("ports")
2274 combResult.append({
2275 'adjacencySid' : adjacencySid,
2276 'ports' : ports,
2277 })
2278 entries = combResult
2279 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2280
2281 if 'showtunnel' in data and (data['showtunnel'] == 'tunnel' or data['detail'] == 'details'):
2282 #eraise error.ArgumentValidationError('\n\n\n %s' % (entries))
2283 combResult = []
2284 tunnelList = entries
2285 for tunnel in tunnelList:
2286 labelStackList = (tunnel.get('labelStack'))
2287 labelStackString = str(labelStackList)
2288 labelStackString = remove_unicodes(labelStackString)
2289 #labelStackList = (tunnel.get('labelStack'))
2290 #labelStackString ='['
2291 #for labelSack in labelStackList:
2292 # for label in labelSack:
2293 # labelStackString += (label + ',')
2294 #if labelStackString == '[':
2295 # labelStackString = ''
2296 #else:
2297 # labelStackString = labelStackString[:-1]
2298 # labelStackString += ']'
2299 tunnelId = tunnel.get('tunnelId')
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08002300 tunnelsetId = tunnel.get('tunnelsetId')
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08002301 tunnelPath = tunnel.get('tunnelPath')
2302 dpidGroup = str(tunnel.get('dpidGroup'))
2303 dpidGroup= remove_unicodes(dpidGroup)
2304 policies = tunnel.get('policies')
2305 combResult.append({
2306 'tunnelId' : tunnelId,
2307 'labelStack' : labelStackString,
2308 'dpidGroup' : dpidGroup,
2309 'tunnelPath' : tunnelPath,
2310 'policies' : policies,
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08002311 'tunnelset' : tunnelsetId,
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08002312 })
2313 entries = combResult
2314
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08002315 if 'showtunnelset' in data and (data['showtunnelset'] == 'tunnelset' or data['detail'] == 'details'):
2316 #eraise error.ArgumentValidationError('\n\n\n %s' % (entries))
2317 combResult = []
2318 tunnelsetList = entries
2319 for tunnelset in tunnelsetList:
2320 tunnelsetId = tunnelset.get('tunnelsetId')
2321 policies = tunnelset.get('policies')
2322 tunnelList = tunnelset.get('constituentTunnels')
2323 for tunnel in tunnelList:
2324 labelStackList = (tunnel.get('labelStack'))
2325 labelStackString = str(labelStackList)
2326 labelStackString = remove_unicodes(labelStackString)
2327 tunnelId = tunnel.get('tunnelId')
2328 tunnelPath = tunnel.get('tunnelPath')
2329 dpidGroup = str(tunnel.get('dpidGroup'))
2330 dpidGroup= remove_unicodes(dpidGroup)
2331 combResult.append({
2332 'tunnelsetId' : tunnelsetId,
2333 'policies' : policies,
2334 'tunnelId' : tunnelId,
2335 'labelStack' : labelStackString,
2336 'dpidGroup' : dpidGroup,
2337 'tunnelPath' : tunnelPath,
2338 'tunnelset' : tunnelsetId,
2339 })
2340 entries = combResult
2341
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08002342 if 'showpolicy' in data and data['showpolicy'] == 'policy':
2343 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2344 combResult = []
2345 portList = entries
2346 for policy in portList:
2347 policyId = policy.get("policyId")
2348 policyType = policy.get("policyType")
2349 priority = policy.get("priority")
2350 tunnelId = policy.get('tunnelId')
2351 match = policy.get("match")
2352 dstIpAddress = match.get('dstIpAddress')['value'] if match.get('dstIpAddress') else '*'
2353 dstMacAddress = match.get('dstMacAddress')['value'] if match.get('dstMacAddress') else '*'
2354 dstTcpPortNumber = match.get('dstTcpPortNumber') if match.get('dstTcpPortNumber') else '*'
2355 etherType = ('0x'+ str(match.get('etherType'))) if match.get('etherType') else '*'
2356 ipProtocolNumber = match.get('ipProtocolNumber') if match.get('ipProtocolNumber') else '*'
2357 srcIpAddress = match.get('srcIpAddress')['value'] if match.get('srcIpAddress') else '*'
2358 srcMacAddress = match.get('srcMacAddress')['value'] if match.get('srcMacAddress') else '*'
2359 srcTcpPortNumber = match.get('srcTcpPortNumber') if match.get('srcTcpPortNumber') else '*'
2360 combResult.append({
2361 'policyId' : policyId,
2362 'policyType' : policyType,
2363 'tunnelId' : tunnelId,
2364 'priority' : priority,
2365 'dstIpAddress' : dstIpAddress,
2366 'dstMacAddress' : dstMacAddress,
2367 'dstTcpPortNumber': dstTcpPortNumber,
2368 'etherType' : etherType,
2369 'ipProtocolNumber': ipProtocolNumber,
2370 'srcIpAddress' : srcIpAddress,
2371 'srcMacAddress' : srcMacAddress,
2372 'srcTcpPortNumber': srcTcpPortNumber,
2373
2374 })
2375 entries = combResult
2376
2377 if 'realtimestats' in data and 'tabletype' in data and data['realtimestats'] == 'table':
2378 combResult = []
2379 if data['tabletype'] == 'ip':
2380 #for decending sorting
2381 descending = True
2382 for ipTableEntry in entries:
2383 match = ipTableEntry['match']
2384 networkDestination = '*'
2385 if match :
2386 networkDestination = match.get('networkDestination') if match.get('networkDestination') else '*'
2387 #raise error.ArgumentValidationError('\n\n\n %s' % json.tool(entries))
2388 instructions = ipTableEntry['instructions']
2389 actions = str(instructions[0]) if instructions[0] else None
2390 if actions != None:
2391 actions = remove_unicodes(actions)
2392 actions = renameActions(actions)
2393 actions = actions.lower()
2394 else:
2395 actions =''
2396 combResult.append({
2397 'switch' : ipTableEntry['switch'],
2398 'byteCount' : ipTableEntry['byteCount'],
2399 'packetCount' : ipTableEntry['packetCount'],
2400 'priority' : ipTableEntry['priority'],
2401 'cookie' : ipTableEntry['cookie'],
2402 'durationSeconds' : ipTableEntry['durationSec'],
2403 'networkDestination' : networkDestination,
2404 'actions' : actions,
2405 })
2406 elif data['tabletype'] == 'mpls':
2407 for ipTableEntry in entries:
2408 match = ipTableEntry['match']
2409 mplsTc = '*'
2410 mplsLabel = '*'
2411 mplsBos = '*'
2412 if match :
2413 mplsTc = match.get('mplsTc') if match.get('mplsTc') else '*'
2414 mplsLabel = match.get('mplsLabel') if match.get('mplsLabel') else '*'
2415 mplsBos = match.get('mplsBos') if match.get('mplsBos') else '*'
2416 instructions = ipTableEntry['instructions']
2417 #raise error.ArgumentValidationError('\n\n\n %s' %len(actions))
2418 actions = str(instructions[0])if instructions[0] else None
2419 if actions != None:
2420 actions = remove_unicodes(actions)
2421 actions = renameActions(actions)
2422 actions = actions.lower()
2423 else:
2424 actions =''
2425 combResult.append({
2426 'switch' : ipTableEntry['switch'],
2427 'byteCount' : ipTableEntry['byteCount'],
2428 'packetCount' : ipTableEntry['packetCount'],
2429 'cookie' : ipTableEntry['cookie'],
2430 'priority' : ipTableEntry['priority'],
2431 'mplsTc' : mplsTc,
2432 'mplsLabel' : mplsLabel,
2433 'mplsBos' : mplsBos,
2434 'durationSeconds' : ipTableEntry['durationSec'],
2435 'actions' : actions
2436 })
2437 elif data['tabletype'] == 'acl':
2438 descending = True
2439 for ipTableEntry in entries:
2440 match = ipTableEntry['match']
2441 networkDestination ='*'
2442 networkProtocol = '*'
2443 networkSource = '*'
2444 mplsTc = '*'
2445 mplsLabel = '*'
2446 mplsBos = '*'
2447 transportDestination = '*'
2448 inputPort = '*'
2449 transportSource = '*'
2450 dataLayerSource = '*'
2451 dataLayerDestination = '*'
2452 dataLayerType = '*'
2453 if match :
2454 networkDestination = match.get('networkDestination') if match.get('networkDestination') else '*'
2455 networkProtocol = match.get('networkProtocol') if match.get('networkProtocol') else '*'
2456 networkSource = match.get('networkSource') if match.get('networkSource') else '*'
2457 mplsTc = match.get('mplsTc') if match.get('mplsTc') else '*'
2458 mplsLabel = match.get('mplsLabel')if match.get('mplsLabel') else '*'
2459 transportDestination = match.get('transportDestination') if match.get('transportDestination') else '*'
2460 transportSource = match.get('transportSource') if match.get('transportSource') else '*'
2461 inputPort = match.get('inputPort') if match.get('inputPort') else '*'
2462 dataLayerSource = match.get('dataLayerSource') if match.get('dataLayerSource') else '*'
2463 dataLayerDestination = match.get('dataLayerDestination') if match.get('dataLayerDestination') else '*'
2464 dataLayerType= match.get('dataLayerType') if match.get('dataLayerType') else '*'
2465 mplsBos = match.get('mplsBos') if match.get('mplsBos') else '*'
2466 instructions = ipTableEntry['instructions']
2467 actions = str(instructions[0])if instructions[0] else None
2468 if actions != None:
2469 actions = remove_unicodes(actions)
2470 actions = renameActions(actions)
2471 actions = actions.lower()
2472 else:
2473 actions = ''
2474 combResult.append({
2475 'switch' : ipTableEntry['switch'],
2476 'byteCount' : ipTableEntry['byteCount'],
2477 'packetCount' : ipTableEntry['packetCount'],
2478 'cookie' : ipTableEntry['cookie'],
2479 'priority' : ipTableEntry['priority'],
2480 'inputPort' : inputPort,
2481 'durationSeconds' : ipTableEntry['durationSec'],
2482 'networkSource' : networkSource,
2483 'networkDestination' : networkDestination,
2484 'networkProtocol' : networkProtocol,
2485 'dataLayerType' : dataLayerType,
2486 'dataLayerSource' : dataLayerSource,
2487 'dataLayerDestination' : dataLayerDestination,
2488 'mplsTc' : mplsTc,
2489 'mplsLabel' : mplsLabel,
2490 'mplsBos' : mplsBos,
2491 'transportDestination' : transportDestination,
2492 'transportSource' : transportSource,
2493 'actions' : actions
2494 })
2495 entries = combResult
2496
2497 if 'realtimestats' in data and data['realtimestats'] == 'group':
2498 combResult = []
2499 for groupStatEntry in entries:
2500 groupId = groupStatEntry["groupId"]
2501 groupDescEntry = None
2502 for entry in entries2:
2503 if groupId == entry["groupId"]:
2504 groupDescEntry = entry
2505 break
2506 if groupDescEntry is '':
2507 print "command_display_rest: missing group desc for group id %s" % (groupId)
2508 continue
2509
2510 if (len(groupStatEntry['bucketStats']) > 0):
2511 for bucketId in range(len(groupStatEntry['bucketStats'])):
2512 setsrcmac = ''
2513 if 'SET_DL_SRC' in groupDescEntry['bucketsActions'][bucketId]:
2514 setsrcmac = groupDescEntry['bucketsActions'][bucketId]['SET_DL_SRC']
2515 setdstmac = ''
2516 if 'SET_DL_DST' in groupDescEntry['bucketsActions'][bucketId]:
2517 setdstmac = groupDescEntry['bucketsActions'][bucketId]['SET_DL_DST']
2518 pushmpls = ''
2519 if 'PUSH_MPLS_LABEL' in groupDescEntry['bucketsActions'][bucketId]:
2520 pushmpls = groupDescEntry['bucketsActions'][bucketId]['PUSH_MPLS_LABEL']
2521 popmpls = ''
2522 if 'POP_MPLS' in groupDescEntry['bucketsActions'][bucketId]:
2523 popmpls = groupDescEntry['bucketsActions'][bucketId]['POP_MPLS']
2524 outport = ''
2525 if 'OUTPUT' in groupDescEntry['bucketsActions'][bucketId]:
2526 outport = groupDescEntry['bucketsActions'][bucketId]['OUTPUT']
2527 goToGroup = ''
2528 if 'goToGroup' in groupDescEntry['bucketsActions'][bucketId]:
2529 goToGroup = groupDescEntry['bucketsActions'][bucketId]['goToGroup']
2530 setBos= ''
2531 if 'PUSH_MPLS_BOS' in groupDescEntry['bucketsActions'][bucketId]:
2532 setBos = groupDescEntry['bucketsActions'][bucketId]['PUSH_MPLS_BOS']
2533 COPY_TTL_IN= ''
2534 if 'COPY_TTL_IN' in groupDescEntry['bucketsActions'][bucketId]:
2535 COPY_TTL_IN = groupDescEntry['bucketsActions'][bucketId]['COPY_TTL_IN']
2536 COPY_TTL_OUT= ''
2537 if 'COPY_TTL_OUT' in groupDescEntry['bucketsActions'][bucketId]:
2538 COPY_TTL_OUT = groupDescEntry['bucketsActions'][bucketId]['COPY_TTL_OUT']
2539 DEC_MPLS_TTL= ''
2540 if 'DEC_MPLS_TTL' in groupDescEntry['bucketsActions'][bucketId]:
2541 DEC_MPLS_TTL = groupDescEntry['bucketsActions'][bucketId]['DEC_MPLS_TTL']
2542 DEC_NW_TTL= ''
2543 if 'DEC_NW_TTL' in groupDescEntry['bucketsActions'][bucketId]:
2544 DEC_NW_TTL = groupDescEntry['bucketsActions'][bucketId]['DEC_NW_TTL']
2545
2546 combResult.append({
2547 'groupid' : groupId,
2548 'grouptype' : groupDescEntry['groupType'],
2549 'totalpktcnt' : groupStatEntry['packetCount'],
2550 'totalbytecnt' : groupStatEntry['byteCount'],
2551 'bucketpktcnt' : groupStatEntry['bucketStats'][bucketId]['pktCount'],
2552 'bucketbytecnt' : groupStatEntry['bucketStats'][bucketId]['byteCount'],
2553 'setsrcmac' : setsrcmac,
2554 'setdstmac' : setdstmac,
2555 'pushMplsLabel' : pushmpls,
2556 'popmpls' : popmpls,
2557 'outport' : outport,
2558 'goToGroup' : goToGroup,
2559 'setBos' : setBos,
2560 'COPY_TTL_IN' : COPY_TTL_IN,
2561 'COPY_TTL_OUT' : COPY_TTL_OUT,
2562 'DEC_MPLS_TTL' : DEC_MPLS_TTL,
2563 'DEC_NW_TTL' : DEC_NW_TTL,
2564 })
2565 else:
2566 combResult.append({
2567 'groupid' : groupId,
2568 'grouptype' : groupDescEntry['groupType'],
2569 'totalpktcnt' : groupStatEntry['packetCount'],
2570 'totalbytecnt' : groupStatEntry['byteCount'],
2571 'bucketpktcnt' : '',
2572 'bucketbytecnt' : '',
2573 'setsrcmac' : '',
2574 'setdstmac' : '',
2575 'pushMplsLabel' : '',
2576 'popmpls' : '',
2577 'outport' : '',
2578 'goToGroup' : '',
2579 'setBos' : '',
2580 'COPY_TTL_IN' : '',
2581 'COPY_TTL_OUT' : '',
2582 'DEC_MPLS_TTL' : '',
2583 'DEC_NW_TTL' : '',
2584 })
2585 entries = combResult
2586 #
2587 if format:
2588 #
2589 detail = command_display_rest_join_entries(table_format, data, entries, detail)
2590 #if 'realtimestats' in data and data['realtimestats'] == 'flow':
2591 # entries = sdnsh.fix_realtime_flows(entries)
2592 # check_single_entry = False
2593
2594 if 'realtimestats' in data and data['realtimestats'] == 'features':
2595 for entry in entries:
2596 entry['stp-state'] = entry['state']
2597
2598 # update any of the pretty-printer tables based on the table_format (obj_type)
2599 obj_type_show_alias_update(table_format % data)
2600
2601 if check_single_entry and entries and len(entries) == 1 and detail == 'details':
2602 return sdnsh.pp.format_entry(entries[0],
2603 table_format % data,
2604 detail,
2605 sdnsh.debug)
2606 if sort:
2607 if descending:
2608 reverse = True
2609 else:
2610 reverse = False
2611 def sort_cmp(x,y):
2612 for f in sort:
2613 if f in x:
2614 c = cmp(x.get(f), y.get(f))
2615 if c != 0:
2616 return c
2617 return 0
2618 entries = sorted(entries, cmp=sort_cmp, reverse=reverse )
2619 if 'realtimestats' in data and data['realtimestats'] == 'group':
2620 repeatGroupId = -1
2621 length = len(entries)
2622 for i in range(0, length):
2623 entry = entries[i]
2624 groupId = entry.get('groupid')
2625 if groupId == repeatGroupId:
2626 entries[i]['groupid'] = ''
2627 else:
2628 repeatGroupId = groupId
2629
2630 display = sdnsh.pp.format_table(entries, table_format % data, detail)
2631 else:
2632 display = entries
2633
2634 if title:
2635 return title + display
2636 return display
2637
2638
2639def command_crack(field):
2640 """
2641 Part of the show pipeline, split is typically used with rest api's
2642 not associated with the model (database), since the cli has enough
2643 details of the relationships between model fields to understand
2644 which of the fields has a compound key value, and has options to
2645 crack those into component parts.
2646
2647 The operation is called 'crack' (not split), since the other
2648 options for some of the actions is called 'crack'
2649
2650 The field identifies the name of the field in the entry to
2651 split into parts, and the remaining '|' separated fields list
2652 the labels to associate in the result from each of the
2653 split components. Currently, the 'crack' character is '|',
2654 although this could be parameterized.
2655 """
2656 if sdnsh.description: # description debugging
2657 print "command_split: ", field
2658
2659 if hasattr(command, 'query_result'):
2660 entries = command.query_result
2661 if command.query_result == None:
2662 entries = []
2663 else:
2664 if sdnsh.description: # description debugging
2665 print "command_join_table: no entries found"
2666 entries = []
2667
2668 parts = field.split('|')
2669 if len(parts) == 0:
2670 if sdnsh.description: # description debugging
2671 print "command_join_table: field doesn't contain labels" \
2672 " use field|label1|label2|..."
2673 return
2674
2675 field = parts[0]
2676 label = parts[1:]
2677 many = len(label)
2678
2679 for entry in entries:
2680 if field in entry:
2681 parts = entry[field].split('|')
2682 if len(parts) and many >= len(parts) :
2683 # use enumerate to create a tuple for each item in parts,
2684 # assocaiting an index, which can be used to identify the
2685 # label to use for each of the elements; from that create
2686 # a dictionay, which is then used to update the entry
2687 entry.update(dict([[label[n],p] for (n,p) in enumerate(parts)]))
2688
2689
2690def command_display(data, table_format, detail = 'default', sort = None, title = None):
2691
2692 if sdnsh.description: # description debugging
2693 print "command_display: ", data, table_format, detail
2694
2695 if 'detail' in data:
2696 detail = data['detail']
2697
2698 if hasattr(command, 'query_result'):
2699 entries = command.query_result
2700 if command.query_result == None:
2701 entries = []
2702 else:
2703 if sdnsh.description: # description debugging
2704 print "command_join_table: no entries found"
2705 entries = []
2706
2707 if sdnsh.description: # description debugging
2708 print "command_display: #entries ", len(entries)
2709
2710 # XXX controller-node has an odd url, join-rest needs to be able to
2711 # be handed a complete url, and replace the ip address with the controller's
2712 # ip address.
2713 detail = command_display_table_join_entries(table_format, data, entries, detail)
2714
2715 # update any of the pretty-printer tables based on the table_format (obj_type)
2716 obj_type_show_alias_update(table_format)
2717
2718 # with_key manages whether a 'detail' or table is displayed.
2719 with_key = '<with_key>' if detail == 'details' and len(entries) > 0 else '<no_key>'
2720
2721 #
2722 if sort:
2723 def sort_cmp(x,y):
2724 for f in sort:
2725 if f in x:
2726 c = utif.trailing_integer_cmp(x.get(f),y.get(f))
2727 if c:
2728 return c
2729 return 0
2730 entries = sorted(entries, cmp=sort_cmp)
2731
2732 # use display_obj_type_rows since it (currently) joins fields for obj_types.
2733 display = sdnsh.display_obj_type_rows(table_format, entries, with_key, detail)
2734
2735 if title:
2736 return title + display
2737 return display
2738
2739
2740def command_legacy_cli(obj_type, data, detail = 'default', scoped = None, sort = None):
2741 """
2742 Unfortunatly, the command descriptions don't have enough different
2743 detail to describe how to join specific distinct fields. In the future,
2744 there will be rest api's for each of the cli requests; that should cause
2745 this trampoline code to become obsolete.
2746 """
2747
2748 if sdnsh.description: # description debugging
2749 print "command_legacy_cli: ", obj_type, data, detail, scoped, sort
2750
2751 # update any of the pretty-printer tables based on the obj_type
2752 obj_type_show_alias_update(obj_type)
2753
2754 #
2755 #
2756 # Various show command 'join' data to create a table not
2757 # directly available in the REST API, someday in the future,
2758 # these joins will be directly implemented in the REST API,
2759 # but these special cases still exist:
2760 #
2761 if 'running-config' in data:
2762 result = sdnsh.error_msg("No running-config choice")
2763 words = []
2764 if 'word' in data and data['word'] != 'all':
2765 words = [data['word']]
2766
2767 if data['running-config'] == 'running-config':
2768 # 'show vns XXX running-config'
2769 if 'vnsname' in data and data['vnsname'] != 'all':
2770 return sdnsh.show_vns_running_config(data['vnsname'],data['tenant'])
2771 elif 'vns' in data and data['vns']=='all':
2772 data['running-config'] = 'vns'
2773 elif 'tenant' in data:
2774 data['running-config']='tenant'
2775 words=[data['tenant']]
2776 if data['running-config'] in run_config.registry_items_enabled():
2777 result = run_config.perform_running_config(data['running-config'], sdnsh, config, words)
2778
2779 if result:
2780 return result
2781 return ''.join(config)
2782
2783 if obj_type == 'running-config':
2784 return run_config.implement_show_running_config([])
2785
2786 if obj_type == 'vns-interface':
2787 if scoped:
2788 # should check for missing 'id' in data
2789 data['vns'] = sdnsh.get_current_mode_obj()
2790
2791 if 'vns' in data:
2792 if data['vns'] == 'all':
2793 return sdnsh.display_vns_interface(None, {}, '<no_key>')
2794 vns_name=data['vns']
2795 return sdnsh.display_vns_interface(vns_name, {'vns': vns_name },
2796 '<no_key>', detail = 'scoped')
2797
2798 if obj_type == 'vns-switch-ports':
2799 if 'vns' in data:
2800 return sdnsh.show_vns_switch_ports([data['vns']])
2801 return sdnsh.show_vns_switch_ports([])
2802
2803 if obj_type == 'switch-ports-vns':
2804 if 'dpid' in data:
2805 return sdnsh.show_switch_ports_vns([data['dpid']])
2806 return sdnsh.show_switch_ports_vns([])
2807
2808 if obj_type == 'switch-interfaces':
2809 key = mi.pk(obj_type)
2810 if scoped:
2811 data['dpid'] = sdnsh.get_current_mode_obj()
2812
2813 # in legacy_cli to join the switch-interfaces with port stats
2814 port_obj = 'port'
2815 entries = sdnsh.show_sort_obj_type(obj_type,
2816 command_query_object(port_obj, data, scoped, sort))
2817
2818 # switch-interfaces is really class Port, and the primary key
2819 # is '#|switch|number, not name.
2820
2821 entries_dict = dict([['%s|%s' % (x['switch'], x['name']), x] for x in entries])
2822 # collect switch-interface-config
2823 sic = 'switch-interface-config'
2824 if 'dpid' in data and data['dpid'] != 'all':
2825 sic_dict = create_obj_type_dict(sic, mi.pk(sic), mi.pk(sic), data['dpid'])
2826 else:
2827 sic_dict = create_obj_type_dict(sic, mi.pk(sic))
2828
2829 # add switch-interface-config names when missing
2830 for (sic_id, sic_value) in sic_dict.items():
2831 if not sic_id in entries_dict:
2832 # add 'state' to this item for prettyprinting column width computation
2833 for sv in sic_value:
2834 sv['state'] = ''
2835 entries += sic_value
2836
2837 # collect the stats for the interfaces
2838 stats_url = 'realtimestats/port/%(dpid)s/' % data
2839 url = "http://%s/rest/v1/" % sdnsh.controller + stats_url
2840 try:
2841 result = sdnsh.store.rest_simple_request(url)
2842 check_rest_result(result)
2843 stats = json.loads(result)
2844
2845 except Exception, e:
2846 stats = {}
2847
2848 # join realtimestats
2849 for entry in entries:
2850 if 'state' in entry:
2851 entry['stp-state'] = entry['state']
2852 stats_list = stats.get(entry['switch'])
2853 # Note, 'number' may be missing from entry if the switch
2854 # matches for switch-interface-config but the interface name
2855 # doesn't show up.
2856 if stats_list and 'number' in entry:
2857 ifn = entry['number']
2858 # Notice that the realtime stat's use a int for the 2^16 value here
2859 # The & 0xffff converts the "-x" to a positive 2^16 value
2860 item = [x for x in stats_list if (x['portNumber'] & 0xffff) == ifn]
2861 if len(item) == 1:
2862 entry.update(item[0])
2863 if entry['id'] in sic_dict:
2864 entry.update(sic_dict[entry['id']][0])
2865
2866 # Update the alias mappings for display
2867 obj_type_show_alias_update(obj_type)
2868
2869 return sdnsh.pp.format_table(entries, obj_type, detail)
2870
2871 if obj_type == 'tunnel-interfaces':
2872 # Use the active tunnels to identify the interfaces on the
2873 # switches which are the tunneling interfaces, with that
2874 # collect to port -> if_name mappings from 'port', then
2875 # find all the switches interfaces, convert those port numbers to
2876 # if names, to collect only tunneling interfaces. Now collect
2877 # realtimestats for the switch's ports, and associate those
2878 # stats with any filtered interfaces, finally display the result
2879 tunnel_url = "tunnel-manager/%(dpid)s" % data
2880 url = "http://%s/rest/v1/" % sdnsh.controller + tunnel_url
2881 result = sdnsh.store.rest_simple_request(url)
2882 check_rest_result(result)
2883 tunnels = json.loads(result)
2884
2885 # use the active tunnels to
2886 # collect dpid's, convert the remote ip's to interface names.
2887 tunnel_ports = {}
2888 for t in tunnels:
2889 quad = t['tunnelPorts'].split('.')
2890 if_name = "vta%03d%03d%03d%03d" % (int(quad[0]), int(quad[1]),
2891 int(quad[2]), int(quad[3]))
2892 key = "%s|%s" % (t['dpid'], if_name)
2893 if not key in tunnel_ports:
2894 tunnel_ports[key] = {t['dpid']: t['tunnelPorts']}
2895
2896 # Collect interfaces on associated switch
2897 port_obj = 'port'
2898 entries = sdnsh.show_sort_obj_type(port_obj,
2899 command_query_object(port_obj, data, scoped, sort))
2900 # Associate port names with interface names
2901 port_to_if_name = {}
2902
2903 try:
2904 ports = sdnsh.get_table_from_store("port")
2905 except Exception, e:
2906 port = []
2907
2908 for port in ports:
2909 key_string = '%s|%s' % (port['switch'], port['number'])
2910 port_to_if_name[key_string] = port['name']
2911
2912 # Filter elements, 'filtered' only contains active tunnel interfaces
2913 filtered = []
2914 for e in entries:
2915 e['ifname'] = port_to_if_name[e['id']]
2916 key = '%s|%s' % (e['switch'], e['ifname'])
2917 if sdnsh.description: # description debugging
2918 print command._line(), key
2919 if key in tunnel_ports:
2920 if sdnsh.description: # description debugging
2921 print command._line(), "Found ", e['id']
2922 filtered.append(e)
2923 entries = filtered
2924
2925 # collect switch-interface-config
2926 sic = 'switch-interface-config'
2927 if 'dpid' in data:
2928 sic_dict = create_obj_type_dict(sic, mi.pk(sic), mi.pk(sic), data['dpid'])
2929 else:
2930 sic_dict = create_obj_type_dict(sic, mi.pk(sic))
2931
2932 # collect the stats for the interfaces
2933 stats_url = 'realtimestats/port/%(dpid)s/' % data
2934 url = "http://%s/rest/v1/" % sdnsh.controller + stats_url
2935 try:
2936 result = sdnsh.store.rest_simple_request(url)
2937 check_rest_result(result)
2938 stats = json.loads(result)
2939 except Exception, e:
2940 stats = {}
2941
2942 # join realtimestats
2943 for entry in entries:
2944 if 'state' in entry:
2945 entry['stp-state'] = entry['state']
2946 stats_list = stats.get(entry['switch'])
2947 if stats_list and 'number' in entry:
2948 ifn = entry['number']
2949 # Notice that the realtime stat's use a int for the 2^16 value here
2950 # The & 0xffff converts the "-x" to a positive 2^16 value
2951 item = [x for x in stats_list if (x['portNumber'] & 0xffff) == ifn]
2952 if len(item) == 1:
2953 entry.update(item[0])
2954 if entry['id'] in sic_dict:
2955 entry.update(sic_dict[entry['id']][0])
2956
2957 obj_type_show_alias_update('switch-interfaces')
2958
2959 return sdnsh.pp.format_table(entries, 'switch-interfaces', detail)
2960
2961 if obj_type == 'host-vns-interface-vns':
2962 words = []
2963 for w in []: # list of options to display_vns_mac_address_table
2964 if w in data:
2965 words[w] = data[w]
2966
2967 return sdnsh.display_vns_mac_address_table(data['vns'], words)
2968
2969 if obj_type == 'config':
2970 if 'config' in data:
2971 if 'version' in data:
2972 return sdnsh.implement_show_config([data['config'],data['version']])
2973 return sdnsh.implement_show_config([data['config']])
2974
2975 if 'config-diff' in data:
2976 if 'version' in data:
2977 return sdnsh.implement_show_config([ data['first'],
2978 'diff',
2979 data['second'],
2980 data['version']])
2981 return sdnsh.implement_show_config([data['first'],
2982 'diff',
2983 data['second'], ])
2984 return sdnsh.implement_show_config([])
2985
2986 if obj_type == 'vns-flow':
2987 if 'detail' in data:
2988 return sdnsh.show_vns_flow_annotated([data['vns'],
2989 'flow',
2990 data['detail']])
2991 return sdnsh.show_vns_flow_annotated([data['vns'], 'flow'])
2992
2993 if obj_type == 'tech-support':
2994 return sdnsh.do_show_tech_support([])
2995
2996 if obj_type == 'config-file':
2997 if 'file' in data:
2998 return sdnsh.implement_show_config_file(['config-file', data['config']])
2999 return sdnsh.implement_show_config_file(['config-file', ])
3000
3001 if obj_type == 'logging':
3002 if 'log-name' in data:
3003 return sdnsh.implement_show_logging([data['log-name']])
3004 return sdnsh.implement_show_logging([])
3005
3006 if obj_type == 'event-history':
3007 if 'count' in data:
3008 return sdnsh.do_show_event_history([data['event'],
3009 'last',
3010 str(data['count'])])
3011 return sdnsh.do_show_event_history([data['event']])
3012
3013 if obj_type == 'flow-cache':
3014 words = []
3015 if 'counters' in data:
3016 words.append('counters')
3017 elif 'application' in data:
3018 words.append('app')
3019 words.append(data['application'])
3020 words.append('app-instance')
3021 words.append(data['instance'])
3022
3023 return sdnsh.do_show_flow_cache(words)
3024
3025 if obj_type in ['controller-stats', 'switch-stats']:
3026 #
3027 # data['id'] is the name of the controller
3028 helper_item = obj_type.replace('-stats','')
3029 if helper_item == 'controller':
3030 helper_item = 'controller-node'
3031 key = mi.pk(helper_item)
3032 words = [helper_item, data[key], 'stats']
3033 if 'stats-type' in data:
3034 words.append(data['stats-type'])
3035 for (n,v) in data.items():
3036 if not n in [key, 'stats', 'stats-type']:
3037 words.append(n)
3038 words.append(v)
3039 return sdnsh.helper_show_object_stats(words)
3040
3041 if obj_type == 'switch-tcpdump':
3042 words = ['trace', data['dpid']]
3043 for (n,v) in data.items():
3044 if not n in ['tcpdump', 'dpid']:
3045 words.append(n)
3046 return sdnsh.do_trace(words)
3047
3048 if obj_type == 'copy':
3049 words = [data['source']]
3050 if 'dest' in data:
3051 words.append(data['dest'])
3052 return sdnsh.implement_copy(words)
3053
3054 if obj_type == 'write':
3055 return sdnsh.implement_write([data['target']])
3056
3057 if obj_type == 'this':
3058 obj_type = sdnsh.get_current_mode_obj_type()
3059 show_this = mi.obj_type_show_this(obj_type)
3060 if not show_this:
3061 return sdnsh.do_show_object(['this'])
3062 result = []
3063 for show in show_this:
3064 if type(show) is list and len(show) >= 3:
3065 # [ object, format, detail ]
3066 if len(result) > 0:
3067 result.append(mi.obj_type_show_title(show[0]))
3068 sort = None
3069 if len(show) > 3:
3070 sort = show[3]
3071 result.append(command_display_table(show[0], {},
3072 table_format = show[1],
3073 detail = show[2],
3074 sort = sort,
3075 scoped = True))
3076 elif type(show) is list and len(show) == 2:
3077 # [ object, detail ]
3078 if len(result) > 0:
3079 result.append(mi.obj_type_show_title(show[0]))
3080 result.append(command_display_table(show[0], {}, detail = show[1], scoped = True))
3081 else:
3082 result.append(sdnsh.do_show_object([show]))
3083 return '\n'.join(result)
3084
3085 if obj_type == 'version':
3086 return sdnsh.do_show_version([])
3087
3088 if obj_type == 'reload':
3089 return sdnsh.implement_reload()
3090
3091 if obj_type == 'test-command':
3092 if data['test-type'] == 'packet-in':
3093 return sdnsh.implement_test_packet_in(data)
3094 if data['test-type'] == 'path':
3095 return sdnsh.implement_test_path(data)
3096
3097 print 'command_legacy_cli: obj-type unknown: ', obj_type
3098
3099
3100def command_legacy_cli_no(obj_type, data, detail = 'default', scoped = None, sort = None):
3101 """
3102 Implement no command for trampoline code back to the original code
3103 """
3104 if obj_type == 'tag-mapping':
3105 return sdnsh.implement_no_tag(['tag', data['tag']])
3106
3107
3108def command_version(data):
3109 """
3110 The version command will later manage changing the syntax to match
3111 the requested version.
3112 """
3113 new_version = data.get('version')
3114 if new_version == None:
3115 return
3116
3117 version = new_version # save for error message
3118 new_version = sdnsh.desc_version_to_path_elem(new_version)
3119
3120 # skip version change is this is the current version.
3121 if sdnsh.desc_version == new_version:
3122 return
3123
3124 # see if the requested version exists
3125 if not sdnsh.command_packages_exists(new_version):
3126 print 'No command description group for version %s' % version
3127 return
3128
3129 # run 'env [envriron_vars] ... cli.py'
3130 command = ['env']
3131 command.append('CLI_COMMAND_VERSION=%s' % version)
3132 command.append('CLI_STARTING_MODE=config')
3133 if os.path.exists('/opt/sdnplatform/cli/bin/cli'):
3134 # controller VM
3135 command.append('/opt/sdnplatform/cli/bin/cli --init')
3136 else:
3137 # developer setup
3138 base = os.path.dirname(__file__)
3139 command.append(os.path.join(base, 'cli.py'))
3140 command.append('--init')
3141
3142 # dump the command descriptions, and read a new set.
3143 # open a subshell with a new command version
3144 subprocess.call(command, cwd=os.environ.get("HOME"))
3145
3146 return
3147
3148
3149def command_clearterm():
3150 """
3151 Print reset characters to the screen to clear the console
3152 """
3153 subprocess.call("reset")
3154
3155def command_display_cli(data):
3156 """
3157 Display various cli details
3158 (this may need to be re-factored into some general "internal" state show
3159 """
3160 debug = []
3161 if sdnsh.debug:
3162 debug.append('debug')
3163 if sdnsh.debug_backtrace:
3164 debug.append('backtrace')
3165
3166 modes = sdnsh.command_dict.keys() + sdnsh.command_nested_dict.keys()
3167
3168 entry = {
3169 'version' : ', '.join(command.command_syntax_version.keys()),
3170 'desc' : ', '.join(sorted(command.command_added_modules.keys())),
3171 'format' : ', '.join(sorted(sdnsh.pp.format_added_modules.keys())),
3172 'modes' : ', '.join(sorted(utif.unique_list_from_list(modes))),
3173 'debug' : ', '.join(debug),
3174 }
3175 basic = sdnsh.pp.format_entry(entry, 'cli')
3176
3177 mode_entries = command.command_submode_dictionary(modes)
3178 mode_table = sdnsh.pp.format_table(mode_entries, 'cli-modes')
3179
3180 return basic + '\n\nCommand Submode Transition\n' + mode_table
3181
3182 return
3183
3184
3185def delete_alias_by_id(alias_obj_type, alias_value):
3186 """
3187 Common delete operation for alias, based on primary key
3188
3189 @param alias_obj_type string, name of table where single entry is removed
3190 @param alias_value string, value of primary key to delete
3191 """
3192 xref = mi.foreign_key_xref.get(alias_obj_type)
3193 if xref:
3194 # look for any referecnes to this alias_value. Since this
3195 # is an alias table, only the pk ought to exist in the xref.
3196 # When the alias is getting removed, any references to it
3197 # via foreign keys must also get removed.
3198 if len(xref) > 1 or not mi.pk(alias_obj_type) in xref:
3199 print 'Internal Inconsistency'
3200 else:
3201 for (fk_obj_type, fk_field) in xref[mi.pk(alias_obj_type)]:
3202 rows = sdnsh.get_table_from_store(fk_obj_type,
3203 fk_field,
3204 alias_value,
3205 'exact')
3206 for row in rows:
3207 sdnsh.rest_delete_object(fk_obj_type, row[mi.pk(fk_obj_type)])
3208 sdnsh.rest_delete_object(alias_obj_type, alias_value)
3209
3210
3211def delete_alias_by_fk(alias_obj_type, foreign_key):
3212 """
3213 Common delete operation for alias, by foreign key
3214
3215 @param alias_obj_type string, name of table where single entry is removed
3216 @param alias_value string, value of primary key to delete
3217 """
3218 # find all the id's based on the foreign key, then delete them all.
3219 # note: see similar midw alias_lookup_with_foreign_key()
3220
3221 foreign_field = mi.alias_obj_type_field(alias_obj_type)
3222 try:
3223 rows = sdnsh.get_table_from_store(alias_obj_type,
3224 foreign_field,
3225 foreign_key,
3226 "exact")
3227 except Exception, e:
3228 raise error.CommandInternalError("Can't fetch %s:%s" %
3229 (foreign_field, foreign_key))
3230 pk = mi.pk(alias_obj_type)
3231 for row in rows:
3232 delete_alias_by_id(alias_obj_type, row[pk])
3233
3234
3235def command_delete_alias(obj_type, data):
3236 """
3237 Action for delete-alias
3238
3239 A single row is deleted from an alias table.
3240 Current alias tables include host-alias, switch-alias, port-alias
3241
3242 @param obj_type string, name of alias table to manage
3243 @param data dict, collection of field:value's from command description
3244 """
3245 if sdnsh.description: # description debugging
3246 print "command_delete_alias: ", obj_type, data
3247
3248 parent_id = sdnsh.get_current_mode_obj()
3249
3250 key = mi.pk(obj_type)
3251 if key not in data:
3252 delete_alias_by_fk(obj_type, parent_id)
3253 else:
3254 delete_alias_by_id(obj_type, data[key])
3255
3256
3257def command_create_alias(obj_type, data, reserved = None, fail_if_exists = False):
3258 """
3259 Action for create-alias
3260
3261 Current alias tables include host-alias, switch-alias, port-alias
3262
3263 @param obj_type string, name of alias table to manage
3264 @param data dict, collection of field:value's from the command description
3265 """
3266 if sdnsh.description: # description debugging
3267 print "command_create_alias: ", obj_type, data, reserved, fail_if_exists
3268
3269 parent_obj_type = sdnsh.get_current_mode_obj_type()
3270 parent_id = sdnsh.get_current_mode_obj()
3271
3272 key = mi.pk(obj_type)
3273 if key not in data:
3274 raise error.CommandInternalError("Alias table '%s': description "
3275 "doesn't populate correct '%s' field as data" %
3276 (obj_type, key))
3277 alias_value = data[key]
3278 #
3279 # Determine if the alias name is allowed.
3280 if alias_value in sdnsh.reserved_words:
3281 raise error.ArgumentValidationError('reserved name "%s" in "%s"'
3282 % (alias_value, ','.join(sdnsh.reserved_words)))
3283 if reserved and type(reserved) != list:
3284 reserved = [reserved]
3285
3286 if reserved and alias_value in reserved:
3287 raise error.ArgumentValidationError('reserved name "%s" in "%s"'
3288 % (alias_value, ','.join(reserved)))
3289
3290 # Walk the foreign key's in the (alias) obj-type, looking
3291 # for the parent reference.
3292
3293 alias_fk = None
3294 obj_type_foreign_keys = mi.obj_type_foreign_keys(obj_type)
3295 if len(obj_type_foreign_keys) == 1:
3296 alias_fk = obj_type_foreign_keys[0]
3297 else:
3298 for alias_fn in obj_type_foreign_keys:
3299 (fk_ot, fk_fn) = mi.foreign_key_references(obj_type, alias_fn)
3300 if fk_ot == parent_obj_type:
3301 alias_fk = alias_fn
3302
3303 if not alias_fk:
3304 raise error.CommandInternalError("Alias table '%s' has no foreign key to '%s'" %
3305 (obj_type, parent_obj_type))
3306
3307 try:
3308 sdnsh.get_object_from_store(obj_type, alias_value)
3309 if sdnsh.description: # description debugging
3310 print "command_create_alias: delete ", obj_type, alias_value
3311 if fail_if_exists:
3312 raise error.ArgumentValidationError("Interface name '%s' already in use - cannot reassign" %(alias_value))
3313 delete_alias_by_id(obj_type, alias_value)
3314 except:
3315 pass
3316
3317 # Remove other existing alias for the same foreign key
3318 # (ie: only one alias per each item, this could be relaxed)
3319 # XXX improve method of managing errors here
3320 try:
3321 rows = sdnsh.get_table_from_store(obj_type,
3322 alias_fk,
3323 parent_id,
3324 "exact")
3325 except Exception, e:
3326 errors = sdnsh.rest_error_to_dict(e)
3327 print sdnsh.rest_error_dict_to_message(errors)
3328 rows = []
3329
3330 for row in rows:
3331 try:
3332 delete_alias_by_id(obj_type, row[key])
3333 if row[alias_fk] != parent_id:
3334 sdnsh.warning("Removed additional alias '%s'"
3335 ", also refers to %s '%s'" %
3336 (row[key], parent_obj_type, parent_id))
3337 except:
3338 if sdnsh.debug or sdnsh.debug_backtrace:
3339 traceback.print_exc()
3340
3341 # This set's the foreign key to allow the create to succeed
3342 c_dict = {
3343 key : alias_value,
3344 alias_fk : parent_id,
3345 }
3346
3347 if sdnsh.description: # description debugging
3348 print "command_create_alias: create ", obj_type, c_dict
3349 result = sdnsh.rest_create_object(obj_type, c_dict)
3350 check_rest_result(result)
3351 result = sdnsh.rest_query_objects(obj_type, c_dict)
3352 check_rest_result(result)
3353
3354 return None
3355
3356
3357def command_create_tag(obj_type, data):
3358 """
3359 obj_type needs to be one of the objects which implements
3360 a relationship to 'tag', for example: tag-mac-mapping
3361 """
3362
3363 item = sdnsh.get_current_mode_obj_type()
3364 fks = mi.obj_type_foreign_keys(obj_type)
3365 for fk in fks:
3366 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
3367 if fk_obj == item:
3368 break
3369 else:
3370 raise error.CommandSemanticError( "type mapping %s doesn't have "
3371 "relationship to the current object %s" %
3372 (obj_type, item))
3373
3374 if sdnsh.description: # description debugging
3375 print "command_create_tag: create ", obj_type, data
3376
3377 tag_and_value = data['tag'].split('=')
3378 if len(tag_and_value) != 2:
3379 # deal with tag_and_value's 'va=vb=vc...'
3380 raise error.CommandSemanticError("tag <[tag-namespace.]name>=<value> "
3381 ": associate tag with host")
3382
3383 tag_parts = tag_and_value[0].split('.')
3384 if len(tag_parts) == 0:
3385 raise error.CommandSemanticError("tag <[tag-namespace.]name>"
3386 ": must have a name")
3387 elif len(tag_parts) == 1:
3388 tag_namespace = "default"
3389 tag_name = tag_parts[0]
3390 elif len(tag_parts) >= 2:
3391 # the tag_name is not allowed to have '.'
3392 # collect all the '.'s together into the namespace
3393 tag_namespace = '.'.join(tag_parts[:-1])
3394 tag_name = tag_parts[-1]
3395
3396 tag_value = tag_and_value[1]
3397
3398 # first manage the tag ...
3399 tag_dict = {
3400 'namespace' : tag_namespace,
3401 'name' : tag_name,
3402 'value' : tag_value,
3403 }
3404
3405 query = sdnsh.rest_query_objects('tag', tag_dict)
3406 sdnsh.check_rest_result(query)
3407 tag_dict['persist'] = True
3408 if len(query) == 0:
3409 result = sdnsh.rest_create_object('tag', tag_dict)
3410 sdnsh.check_rest_result(result)
3411 elif len(query) == 1:
3412 update = sdnsh.rest_update_object('tag',
3413 mi.pk('tag'),
3414 query[0][mi.pk('tag')],
3415 tag_dict)
3416 sdnsh.check_rest_result(update)
3417
3418 del tag_dict['persist']
3419 query = sdnsh.rest_query_objects('tag', tag_dict)
3420 sdnsh.check_rest_result(query)
3421 tag_id = query[0][mi.pk('tag')]
3422
3423 # now create the tag-mapping
3424 tag_dict = {
3425 fk : sdnsh.get_current_mode_obj(), # fk from early for loop
3426 'tag' : tag_id,
3427 }
3428
3429 query = sdnsh.rest_query_objects(obj_type, tag_dict)
3430 sdnsh.check_rest_result(query)
3431 if len(query) == 0:
3432 result = sdnsh.rest_create_object(obj_type, tag_dict)
3433 sdnsh.check_rest_result(result)
3434
3435
3436def command_delete_tag(obj_type, data):
3437 """
3438 obj_type describes the tag-XXX-mapping which is getting
3439 managed, data has the tag 'string' to delete.
3440 """
3441 item = sdnsh.get_current_mode_obj_type()
3442 fks = mi.obj_type_foreign_keys(obj_type)
3443 for fk in fks:
3444 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
3445 if fk_obj == item:
3446 break
3447 else:
3448 raise error.CommandSemanticError( "type mapping %s doesn't have "
3449 "relationship to the current object %s" %
3450 (obj_type, item))
3451
3452 if 'tag' not in data:
3453 raise error.CommandSemanticError('Tag value missing')
3454
3455 tag = data['tag']
3456 name_and_value = tag.split('=')
3457
3458 name_part = name_and_value[0].split('.')
3459 if len(name_part) == 1:
3460 namespace = 'default'
3461 name = name_part[0]
3462 elif len(name_part) >= 2:
3463 namespace = '.'.join(name_part[:-1])
3464 name = name_part[-1]
3465
3466 value = name_and_value[1]
3467 pk_value = sdnsh.unique_key_from_non_unique([namespace,
3468 name,
3469 value,
3470 sdnsh.get_current_mode_obj()])
3471 try:
3472 sdnsh.get_object_from_store(obj_type, pk_value)
3473 except Exception:
3474 raise error.CommandSemanticError('%s No such tag %s' % (obj_type, tag))
3475
3476 sdnsh.rest_delete_object(obj_type, pk_value)
3477
3478 # with that entry removed, check to see if any other
3479 # foreign keys assocaited with class Tag exist.
3480
3481 fk_value = sdnsh.unique_key_from_non_unique([namespace,
3482 name,
3483 value])
3484
3485 for tag_fields in mi.foreign_key_xref['tag']:
3486 for (fk_obj_type, fk_name) in mi.foreign_key_xref['tag'][tag_fields]:
3487 try:
3488 sdnsh.get_table_from_store(fk_obj_type, fk_name, fk_value)
3489 break
3490 except Exception, e:
3491 pass
3492 else:
3493 continue
3494 break
3495 else:
3496 try:
3497 sdnsh.rest_delete_object('tag', fk_value)
3498 except Exception, e:
3499 raise error.CommandSemanticError('base tag missing' % fk_value)
3500
3501
3502def command_rest_post_data(path, data=None, verb='PUT'):
3503 """
3504 """
3505 url = 'http://%s/rest/v1/%s' % (sdnsh.controller, path)
3506 result = sdnsh.rest_post_request(url, data, verb)
3507 check_rest_result(result)
3508 return None
3509
3510
3511def command_cli_variables_set(variable, value, data):
3512 global sdnsh
3513
3514 if variable == 'debug':
3515 print '***** %s cli debug *****' % \
3516 ('Enabled' if value else 'Disabled')
3517 sdnsh.debug = value
3518 elif variable == 'cli-backtrace':
3519 print '***** %s cli debug backtrace *****' % \
3520 ('Enabled' if value else 'Disabled')
3521 sdnsh.debug_backtrace = value
3522 elif variable == 'cli-batch':
3523 print '***** %s cli batch mode *****' % \
3524 ('Enabled' if value else 'Disabled')
3525 sdnsh.batch = value
3526 elif variable == 'description':
3527 print '***** %s command description mode *****' % \
3528 ('Enabled' if value else 'Disabled')
3529 sdnsh.description = value
3530 elif variable == 'rest':
3531 if 'record' in data and value:
3532 print '***** Eanbled rest record mode %s *****' % \
3533 (data['record'])
3534 url_cache.record(data['record'])
3535 return
3536 print '***** %s display rest mode *****' % \
3537 ('Enabled' if value else 'Disabled')
3538 if 'detail' in data and data['detail'] == 'details':
3539 if value == True:
3540 sdnsh.disply_rest_detail = value
3541 sdnsh.store.display_reply_mode(value)
3542 sdnsh.display_rest = value
3543 sdnsh.store.display_mode(value)
3544 if value == False:
3545 sdnsh.disply_rest_detail = value
3546 sdnsh.store.display_reply_mode(value)
3547 url_cache.record(None)
3548 elif variable == 'set':
3549 if 'length' in data:
3550 sdnsh.length = utif.try_int(data['length'])
3551
3552
3553def command_cli_set(variable, data):
3554 command_cli_variables_set(variable, True, data)
3555
3556def command_cli_unset(variable, data):
3557 command_cli_variables_set(variable, False, data)
3558
3559
3560def command_shell_command(script):
3561
3562 def shell(args):
3563 subprocess.call(["env", "SHELL=/bin/bash", "/bin/bash"] + list(args),
3564 cwd=os.environ.get("HOME"))
3565 print
3566
3567 print "\n***** Warning: this is a debug command - use caution! *****"
3568 if script == 'bash':
3569 print '***** Type "exit" or Ctrl-D to return to the CLI *****\n'
3570 shell(["-l", "-i"])
3571 elif script == 'python':
3572 print '***** Type "exit()" or Ctrl-D to return to the CLI *****\n'
3573 shell(["-l", "-c", "python"])
3574 elif script == 'cassandra-cli':
3575 print '***** Type "exit" or Ctrl-D to return to the CLI *****\n'
3576 shell(["-l", "-c", "/opt/sdnplatform/db/bin/cassandra-cli --host localhost"])
3577 elif script == 'netconfig':
3578 if not re.match("/dev/ttyS?[\d]+$", os.ttyname(0)):
3579 print '***** You seem to be connected via SSH or another remote protocol;'
3580 print '***** reconfiguring the network interface may disrupt the connection!'
3581 print '\n(Press Control-C now to leave the network configuration unchanged)\n'
3582 subprocess.call(["sudo",
3583 "env",
3584 "SHELL=/bin/bash",
3585 "/opt/sdnplatform/sys/bin/bscnetconfig",
3586 "eth0"],
3587 cwd=os.environ.get("HOME"))
3588 else:
3589 # XXX possibly run the script directly?
3590 print "Unknown debug choice %s" % script
3591
3592
3593def command_prompt_update():
3594 """
3595 Action to recompute the prompt, used when there's some possibility
3596 the prompt has changes after some other action (hostname update, for example)
3597 """
3598 sdnsh.set_controller_for_prompt()
3599 sdnsh.update_prompt()
3600
3601def command_controller_decommission(data):
3602 """
3603 Decommission the controller using the REST API
3604 """
3605 id = data.get('id')
3606 confirm_request("Decommission controller '%s'?\n(yes to continue) " % id)
3607
3608 while True:
3609 url = 'http://%s/rest/v1/system/ha/decommission' % (sdnsh.controller)
3610 result = sdnsh.rest_post_request(url, {"id": id}, 'PUT')
3611 status = json.loads(result)
3612
3613 if (status['status'] == 'OK') and status['description'].endswith('is already decommissioned') == True:
3614 print 'Decommission finished'
3615 print
3616 break
3617 else:
3618 print 'Decommission in progress'
3619
3620 time.sleep(10)
3621
3622def command_controller_upgrade(data = None):
3623 """
3624 Upgrade the controller using the REST API
3625 """
3626
3627 force = 'force' in data
3628 details = 'details' in data
3629
3630 if force:
3631 print "WARNING: Ignoring any validation errors during upgrade"
3632 url = "http://%s/rest/v1/system/upgrade/image-name" % sdnsh.controller
3633 result = sdnsh.store.rest_simple_request(url)
3634 check_rest_result(result)
3635 iname = json.loads(result)
3636 if (iname['file'] is None or iname['file'] == ""):
3637 print "Error: No upgrade image present."
3638 print ""
3639 print """To perform upgrade, an upgrade image package needs to be uploaded (with scp) to the controller's \"images\" user."""
3640 print """Upgrade image package is a file with name of format \"upgrade-YYYY.MM.DD.XXXX.pkg\"."""
3641 print ""
3642 print "Following is an example to prepare upgrade for controller with IP address 192.168.67.141:"
3643 print "scp $path/upgrade-2013.02.13.0921.pkg images@192.168.67.141:"
3644 print ""
3645 return
3646
3647 confirm_request("Upgrade controller from image '%s'?\n(yes to continue) "
3648 % iname['file'])
3649
3650 url = "http://%s/rest/v1/system/upgrade/extract-image-manifest" % sdnsh.controller
3651 result = sdnsh.store.rest_simple_request(url)
3652 check_rest_result(result)
3653 manifest = json.loads(result)
3654
3655 print "Executing upgrade..."
3656 for step in manifest:
3657 print "%s - %s" % (step['step'], step['description'])
3658 url = 'http://%s/rest/v1/system/upgrade/execute-upgrade-step' % \
3659 (sdnsh.controller)
3660 result = sdnsh.rest_post_request(url, {"step": step['step'],
3661 "imageName": iname['file'],
3662 "force": force},
3663 'PUT')
3664 check_rest_result(result)
3665 status = json.loads(result)
3666
3667 if (status['status'] == "OK"):
3668 print " Succeeded"
3669 if details:
3670 print "\nDetailed output:"
3671 print status['description']
3672 print
3673 else:
3674 print " Failed to execute upgrade step %d" % step['step']
3675 print "\nDetailed output:"
3676 print status['description']
3677 print
3678 return
3679
3680 print """Controller node upgrade complete.
3681Upgrade will not take effect until system is rebooted. Use 'reload' to
3682reboot this controller node. To revert, select the appropriate image
3683from the boot menu"""
3684
3685def command_cluster_config_rollback(data):
3686 path = ''
3687 if data.get('dir') == 'images://':
3688 path += '/home/images/'
3689 elif data.get('dir') == 'saved-configs://':
3690 path += '/opt/sdnplatform/run/saved-configs/'
3691 path += data.get('file')
3692
3693 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3694 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3695 ha_role = json.loads(result)
3696 if ha_role['role'] != 'MASTER':
3697 print "Command can only be run on Master"
3698 return
3699
3700 command_legacy_cli('copy', {'dest': 'file://running-config-copy', 'source': 'running-config'})
3701 print "INFO: Checking config '%s'" % path
3702 url = "http://%s/rest/v1/system/rollback/diffconfig" % sdnsh.controller
3703 result = sdnsh.rest_post_request(url, {"config-1": "/opt/sdnplatform/run/saved-configs/running-config-copy", "config-2": path}, 'PUT')
3704 check_rest_result(result)
3705 if json.loads(result)['out'].startswith('Found differences'):
3706 print json.loads(result)['out']
3707 print "Rollback aborted"
3708 return
3709
3710 url = "http://%s/rest/v1/system/controller" % sdnsh.controller
3711 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3712 controller_id = json.loads(result)['id']
3713
3714 url = "http://%s/rest/v1/model/controller-interface?controller=%s" % (sdnsh.controller, controller_id)
3715 result = sdnsh.store.rest_simple_request(url)
3716 local_iface = json.loads(result)[0]['discovered-ip']
3717
3718 url = "http://%s/rest/v1/model/controller-interface" % sdnsh.controller
3719 result = sdnsh.store.rest_simple_request(url)
3720 check_rest_result(result)
3721 ifaces = json.loads(result)
3722
3723 nodeCount = len(ifaces)
3724 cutover = nodeCount/2
3725 if nodeCount%2 == 1:
3726 cutover = cutover + 1
3727
3728 rollbackedNodes = []
3729
3730 # remove and add object for local node at the end of the list
3731 for index, iface in enumerate(ifaces):
3732 if iface['discovered-ip'] == local_iface:
3733 break
3734 del ifaces[index]
3735 ifaces.append(iface)
3736
3737 config=open(path, 'r').read()
3738 url = 'http://%s/rest/v1/system/upload-data' % ifaces[0]['discovered-ip']
3739 result = sdnsh.rest_post_request(url, {"data": config, "dst" : "/tmp/rollback.conf"}, 'PUT')
3740 check_rest_result(result)
3741
3742 while len(ifaces) > 0:
3743 if sdnsh.batch == False:
3744 while True:
3745 confirm = raw_input("Rollback controller at '%s'. [yes/no] ?" % ifaces[0]['discovered-ip'])
3746 if confirm.lower() == 'n' or confirm.lower() == 'no':
3747 if len(rollbackedNodes) == 0:
3748 print "INFO: Rollback aborted"
3749 return
3750
3751 print "INFO: Undoing Rollback on previously rollbacked nodes"
3752 for node in rollbackedNodes:
3753 print "INFO: Resetting database on '%s'" % node['discovered-ip']
3754 url = 'http://%s/rest/v1/system/resetbsc' % (node['discovered-ip'])
3755 result = sdnsh.rest_post_request(url, {}, 'PUT')
3756 check_rest_result(result)
3757 print "INFO: Rebooting '%s'" % node['discovered-ip']
3758 url = 'http://%s/rest/v1/system/reload' % (node['discovered-ip'])
3759 result = sdnsh.rest_post_request(url, {}, 'GET')
3760 check_rest_result(result)
3761
3762 if len(rollbackedNodes) >= cutover:
3763 # delete the REJECT rules
3764 url="http://localhost/rest/v1/model/firewall-rule?port=6633"
3765 result = sdnsh.rest_post_request(url, {}, 'DELETE')
3766 # enable allow openflow on all controllers not rollbacked.
3767 url="http://localhost/rest/v1/model/firewall-rule"
3768 for iface in ifaces:
3769 pk_id = '%s|Ethernet|0' % iface['controller']
3770 data = {
3771 'action': 'allow',
3772 'interface': pk_id,
3773 'src-ip': '',
3774 'port': '6633',
3775 'proto': 'tcp',
3776 'vrrp-ip': '',
3777 }
3778 print "INFO: re-allow openflow on %s" % iface['discovered-ip']
3779 result = sdnsh.rest_post_request(url, data, 'PUT')
3780 check_rest_result(result)
3781
3782 print "Rollback aborted"
3783 return
3784 elif confirm.lower() == 'y' or confirm.lower() == 'yes':
3785 break
3786
3787 url = 'http://%s/rest/v1/system/rollback/config' % (ifaces[0]['discovered-ip'])
3788 result = sdnsh.rest_post_request(url, {"path": "/tmp/rollback.conf"}, 'PUT')
3789 check_rest_result(result)
3790 time.sleep(10)
3791
3792 print "INFO: Rebooting ", ifaces[0]['discovered-ip']
3793 url = "http://%s/rest/v1/system/reload" % ifaces[0]['discovered-ip']
3794 result = sdnsh.store.rest_simple_request(url)
3795
3796 if ifaces[0]['discovered-ip'] == local_iface:
3797 break
3798
3799 print "INFO: Waiting for %s to come back up" % ifaces[0]['discovered-ip']
3800 url = "http://%s/rest/v1/system/ha/role" % ifaces[0]['discovered-ip']
3801 while True:
3802 time.sleep(30)
3803 try:
3804 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3805 status = json.loads(result)
3806 if status['role'] == 'SLAVE' or status['role'] == 'MASTER':
3807 print "INFO: Rollback complete on '%s'" % ifaces[0]['discovered-ip']
3808 break
3809 print "INFO: Waiting for 30 seconds"
3810 except:
3811 print "INFO: Waiting for 30 seconds"
3812
3813
3814 iface = ifaces.pop(0)
3815 rollbackedNodes.append(iface)
3816
3817 print "Rollback completed"
3818
3819def command_wait_for_controller(delay = None, sdnplatform_check = False,
3820 within_command = False):
3821 """
3822 For various commands, it makes sense for the command to verify that
3823 the controller restart has been completed. In the situation where
3824 a single controller is configured, it also makes sense to verify the
3825 controller is now configured as MASTER.
3826
3827 This is especially true for command which are known to cause the
3828 controller to restart, for exampe the 'feature' command.
3829
3830 The procedure is also used during CLI startup (see cli.py)
3831 to verify that the controller is in MASTER mode. Its normal
3832 for the HA role to transition from SLAVE to master during
3833 system startup.
3834 """
3835
3836 # if the CLI was started with --init, skip the wait, the
3837 # controller isn't running.
3838 if sdnsh.options.init:
3839 return
3840
3841 def is_ready(sdnsh, verbose, duration):
3842 """
3843 Be loud-as-_ean when the duration is greater then 15 seconds.
3844 Display the gory details for all to know.
3845 """
3846 too_long = 90
3847 try:
3848 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3849 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3850 ha_role = json.loads(result)
3851 if duration > too_long:
3852 print 'Long delay: reason', \
3853 ', '.join(['%s: %s' % (n,v) for (n,v) in ha_role.items()
3854 if v != ''])
3855 if (ha_role['role'] == 'MASTER' or
3856 sdnsh.find_master()['master'] is not None):
3857 if verbose:
3858 print 'Current role is MASTER'
3859 return True
3860 return False
3861 except error.CommandRestError,e:
3862 print "REST error whileUnable to determine controller HA role."
3863 errors = self.rest_error_to_dict(e, obj_type)
3864 print self.rest_error_dict_to_message(errors)
3865 return True
3866 except Exception, e:
3867 if duration > too_long:
3868 print 'MASTER Transition Failure: ', e
3869 traceback.print_exc()
3870 return True
3871 return False
3872
3873 # if this isn't a typical environment (ie: running remotely)
3874 # don't bother trying to determine the role
3875 if not os.path.exists('/opt/sdnplatform/current_role'):
3876 return
3877
3878 # now vadalidate the rest api port is working
3879 ip_and_port = sdnsh.controller.split(':')
3880 if len(ip_and_port) == 2:
3881 # first ensure the REST API is answering
3882 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
3883 try:
3884 s.connect((ip_and_port[0], int(ip_and_port[1])))
3885 s.close()
3886 except Exception, e:
3887 print 'REST API not running, emergency CLI access'
3888 if sdnsh.debug: # enable debug to see messages
3889 print 'Exception:', e
3890 return
3891
3892 # issue a REST API request directed at the model.
3893 try:
3894 entry = sdnsh.get_table_from_store('feature')
3895 except Exception, e:
3896 print 'REST API/Database not responding, emergency CLI access'
3897 if sdnsh.debug: # enable debug to see messages
3898 print 'Exception:', e
3899 return
3900
3901 if sdnplatform_check:
3902 # the REST API request for ha-role will return UNAVAILABLE
3903 # when sdnplatform isn't running.
3904 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3905 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3906 ha_role = json.loads(result)
3907 if ha_role['role'] == 'UNAVAILABLE':
3908 print 'REST API/SDN platform not responding, emergency CLI access'
3909 return
3910
3911
3912 if delay == None:
3913 delay = 1
3914 delay_str = 'a sec' if delay == 1 else '%d seconds' % delay
3915
3916 duration = 0
3917 while True:
3918 try:
3919 verbose = False
3920 while not is_ready(sdnsh, verbose, duration):
3921 if within_command:
3922 print 'Waiting %s to complete command execution, ' \
3923 'Hit Ctrl-C to exit early' % delay_str
3924 verbose = False
3925 else:
3926 print 'Waiting %s while current role is SLAVE mode, ' \
3927 'Hit Ctrl-C to exit early' % delay_str
3928 verbose = True
3929 time.sleep(delay)
3930 duration += delay
3931 return
3932 except:
3933 if is_ready(sdnsh, True, duration):
3934 if duration > 15:
3935 print 'MASTER Transition: %s sec' % duration
3936 return
3937 try:
3938 resp = raw_input('Controller is not yet ready.'
3939 'Do you still want to continue to the CLI? [n]')
3940 if resp and "yes".startswith(resp.lower()):
3941 print 'Continuing with CLI despite initialization error ...'
3942 return
3943 except KeyboardInterrupt:
3944 return
3945
3946
3947def command_factory_default():
3948 print "Re-setting controller to factory defaults ..."
3949 os.system("sudo /opt/sdnplatform/sys/bin/resetbsc")
3950 return
3951
3952
3953def command_dump_log(data):
3954 controller = data.get('controller-node') # can be None.
3955 controller_dict = { 'id' : controller }
3956 for ip_port in controller_ip_and_port(controller_dict):
3957 log_name = data['log-name']
3958 if log_name == 'all':
3959 url = log_url(ip_and_port = ip_port)
3960 log_names = command.sdnsh.rest_simple_request_to_dict(url)
3961 for log in log_names:
3962 yield '*' * 40 + ip_port + ' ' + log['log'] + '\n'
3963 for item in command_dump_log({ 'log-name' : log['log'] }):
3964 yield item
3965 return
3966
3967 # use a streaming method so the complete log is not in memory
3968 url = log_url(ip_and_port = ip_port, log = log_name)
3969 request = urllib2.urlopen(url)
3970 for line in request:
3971 yield line
3972 request.close()
3973
3974
3975#
3976# Initialize action functions
3977#
3978#
3979
3980def init_actions(bs, modi):
3981 global sdnsh, mi
3982 sdnsh = bs
3983 mi = modi
3984
3985 command.add_action('create-tunnel',
3986 tunnel_create,
3987 {'kwargs': {'data' : '$data',}})
3988
3989 command.add_action('remove-tunnel',
3990 tunnel_remove,
3991 {'kwargs': {'data' : '$data',}})
3992
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08003993 command.add_action('create-tunnelset',
3994 tunnelset_create,
3995 {'kwargs': {'data' : '$data',}})
3996
3997 command.add_action('remove-tunnelset',
3998 tunnelset_remove,
3999 {'kwargs': {'data' : '$data',}})
4000
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08004001 command.add_action('create-policy',
4002 policy_create,
4003 {'kwargs': {'data' : '$data',}})
4004
4005 command.add_action('remove-policy',
4006 policy_remove,
4007 {'kwargs': {'data' : '$data',}})
4008
4009 command.add_action('write-fields', write_fields,
4010 {'kwargs': {'obj_type': '$current-mode-obj-type',
4011 'obj_id': '$current-mode-obj-id',
4012 'data': '$data'}})
4013
4014 command.add_action('reset-fields', reset_fields,
4015 {'kwargs': {'obj_type' : '$current-mode-obj-type',
4016 'obj_id' : '$current-mode-obj-id',
4017 'arg_data' : '$data',
4018 'match_for_no' : '$match-for-no',
4019 'fields' : '$fields'}})
4020
4021 command.add_action('write-fields-explicit', write_fields,
4022 {'kwargs': {'obj_type' : '$obj-type',
4023 'obj_id' : '$obj-id',
4024 'data' : '$data'}})
4025
4026 command.add_action('reset-fields-explicit', reset_fields,
4027 {'kwargs': {'obj_type' : '$obj-type',
4028 'obj_id' : '$obj-id',
4029 'arg_data' : '$data',
4030 'match_for_no' : '$match-for-no',
4031 'fields' : '$fields'}})
4032
4033 command.add_action('update-config', update_config,
4034 {'kwargs': {'obj_type' : '$obj-type',
4035 'obj_id' : '$current-mode-obj-id',
4036 'data' : '$data',
4037 'no_command' : '$is-no-command', }})
4038
4039 command.add_action('delete-objects', delete_objects,
4040 {'kwargs': {'obj_type': '$obj-type',
4041 'data': '$data',
4042 'parent_field': '$parent-field',
4043 'parent_id': '$current-mode-obj-id'}})
4044
4045 command.add_action('write-object', write_object,
4046 {'kwargs': {'obj_type': '$obj-type',
4047 'data': '$data',
4048 'parent_field': '$parent-field',
4049 'parent_id': '$current-mode-obj-id'}})
4050
4051 command.add_action('set-data', set_data,
4052 {'kwargs': {'data': '$data',
4053 'key': '$key',
4054 'value': '$value'}})
4055
4056 command.add_action('push-mode-stack', push_mode_stack,
4057 {'kwargs': {'mode_name': '$submode-name',
4058 'obj_type': '$obj-type',
4059 'parent_field': '$parent-field',
4060 'parent_id': '$current-mode-obj-id',
4061 'data': '$data',
4062 'create': '$create'}})
4063
4064 command.add_action('pop-mode-stack', pop_mode_stack)
4065
4066 command.add_action('confirm', confirm_request,
4067 {'kwargs': {'prompt': '$prompt'}})
4068
4069 command.add_action('convert-vns-access-list', convert_vns_access_list,
4070 {'kwargs': {'obj_type': '$obj-type',
4071 'key' : '$current-mode-obj-id',
4072 'data' : '$data'}})
4073 command.add_action('display-table', command_display_table,
4074 {'kwargs': {'obj_type' : '$obj-type',
4075 'data' : '$data',
4076 'table_format' : '$format',
4077 'title' : '$title',
4078 'detail' : '$detail',
4079 'scoped' : '$scoped',
4080 'sort' : '$sort',
4081 }})
4082
4083 command.add_action('display-rest', command_display_rest,
4084 {'kwargs': { 'data' : '$data',
4085 'url' : '$url',
4086 'path' : '$path',
4087 'rest_type' : '$rest-type',
4088 'sort' : '$sort',
4089 'title' : '$title',
4090 'table_format' : '$format',
4091 'detail' : '$detail',
4092 }})
4093
4094 command.add_action('query-table', command_query_table,
4095 {'kwargs': {'obj_type' : '$obj-type',
4096 'data' : '$data',
4097 'key' : '$key',
4098 'scoped' : '$scoped',
4099 'sort' : '$sort',
4100 'crack' : '$crack',
4101 'append' : '$append',
4102 'clear' : True,
4103 }})
4104
4105 command.add_action('query-table-append', command_query_table,
4106 {'kwargs': {'obj_type' : '$obj-type',
4107 'data' : '$data',
4108 'key' : '$key',
4109 'scoped' : '$scoped',
4110 'sort' : '$sort',
4111 'crack' : '$crack',
4112 'append' : '$append',
4113 'clear' : False,
4114 }})
4115
4116
4117 command.add_action('query-rest', command_query_rest,
4118 {'kwargs': {'url' : '$url',
4119 'path' : '$path',
4120 'rest_type' : '$rest-type',
4121 'data' : '$data',
4122 'key' : '$key',
4123 'scoped' : '$scoped',
4124 'sort' : '$sort',
4125 'append' : '$append',
4126 'clear' : True,
4127 }})
4128
4129 command.add_action('query-rest-append', command_query_rest,
4130 {'kwargs': {'url' : '$url',
4131 'path' : '$path',
4132 'rest_type' : '$rest-type',
4133 'data' : '$data',
4134 'key' : '$key',
4135 'scoped' : '$scoped',
4136 'sort' : '$sort',
4137 'crack' : '$crack',
4138 'append' : '$append',
4139 'clear' : False,
4140 }})
4141
4142 command.add_action('join-rest', command_join_rest,
4143 {'kwargs': {'url' : '$url',
4144 'key' : '$key',
4145 'join_field' : '$join-field',
4146 'rest_type' : '$rest-type',
4147 'add_field' : '$add-field',
4148 'data' : '$data',
4149 'crack' : '$crack',
4150 'url_key' : '$url-key',
4151 }})
4152
4153 command.add_action('join-table', command_join_table,
4154 {'kwargs': {'obj_type' : '$obj-type',
4155 'data' : '$data',
4156 'key' : '$key',
4157 'key_value' : '$key-value',
4158 'add_field' : '$add-field',
4159 'join_field' : '$join-field',
4160 'crack' : '$crack',
4161 }})
4162
4163 command.add_action('crack', command_crack,
4164 {'kwargs': {
4165 'field' : '$field',
4166 }})
4167
4168 command.add_action('display', command_display,
4169 {'kwargs': {'data' : '$data',
4170 'table_format' : '$format',
4171 'sort' : '$sort',
4172 'detail' : '$detail',
4173 'title' : '$title',
4174 }})
4175
4176 command.add_action('legacy-cli', command_legacy_cli,
4177 {'kwargs': {'obj_type' : '$obj-type',
4178 'data' : '$data',
4179 'detail' : '$detail',
4180 'sort' : '$sort',
4181 'scoped' : '$scoped',
4182 }})
4183
4184 command.add_action('legacy-cli-no', command_legacy_cli_no,
4185 {'kwargs': {'obj_type' : '$obj-type',
4186 'data' : '$data',
4187 'detail' : '$detail',
4188 'sort' : '$sort',
4189 'scoped' : '$scoped',
4190 }})
4191
4192 command.add_action('version', command_version,
4193 {'kwargs': {'data' : '$data',
4194 }})
4195
4196 command.add_action('clearterm', command_clearterm)
4197
4198 command.add_action('display-cli', command_display_cli,
4199 {'kwargs': {'data' : '$data',
4200 'detail' : '$detail',
4201 }})
4202
4203 command.add_action('create-alias', command_create_alias,
4204 {'kwargs': {'obj_type' : '$obj-type',
4205 'data' : '$data',
4206 'reserved' : '$reserved',
4207 'fail_if_exists' : '$fail-if-exists',
4208 }})
4209
4210 command.add_action('delete-alias', command_delete_alias,
4211 {'kwargs': {'obj_type' : '$obj-type',
4212 'data' : '$data',
4213 }})
4214
4215 command.add_action('create-tag', command_create_tag,
4216 {'kwargs': {'obj_type' : '$obj-type',
4217 'data' : '$data',
4218 }})
4219
4220 command.add_action('delete-tag', command_delete_tag,
4221 {'kwargs': {'obj_type' : '$obj-type',
4222 'data' : '$data',
4223 }})
4224
4225 command.add_action('cli-set', command_cli_set,
4226 {'kwargs': {'variable' : '$variable',
4227 'data' : '$data',
4228 }})
4229
4230 command.add_action('cli-unset', command_cli_unset,
4231 {'kwargs': {'variable' : '$variable',
4232 'data' : '$data',
4233 }})
4234
4235 command.add_action('shell-command', command_shell_command,
4236 {'kwargs': {'script' : '$command',
4237 }})
4238
4239 command.add_action('rest-post-data', command_rest_post_data,
4240 {'kwargs': {'path': '$path',
4241 'data': '$data',
4242 'verb': '$verb'
4243 }})
4244
4245 command.add_action('prompt-update', command_prompt_update,)
4246
4247 command.add_action('controller-upgrade', command_controller_upgrade,
4248 {'kwargs': {'data': '$data'}})
4249
4250 command.add_action('controller-config-rollback', command_cluster_config_rollback,
4251 {'kwargs': {'data': '$data'}})
4252
4253 command.add_action('controller-decommission', command_controller_decommission,
4254 {'kwargs': {'data': '$data'}})
4255
4256 command.add_action('wait-for-controller', command_wait_for_controller,
4257 {'kwargs': {'within_command': True}})
4258
4259 command.add_action('factory-default', command_factory_default)
4260
4261 command.add_action('dump-log', command_dump_log,
4262 {'kwargs' : { 'data' : '$data', }})