blob: 05a2eeb9b94034b8e9855bf8f7447628cc6fc49c [file] [log] [blame]
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08001#
2# Copyright (c) 2011,2012,2013 Big Switch Networks, Inc.
3#
4# Licensed under the Eclipse Public License, Version 1.0 (the
5# "License"); you may not use this file except in compliance with the
6# License. You may obtain a copy of the License at
7#
8# http://www.eclipse.org/legal/epl-v10.html
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13# implied. See the License for the specific language governing
14# permissions and limitations under the License.
15#
16
17import re
18import numbers
19import collections
20import traceback
21import types
22import json
23import time
24import sys
25import datetime
26import os
27import subprocess
28import socket
29import urllib2 # exception, dump_log()
30
31import modi
32import error
33import command
34import run_config
35import rest_to_model
36import url_cache
37
38from midw import *
39from vnsw import *
40#from html5lib.constants import DataLossWarning
41
42onos=1
43#
44# ACTION PROCS
45#Format actions for stats per table
46def remove_unicodes(actions):
47
48 if actions:
49 #TODO: Check:- Why I have to remove last two character from string
50 #instead of 1 character to get rid of comma from last aciton
51 a=''
52 b=''
53 newActions=''
54 isRemoved_u = False
55 for ch in actions:
56 if ch =='u':
57 a= 'u'
58 if ch =='\'':
59 b= '\''
60 if isRemoved_u:
61 isRemoved_u=False
62 continue
63 if (a+b) == 'u\'':
64 newActions = newActions[:-1]
65 a= ''
66 isRemoved_u = True
67 else:
68 newActions += ch
69 return newActions
70 else:
71 ''
72def renameActions(actions):
73
74 actions = actions.replace('GOTO_TABLE','GOTO')
75 actions = actions.replace('WRITE_ACTIONS','WRITE')
76 actions = actions.replace('APPLY_ACTIONS','APPLY')
77 actions = actions.replace('DEC_NW_TTL: True','DEC_NW_TTL')
78 actions = actions.replace('POP_MPLS: True','POP_MPLS')
79 actions = actions.replace('COPY_TTL_IN: True','COPY_TTL_IN')
80 actions = actions.replace('COPY_TTL_OUT: True','COPY_TTL_OUT')
81 actions = actions.replace('DEC_MPLS_TTL: True','DEC_MPLS_TTL')
82 actions = actions.replace('SET_DL_SRC','SRC_MAC')
83 actions = actions.replace('SET_DL_DST','DST_MAC')
84 actions = actions.replace('SET_NW_SRC','SRC_IP')
85 actions = actions.replace('SET_NW_DST','DST_IP')
86 actions = actions.replace('CLEAR_ACTIONS: {CLEAR_ACTIONS: True}','CLEAR_ACTIONS')
87
88 return actions
89
90def check_rest_result(result, message=None):
91 if isinstance(result, collections.Mapping):
92 error_type = result.get('error_type')
93 if error_type:
94 raise error.CommandRestError(result, message)
95
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -080096tunnelset_id=None
97tunnelset_dict=[]
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -080098tunnelset_remove_tunnels=[]
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -080099def tunnelset_create(data=None):
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800100 global tunnelset_id,tunnelset_dict,tunnelset_remove_tunnels
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800101 if sdnsh.description: # description debugging
102 print "tunnelset_create:" , data
103 if data.has_key('tunnelset-id'):
104 if (tunnelset_id != None):
105 if sdnsh.description: # description debugging
106 print "tunnelset_create: previous data is not cleaned up"
107 tunnelset_id=None
108 tunnelset_dict=[]
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800109 tunnelset_remove_dict=[]
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800110 tunnelset_id=data['tunnelset-id']
111 tunnelset_dict=[]
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800112 tunnelset_remove_dict=[]
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800113 if sdnsh.description: # description debugging
114 print "tunnelset_create:" , tunnelset_id
115
116def tunnelset_config_exit():
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800117 global tunnelset_id,tunnelset_dict,tunnelset_remove_tunnels
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800118 if sdnsh.description: # description debugging
119 print "tunnelset_config_exit entered", tunnelset_dict
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800120 if (len(tunnelset_dict) > 0) or (len(tunnelset_remove_tunnels)>0):
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800121 url_str = ""
122 entries = tunnelset_dict
123 url_str = "http://%s/rest/v1/tunnelset/" % (sdnsh.controller)
124 obj_data = {}
125 obj_data['tunnelset_id']=tunnelset_id
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800126 if (len(entries) > 0):
127 obj_data['tunnel_params']=entries
128 if (len(tunnelset_remove_tunnels) > 0):
129 obj_data['remove_tunnel_params']=tunnelset_remove_tunnels
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800130 result = "fail"
131 try:
132 result = sdnsh.store.rest_post_request(url_str,obj_data)
133 except Exception, e:
134 errors = sdnsh.rest_error_to_dict(e)
135 print sdnsh.rest_error_dict_to_message(errors)
136 # LOOK! successful stuff should be returned in json too.
137 tunnelset_dict = []
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800138 tunnelset_remove_tunnels = []
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800139 tunnelset_id = None
140 curr_tunnel_id = None
141 if result != "success":
142 print "command failed"
143 else:
144 print "empty command"
145 #Clear the transit information
146
147def tunnelset_remove(data=None):
148 if sdnsh.description: # description debugging
149 print "tunnelset_remove:" , data
150 tunnelset_id=data['tunnelset-id']
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800151 url_str = "http://%s/rest/v1/tunnelset/" % (sdnsh.controller)
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800152 obj_data = {}
153 obj_data['tunnelset_id']=data['tunnelset-id']
154 result = "fail"
155 try:
156 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
157 except Exception, e:
158 errors = sdnsh.rest_error_to_dict(e)
159 print sdnsh.rest_error_dict_to_message(errors)
160 if not result.startswith("SUCCESS"):
161 print result
162
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800163tunnel_id=None
164tunnel_dict={}
165def tunnel_create(data=None):
166 global tunnel_id,tunnel_dict
167 if sdnsh.description: # description debugging
168 print "tunnel_create:" , data
169 if data.has_key('tunnel-id'):
170 if (tunnel_id != None):
171 if sdnsh.description: # description debugging
172 print "tunnel_create: previous data is not cleaned up"
173 tunnel_id=None
174 tunnel_dict={}
175 tunnel_id=data['tunnel-id']
176 tunnel_dict[tunnel_id]=[]
177 if data.has_key('node-label'):
178 tunnel_dict[tunnel_id].append(data['node-label'])
179 if data.has_key('adjacency-label'):
180 tunnel_dict[tunnel_id].append(data['adjacency-label'])
181 if sdnsh.description: # description debugging
182 print "tunnel_create:" , tunnel_id, tunnel_dict
183
184def tunnel_config_exit():
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800185 global tunnel_id,tunnel_dict,tunnelset_dict
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800186 if sdnsh.description: # description debugging
187 print "tunnel_config_exit entered", tunnel_dict
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800188
189 entries = tunnel_dict[tunnel_id]
190 obj_data = {}
191 obj_data['tunnel_id']=tunnel_id
192 obj_data['label_path']=entries
193 if tunnelset_id:
194 tunnelset_dict.append(obj_data)
195 tunnel_dict = {}
196 tunnel_id = None
197 elif tunnel_dict:
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800198 url_str = ""
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800199 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800200 result = "fail"
201 try:
202 result = sdnsh.store.rest_post_request(url_str,obj_data)
203 except Exception, e:
204 errors = sdnsh.rest_error_to_dict(e)
205 print sdnsh.rest_error_dict_to_message(errors)
206 # LOOK! successful stuff should be returned in json too.
207 tunnel_dict = {}
208 tunnel_id = None
209 if result != "success":
210 print "command failed"
211 else:
212 print "empty command"
213 #Clear the transit information
214
215def tunnel_remove(data=None):
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800216 global tunnelset_remove_tunnels
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800217 if sdnsh.description: # description debugging
218 print "tunnel_remove:" , data
219 tunnel_id=data['tunnel-id']
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800220 if tunnelset_id:
221 tunnelset_remove_tunnels.append(tunnel_id)
222 else:
223 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
224 obj_data = {}
225 obj_data['tunnel_id']=data['tunnel-id']
226 result = "fail"
227 try:
228 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
229 except Exception, e:
230 errors = sdnsh.rest_error_to_dict(e)
231 print sdnsh.rest_error_dict_to_message(errors)
232 if not result.startswith("SUCCESS"):
233 print result
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800234
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800235
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800236policy_obj_data = {}
237def policy_create(data=None):
238 global policy_obj_data
239 if sdnsh.description: # description debugging
240 print "policy_create:" , data
241 if data.has_key('policy-id'):
242 if policy_obj_data:
243 if sdnsh.description: # description debugging
244 print "policy_create: previous data is not cleaned up"
245 policy_obj_data = {}
246 policy_obj_data['policy_id'] = data['policy-id']
247 policy_obj_data['policy_type'] = data['policy-type']
248 if data.has_key('src_ip'):
249 for key in data:
250 policy_obj_data[key] = data[key]
251 if data.has_key('priority'):
252 policy_obj_data['priority'] = data['priority']
253 if data.has_key('tunnel-id'):
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800254 if policy_obj_data.has_key('tunnelset_id'):
255 print "ERROR: Policy can not point to both tunnelset and tunnel"
256 return
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800257 policy_obj_data['tunnel_id'] = data['tunnel-id']
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800258 if data.has_key('tunnelset-id'):
259 if policy_obj_data.has_key('tunnel_id'):
260 print "ERROR: Policy can not point to both tunnelset and tunnel"
261 return
262 policy_obj_data['tunnelset_id'] = data['tunnelset-id']
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800263
264 if sdnsh.description: # description debugging
265 print policy_obj_data
266
267def policy_config_exit():
268 global policy_obj_data
269 if sdnsh.description: # description debugging
270 print "policy_config_exit entered", policy_obj_data
271 if policy_obj_data:
272 url_str = "http://%s/rest/v1/policy/" % (sdnsh.controller)
273 result = "fail"
274 try:
275 result = sdnsh.store.rest_post_request(url_str,policy_obj_data)
276 except Exception, e:
277 errors = sdnsh.rest_error_to_dict(e)
278 print sdnsh.rest_error_dict_to_message(errors)
279 if result != "success":
280 print "command failed"
281 policy_obj_data = {}
282 else:
283 print "empty command"
284 #Clear the transit information
285
286def policy_remove(data=None):
287 if sdnsh.description: # description debugging
288 print "policy_remove:" , data
289 policy_id=data['policy-id']
290 url_str = "http://%s/rest/v1/policy/" % (sdnsh.controller)
291 obj_data = {}
292 obj_data['policy_id']=data['policy-id']
293 result = "fail"
294 try:
295 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
296 except Exception, e:
297 errors = sdnsh.rest_error_to_dict(e)
298 print sdnsh.rest_error_dict_to_message(errors)
299 if result != "deleted":
300 print "command failed"
301
302
303
304def write_fields(obj_type, obj_id, data):
305 """
306 Typical action to update fields of a row in the model
307
308 @param obj_type a string, the name of the db table to update
309 @param obj_id a string, the value of the primary key in for the table
310 @param data a dict, the name:value pairs of data to update in the table
311 """
312 if sdnsh.description: # description debugging
313 print "write_fields:", obj_type, obj_id, data
314
315 pk_name = mi.pk(obj_type)
316 if not pk_name:
317 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
318 if sdnsh.description: # description debugging
319 print "write_fields:", obj_type, pk_name, obj_id, data
320 for fk in mi.obj_type_foreign_keys(obj_type):
321 if fk in data and mi.is_null_allowed(obj_type, fk):
322 if data[fk] == 'default': # XXX much too magic, seems an option here would be good
323 data[fk] = None
324
325 result = sdnsh.rest_update_object(obj_type, pk_name, obj_id, data)
326 check_rest_result(result)
327
328
329def verify_row_includes(obj_type, pk_value, data, verify):
330 """
331 Intended to raise an exception when a user enters 'no field value',
332 and the field isn't currently set to value, for example:
333 'address-space as1 ; no address-space as2', should complain
334 that the 'address-space' field isn't currently set to 'as2'.
335
336 @param obj_type a string, identifies the db table
337 @param pk_value a string, identifies the value for the primary key
338 @param data is a dict, collecting the name:value pairs from the description
339 @verify the string or list of field names to be verified
340 """
341 if sdnsh.description: # description debugging
342 print "validate_row_includes:", obj_type, pk_value, data, verify
343
344 if type(verify) == str:
345 verify = [verify] # if not a list, make it a list
346
347 try:
348 row = sdnsh.get_object_from_store(obj_type, pk_value)
349 except Exception, e:
350 if sdnsh.debug or sdnsh.debug_backtrace:
351 print 'Failed lookup of %s:%s:%s', (obj_type, pk_value, e)
352 traceback.print_exc()
353 raise error.ArgumentValidationError("%s: '%s' doesn't exist" %
354 (obj_type, pk_value))
355 return
356
357 if sdnsh.description: # description debugging
358 print "validate_includes: ", row
359 for field in [x for x in verify if x in data and x in row]:
360 if row[field] != data[field]:
361 raise error.ArgumentValidationError("%s: %s found '%s' current value '%s'" %
362 (obj_type, field, data[field], row[field]))
363
364
365def reset_fields(obj_type, arg_data,
366 obj_id = None, fields = None, match_for_no = None):
367 """
368 For an obj_type, revert fields back to their default value.
369 This is the typical action for 'no' commands.
370
371 When verify is set, this is a string or list of fields who's values
372 must match in the table for the primary key associated with the reset.
373 This allows command descriptions to identify any fields which need to
374 be checked against, when they are explicidly named in the 'no' command,
375 so that 'no XXX value' will verify that 'value' matches the current
376 row's value before allowing the reset to continue
377
378 @param obj_type a string, identifies the db table
379 @param obj_id a string, identifies the value for the primary key of the row in the table,
380 possibly unset, the key is looked for in the arg_data in that case.
381 @param arg_data a dict, collection of name:value pairs from the description
382 @param fields a list, collection of fields to update in the table
383 @param match_for_no a string or list, list of fields to check for matched values in arg_data
384 """
385
386 if obj_type == None:
387 raise error.CommandDescriptionError("No object to reset (missing obj-type)")
388
389 pk_name = mi.pk(obj_type)
390 # If the fields aren't specified explicitly, then derive from the arg_data
391 if fields is None:
392 fields = []
393 for field in arg_data.keys():
394 # Only add arguments that correspond to valid fields in the object
395 if mi.obj_type_has_field(obj_type, field):
396 if field != pk_name: # don't reset primary keys
397 fields.append(field)
398
399 if len(fields) == 0:
400 raise error.CommandDescriptionError("No fields to reset: type: %s" % obj_type)
401
402 # Get the primary key name
403 if not pk_name:
404 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
405 if obj_id == None:
406 if pk_name in arg_data:
407 obj_id = arg_data[pk_name]
408 elif mi.field_default_value(obj_type, pk_name):
409 # unusual, but not impossible for singletons
410 obj_id = mi.field_default_value(obj_type, pk_name)
411 else:
412 raise error.CommandDescriptionError("Can't find id value name for type: %s"
413 " field %s" % (obj_type, pk_name))
414
415 if match_for_no:
416 verify_row_includes(obj_type, obj_id, arg_data, match_for_no)
417
418 # Get the default values of the specified field from CLI model info
419 data = {}
420 for field in fields:
421 if field == pk_name:
422 continue
423 type_info = mi.cli_model_info.get_field_info(obj_type, field)
424 if type_info == None:
425 raise error.CommandDescriptionError("Can't find field details for "
426 "field %s in type %s" % (field, obj_type))
427 data[field] = type_info.get('default')
428 if data[field] == None and type_info.get('type') == 'BooleanField':
429 data[field] = False
430 # why does boolean not respect the default in the model?!?
431 # data[field] = type_info.get('default') if type_info.get('type') != 'BooleanField' else False
432
433 if sdnsh.description: # description debugging
434 print "reset_fields:", obj_type, pk_name, obj_id, data, match_for_no
435
436 # Invoke the REST API to set the default values
437 try:
438 result = sdnsh.rest_update_object(obj_type, pk_name, obj_id, data)
439 except Exception, e:
440 errors = sdnsh.rest_error_to_dict(e, obj_type)
441 raise error.CommandError('REST', sdnsh.rest_error_dict_to_message(errors))
442
443
444def obj_type_fields_have_default_value(obj_type, row, data):
445 """
446 Return True when all the fields have a default value,
447 row is the queried data from the store,
448 data is the data to be updated.
449
450 The goal is to determine whether to delete or update
451 the row in the store.
452
453 """
454
455 ckf = []
456 if mi.is_compound_key(obj_type, mi.pk(obj_type)):
457 # XXX primitive compound keys' too?
458 ckf = mi.compound_key_fields(obj_type, mi.pk(obj_type))
459
460 for field in mi.obj_type_fields(obj_type):
461 if mi.is_primary_key(obj_type, field):
462 continue
463 if mi.is_foreign_key(obj_type, field):
464 # perhaps only allow a single foreign key?
465 continue
466 # also any fields which are used to compound the ident.
467 if field in ckf:
468 continue
469 # Needs a better way to identify non-model-fields
470 if field == 'Idx':
471 continue
472 if mi.is_null_allowed(obj_type, field):
473 # does this need to be more complex?
474 if field in data and data[field] != None:
475 return False
476 continue # next field
477 default_value = mi.field_default_value(obj_type, field)
478 if default_value == None:
479 if sdnsh.description: # description debugging
480 print 'default_value: no default: %s %s' % (obj_type, field)
481 return False
482 # check to see if the updated value would be the default
483 if field in data and data[field] != default_value:
484 if sdnsh.description: # description debugging
485 print 'default_value: not default %s %s %s' % \
486 (field, data[field], default_value)
487 return False
488 elif row.get(field, default_value) != default_value:
489 if field in data and data[field] == default_value:
490 if sdnsh.description: # description debugging
491 print 'default_value: db not default %s %s %s' \
492 ' new value in data %s is default' % \
493 (field, row[field], default_value, data[field])
494 continue
495 if sdnsh.description: # description debugging
496 print 'default_value: db not default %s %s %s' % \
497 (field, row[field], default_value)
498 return False
499 return True
500
501
502def update_config(obj_type, obj_id, data, no_command):
503 """
504 update_config is intended to write a row when the described data
505 is different from the default values of the fields of the row.
506
507 When the data described in the call updates the field's values
508 to all default values, the row associated with the obj_id is
509 deleted.
510
511 This is intended to be used for models which contain configuration
512 row data, and that every field has a default value,
513 so that when the config data is transitioned to the default
514 state, the row is intended to be removed. For these sorts of
515 command descriptions, updating a field to some default value
516 may result in the row getting deleted.
517 """
518
519 c_data = dict(data) # make a local copy
520 if sdnsh.description: # description debugging
521 print "update_config: ", obj_type, obj_id, c_data, no_command
522
523 if not mi.obj_type_exists(obj_type):
524 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
525
526 # collect any dict.key names which aren't fields in the object
527 for unknown_field in [x for x in c_data.keys() if not mi.obj_type_has_field(obj_type, x)]:
528 del c_data[unknown_field]
529
530 # if its a no command, set the value to 'None' if it's allowed,
531 # of to its default value otherwise
532 if no_command:
533 for field in c_data.keys():
534 if mi.is_null_allowed(obj_type, field):
535 c_data[field] = None
536 else:
537 # required to have a default value
538 c_data[field] = mi.field_default_value(obj_type, field)
539
540 # Get the primary key name
541 pk_name = mi.pk(obj_type)
542 if not pk_name:
543 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
544 pk_value = obj_id
545 if pk_name in data:
546 pk_value = data[pk_name]
547 if pk_name in c_data:
548 del c_data[pk_name]
549
550 # Query for the row, if it doesn't exist, create the item if any item isn't default
551 if sdnsh.description: # description debugging
552 print "update_config: query:", obj_type, pk_value
553
554 result = sdnsh.rest_query_objects(obj_type, { pk_name : pk_value })
555 check_rest_result(result)
556 if len(result) == 0:
557 # result[0] -> dictionary of field:value pairs
558 # check to ensure c_data isn't just default row values
559 if not obj_type_fields_have_default_value(obj_type, {}, c_data):
560 if sdnsh.description: # description debugging
561 print "update_config: create:", obj_type, c_data
562 # populate the create dictionary
563 create_dict = dict(c_data)
564 create_dict[pk_name] = pk_value
565 result = sdnsh.rest_create_object(obj_type, create_dict)
566 check_rest_result(result)
567 else:
568 if sdnsh.description: # description debugging
569 print "update_config: no current row"
570 return
571 else:
572 if sdnsh.description: # description debugging
573 print "update_config: found row", result[0]
574
575 if len(result) > 1:
576 raise error.CommandInternalError("Multiple rows for obj-type: %s: pk %s" %
577 (obj_type, pk_value))
578
579 # See if the complete row needs to be deleted.
580 # For each of the current fields, if a field's default doesn't exist,
581 # skip the row delete, or if any field has a non-default value, update
582 # the requested fields instead of deleting the row.
583 if obj_type_fields_have_default_value(obj_type, result[0], c_data):
584 # if the table has foreign keys, check no children refer to this table.
585 no_foreign_keys_active = True
586 if obj_type in mi.foreign_key_xref:
587 for (fk_obj_type, fk_fn) in mi.foreign_key_xref[obj_type][mi.pk(obj_type)]:
588 try:
589 rows = sdnsh.get_table_from_store(fk_obj_type, fk_fn,
590 pk_value, "exact")
591 except Exception, e:
592 rows = []
593 if len(rows):
594 if sdnsh.description: # description debugging
595 print "update_config: foreign key active:", \
596 fk_obj_type, fk_fn, pk_value
597 no_foreign_keys_active = False
598 break
599
600 if no_foreign_keys_active:
601 if sdnsh.description: # description debugging
602 print "update_config: delete:", obj_type, pk_value
603 try:
604 delete_result = sdnsh.rest_delete_objects(obj_type, { pk_name : pk_value })
605 check_rest_result(delete_result)
606 except Exception, e:
607 errors = sdnsh.rest_error_to_dict(e)
608 raise error.CommandInvocationError(sdnsh.rest_error_dict_to_message(errors))
609 return
610 # XXX if a row from some table is removed, and that table is using
611 # foreign keys, then the table which is refered to ought to be
612 # reviewed, to see if all the entries of the row which this table
613 # refer's to are default, and if that parent table is a config-style
614 # table, with all default values for every field, there's a good
615 # argument that the row ought to be removed.
616
617 # See if any of the c_data items in the matching row are different
618 # (ie: is this update really necessary?)
619 update_necessary = False
620 for (name, value) in c_data.items():
621 if name in result[0]:
622 if value != result[0][name]:
623 update_necessary = True
624 if sdnsh.description: # description debugging
625 print "update_config: update necessary:", name, result[0][name], value
626 else:
627 update_necessary = True
628
629 if not update_necessary:
630 if sdnsh.description: # description debugging
631 print "update_config: no update needed", obj_type, pk_name, pk_value
632 return
633
634 if sdnsh.description: # description debugging
635 print "update_config: update:", obj_type, pk_name, pk_value, c_data
636 # Invoke the REST API to set the default values
637 result = sdnsh.rest_update_object(obj_type, pk_name, pk_value, c_data)
638 check_rest_result(result)
639
640
641def delete_objects(obj_type, data, parent_field=None, parent_id=None):
642 """
643 Delete a row in the table.
644
645 @param obj_type a string, the name of the table to update
646 @param data a dictionary, name:value pairs to describe the delete
647 @param parent_field a string, the name of a field in the obj_type,
648 identifying a relationship between this table, and another table
649 @param parent_id a string, the value of the parent_field, to identify
650 another row in the other table identified by a field in this table
651 """
652
653 pk_name = mi.pk(obj_type)
654 if not pk_name:
655 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
656
657 query_data = dict(data)
658 if parent_field:
659 query_data[parent_field] = parent_id
660
661 # case conversion
662 for field in data:
663 if mi.obj_type_has_field(obj_type, field):
664 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
665 if case:
666 if sdnsh.description: # description debugging
667 print 'delete_objects: case convert %s:%s to %s' % \
668 (obj_type, field, case)
669 data[field] = utif.convert_case(case, data[field])
670
671 query_result = sdnsh.rest_query_objects(obj_type, query_data)
672 check_rest_result(query_result)
673 #
674 # if there were no results, try to delete by removing any
675 # items which have "None" values
676 if len(query_result) == 0:
677 for key in query_data.keys():
678 if query_data[key] == None:
679 del query_data[key]
680 query_result = sdnsh.rest_query_objects(obj_type, query_data)
681 check_rest_result(query_result)
682
683 if sdnsh.description: # description debugging
684 print "delete_objects:", obj_type, query_data
685 delete_result = sdnsh.rest_delete_objects(obj_type, query_data)
686 check_rest_result(delete_result)
687
688 for item in query_result:
689 key = item[pk_name]
690 sdnsh.cascade_delete(obj_type, key)
691
692
693def set_data(data, key, value):
694 """
695 Action to associate a new name:value pair with 'data', the dictionary used
696 to pass to REST API's. Allows the action to describe a value for a field
697 which wasn't directly named in the description.
698
699 """
700 if sdnsh.description: # description debugging
701 print "set_data:", data, key, value
702 data[key] = value
703
704
705def write_object(obj_type, data, parent_field=None, parent_id=None):
706 """
707 Write a new row into a specific table.
708
709 """
710 # If we're pushing a config submode with an object, then we need to extend the
711 # argument data that was entered explicitly in the command with the information
712 # about the parent object (by default obtained by looking at the obj info on
713 # the mode stack -- see default arguments for this action when it is added).
714
715 if sdnsh.description: # description debugging
716 print 'write_object: params ', obj_type, data, parent_field, parent_id
717 data = dict(data) # data is overwriten in various situations below
718 if parent_field:
719 if not parent_id:
720 raise error.CommandDescriptionError('Invalid command description;'
721 'improperly configured parent info for create-object')
722 data[parent_field] = parent_id
723
724 pk_name = mi.pk(obj_type)
725 if not pk_name:
726 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
727
728 # look for unpopulated foreign keys.
729 fks = mi.obj_type_foreign_keys(obj_type)
730 if fks:
731 for fk in fks:
732 (fk_obj, fk_nm) = mi.foreign_key_references(obj_type, fk)
733
734 if not fk in data or \
735 (mi.is_compound_key(fk_obj, fk_nm) and data[fk].find('|') == -1):
736 # use various techniques to populate the foreign key
737 # - if the foreign key is for class which has a compound key, see if all the
738 # parts of the compound key are present
739
740 if mi.is_compound_key(fk_obj, fk_nm):
741 kfs = mi.deep_compound_key_fields(fk_obj, fk_nm)
742 missing = [x for x in kfs if not x in data]
743 if len(missing) == 0:
744 # remove the entries, build the compound key for the foreign key reference
745 new_value = mi.compound_key_separator(fk_obj, fk_nm).\
746 join([data[x] for x in kfs])
747 # verify the foreign key exists, if not complain and return,
748 # preventing a error during the create request
749 query_result = sdnsh.rest_query_objects( fk_obj, { fk_nm : new_value })
750 check_rest_result(query_result)
751 if len(query_result) == 0:
752 joinable_name = ["%s: %s" % (x, data[x]) for x in kfs]
753 raise error.CommandSemanticError("Reference to non-existant object: %s " %
754 ', '.join(joinable_name))
755 for rfn in kfs: # remove field name
756 del data[rfn]
757 data[fk] = new_value
758 else:
759 qr = sdnsh.rest_query_objects(fk_obj, data)
760 if len(qr) == 1:
761 data[fk] = qr[0][mi.pk(fk_obj)]
762
763 if pk_name in data:
764 if sdnsh.description: # description debugging
765 print command._line(), 'write_object: query pk_name ', obj_type, pk_name, data
766 case = mi.get_obj_type_field_case_sensitive(obj_type, pk_name)
767 if case:
768 data[pk_name] = utif.convert_case(case, data[pk_name])
769 query_result = sdnsh.rest_query_objects(obj_type, { pk_name : data[pk_name]})
770 else:
771 query_data = dict([[n,v] for (n,v) in data.items() if v != None])
772 if sdnsh.description: # description debugging
773 print command._line(), 'write_object: query ', obj_type, query_data
774 query_result = sdnsh.rest_query_objects(obj_type, query_data)
775 check_rest_result(query_result)
776
777 # Consider checking to see if all the fields listed here
778 # already match a queried result, if so, no write is needed
779
780 if (len(query_result) > 0) and (pk_name in data):
781 if sdnsh.description: # description debugging
782 print "write_object: update object", obj_type, pk_name, data
783 result = sdnsh.rest_update_object(obj_type, pk_name, data[pk_name], data)
784 else:
785 if sdnsh.description: # description debugging
786 print "write_object: create_object", obj_type, data
787 result = sdnsh.rest_create_object(obj_type, data)
788
789 check_rest_result(result)
790
791 for item in query_result:
792 key = item[pk_name]
793 sdnsh.cascade_delete(obj_type, key)
794
795
796def delete_object(obj_type, data, parent_field=None, parent_id=None):
797 global sdnsh
798
799 data = dict(data)
800 if parent_field:
801 if not parent_id:
802 raise error.CommandDescriptionError('Invalid command description;'
803 'improperly configured parent info for delete-object')
804 data[parent_field] = parent_id
805
806 # case conversion
807 for field in data:
808 if mi.obj_type_has_field(obj_type, field):
809 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
810 if case:
811 if sdnsh.description: # description debugging
812 print 'delete_object: case convert %s:%s to %s' % \
813 (obj_type, field, case)
814 data[field] = utif.convert_case(case, data[field])
815
816 if sdnsh.description: # description debugging
817 print "delete_object: ", obj_type, data
818 result = sdnsh.rest_delete_objects(obj_type, data)
819 check_rest_result(result)
820
821
822def push_mode_stack(mode_name, obj_type, data, parent_field = None, parent_id = None, create=True):
823 """
824 Push a submode on the config stack.
825 """
826 global sdnsh, modi
827
828 # Some few minor validations: enable only in login, config only in enable,
829 # and additional config modes must also have the same prefix as the
830 # current mode.
831 current_mode = sdnsh.current_mode()
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800832 if (mode_name == 'config-tunnel'):
833 if (current_mode == 'config-tunnelset'):
834 mode_name = 'config-tunnelset-tunnel'
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800835 if (mode_name == 'config-policy'):
836 if (data.has_key('policy-type')):
837 if (data['policy-type'] == 'tunnel-flow'):
838 mode_name = 'config-policy-tunnel'
839 if (data['policy-type'] == 'loadbalance'):
840 mode_name = 'config-policy-loadbalance'
841 if (data['policy-type'] == 'avoid'):
842 mode_name = 'config-policy-avoid'
843 if sdnsh.description: # description debugging
844 print "Changing config-policy sub mode to ", mode_name
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800845
846 if sdnsh.description: # description debugging
847 print "push_mode: ", mode_name, obj_type, data, parent_field, parent_id
848
849 # See if this is a nested submode, or whether some current modes
850 # need to be popped.
851 if (mode_name.startswith('config-') and
852 (not mode_name.startswith(current_mode) or (mode_name == current_mode))):
853
854 sdnsh.pop_mode()
855 current_mode = sdnsh.current_mode()
856 # pop until it it matches
857 while not mode_name.startswith(current_mode):
858 if len(sdnsh.mode_stack) == 0:
859 raise error.CommandSemanticError('%s not valid within %s mode' %
860 (mode_name, current_mode))
861 sdnsh.pop_mode()
862 current_mode = sdnsh.current_mode()
863
864 # if there's a parent id, it is typically the parent, and audit
865 # ought to be done to verify this
866 if parent_field:
867 data = dict(data)
868 data[parent_field] = sdnsh.get_current_mode_obj()
869
870 elif mode_name in ['config', 'enable', 'login']:
871 # see if the mode is in the stack
872 if mode_name in [x['mode_name'] for x in sdnsh.mode_stack]:
873 if sdnsh.description: # description debugging
874 print 'push_mode: popping stack for', mode_name
875 current_mode = sdnsh.current_mode()
876 while current_mode != mode_name:
877 sdnsh.pop_mode()
878 current_mode = sdnsh.current_mode()
879 return
880
881
882 # If we're pushing a config submode with an object, then we need to extend the
883 # argument data that was entered explicitly in the command with the information
884 # about the parent object (by default obtained by looking at the obj info on
885 # the mode stack -- see default arguments for this action when it is added).
886 elif parent_field:
887 if not parent_id:
888 raise error.CommandDescriptionError('Invalid command description; '
889 'improperly configured parent info for push-mode-stack')
890 data = dict(data)
891 data[parent_field] = parent_id
892
893 key = None
894 if obj_type:
895 for field in data:
896 if mi.obj_type_has_field(obj_type, field):
897 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
898 if case:
899 if sdnsh.description: # description debugging
900 print 'push_mode: case convert %s:%s to %s' % \
901 (obj_type, field, case)
902 data[field] = utif.convert_case(case, data[field])
903
904
905 # Query for the object both to see if it exists and also to determine
906 # the pk value we're going to push on the stack. We need to do
907 # the query in the case where the model uses compound keys and we're
908 # specifying the individual fields that compose the compound key.
909 result = sdnsh.rest_query_objects(obj_type, data)
910 check_rest_result(result)
911 if len(result) == 0 and create:
912 #
913 # For vns-interface, the association of 'rule' with the data dict
914 # is difficult to explain via the command description. This is
915 # obviously a poor method of dealing with the issue, but until
916 # a better one arises (possibly REST api create? possibly
917 # model validation code?), this solution works.
918 if obj_type == 'vns-interface':
919 data = associate_foreign_key_for_vns_interface(data)
920
921 # Create the object and re-query to get the id/pk value
922 # FIXME: Could probably optimize here if the data already
923 # contains the pk value.
924 if sdnsh.description: # description debugging
925 print "push_mode: create ", obj_type, data
926 result = sdnsh.rest_create_object(obj_type, data)
927 check_rest_result(result)
928 result = sdnsh.rest_query_objects(obj_type, data)
929 check_rest_result(result)
930 else:
931 if sdnsh.description: # description debugging
932 print "push_mode: object found", obj_type, result
933
934 # Check (again) to make sure that we have an object
935 if len(result) == 0:
936 raise error.CommandSemanticError('Object not found; type = %s' % obj_type)
937
938 # Check to make sure there aren't multiple matching objects. If there
939 # are that would indicate a problem in the command description.
940 if len(result) > 1:
941 raise error.CommandDescriptionError('Push mode info must identify a single object;'
942 'type = %s; data = %s' %
943 (obj_type, str(data)))
944
945 # Get the id/pk value from the object info
946 pk_name = mi.pk(obj_type)
947 if not pk_name:
948 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
949 key = result[0][pk_name]
950 else:
951 pk_name = '<none>'
952
953 if sdnsh.description: # description debugging
954 print "push_mode: ", mode_name, obj_type, pk_name, key
955 exitCallback = None
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -0800956 if (mode_name == 'config-tunnelset'):
957 exitCallback = tunnelset_config_exit
958 if ((mode_name == 'config-tunnel') or (mode_name == 'config-tunnelset-tunnel')):
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800959 exitCallback = tunnel_config_exit
Srikanth Vavilapalli40d79f82014-12-17 14:29:24 -0800960 if (mode_name.startswith('config-policy')):
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -0800961 exitCallback = policy_config_exit
962 sdnsh.push_mode(mode_name, obj_type, key, exitCallback)
963
964
965def pop_mode_stack():
966 global sdnsh
967
968 if sdnsh.description: # description debugging
969 print "pop_mode: "
970 sdnsh.pop_mode()
971
972def confirm_request(prompt):
973 global sdnsh
974
975 if sdnsh.batch:
976 return
977 result = raw_input(prompt)
978 if result.lower() == 'y' or result.lower() == 'yes':
979 return
980 raise error.ArgumentValidationError("Expected y or yes, command: ")
981
982import c_data_handlers
983
984def convert_vns_access_list(obj_type, key, data):
985 """
986 For vns-access-group's, the access list which is the first parameter
987 needs to be converted into a vns-access-list foreign key. This is
988 possible since the vns name is part of the current object id.
989 """
990 global sdnsh, modi
991
992 key_parts = key.split('|')
993 if len(key_parts) != 3:
994 raise error.ArgumentValidationError("invalid id")
995 if not 'vns-access-list' in data:
996 raise error.ArgumentValidationError("missing vns-access-list")
997 try:
998 key_parts.pop()
999 vnskey='|'.join(key_parts)
1000 entry = sdnsh.rest_query_objects('vns-access-list',
1001 { 'vns' : vnskey,
1002 'name' : data['vns-access-list']
1003 })
1004 except Exception, _e:
1005 entry = []
1006
1007 if len(entry) != 1:
1008 raise error.ArgumentValidationError("unknown acl %s" % data['vns-access-list'])
1009 data['vns-access-list'] = entry[0]['id']
1010
1011def command_query_object(obj_type, data, scoped, sort):
1012 """
1013 Return model entries (db rows) via the REST API. Try to be
1014 very smart about using parameters and the model definition to
1015 figure out how to query for the entries.
1016 """
1017
1018 if sdnsh.description:
1019 print 'command_query_object: ', obj_type, data, scoped, sort
1020
1021 skipforeignsearch=False
1022 if (obj_type=='virtualrouter-routingrule' or obj_type=='virtualrouter-interface'):
1023 skipforeignsearch=True
1024 # big_search describes a related search which must be done to
1025 # satisfy this request, see the relationship of tag-mapping to tag
1026 # as an example.
1027 big_search = []
1028
1029 key = mi.pk(obj_type)
1030 #
1031 if mi.is_compound_key(obj_type, key):
1032 if sdnsh.description: # description debugging
1033 print "command_query_object: %s compound %s" % (obj_type, key)
1034 #
1035 # collect compound key names, look for these in the data,
1036 # if any of the values are 'all', remove the item from
1037 # the group of data.
1038 #
1039 # XXX needs work: we ought to check to see if the
1040 # compound key is part of some other key.
1041 #
1042 if scoped:
1043 obj_d = { key : sdnsh.get_current_mode_obj() }
1044 mi.split_compound_into_dict(obj_type, key, obj_d, is_prefix = True)
1045 for (k,v) in obj_d.items():
1046 if k != key and not k in data:
1047 data[k] = v
1048
1049 new_data = {}
1050 dckfs = mi.deep_compound_key_fields(obj_type, key)
1051 if key in data:
1052 mi.split_compound_into_dict(obj_type, key, data, is_prefix = True)
1053 foreign_obj_type_search = {}
1054
1055 for kf in dckfs:
1056 if mi.obj_type_has_field(obj_type, kf) and kf in data and data[kf] != 'all':
1057 new_data[kf] = data[kf]
1058 elif not mi.obj_type_has_field(obj_type, kf):
1059 # deep_compound_keys returns references via foreign keys.
1060 # if the field is missing in obj_type, its likely from
1061 # some related fk.
1062 for fk in mi.obj_type_foreign_keys(obj_type):
1063 (_fk_obj_type, fk_name) = mi.foreign_key_references(obj_type,
1064 fk)
1065 if kf == fk_name:
1066 # print "FOUND MATCH ", kf, _fk_obj_type, fk_name
1067 continue
1068 elif not mi.is_compound_key( _fk_obj_type, fk_name):
1069 continue
1070 for fkcf in mi.compound_key_fields(_fk_obj_type, fk_name):
1071 if fkcf in data and data[fkcf] != 'all':
1072 # assume all models use COMPOUND_KEY_FIELDS
1073 if _fk_obj_type not in foreign_obj_type_search:
1074 foreign_obj_type_search[_fk_obj_type] = {}
1075 foreign_obj_type_search[_fk_obj_type][fkcf] = data[fkcf]
1076 pass
1077 # see if foreign key fields are indirectly named
1078 elif mi.is_foreign_key(obj_type, kf):
1079 (_fk_obj_type, fk_name) = mi.foreign_key_references(obj_type,
1080 kf)
1081 if fk_name in data and data[fk_name] != 'all':
1082 new_data[kf] = data[fk_name]
1083 if (not skipforeignsearch): #skip foreign key search for routingrule type
1084 if len(foreign_obj_type_search):
1085 # This means to collect the entries, a search though a
1086 # related obj_type (through foreign key) will need to be done
1087 # a single query isn't enough, unless all entries are collected
1088 # consider the relationship between tag-mapping and tags
1089 #
1090 # This code seems to handle single indirected foreign key
1091 # lookup, but if deep_compound_key_fields() found more than
1092 # three layers deep (the obj-type has a fk reference to a
1093 # table, which had a fk reference to another table, which
1094 # had a value to search with), this won't do the trick.
1095 # at that point some sort of recursive building of the
1096 # foreign keys would be needed to collect up the required
1097 # final seraches
1098 for (_fk_obj_type, search) in foreign_obj_type_search.items():
1099 fk_entries = sdnsh.rest_query_objects(_fk_obj_type, search)
1100 # need to identify the name associated foreign key in this model
1101 for fk in mi.obj_type_foreign_keys(obj_type):
1102 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
1103 if fk_obj == _fk_obj_type:
1104 obj_type_field = fk
1105 break
1106 else:
1107 raise error.CommandSemanticError("bigsearch: can't find fk reference"
1108 "for %s for obj-type %s" %
1109 (fk, obj_type))
1110 big_search += [{obj_type_field:
1111 x[mi.pk(_fk_obj_type)]} for x in fk_entries]
1112 # big_search would return id's for the _fk_obj_type,
1113 # which can be used to search this obj_type
1114 # look for fields which are set in new_data, which aren't in data.
1115 for (field, value) in data.items():
1116 if field not in new_data:
1117 if mi.is_marked_searchable(obj_type, field) and value!='all':
1118 new_data[field] = value
1119
1120 data = new_data
1121 else:
1122 # Only allow fields which are searchable (XXX need a prediate)
1123 # only save primary key's and foreigh keys.
1124 new_data = {}
1125 if key in data and mi.is_primary_key(obj_type, key):
1126 new_data[key] = data[key]
1127 for fk in mi.obj_type_foreign_keys(obj_type):
1128 if fk in data:
1129 new_data[fk] = data[fk]
1130 (_fk_obj, fk_fn) = mi.foreign_key_references(obj_type, fk)
1131 if fk_fn in data:
1132 new_data[fk_fn] = data[fk_fn]
1133 for f in mi.obj_type_fields(obj_type):
1134 if f in data and f not in new_data:
1135 new_data[f] = data[f]
1136
1137 data = new_data
1138
1139 if scoped:
1140 data[key] = sdnsh.get_current_mode_obj()
1141
1142 if key in data and (data[key]=='all' or data[key]==None):
1143 del data[key]
1144 #
1145 # Now that the fields have been disassembled as much as possible, see
1146 # if some of the entries need to be cobbled back together.
1147 fks = mi.obj_type_foreign_keys(obj_type)
1148 if sdnsh.description: # description debugging
1149 print "command_query_object: %s foreign-key %s" % (obj_type, fks)
1150 if fks:
1151 for fk in fks:
1152 (fk_obj, fk_nm) = mi.foreign_key_references(obj_type, fk)
1153
1154 if not fk in data or \
1155 (mi.is_compound_key(fk_obj, fk_nm) and data[fk].find('|') == -1):
1156
1157 # use various techniques to populate the foreign key
1158 # - if the foreign key is for class which has a compound key, see if all the
1159 # parts of the compound key are present
1160 if mi.is_compound_key(fk_obj, fk_nm):
1161 kfs = mi.deep_compound_key_fields(fk_obj, fk_nm)
1162 missing = [x for x in kfs if not x in data]
1163 if len(missing) == 0:
1164 # remove the entries, build the compound key for the foreign key reference
1165 new_value = mi.compound_key_separator(fk_obj, fk_nm).\
1166 join([data[x] for x in kfs])
1167 # verify the foreign key exists, if not complain and return,
1168 # preventing a error during the create request
1169 query_result = sdnsh.rest_query_objects( fk_obj, { fk_nm : new_value })
1170 check_rest_result(query_result)
1171 if len(query_result) == 0:
1172 joinable_name = ["%s: %s" % (x, data[x]) for x in kfs]
1173 raise error.CommandSemanticError("Reference to non-existant object: %s " %
1174 ', '.join(joinable_name))
1175 for rfn in kfs: # remove field name
1176 del data[rfn]
1177 data[fk] = new_value
1178 if sdnsh.description: # description debugging
1179 print "command_query_object: %s foreign key construction " % obj_type, data
1180 #
1181 # Do something for alias displays, for obj_types which sdnsh says
1182 # are aliases, find the foreign reference in the alias obj_type,
1183 # and use that to determine the field name (fk_fn) in the parent.
1184 # Do lookups based on either the alias field name, or the parent's
1185 # fk_fn when set in data{}
1186 if obj_type in mi.alias_obj_types:
1187 field = mi.alias_obj_type_field(obj_type)
1188 (_fk_obj, fk_fn) = mi.foreign_key_references(obj_type, field)
1189 new_data = {}
1190 if fk_fn in data and data[fk_fn] != 'all':
1191 new_data[field] = data[fk_fn]
1192 elif field in data and data[field] != 'all':
1193 new_data[field] = data[field]
1194 data = new_data
1195
1196 #
1197 # The sort value ought to be a command separated list of fields within the model
1198 #
1199 if sort:
1200 data['orderby'] = sort
1201
1202 if not mi.obj_type_has_model(obj_type):
1203 return rest_to_model.get_model_from_url(obj_type, data)
1204
1205 if sdnsh.description: # description debugging
1206 print "command_query_object: ", obj_type, data
1207
1208 if len(big_search):
1209 entries = []
1210 if sdnsh.description: # description debugging
1211 print "command_query_object: big search", big_search
1212 for bs in big_search:
1213 search = dict(list(bs.items()) + list(data.items()))
1214 entries += sdnsh.rest_query_objects(obj_type, search)
1215 # XXX needs to be re-sorted
1216 return entries
1217
1218 return sdnsh.rest_query_objects(obj_type, data)
1219
1220
1221def command_display_table_join_entries(obj_type, data, entries, detail):
1222 """
1223 """
1224 if obj_type == 'tag-mapping':
1225 # lift persist from the parent tag
1226 if len(entries) == 1:
1227 entry = entries[0]
1228 tag = sdnsh.rest_query_objects('tag', { mi.pk('tag') : entry['tag']})
1229 entry['persist'] = tag[0]['persist']
1230 else:
1231 # key? value? for the _dict?
1232 tags = create_obj_type_dict('tag', mi.pk('tag'))
1233 for entry in entries:
1234 entry['persist'] = tags[entry['tag']][0]['persist']
1235
1236 if obj_type == 'controller-node':
1237 # This is a big odd, since the current node needs to be asked
1238 # which controller node it is
1239 url = "http://%s/rest/v1/system/controller" % sdnsh.controller
1240
1241 result = sdnsh.store.rest_simple_request(url)
1242 check_rest_result(result)
1243 iam = json.loads(result)
1244
1245 cluster_url = ("http://%s/rest/v1/system/ha/clustername"
1246 % sdnsh.controller)
1247 result = sdnsh.store.rest_simple_request(cluster_url)
1248 check_rest_result(result)
1249 # perhaps ought to assert on lenresult) == 1
1250 clustername = json.loads(result)[0]['clustername']
1251
1252 for entry in entries:
1253 controller = None
1254 if entry['id'] == iam['id']:
1255 controller = sdnsh.controller
1256 else:
1257 # find interfaces which hacve a firewall rule open for
1258 # tcp/80. ie: ip for the interface with rest-api role
1259 ips = local_interfaces_firewall_open("tcp", 80, entry)
1260
1261 # controller-interfaces needs to be examined to determine
1262 # if there's an ip address to use to discover the ha-role
1263 if len(ips) == 1:
1264 # Not even certain if this is reachable
1265 if ips[0]['discovered-ip'] != '':
1266 controller = ips[0]['discovered-ip']
1267 elif ips[0]['ip'] != '':
1268 controller = ips[0]['ip']
1269 else:
1270 entry['ha-role'] = 'no-ip'
1271 entry['errors'] = 'No IP Address'
1272 else:
1273 entry['errors'] = 'No IP Address'
1274
1275 if controller == None:
1276 entry['errors'] = 'No ip address configured'
1277 entry['ha-role'] = 'unknown'
1278 continue
1279
1280 try:
1281 url = "http://%s/rest/v1/system/ha/role" % controller
1282 result = sdnsh.store.rest_simple_request(url, timeout = 2)
1283 check_rest_result(result)
1284 ha_role = json.loads(result)
1285 entry['ha-role'] = ha_role['role']
1286 if not 'clustername' in ha_role:
1287 entry['errors'] = 'no clustername in ha-role rest api'
1288 entry['ha-role'] = 'Untrusted: %s' % ha_role['role']
1289 elif ha_role['clustername'] != clustername:
1290 entry['errors'] = 'Not in HA Cluster, requires decomission'
1291 entry['ha-role'] = 'External Cluster: %s' % ha_role['role']
1292 if 'change-date-time' in ha_role:
1293 entry['change-date-time'] = ha_role['change-date-time']
1294 if 'change-description' in ha_role:
1295 entry['change-description'] = ha_role['change-description']
1296 except urllib2.HTTPError, e: # timeout?
1297 entry['errors'] = e.reason
1298 entry['ha-role'] = 'unknown'
1299 continue
1300 except urllib2.URLError, e: # timeout?
1301 entry['errors'] = '%s: %s' % (controller, e.reason)
1302 entry['ha-role'] = 'unknown'
1303 continue # dontt try the uptime, it will fail too
1304 except Exception, e:
1305 entry['errors'] = str(e)
1306 entry['ha-role'] = 'unknown'
1307
1308 url = "http://%s/rest/v1/system/uptime" % controller
1309 try:
1310 result = sdnsh.store.rest_simple_request(url)
1311 check_rest_result(result)
1312 uptime = json.loads(result)
1313 entry['uptime'] = uptime['systemUptimeMsec']
1314
1315 except Exception, e:
1316 pass
1317
1318 return detail
1319
1320
1321def command_display_table(obj_type, data, detail = 'default',
1322 table_format = None, title = None, scoped = None, sort = None):
1323
1324 """
1325 Display entries from a obj_type, with some filtering done via data,
1326 and the output format described by table_format, with the devel of detail in detail
1327
1328 @param obj_type string name of the object type
1329 @param data dictionary of configured data items from the description
1330 @param table_format string describing table format to use for output
1331 @param detail string describing the detail-flavor for format
1332 @param scoped string, when not null, indicates the submode level is used to filter query request
1333 @param sort string, describes sort to append to the query request
1334 """
1335
1336 if not mi.obj_type_exists(obj_type):
1337 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1338
1339 if sdnsh.description: # description debugging
1340 print "command_display_table:", obj_type, data, table_format, detail, scoped, sort
1341
1342 if 'detail' in data:
1343 detail = data['detail']
1344
1345 if not table_format:
1346 if 'format' in data:
1347 table_format = data['format']
1348 else:
1349 table_format = obj_type
1350 if 'scoped' in data:
1351 scoped=data['scoped']
1352 del data['scoped']
1353 entries = command_query_object(obj_type, data, scoped, sort)
1354 if sdnsh.description: # description debugging
1355 print "command_display_table: %d entries found, using %s" % (len(entries), data)
1356
1357 # update any of the pretty-printer tables based on the obj_type
1358 obj_type_show_alias_update(obj_type)
1359
1360 # with_key manages whether a 'detail' or table is displayed.
1361 with_key = '<with_key>' if detail == 'details' and len(entries) > 0 else '<no_key>'
1362
1363 # pick foreign keys which are compound keys, explode these into fields
1364 fks = [x for x in mi.obj_type_foreign_keys(obj_type) if mi.is_compound_key(obj_type,x)]
1365 for entry in entries:
1366 for fk in fks:
1367 if fk in entry: # fk may be null-able
1368 mi.split_compound_into_dict(obj_type, fk, entry, True)
1369 #
1370 detail = command_display_table_join_entries(obj_type, data, entries, detail)
1371
1372 # use display_obj_type_rows since it (currently) joins fields for obj_types.
1373 display = sdnsh.display_obj_type_rows(table_format, entries, with_key, detail)
1374 if title:
1375 return title + display
1376 return display
1377
1378
1379def command_display_rest_join_entries(table_format, data, entries, detail):
1380 """
1381 @param table_format string, identifying the final table output
1382 @param data dict, used to query the rest api output
1383 @param entries list of dicts, ready to be displayed
1384 @return string replacing detail
1385
1386 """
1387
1388 if sdnsh.description: # description debugging
1389 print "command_display_rest_join_entries: ", table_format, data, detail
1390
1391 if table_format == 'controller-interface':
1392 # join firewall rules for these interfaces
1393 for intf in entries:
1394 rules = [x['rule'] for x in sdnsh.get_firewall_rules(intf['id'])]
1395 intf['firewall'] = ', '.join(rules)
1396
1397 if table_format == 'system-clock':
1398 # join the 'time' string, possibly remove 'tz' from entries[0]
1399 entries[0]['time'] = sdnsh.get_clock_string(entries[0], data.get('detail'))
1400 return 'details' # force table format
1401
1402 return detail
1403
1404
1405def command_display_rest_type_converter(table_format, rest_type, data, entries):
1406 """
1407 the expected display table_format is a list of dictionaries
1408 each dictionary has the field : value pairs. Many rest api's
1409 return a dictionary of different layers, the description
1410 provides a rest-type, which is used to describe the form
1411 of the value returned from the rest api.
1412 """
1413
1414 if sdnsh.description: # description debugging
1415 print "command_display_rest_type_converter: ", table_format, rest_type
1416
1417 if rest_type.startswith('dict-of-list-of-'):
1418 # entries look like { row_name : [value, ...], ... more-row-value-pairs }
1419 #
1420 # dict-of-list-of: a dict with key's which are given
1421 # the name of the first token, then the dict's value is
1422 # a list which can be given an associated name.
1423 # for example 'dict-of-list-of-cluster-id|[switches]'
1424 #
1425 # 'dict-of-list-of-switch' is a dict with key : value's
1426 # where the value is a list. The member's of the list
1427 # are dictionaries. the key of the outer dict is added to
1428 # each of the dicts, and this interior dict is added to
1429 # the final output list.
1430
1431 # identify the added key from the rest_type
1432 key = rest_type.replace('dict-of-list-of-','')
1433 parts = key.split('|')
1434 names = None
1435 build_list = False
1436 if len(parts) > 0:
1437 key = parts[0]
1438 names = parts[1:] # should only be one name
1439 if len(names) > 0 and names[0][0] == '[':
1440 build_list = True
1441 formatted_list = []
1442 for (row_name, rows) in entries.items():
1443 if not rows:
1444 continue
1445 # use the names as ways of describing each of the list's items
1446 if type(rows) == list and build_list:
1447 # name[0] looks like '[switches]', requesting that this
1448 # list become switches : [rows]
1449 formatted_list.append({key : row_name, names[0][1:-1] : rows})
1450 elif type(rows) == list:
1451 for row in rows:
1452 add_dict = {key : row_name}
1453 if type(row) == str or type(row) == unicode:
1454 add_dict[names[0]] = row
1455 elif type(row) == dict:
1456 # addition names make no difference
1457 add_dict.update(row)
1458 formatted_list.append(add_dict)
1459 elif type(rows) == dict:
1460 do_append = True
1461 new_row = { key : row_name }
1462 for name in [x for x in names.keys() if x in row]:
1463 item = row[name]
1464 if type(item) == str or type(item) == unicode:
1465 new_row[name] = item
1466 if type(item) == dict:
1467 new_row[name].update(item)
1468 if type(item) == list:
1469 do_append = False
1470 for i_row in item:
1471 new_row.update(i_row)
1472 formatted_list.append(new_row)
1473 new_row = { key : row_name }
1474 if do_append:
1475 formatted_list.append(new_row)
1476
1477 entries = formatted_list
1478 elif rest_type.startswith('dict-of-dict-of-'):
1479 # entries looks like { row_name : { [ { }, ... ] } }
1480 # ^
1481 # want this |
1482 # ie: dict with a value which is a dict, whose
1483 # 'dict-of-dict-of-switch|ports' The dict has key : values
1484 # where the value is a dict. That dict has the 'switch' : key
1485 # added, and it becomes the final output dict.
1486 #
1487 # if a second name is included, then the outer dict is
1488 # examined to find these values (ie: values[names]), and these
1489 # get added to the final output dict.
1490 #
1491 # identify the added key from the rest_type
1492 key = rest_type.replace('dict-of-dict-of-','')
1493 parts = key.split('|')
1494 name = None
1495 if len(parts) > 0:
1496 names = parts[1:]
1497 key = parts[0]
1498 formatted_list = []
1499 for (row_name, row) in entries.items():
1500 row[key] = row_name
1501 do_append = False
1502 if names:
1503 new_row = {}
1504 for name in names:
1505 if name in row:
1506 item = row[name]
1507 if type(item) == str or type(item) == unicode:
1508 new_row[name] = item
1509 do_append = True
1510 elif type(item) == dict:
1511 if name == row_name:
1512 do_append = True
1513 elif type(item) == list:
1514 for i_row in item:
1515 row_items = {}
1516 row_items[key] = row_name
1517 row_items.update(i_row)
1518 formatted_list.append(row_items)
1519 if do_append:
1520 formatted_list.append(row)
1521
1522 else:
1523 formatted_list.append(row)
1524
1525 entries = formatted_list
1526 elif rest_type.startswith('dict-with-'):
1527 # rest result looks like: { k : v, k : { } }
1528 # ^
1529 # want this |
1530 # dict-with: typically used for dict returns which have
1531 # nested dict's who's values are promoted to a single
1532 # list with a dict with these values.
1533 #
1534 # identify the added key from the rest_type
1535 key = rest_type.replace('dict-with-','')
1536 names = key.split('|')
1537 collect_row = {}
1538 formatted_list = []
1539 for name in names:
1540 if name in entries:
1541 item = entries[name]
1542 if type(item) == str or type(item) == unicode or \
1543 type(item) == int or type(item) == long: # XXX float?
1544 collect_row[name] = item
1545 elif type(item) == list:
1546 for i_row in item:
1547 row_items = {}
1548 formatted_list.append(i_row)
1549 elif type(item) == dict:
1550 collect_row.update(item)
1551
1552 if len(collect_row) == 0:
1553 entries = formatted_list
1554 else:
1555 entries = [collect_row] + formatted_list
1556
1557 elif rest_type == 'dict':
1558 entries = [entries]
1559 else:
1560 raise error.CommandDescriptionError("Unknown rest-type: %s" % rest_type)
1561 return entries
1562
1563
1564def missing_part(key_parts, entry, key_case = False):
1565 """
1566 Return the name of the missing field of one of the strings
1567 in the key_parts list when it doesn't appear in the 'entry' dictionary.
1568
1569 Return None otherwise.
1570
1571 This is used to identify rows which don't have all the
1572 parts needed to constrcut a join key, or a db-table or
1573 query "key" to support addition of two different tables.
1574
1575 @key_parts list of strings,
1576 @entry dictionary, needs to contains each string in key_parts
1577 @key_case True when all key_parts may contain a leading '~' to
1578 denote the field needs to be lower cased for joining
1579 """
1580 for kn in key_parts:
1581 if not kn in entry:
1582 if key_case == False:
1583 return kn
1584 if kn[0] != '~':
1585 return kn
1586 if kn[1:] not in entry:
1587 return kn[1:]
1588
1589 return None
1590
1591
1592def case_cvt(fn, f_dict):
1593 """
1594 For join operations, the fields in the partial result can no longer
1595 be associated with any obj-type, which means its impossible to determine
1596 whether the associated field is case sensitive.
1597
1598 One approach to this problem is to case-normalize the obj-type's
1599 field values when they're first added to the row. That doesn't
1600 help for rest-api's, which means it can only be a partial solution.
1601 In addition, it makes sense to preserve these values when possible,
1602 but still join based on case-normalization.
1603 """
1604 if fn[0] == '~':
1605 return str(f_dict.get(fn[1:], '').lower())
1606 return str(f_dict.get(fn, ''))
1607
1608
1609def obj_type_field_case(data, obj_type, field):
1610 """
1611 For objects where the case-normalization is identifed,
1612 manage conversion of the value associated with the field
1613 """
1614 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
1615 return str(data[field]) if not case else str(utif.convert_case(case, data[field]))
1616
1617
1618def add_fields(dest, src):
1619 """
1620 These should both be dictionaries, leave the original entries in place
1621 when the 'dest' entries are populated from 'src'. This operation is
1622 handy since the original 'dest' entries may differ from the 'src' due
1623 to case normalization. Since having consistent names is a plus, by
1624 not updating the value with the 'src' entries, 'dest' retains its original
1625 values.
1626 """
1627 for (n,v) in src.items():
1628 if n not in dest:
1629 dest[n] = v
1630 elif str(dest[n]).lower() == str(v).lower:
1631 # should have better controls for when the case matters
1632 if sdnsh.description:
1633 print 'ADD %s skipping updating %s <-> %s' % (n, dest[n], v)
1634 else:
1635 dest[n] = v
1636
1637
1638def command_query_table(obj_type, data,
1639 clear = True,
1640 key = None, append = None, scoped = None, sort = None, crack = None):
1641 """
1642 Leave the result in command's global query_result, which can
1643 be used by other c_action steps
1644
1645 'key' is one or more fields which are concatenated together to form
1646 the display-pipeline's version of a primary key. It could be the
1647 actual primary key of the table, or it could be some fields which
1648 appear in all the rows. Once the 'key' is constructed, it used to
1649 determine how results are added to the command.query_result.
1650
1651 If the existing entries are to be 'cleared', then te primary key's
1652 are simply added to the table. When the entries aren't cleared, then
1653 the computed primary key is used to join against existing items.
1654
1655 Finally, the dict field name for the primary key is a single character: '@'
1656 This name was picked since its not possible for the database to ever
1657 use that name.
1658 """
1659
1660 if not mi.obj_type_exists(obj_type):
1661 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1662
1663 if sdnsh.description: # description debugging
1664 print "command_query_table:", obj_type, data, clear, key, append, scoped, sort, crack
1665
1666 if 'scoped' in data:
1667 scoped=data['scoped']
1668 del data['scoped']
1669
1670 result = command_query_object(obj_type, data, scoped, sort)
1671 if sdnsh.description: # description debugging
1672 print "command_query_table: %d entries found, using %s" % \
1673 (len(result), data)
1674
1675 if crack:
1676 if crack == True:
1677 crack = mi.pk(obj_type)
1678 for entry in result:
1679 mi.split_compound_into_dict(obj_type, crack, entry, True)
1680
1681 if append:
1682 for entry in result:
1683 if type(append) == dict:
1684 entry.update(append)
1685 elif type(append) == list:
1686 entry.update(dict(append))
1687 else:
1688 entry[append] = True
1689
1690 # all the field from all the rows need to be present.
1691 if key:
1692 fields = key.split('|')
1693
1694 if clear:
1695 command.query_result = result
1696 if key:
1697 for r in result:
1698 missing = missing_part(fields, r)
1699 if missing:
1700 if sdnsh.description:
1701 print "command_query_table: ' \
1702 ' missing field in row %s (%s) " % (missing, obj_type)
1703 continue
1704 r['@'] = '|'.join([obj_type_field_case(r, obj_type, f) for f in fields])
1705 else:
1706 if key == None:
1707 if command.query_resuls != None:
1708 command.query_result += result
1709 else:
1710 command.query_result = result
1711 else:
1712 r_dict = {}
1713 for r in result:
1714 missing = missing_part(fields, r)
1715 if missing:
1716 if sdnsh.description:
1717 print "command_query_table: ' \
1718 ' missing field in row %s (%s) " % (missing, obj_type)
1719 continue
1720 pk = '|'.join([r[f] for f in fields])
1721 r_dict[pk] = r
1722 if hasattr(command, 'query_result') and command.query_result:
1723 for qr in command.query_result:
1724 if '@' in qr and qr['@'] in r_dict:
1725 add_fields(qr, r_dict[qr['@']])
1726 del r_dict[qr['@']]
1727 command.query_result += r_dict.values()
1728 else:
1729 for (r, value) in r_dict.items():
1730 value['@'] = '|'.join([value[f] for f in fields])
1731 command.query_result = r_dict.values()
1732
1733
1734def command_query_rest(data,
1735 url = None, path = None, clear = True,
1736 key = None, rest_type = None, scoped = None, sort = None, append = None):
1737 """
1738 Leave the result in command's global query_result, which can
1739 be used by other c_action steps (query-table, join-table, join-rest, display)
1740
1741 'key' is one or more fields which are concatenated together to form
1742 the display-pipeline's version of a primary key. It could be the
1743 actual primary key of the table, or it could be some fields which
1744 appear in all the rows. Once the 'key' is constructed, it used to
1745 determine how results are added to the command.query_result.
1746
1747 If the existing entries are to be 'cleared', then te primary key's
1748 are simply added to the table. When the entries aren't cleared, then
1749 the computed primary key is used to join against existing items.
1750
1751 Finally, the dict field name for the primary key is a single character: '@'
1752 This name was picked since its not possible for the database to ever
1753 use that name.
1754
1755 """
1756
1757 if sdnsh.description: # description debugging
1758 print "command_query_rest:", url, path, rest_type, data, scoped, sort, append
1759
1760 if url == None and path == None:
1761 raise error.CommandDescriptionError("missing url or path")
1762
1763 if path:
1764 schema = sdnsh.sdndb.schema_detail(path)
1765 if schema:
1766 result = sdnsh.sdndb.data_rest_request(path)
1767 if key:
1768 # create a key dictionary, with the key values, pointing to
1769 # a psth in the schema.
1770 pass
1771 print 'PATH', path, result
1772 else:
1773 # if url is a list, pick the first one which can be build from the data
1774 if type(url) == list:
1775 select_url = url
1776 else:
1777 select_url = [url]
1778
1779 use_url = None
1780 for u in select_url:
1781 try:
1782 use_url = (u % data)
1783 break
1784 except:
1785 pass
1786
1787 if use_url == None:
1788 if sdnsh.description: # description debugging
1789 print "command_query_rest: no url found"
1790 return
1791
1792 query_url = "http://%s/rest/v1/" % sdnsh.controller + use_url
1793
1794 if sdnsh.description: # description debugging
1795 print "command_query_rest: query ", query_url
1796 try:
1797 result = sdnsh.store.rest_simple_request(query_url)
1798 check_rest_result(result)
1799 entries = json.loads(result)
1800 except Exception, e:
1801 if sdnsh.description or sdnsh.debug:
1802 print 'command_query_rest: ERROR url %s %s' % (url, e)
1803 entries = []
1804
1805 if entries == None or len(entries) == 0:
1806 if sdnsh.description: # description debugging
1807 print "command_query_rest: no new entries ", query_url
1808 if clear:
1809 command.query_result = None
1810 return
1811
1812 # It certainly seems possible to map from url's to the type associated,
1813 # with the result, but it also makes sense to encode that type information
1814 # into the description
1815 if rest_type:
1816 result = command_display_rest_type_converter(None,
1817 rest_type,
1818 data,
1819 entries)
1820 if sdnsh.description: # description debugging
1821 print "command_query_rest: %s #entries %d " % (url, len(entries))
1822 print result
1823 else:
1824 result = []
1825 import fmtcnv
Srikanth Vavilapalli59ddcde2015-03-10 14:40:38 -07001826 if (url == 'links'):
1827 if (onos == 1):
1828 for entry in entries:
1829 src = entry.get('src')
1830 dst = entry.get('dst')
1831 for tempEntry in entries:
1832 if cmp(src, tempEntry.get('dst')) == 0:
1833 if cmp(dst, tempEntry.get('src')) == 0:
1834 entries.remove(tempEntry)
1835 result.append({
1836 'src-switch' : fmtcnv.print_switch_and_alias(entry['src']['dpid']),
1837 'src-port' : entry['src']['portNumber'],
1838 'src-port-state' : 0,
1839 'dst-switch' : fmtcnv.print_switch_and_alias(entry['dst']['dpid']),
1840 'dst-port' : entry['dst']['portNumber'],
1841 'dst-port-state' : 0,
1842 'type' : entry['type'],
1843 })
1844 elif onos == 2:
1845 entries = entries.get('links')
1846 for entry in entries:
1847 src = entry.get('src')
1848 dst = entry.get('dst')
1849 for tempEntry in entries:
1850 if cmp(src, tempEntry.get('dst')) == 0:
1851 if cmp(dst, tempEntry.get('src')) == 0:
1852 entries.remove(tempEntry)
1853 result.append({
1854 #'src-switch' : fmtcnv.print_switch_and_alias(entry['src']['device']),
1855 'src-switch' : entry['src']['device'],
1856 'src-port' : entry['src']['port'],
1857 'src-port-state' : 0,
1858 #'dst-switch' : fmtcnv.print_switch_and_alias(entry['dst']['device']),
1859 'dst-switch' : entry['dst']['device'],
1860 'dst-port' : entry['dst']['port'],
1861 'dst-port-state' : 0,
1862 'type' : None,
1863 })
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08001864 else:
1865 result = entries
1866
1867 if append:
1868 for entry in result:
1869 if type(append) == dict:
1870 entry.update(append)
1871 elif type(append) == list:
1872 entry.update(dict(append))
1873 else:
1874 entry[append] = True
1875
1876 if key:
1877 fields = key.split('|')
1878
1879 if clear:
1880 command.query_result = result
1881 if key:
1882 for r in result:
1883 r['@'] = '|'.join([r[f] for f in fields])
1884 else:
1885 if key == None:
1886 if command.query_result != None:
1887 command.query_result += result
1888 else:
1889 command.query_result = result
1890 else:
1891 r_dict = {}
1892 for r in result:
1893 missing = missing_part(fields, r, key_case = True)
1894 if missing:
1895 if sdnsh.description:
1896 print "command_query_rest: missing field %s in row %s" % (missing, r)
1897 continue
1898 pk = '|'.join([case_cvt(f, r) for f in fields])
1899 r_dict[pk] = r
1900 for qr in command.query_result:
1901 if '@' in qr and qr['@'] in r_dict:
1902 add_fields(qr, r_dict[qr['@']])
1903
1904
1905def command_join_rest(url, data, key, join_field,
1906 add_field = None, rest_type = None, crack = None, url_key = None):
1907
1908 """
1909 url-key allows single row results to have a name:value added to the
1910 entry in situations where a single dictionary is computed after the
1911 rest-type conversion. this allows simple results from the url to
1912 have a keyword added to allow joins.
1913 """
1914 if not hasattr(command, 'query_result'):
1915 if sdnsh.description: # description debugging
1916 print "command_join_rest: no entries found"
1917 return
1918
1919 if command.query_result == None:
1920 if sdnsh.description: # description debugging
1921 print "command_join_rest: query_result: None"
1922 return
1923
1924 if sdnsh.description: # description debugging
1925 print "command_join_rest: %d entries found, using %s, url %s" % \
1926 (len(command.query_result), data, url)
1927 print "command_join_rest:", data, key, join_field
1928
1929 if url == None:
1930 return
1931 if join_field == None:
1932 return
1933 if key == None:
1934 return
1935
1936
1937 # Collect all the queries, removing any duplicates
1938 queries = {}
1939 for entry in command.query_result:
1940 # if url is a list, pick the first one which can be build from the data
1941 if type(url) == list:
1942 select_url = url
1943 else:
1944 select_url = [url]
1945
1946 use_url = None
1947 for u in select_url:
1948 try:
1949 use_url = (u % entry)
1950 break
1951 except:
1952 pass
1953
1954 if use_url == None:
1955 if sdnsh.description: # description debugging
1956 print "command_join_rest: no url found", url
1957 continue
1958 query_url = "http://%s/rest/v1/" % sdnsh.controller + use_url
1959
1960 if sdnsh.description: # description debugging
1961 print "command_join_rest: query ", query_url, entry
1962 if query_url in queries:
1963 continue
1964
1965 try:
1966 result = sdnsh.store.rest_simple_request(query_url)
1967 check_rest_result(result)
1968 entries = json.loads(result)
1969 except Exception, e:
1970 entries = []
1971
1972 if entries == None or len(entries) == 0:
1973 continue
1974
1975 # It certainly seems possible to map from url's to the type associated,
1976 # with the result, but it also makes sense to encode that type information
1977 # into the description
1978 if rest_type:
1979 queries[query_url] = command_display_rest_type_converter(None,
1980 rest_type,
1981 data,
1982 entries)
1983 #
1984 # url_key allows the addition of a key for joining for single results
1985 if url_key and len(queries[query_url]) == 1:
1986 queries[query_url][0][url_key] = entry.get(url_key)
1987
1988 if sdnsh.description: # description debugging
1989 print "command_join_rest: %s #entries %d #result %s" % \
1990 (url, len(entries), len(queries[query_url]))
1991 else:
1992 queries[query_url] = entries
1993
1994 # From the query results, generate the dictionary to join through
1995
1996 key_parts = key.split('|') # all the fields needed to make a key
1997 key_dict = {} # resulting key dictionary
1998 for (url, value) in queries.items():
1999 for entry in value:
2000 # see if all the key parts are in the entry
2001 missing = missing_part(key_parts, entry)
2002 if missing:
2003 if sdnsh.description:
2004 print 'command_join_rest: missing field %s in %s' % (missing, entry)
2005 continue
2006 new_key = '|'.join([str(entry[kn]) for kn in key_parts])
2007 if sdnsh.description: # description debugging
2008 print 'command_join_rest: new-key', new_key
2009 key_dict[new_key] = entry
2010
2011 # Using the key-dictinoary, look for matches from the original entries
2012
2013 if add_field:
2014 parts = add_field.split('|')
2015 from_fields = None
2016 if len(parts):
2017 add_field = parts[0]
2018 from_fields = parts[1:]
2019
2020 join_parts = join_field.split('|')
2021 for entry in command.query_result:
2022 if len(join_parts):
2023 missing = missing_part(join_parts, entry, key_case = True)
2024 if missing:
2025 if sdnsh.description: # description debugging
2026 print "command_join_rest: missing field %s in %s" % (missing, entry)
2027 continue
2028
2029 joiner = '|'.join([case_cvt(kn, entry) for kn in join_parts])
2030 else:
2031 if sdnsh.description: # description debugging
2032 print "command_join_rest: joining ", entry, join_field, entry.get(join_field)
2033 if not join_field in entry:
2034 continue
2035 joiner = case_cvt(join_field, entry)
2036
2037 if sdnsh.description: # description debugging
2038 print "command_join_rest: joining ", entry, joiner, key_dict.get(joiner)
2039
2040 if joiner in key_dict:
2041 # add all the entries from the key_dict
2042 if sdnsh.description: # description debugging
2043 print 'command_join_rest: ADD', key_dict[joiner]
2044 if add_field == None:
2045 add_fields(entry, key_dict[joiner])
2046 elif from_fields:
2047 if len(from_fields) == 1:
2048 # add a single field
2049 if from_fields[0] in key_dict[joiner]:
2050 entry[add_field] = key_dict[joiner][from_fields[0]]
2051 else:
2052 # add a dictionary
2053 entry[add_field] = dict([[ff, key_dict[joiner][ff]]
2054 for ff in from_fields])
2055 else:
2056 entry[add_field] = key_dict[joiner]
2057
2058 if sdnsh.description: # description debugging
2059 print "command_join_rest: ", command.query_result
2060
2061
2062def command_join_table(obj_type, data, key, join_field,
2063 key_value = None, add_field = None, crack = None):
2064 """
2065 Add fieds to the current command.query_result by looking up the entry in
2066 the db/store. key represents the value of the index to use from
2067 the entries read from the database. The key can be composed of
2068 multiple fields within the entry. The join_field is the name
2069 of the field within the command.query_result to use as the value to match
2070 against the key field.
2071
2072 When key_value is None, the matched entry from the join_field's is
2073 treated as a dictionary, and all the pair of name:values are added
2074 directly to the new entry.
2075
2076 When key_value is a field name, the joined entries are collected
2077 as a list, and added to the new entry a the key_value name.
2078 (see the use of tag-mapping as an example)
2079 """
2080 if not hasattr(command, 'query_result'):
2081 if sdnsh.description: # description debugging
2082 print "command_join_table: no entries found"
2083 return
2084
2085 if command.query_result == None:
2086 if sdnsh.description: # description debugging
2087 print "command_join_table: query_result: None"
2088 return
2089
2090 if sdnsh.description: # description debugging
2091 print "command_join_table: %d entries found, using %s, obj_type %s %s %s" % \
2092 (len(command.query_result), data, obj_type, key, join_field)
2093 print "command_join_table:", data, key, join_field
2094
2095 if join_field == None:
2096 return
2097 if key == None:
2098 return
2099
2100 if not mi.obj_type_exists(obj_type):
2101 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
2102
2103 # build the join_dict, which will have keys for the items to
2104 # add into the entries
2105 if not mi.obj_type_has_model(obj_type):
2106 entries = rest_to_model.get_model_from_url(obj_type, data)
2107 else:
2108 entries = sdnsh.get_table_from_store(obj_type)
2109
2110 # determine whether specific field names are added
2111 if add_field:
2112 parts = add_field.split('|')
2113 from_fields = None
2114 if len(parts):
2115 add_field = parts[0]
2116 from_fields = parts[1:]
2117
2118 # constuct the join key for each row from the db table
2119 key_parts = key.split('|') # all the fields needed to make a key
2120 key_dict = {} # resulting key dictionary
2121 for entry in entries:
2122 # see if all the key parts are in the entry
2123 missing = missing_part(key_parts, entry)
2124 if missing:
2125 if sdnsh.description: # description debugging
2126 print "command_join_table: missing field %s in %s" % (missing, entry)
2127 continue
2128
2129 new_key = '|'.join([obj_type_field_case(entry, obj_type, kn) for kn in key_parts])
2130 if sdnsh.description: # description debugging
2131 print 'command_join_table: new-key', new_key, key_value
2132 if key_value:
2133 if not new_key in key_dict:
2134 key_dict[new_key] = [entry]
2135 else:
2136 key_dict[new_key].append(entry)
2137 else:
2138 key_dict[new_key] = entry
2139
2140
2141 # let 'crack' contain the field's name, not a boolean.
2142 if crack and crack == True:
2143 crack = mi.pk(obj_type)
2144
2145 # Using the key-dictinoary, look for matches from the original entries
2146
2147 join_parts = join_field.split('|')
2148 for entry in command.query_result:
2149 if len(join_parts):
2150 missing = missing_part(join_parts, entry, key_case = True)
2151 if missing:
2152 if sdnsh.description: # description debugging
2153 print "command_join_table: missing field %s in %s" % (missing, entry)
2154 continue
2155
2156 joiner = '|'.join([case_cvt(kn, entry) for kn in join_parts])
2157 else:
2158 if sdnsh.description: # description debugging
2159 print "command_join_table: joining ", entry, join_field, entry.get(join_field)
2160 if not join_field in entry:
2161 continue
2162 joiner = case_cvt(join_field, entry)
2163
2164 if joiner in key_dict:
2165 if crack:
2166 if not crack in key_dict[entry[joiner]]:
2167 if sdnsh.description: # description debugging
2168 print "command_join_table: field %s not in entry" % crack, key_dict[joiner]
2169 else:
2170 mi.split_compound_into_dict(obj_type, crack, key_dict[joiner], True)
2171
2172 # add all the entries from the key_dict
2173 if sdnsh.description: # description debugging
2174 print 'command_join_table: ADD %s as %s ' % (key_dict[joiner], add_field)
2175 if add_field == None:
2176 if key_value:
2177 entry[key_value] = key_dict[joiner]
2178 else:
2179 add_fields(entry, key_dict[joiner])
2180 elif from_fields:
2181 if len(from_fields) == 1:
2182 # add a single field
2183 if type(key_dict[joiner]) == list:
2184 entry[add_field] = [x[from_fields[0]] for x in key_dict[joiner]]
2185 else:
2186 entry[add_field] = key_dict[joiner][from_fields[0]]
2187 else:
2188 # add a dictionary with named fields
2189 if type(key_dict[joiner]) == list:
2190 for item in key_dict[joiner]:
2191 entry[add_field] = dict([[ff, item[ff]]
2192 for ff in from_fields])
2193 else:
2194 entry[add_field] = dict([[ff, key_dict[joiner][ff]]
2195 for ff in from_fields])
2196
2197 else:
2198 entry[add_field] = key_dict[joiner]
2199
2200 if sdnsh.description: # description debugging
2201 print "command_join_table: ", command.query_result
2202
2203
2204def command_display_rest(data, url = None, sort = None, rest_type = None,
2205 table_format = None, title = None, detail = None):
2206 """
2207 Perform a call to the rest api, and format the result.
2208
2209 When sort isn't None, it names a field whose's value are sorted on.
2210 """
2211 #just a hack check to implement decending sorting
2212 descending = False
2213 #raise error.ArgumentValidationError('\n\n\n %s' % (descending))
2214 if sdnsh.description: # description debugging
2215 print "command_display_rest: ", data, url, rest_type, table_format, detail
2216
2217 if not url:
2218 url = data.get('url')
2219 if not table_format:
2220 table_format = data.get('format')
2221
2222 check_single_entry = True
2223
2224 # if url is a list, pick the first one which can be build from the data
2225 select_url = url
2226 if url and type(url) == list:
2227 for u in url:
2228 try:
2229 select_url = (u % data)
2230 select_url = u # select this url from the list
2231 break
2232 except:
2233 pass
2234
2235 if not detail:
2236 detail = data.get('detail', 'default')
2237 url = "http://%s/rest/v1/" % sdnsh.controller + (select_url % data)
2238
2239 result = sdnsh.store.rest_simple_request(url)
2240 check_rest_result(result)
2241 if sdnsh.description: # description debugging
2242 print "command_display_rest: result ", result
2243 entries = json.loads(result)
2244 #rest_type = None
2245 #raise error.ArgumentValidationError('\n\n\n %s' % (attributes))
2246 #if 'realtimestats' in data and data['realtimestats'] == 'group':
2247
2248 entries2 = None
2249
2250
2251 if 'realtimestats' in data and data['realtimestats'] == 'group':
2252 url2 = "http://%s/rest/v1/" % sdnsh.controller + ("realtimestats/groupdesc/%(dpid)s/" % data)
2253 result2 = sdnsh.store.rest_simple_request(url2)
2254 check_rest_result(result2)
2255 if sdnsh.description: # description debugging
2256 print "command_display_rest: groupdesc result ", result2
2257 entries2 = json.loads(result2)
2258
2259 # It certainly seems possible to map from url's to the type associated,
2260 # with the result, but it also makes sense to encode that type information
2261 # into the description
2262 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'adjacency':
2263 rest_type =False
2264 if rest_type:
2265 entries = command_display_rest_type_converter(table_format,
2266 rest_type,
2267 data,
2268 entries)
2269 if 'realtimestats' in data and data['realtimestats'] == 'group':
2270 if entries2 is not None:
2271 entries2 = command_display_rest_type_converter(table_format,
2272 rest_type,
2273 data,
2274 entries2)
2275
2276 if 'router' in data and data['router'] == 'router':
2277 combResult = []
2278 for entry in entries:
2279 attributes = entry.get('stringAttributes')
2280 #raise error.ArgumentValidationError('\n\n\n %s' % (attributes))
2281 combResult.append({
2282 'dpid' : entry.get('dpid'),
2283 'routerIP' : attributes['routerIp'],
2284 'name' : attributes['name'],
2285 'isEdgeRouter' : attributes['isEdgeRouter'],
2286 'routerMac' : attributes['routerMac'],
2287 'nodeSId' : attributes['nodeSid'],
2288 },)
2289 entries = combResult
2290 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2291 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'port':
2292 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2293 combResult = []
2294 portList = entries
2295 for port in portList:
2296 portData = port.get("port")
2297 name = portData.get("stringAttributes").get('name')
2298 portNo = portData.get("portNumber") & 0xFFFF # converting to unsigned16int
2299 subnetIp = port.get("subnetIp")
2300 adjacency = str(port.get('adjacency'))
2301 combResult.append({
2302 'name' :name,
2303 'portNo' : portNo,
2304 'subnetIp' : subnetIp,
2305 'adjacency' : adjacency,
2306 })
2307 entries = combResult
2308 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'adjacency':
2309 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2310 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2311 combResult = []
2312 adjacencyPairList = entries
2313 for adjacencyPair in adjacencyPairList:
2314 adjacencySid = adjacencyPair.get("adjacencySid")
2315 ports = adjacencyPair.get("ports")
2316 combResult.append({
2317 'adjacencySid' : adjacencySid,
2318 'ports' : ports,
2319 })
2320 entries = combResult
2321 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2322
2323 if 'showtunnel' in data and (data['showtunnel'] == 'tunnel' or data['detail'] == 'details'):
2324 #eraise error.ArgumentValidationError('\n\n\n %s' % (entries))
2325 combResult = []
2326 tunnelList = entries
2327 for tunnel in tunnelList:
2328 labelStackList = (tunnel.get('labelStack'))
2329 labelStackString = str(labelStackList)
2330 labelStackString = remove_unicodes(labelStackString)
2331 #labelStackList = (tunnel.get('labelStack'))
2332 #labelStackString ='['
2333 #for labelSack in labelStackList:
2334 # for label in labelSack:
2335 # labelStackString += (label + ',')
2336 #if labelStackString == '[':
2337 # labelStackString = ''
2338 #else:
2339 # labelStackString = labelStackString[:-1]
2340 # labelStackString += ']'
2341 tunnelId = tunnel.get('tunnelId')
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08002342 tunnelsetId = tunnel.get('tunnelsetId')
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08002343 tunnelPath = tunnel.get('tunnelPath')
2344 dpidGroup = str(tunnel.get('dpidGroup'))
2345 dpidGroup= remove_unicodes(dpidGroup)
2346 policies = tunnel.get('policies')
2347 combResult.append({
2348 'tunnelId' : tunnelId,
2349 'labelStack' : labelStackString,
2350 'dpidGroup' : dpidGroup,
2351 'tunnelPath' : tunnelPath,
2352 'policies' : policies,
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08002353 'tunnelset' : tunnelsetId,
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08002354 })
2355 entries = combResult
2356
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08002357 if 'showtunnelset' in data and (data['showtunnelset'] == 'tunnelset' or data['detail'] == 'details'):
2358 #eraise error.ArgumentValidationError('\n\n\n %s' % (entries))
2359 combResult = []
2360 tunnelsetList = entries
2361 for tunnelset in tunnelsetList:
2362 tunnelsetId = tunnelset.get('tunnelsetId')
2363 policies = tunnelset.get('policies')
2364 tunnelList = tunnelset.get('constituentTunnels')
2365 for tunnel in tunnelList:
2366 labelStackList = (tunnel.get('labelStack'))
2367 labelStackString = str(labelStackList)
2368 labelStackString = remove_unicodes(labelStackString)
2369 tunnelId = tunnel.get('tunnelId')
2370 tunnelPath = tunnel.get('tunnelPath')
2371 dpidGroup = str(tunnel.get('dpidGroup'))
2372 dpidGroup= remove_unicodes(dpidGroup)
2373 combResult.append({
2374 'tunnelsetId' : tunnelsetId,
2375 'policies' : policies,
2376 'tunnelId' : tunnelId,
2377 'labelStack' : labelStackString,
2378 'dpidGroup' : dpidGroup,
2379 'tunnelPath' : tunnelPath,
2380 'tunnelset' : tunnelsetId,
2381 })
2382 entries = combResult
2383
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08002384 if 'showpolicy' in data and data['showpolicy'] == 'policy':
2385 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2386 combResult = []
2387 portList = entries
2388 for policy in portList:
2389 policyId = policy.get("policyId")
2390 policyType = policy.get("policyType")
2391 priority = policy.get("priority")
2392 tunnelId = policy.get('tunnelId')
2393 match = policy.get("match")
2394 dstIpAddress = match.get('dstIpAddress')['value'] if match.get('dstIpAddress') else '*'
2395 dstMacAddress = match.get('dstMacAddress')['value'] if match.get('dstMacAddress') else '*'
2396 dstTcpPortNumber = match.get('dstTcpPortNumber') if match.get('dstTcpPortNumber') else '*'
2397 etherType = ('0x'+ str(match.get('etherType'))) if match.get('etherType') else '*'
2398 ipProtocolNumber = match.get('ipProtocolNumber') if match.get('ipProtocolNumber') else '*'
2399 srcIpAddress = match.get('srcIpAddress')['value'] if match.get('srcIpAddress') else '*'
2400 srcMacAddress = match.get('srcMacAddress')['value'] if match.get('srcMacAddress') else '*'
2401 srcTcpPortNumber = match.get('srcTcpPortNumber') if match.get('srcTcpPortNumber') else '*'
2402 combResult.append({
2403 'policyId' : policyId,
2404 'policyType' : policyType,
2405 'tunnelId' : tunnelId,
2406 'priority' : priority,
2407 'dstIpAddress' : dstIpAddress,
2408 'dstMacAddress' : dstMacAddress,
2409 'dstTcpPortNumber': dstTcpPortNumber,
2410 'etherType' : etherType,
2411 'ipProtocolNumber': ipProtocolNumber,
2412 'srcIpAddress' : srcIpAddress,
2413 'srcMacAddress' : srcMacAddress,
2414 'srcTcpPortNumber': srcTcpPortNumber,
2415
2416 })
2417 entries = combResult
2418
2419 if 'realtimestats' in data and 'tabletype' in data and data['realtimestats'] == 'table':
2420 combResult = []
2421 if data['tabletype'] == 'ip':
2422 #for decending sorting
2423 descending = True
2424 for ipTableEntry in entries:
2425 match = ipTableEntry['match']
2426 networkDestination = '*'
2427 if match :
2428 networkDestination = match.get('networkDestination') if match.get('networkDestination') else '*'
2429 #raise error.ArgumentValidationError('\n\n\n %s' % json.tool(entries))
2430 instructions = ipTableEntry['instructions']
2431 actions = str(instructions[0]) if instructions[0] else None
2432 if actions != None:
2433 actions = remove_unicodes(actions)
2434 actions = renameActions(actions)
2435 actions = actions.lower()
2436 else:
2437 actions =''
2438 combResult.append({
2439 'switch' : ipTableEntry['switch'],
2440 'byteCount' : ipTableEntry['byteCount'],
2441 'packetCount' : ipTableEntry['packetCount'],
2442 'priority' : ipTableEntry['priority'],
2443 'cookie' : ipTableEntry['cookie'],
2444 'durationSeconds' : ipTableEntry['durationSec'],
2445 'networkDestination' : networkDestination,
2446 'actions' : actions,
2447 })
2448 elif data['tabletype'] == 'mpls':
2449 for ipTableEntry in entries:
2450 match = ipTableEntry['match']
2451 mplsTc = '*'
2452 mplsLabel = '*'
2453 mplsBos = '*'
2454 if match :
2455 mplsTc = match.get('mplsTc') if match.get('mplsTc') else '*'
2456 mplsLabel = match.get('mplsLabel') if match.get('mplsLabel') else '*'
2457 mplsBos = match.get('mplsBos') if match.get('mplsBos') else '*'
2458 instructions = ipTableEntry['instructions']
2459 #raise error.ArgumentValidationError('\n\n\n %s' %len(actions))
2460 actions = str(instructions[0])if instructions[0] else None
2461 if actions != None:
2462 actions = remove_unicodes(actions)
2463 actions = renameActions(actions)
2464 actions = actions.lower()
2465 else:
2466 actions =''
2467 combResult.append({
2468 'switch' : ipTableEntry['switch'],
2469 'byteCount' : ipTableEntry['byteCount'],
2470 'packetCount' : ipTableEntry['packetCount'],
2471 'cookie' : ipTableEntry['cookie'],
2472 'priority' : ipTableEntry['priority'],
2473 'mplsTc' : mplsTc,
2474 'mplsLabel' : mplsLabel,
2475 'mplsBos' : mplsBos,
2476 'durationSeconds' : ipTableEntry['durationSec'],
2477 'actions' : actions
2478 })
2479 elif data['tabletype'] == 'acl':
2480 descending = True
2481 for ipTableEntry in entries:
2482 match = ipTableEntry['match']
2483 networkDestination ='*'
2484 networkProtocol = '*'
2485 networkSource = '*'
2486 mplsTc = '*'
2487 mplsLabel = '*'
2488 mplsBos = '*'
2489 transportDestination = '*'
2490 inputPort = '*'
2491 transportSource = '*'
2492 dataLayerSource = '*'
2493 dataLayerDestination = '*'
2494 dataLayerType = '*'
2495 if match :
2496 networkDestination = match.get('networkDestination') if match.get('networkDestination') else '*'
2497 networkProtocol = match.get('networkProtocol') if match.get('networkProtocol') else '*'
2498 networkSource = match.get('networkSource') if match.get('networkSource') else '*'
2499 mplsTc = match.get('mplsTc') if match.get('mplsTc') else '*'
2500 mplsLabel = match.get('mplsLabel')if match.get('mplsLabel') else '*'
2501 transportDestination = match.get('transportDestination') if match.get('transportDestination') else '*'
2502 transportSource = match.get('transportSource') if match.get('transportSource') else '*'
2503 inputPort = match.get('inputPort') if match.get('inputPort') else '*'
2504 dataLayerSource = match.get('dataLayerSource') if match.get('dataLayerSource') else '*'
2505 dataLayerDestination = match.get('dataLayerDestination') if match.get('dataLayerDestination') else '*'
2506 dataLayerType= match.get('dataLayerType') if match.get('dataLayerType') else '*'
2507 mplsBos = match.get('mplsBos') if match.get('mplsBos') else '*'
2508 instructions = ipTableEntry['instructions']
2509 actions = str(instructions[0])if instructions[0] else None
2510 if actions != None:
2511 actions = remove_unicodes(actions)
2512 actions = renameActions(actions)
2513 actions = actions.lower()
2514 else:
2515 actions = ''
2516 combResult.append({
2517 'switch' : ipTableEntry['switch'],
2518 'byteCount' : ipTableEntry['byteCount'],
2519 'packetCount' : ipTableEntry['packetCount'],
2520 'cookie' : ipTableEntry['cookie'],
2521 'priority' : ipTableEntry['priority'],
2522 'inputPort' : inputPort,
2523 'durationSeconds' : ipTableEntry['durationSec'],
2524 'networkSource' : networkSource,
2525 'networkDestination' : networkDestination,
2526 'networkProtocol' : networkProtocol,
2527 'dataLayerType' : dataLayerType,
2528 'dataLayerSource' : dataLayerSource,
2529 'dataLayerDestination' : dataLayerDestination,
2530 'mplsTc' : mplsTc,
2531 'mplsLabel' : mplsLabel,
2532 'mplsBos' : mplsBos,
2533 'transportDestination' : transportDestination,
2534 'transportSource' : transportSource,
2535 'actions' : actions
2536 })
2537 entries = combResult
2538
2539 if 'realtimestats' in data and data['realtimestats'] == 'group':
2540 combResult = []
2541 for groupStatEntry in entries:
2542 groupId = groupStatEntry["groupId"]
2543 groupDescEntry = None
2544 for entry in entries2:
2545 if groupId == entry["groupId"]:
2546 groupDescEntry = entry
2547 break
2548 if groupDescEntry is '':
2549 print "command_display_rest: missing group desc for group id %s" % (groupId)
2550 continue
2551
2552 if (len(groupStatEntry['bucketStats']) > 0):
2553 for bucketId in range(len(groupStatEntry['bucketStats'])):
2554 setsrcmac = ''
2555 if 'SET_DL_SRC' in groupDescEntry['bucketsActions'][bucketId]:
2556 setsrcmac = groupDescEntry['bucketsActions'][bucketId]['SET_DL_SRC']
2557 setdstmac = ''
2558 if 'SET_DL_DST' in groupDescEntry['bucketsActions'][bucketId]:
2559 setdstmac = groupDescEntry['bucketsActions'][bucketId]['SET_DL_DST']
2560 pushmpls = ''
2561 if 'PUSH_MPLS_LABEL' in groupDescEntry['bucketsActions'][bucketId]:
2562 pushmpls = groupDescEntry['bucketsActions'][bucketId]['PUSH_MPLS_LABEL']
2563 popmpls = ''
2564 if 'POP_MPLS' in groupDescEntry['bucketsActions'][bucketId]:
2565 popmpls = groupDescEntry['bucketsActions'][bucketId]['POP_MPLS']
2566 outport = ''
2567 if 'OUTPUT' in groupDescEntry['bucketsActions'][bucketId]:
2568 outport = groupDescEntry['bucketsActions'][bucketId]['OUTPUT']
2569 goToGroup = ''
2570 if 'goToGroup' in groupDescEntry['bucketsActions'][bucketId]:
2571 goToGroup = groupDescEntry['bucketsActions'][bucketId]['goToGroup']
2572 setBos= ''
2573 if 'PUSH_MPLS_BOS' in groupDescEntry['bucketsActions'][bucketId]:
2574 setBos = groupDescEntry['bucketsActions'][bucketId]['PUSH_MPLS_BOS']
2575 COPY_TTL_IN= ''
2576 if 'COPY_TTL_IN' in groupDescEntry['bucketsActions'][bucketId]:
2577 COPY_TTL_IN = groupDescEntry['bucketsActions'][bucketId]['COPY_TTL_IN']
2578 COPY_TTL_OUT= ''
2579 if 'COPY_TTL_OUT' in groupDescEntry['bucketsActions'][bucketId]:
2580 COPY_TTL_OUT = groupDescEntry['bucketsActions'][bucketId]['COPY_TTL_OUT']
2581 DEC_MPLS_TTL= ''
2582 if 'DEC_MPLS_TTL' in groupDescEntry['bucketsActions'][bucketId]:
2583 DEC_MPLS_TTL = groupDescEntry['bucketsActions'][bucketId]['DEC_MPLS_TTL']
2584 DEC_NW_TTL= ''
2585 if 'DEC_NW_TTL' in groupDescEntry['bucketsActions'][bucketId]:
2586 DEC_NW_TTL = groupDescEntry['bucketsActions'][bucketId]['DEC_NW_TTL']
2587
2588 combResult.append({
2589 'groupid' : groupId,
2590 'grouptype' : groupDescEntry['groupType'],
2591 'totalpktcnt' : groupStatEntry['packetCount'],
2592 'totalbytecnt' : groupStatEntry['byteCount'],
2593 'bucketpktcnt' : groupStatEntry['bucketStats'][bucketId]['pktCount'],
2594 'bucketbytecnt' : groupStatEntry['bucketStats'][bucketId]['byteCount'],
2595 'setsrcmac' : setsrcmac,
2596 'setdstmac' : setdstmac,
2597 'pushMplsLabel' : pushmpls,
2598 'popmpls' : popmpls,
2599 'outport' : outport,
2600 'goToGroup' : goToGroup,
2601 'setBos' : setBos,
2602 'COPY_TTL_IN' : COPY_TTL_IN,
2603 'COPY_TTL_OUT' : COPY_TTL_OUT,
2604 'DEC_MPLS_TTL' : DEC_MPLS_TTL,
2605 'DEC_NW_TTL' : DEC_NW_TTL,
2606 })
2607 else:
2608 combResult.append({
2609 'groupid' : groupId,
2610 'grouptype' : groupDescEntry['groupType'],
2611 'totalpktcnt' : groupStatEntry['packetCount'],
2612 'totalbytecnt' : groupStatEntry['byteCount'],
2613 'bucketpktcnt' : '',
2614 'bucketbytecnt' : '',
2615 'setsrcmac' : '',
2616 'setdstmac' : '',
2617 'pushMplsLabel' : '',
2618 'popmpls' : '',
2619 'outport' : '',
2620 'goToGroup' : '',
2621 'setBos' : '',
2622 'COPY_TTL_IN' : '',
2623 'COPY_TTL_OUT' : '',
2624 'DEC_MPLS_TTL' : '',
2625 'DEC_NW_TTL' : '',
2626 })
2627 entries = combResult
2628 #
2629 if format:
2630 #
2631 detail = command_display_rest_join_entries(table_format, data, entries, detail)
2632 #if 'realtimestats' in data and data['realtimestats'] == 'flow':
2633 # entries = sdnsh.fix_realtime_flows(entries)
2634 # check_single_entry = False
2635
2636 if 'realtimestats' in data and data['realtimestats'] == 'features':
2637 for entry in entries:
2638 entry['stp-state'] = entry['state']
2639
2640 # update any of the pretty-printer tables based on the table_format (obj_type)
2641 obj_type_show_alias_update(table_format % data)
2642
2643 if check_single_entry and entries and len(entries) == 1 and detail == 'details':
2644 return sdnsh.pp.format_entry(entries[0],
2645 table_format % data,
2646 detail,
2647 sdnsh.debug)
2648 if sort:
2649 if descending:
2650 reverse = True
2651 else:
2652 reverse = False
2653 def sort_cmp(x,y):
2654 for f in sort:
2655 if f in x:
2656 c = cmp(x.get(f), y.get(f))
2657 if c != 0:
2658 return c
2659 return 0
2660 entries = sorted(entries, cmp=sort_cmp, reverse=reverse )
2661 if 'realtimestats' in data and data['realtimestats'] == 'group':
2662 repeatGroupId = -1
2663 length = len(entries)
2664 for i in range(0, length):
2665 entry = entries[i]
2666 groupId = entry.get('groupid')
2667 if groupId == repeatGroupId:
2668 entries[i]['groupid'] = ''
2669 else:
2670 repeatGroupId = groupId
2671
2672 display = sdnsh.pp.format_table(entries, table_format % data, detail)
2673 else:
2674 display = entries
2675
2676 if title:
2677 return title + display
2678 return display
2679
2680
2681def command_crack(field):
2682 """
2683 Part of the show pipeline, split is typically used with rest api's
2684 not associated with the model (database), since the cli has enough
2685 details of the relationships between model fields to understand
2686 which of the fields has a compound key value, and has options to
2687 crack those into component parts.
2688
2689 The operation is called 'crack' (not split), since the other
2690 options for some of the actions is called 'crack'
2691
2692 The field identifies the name of the field in the entry to
2693 split into parts, and the remaining '|' separated fields list
2694 the labels to associate in the result from each of the
2695 split components. Currently, the 'crack' character is '|',
2696 although this could be parameterized.
2697 """
2698 if sdnsh.description: # description debugging
2699 print "command_split: ", field
2700
2701 if hasattr(command, 'query_result'):
2702 entries = command.query_result
2703 if command.query_result == None:
2704 entries = []
2705 else:
2706 if sdnsh.description: # description debugging
2707 print "command_join_table: no entries found"
2708 entries = []
2709
2710 parts = field.split('|')
2711 if len(parts) == 0:
2712 if sdnsh.description: # description debugging
2713 print "command_join_table: field doesn't contain labels" \
2714 " use field|label1|label2|..."
2715 return
2716
2717 field = parts[0]
2718 label = parts[1:]
2719 many = len(label)
2720
2721 for entry in entries:
2722 if field in entry:
2723 parts = entry[field].split('|')
2724 if len(parts) and many >= len(parts) :
2725 # use enumerate to create a tuple for each item in parts,
2726 # assocaiting an index, which can be used to identify the
2727 # label to use for each of the elements; from that create
2728 # a dictionay, which is then used to update the entry
2729 entry.update(dict([[label[n],p] for (n,p) in enumerate(parts)]))
2730
2731
2732def command_display(data, table_format, detail = 'default', sort = None, title = None):
2733
2734 if sdnsh.description: # description debugging
2735 print "command_display: ", data, table_format, detail
2736
2737 if 'detail' in data:
2738 detail = data['detail']
2739
2740 if hasattr(command, 'query_result'):
2741 entries = command.query_result
2742 if command.query_result == None:
2743 entries = []
2744 else:
2745 if sdnsh.description: # description debugging
2746 print "command_join_table: no entries found"
2747 entries = []
2748
2749 if sdnsh.description: # description debugging
2750 print "command_display: #entries ", len(entries)
2751
2752 # XXX controller-node has an odd url, join-rest needs to be able to
2753 # be handed a complete url, and replace the ip address with the controller's
2754 # ip address.
2755 detail = command_display_table_join_entries(table_format, data, entries, detail)
2756
2757 # update any of the pretty-printer tables based on the table_format (obj_type)
2758 obj_type_show_alias_update(table_format)
2759
2760 # with_key manages whether a 'detail' or table is displayed.
2761 with_key = '<with_key>' if detail == 'details' and len(entries) > 0 else '<no_key>'
2762
2763 #
2764 if sort:
2765 def sort_cmp(x,y):
2766 for f in sort:
2767 if f in x:
2768 c = utif.trailing_integer_cmp(x.get(f),y.get(f))
2769 if c:
2770 return c
2771 return 0
2772 entries = sorted(entries, cmp=sort_cmp)
2773
2774 # use display_obj_type_rows since it (currently) joins fields for obj_types.
2775 display = sdnsh.display_obj_type_rows(table_format, entries, with_key, detail)
2776
2777 if title:
2778 return title + display
2779 return display
2780
2781
2782def command_legacy_cli(obj_type, data, detail = 'default', scoped = None, sort = None):
2783 """
2784 Unfortunatly, the command descriptions don't have enough different
2785 detail to describe how to join specific distinct fields. In the future,
2786 there will be rest api's for each of the cli requests; that should cause
2787 this trampoline code to become obsolete.
2788 """
2789
2790 if sdnsh.description: # description debugging
2791 print "command_legacy_cli: ", obj_type, data, detail, scoped, sort
2792
2793 # update any of the pretty-printer tables based on the obj_type
2794 obj_type_show_alias_update(obj_type)
2795
2796 #
2797 #
2798 # Various show command 'join' data to create a table not
2799 # directly available in the REST API, someday in the future,
2800 # these joins will be directly implemented in the REST API,
2801 # but these special cases still exist:
2802 #
2803 if 'running-config' in data:
2804 result = sdnsh.error_msg("No running-config choice")
2805 words = []
2806 if 'word' in data and data['word'] != 'all':
2807 words = [data['word']]
2808
2809 if data['running-config'] == 'running-config':
2810 # 'show vns XXX running-config'
2811 if 'vnsname' in data and data['vnsname'] != 'all':
2812 return sdnsh.show_vns_running_config(data['vnsname'],data['tenant'])
2813 elif 'vns' in data and data['vns']=='all':
2814 data['running-config'] = 'vns'
2815 elif 'tenant' in data:
2816 data['running-config']='tenant'
2817 words=[data['tenant']]
2818 if data['running-config'] in run_config.registry_items_enabled():
2819 result = run_config.perform_running_config(data['running-config'], sdnsh, config, words)
2820
2821 if result:
2822 return result
2823 return ''.join(config)
2824
2825 if obj_type == 'running-config':
2826 return run_config.implement_show_running_config([])
2827
2828 if obj_type == 'vns-interface':
2829 if scoped:
2830 # should check for missing 'id' in data
2831 data['vns'] = sdnsh.get_current_mode_obj()
2832
2833 if 'vns' in data:
2834 if data['vns'] == 'all':
2835 return sdnsh.display_vns_interface(None, {}, '<no_key>')
2836 vns_name=data['vns']
2837 return sdnsh.display_vns_interface(vns_name, {'vns': vns_name },
2838 '<no_key>', detail = 'scoped')
2839
2840 if obj_type == 'vns-switch-ports':
2841 if 'vns' in data:
2842 return sdnsh.show_vns_switch_ports([data['vns']])
2843 return sdnsh.show_vns_switch_ports([])
2844
2845 if obj_type == 'switch-ports-vns':
2846 if 'dpid' in data:
2847 return sdnsh.show_switch_ports_vns([data['dpid']])
2848 return sdnsh.show_switch_ports_vns([])
2849
2850 if obj_type == 'switch-interfaces':
2851 key = mi.pk(obj_type)
2852 if scoped:
2853 data['dpid'] = sdnsh.get_current_mode_obj()
2854
2855 # in legacy_cli to join the switch-interfaces with port stats
2856 port_obj = 'port'
2857 entries = sdnsh.show_sort_obj_type(obj_type,
2858 command_query_object(port_obj, data, scoped, sort))
2859
2860 # switch-interfaces is really class Port, and the primary key
2861 # is '#|switch|number, not name.
2862
2863 entries_dict = dict([['%s|%s' % (x['switch'], x['name']), x] for x in entries])
2864 # collect switch-interface-config
2865 sic = 'switch-interface-config'
2866 if 'dpid' in data and data['dpid'] != 'all':
2867 sic_dict = create_obj_type_dict(sic, mi.pk(sic), mi.pk(sic), data['dpid'])
2868 else:
2869 sic_dict = create_obj_type_dict(sic, mi.pk(sic))
2870
2871 # add switch-interface-config names when missing
2872 for (sic_id, sic_value) in sic_dict.items():
2873 if not sic_id in entries_dict:
2874 # add 'state' to this item for prettyprinting column width computation
2875 for sv in sic_value:
2876 sv['state'] = ''
2877 entries += sic_value
2878
2879 # collect the stats for the interfaces
2880 stats_url = 'realtimestats/port/%(dpid)s/' % data
2881 url = "http://%s/rest/v1/" % sdnsh.controller + stats_url
2882 try:
2883 result = sdnsh.store.rest_simple_request(url)
2884 check_rest_result(result)
2885 stats = json.loads(result)
2886
2887 except Exception, e:
2888 stats = {}
2889
2890 # join realtimestats
2891 for entry in entries:
2892 if 'state' in entry:
2893 entry['stp-state'] = entry['state']
2894 stats_list = stats.get(entry['switch'])
2895 # Note, 'number' may be missing from entry if the switch
2896 # matches for switch-interface-config but the interface name
2897 # doesn't show up.
2898 if stats_list and 'number' in entry:
2899 ifn = entry['number']
2900 # Notice that the realtime stat's use a int for the 2^16 value here
2901 # The & 0xffff converts the "-x" to a positive 2^16 value
2902 item = [x for x in stats_list if (x['portNumber'] & 0xffff) == ifn]
2903 if len(item) == 1:
2904 entry.update(item[0])
2905 if entry['id'] in sic_dict:
2906 entry.update(sic_dict[entry['id']][0])
2907
2908 # Update the alias mappings for display
2909 obj_type_show_alias_update(obj_type)
2910
2911 return sdnsh.pp.format_table(entries, obj_type, detail)
2912
2913 if obj_type == 'tunnel-interfaces':
2914 # Use the active tunnels to identify the interfaces on the
2915 # switches which are the tunneling interfaces, with that
2916 # collect to port -> if_name mappings from 'port', then
2917 # find all the switches interfaces, convert those port numbers to
2918 # if names, to collect only tunneling interfaces. Now collect
2919 # realtimestats for the switch's ports, and associate those
2920 # stats with any filtered interfaces, finally display the result
2921 tunnel_url = "tunnel-manager/%(dpid)s" % data
2922 url = "http://%s/rest/v1/" % sdnsh.controller + tunnel_url
2923 result = sdnsh.store.rest_simple_request(url)
2924 check_rest_result(result)
2925 tunnels = json.loads(result)
2926
2927 # use the active tunnels to
2928 # collect dpid's, convert the remote ip's to interface names.
2929 tunnel_ports = {}
2930 for t in tunnels:
2931 quad = t['tunnelPorts'].split('.')
2932 if_name = "vta%03d%03d%03d%03d" % (int(quad[0]), int(quad[1]),
2933 int(quad[2]), int(quad[3]))
2934 key = "%s|%s" % (t['dpid'], if_name)
2935 if not key in tunnel_ports:
2936 tunnel_ports[key] = {t['dpid']: t['tunnelPorts']}
2937
2938 # Collect interfaces on associated switch
2939 port_obj = 'port'
2940 entries = sdnsh.show_sort_obj_type(port_obj,
2941 command_query_object(port_obj, data, scoped, sort))
2942 # Associate port names with interface names
2943 port_to_if_name = {}
2944
2945 try:
2946 ports = sdnsh.get_table_from_store("port")
2947 except Exception, e:
2948 port = []
2949
2950 for port in ports:
2951 key_string = '%s|%s' % (port['switch'], port['number'])
2952 port_to_if_name[key_string] = port['name']
2953
2954 # Filter elements, 'filtered' only contains active tunnel interfaces
2955 filtered = []
2956 for e in entries:
2957 e['ifname'] = port_to_if_name[e['id']]
2958 key = '%s|%s' % (e['switch'], e['ifname'])
2959 if sdnsh.description: # description debugging
2960 print command._line(), key
2961 if key in tunnel_ports:
2962 if sdnsh.description: # description debugging
2963 print command._line(), "Found ", e['id']
2964 filtered.append(e)
2965 entries = filtered
2966
2967 # collect switch-interface-config
2968 sic = 'switch-interface-config'
2969 if 'dpid' in data:
2970 sic_dict = create_obj_type_dict(sic, mi.pk(sic), mi.pk(sic), data['dpid'])
2971 else:
2972 sic_dict = create_obj_type_dict(sic, mi.pk(sic))
2973
2974 # collect the stats for the interfaces
2975 stats_url = 'realtimestats/port/%(dpid)s/' % data
2976 url = "http://%s/rest/v1/" % sdnsh.controller + stats_url
2977 try:
2978 result = sdnsh.store.rest_simple_request(url)
2979 check_rest_result(result)
2980 stats = json.loads(result)
2981 except Exception, e:
2982 stats = {}
2983
2984 # join realtimestats
2985 for entry in entries:
2986 if 'state' in entry:
2987 entry['stp-state'] = entry['state']
2988 stats_list = stats.get(entry['switch'])
2989 if stats_list and 'number' in entry:
2990 ifn = entry['number']
2991 # Notice that the realtime stat's use a int for the 2^16 value here
2992 # The & 0xffff converts the "-x" to a positive 2^16 value
2993 item = [x for x in stats_list if (x['portNumber'] & 0xffff) == ifn]
2994 if len(item) == 1:
2995 entry.update(item[0])
2996 if entry['id'] in sic_dict:
2997 entry.update(sic_dict[entry['id']][0])
2998
2999 obj_type_show_alias_update('switch-interfaces')
3000
3001 return sdnsh.pp.format_table(entries, 'switch-interfaces', detail)
3002
3003 if obj_type == 'host-vns-interface-vns':
3004 words = []
3005 for w in []: # list of options to display_vns_mac_address_table
3006 if w in data:
3007 words[w] = data[w]
3008
3009 return sdnsh.display_vns_mac_address_table(data['vns'], words)
3010
3011 if obj_type == 'config':
3012 if 'config' in data:
3013 if 'version' in data:
3014 return sdnsh.implement_show_config([data['config'],data['version']])
3015 return sdnsh.implement_show_config([data['config']])
3016
3017 if 'config-diff' in data:
3018 if 'version' in data:
3019 return sdnsh.implement_show_config([ data['first'],
3020 'diff',
3021 data['second'],
3022 data['version']])
3023 return sdnsh.implement_show_config([data['first'],
3024 'diff',
3025 data['second'], ])
3026 return sdnsh.implement_show_config([])
3027
3028 if obj_type == 'vns-flow':
3029 if 'detail' in data:
3030 return sdnsh.show_vns_flow_annotated([data['vns'],
3031 'flow',
3032 data['detail']])
3033 return sdnsh.show_vns_flow_annotated([data['vns'], 'flow'])
3034
3035 if obj_type == 'tech-support':
3036 return sdnsh.do_show_tech_support([])
3037
3038 if obj_type == 'config-file':
3039 if 'file' in data:
3040 return sdnsh.implement_show_config_file(['config-file', data['config']])
3041 return sdnsh.implement_show_config_file(['config-file', ])
3042
3043 if obj_type == 'logging':
3044 if 'log-name' in data:
3045 return sdnsh.implement_show_logging([data['log-name']])
3046 return sdnsh.implement_show_logging([])
3047
3048 if obj_type == 'event-history':
3049 if 'count' in data:
3050 return sdnsh.do_show_event_history([data['event'],
3051 'last',
3052 str(data['count'])])
3053 return sdnsh.do_show_event_history([data['event']])
3054
3055 if obj_type == 'flow-cache':
3056 words = []
3057 if 'counters' in data:
3058 words.append('counters')
3059 elif 'application' in data:
3060 words.append('app')
3061 words.append(data['application'])
3062 words.append('app-instance')
3063 words.append(data['instance'])
3064
3065 return sdnsh.do_show_flow_cache(words)
3066
3067 if obj_type in ['controller-stats', 'switch-stats']:
3068 #
3069 # data['id'] is the name of the controller
3070 helper_item = obj_type.replace('-stats','')
3071 if helper_item == 'controller':
3072 helper_item = 'controller-node'
3073 key = mi.pk(helper_item)
3074 words = [helper_item, data[key], 'stats']
3075 if 'stats-type' in data:
3076 words.append(data['stats-type'])
3077 for (n,v) in data.items():
3078 if not n in [key, 'stats', 'stats-type']:
3079 words.append(n)
3080 words.append(v)
3081 return sdnsh.helper_show_object_stats(words)
3082
3083 if obj_type == 'switch-tcpdump':
3084 words = ['trace', data['dpid']]
3085 for (n,v) in data.items():
3086 if not n in ['tcpdump', 'dpid']:
3087 words.append(n)
3088 return sdnsh.do_trace(words)
3089
3090 if obj_type == 'copy':
3091 words = [data['source']]
3092 if 'dest' in data:
3093 words.append(data['dest'])
3094 return sdnsh.implement_copy(words)
3095
3096 if obj_type == 'write':
3097 return sdnsh.implement_write([data['target']])
3098
3099 if obj_type == 'this':
3100 obj_type = sdnsh.get_current_mode_obj_type()
3101 show_this = mi.obj_type_show_this(obj_type)
3102 if not show_this:
3103 return sdnsh.do_show_object(['this'])
3104 result = []
3105 for show in show_this:
3106 if type(show) is list and len(show) >= 3:
3107 # [ object, format, detail ]
3108 if len(result) > 0:
3109 result.append(mi.obj_type_show_title(show[0]))
3110 sort = None
3111 if len(show) > 3:
3112 sort = show[3]
3113 result.append(command_display_table(show[0], {},
3114 table_format = show[1],
3115 detail = show[2],
3116 sort = sort,
3117 scoped = True))
3118 elif type(show) is list and len(show) == 2:
3119 # [ object, detail ]
3120 if len(result) > 0:
3121 result.append(mi.obj_type_show_title(show[0]))
3122 result.append(command_display_table(show[0], {}, detail = show[1], scoped = True))
3123 else:
3124 result.append(sdnsh.do_show_object([show]))
3125 return '\n'.join(result)
3126
3127 if obj_type == 'version':
3128 return sdnsh.do_show_version([])
3129
3130 if obj_type == 'reload':
3131 return sdnsh.implement_reload()
3132
3133 if obj_type == 'test-command':
3134 if data['test-type'] == 'packet-in':
3135 return sdnsh.implement_test_packet_in(data)
3136 if data['test-type'] == 'path':
3137 return sdnsh.implement_test_path(data)
3138
3139 print 'command_legacy_cli: obj-type unknown: ', obj_type
3140
3141
3142def command_legacy_cli_no(obj_type, data, detail = 'default', scoped = None, sort = None):
3143 """
3144 Implement no command for trampoline code back to the original code
3145 """
3146 if obj_type == 'tag-mapping':
3147 return sdnsh.implement_no_tag(['tag', data['tag']])
3148
3149
3150def command_version(data):
3151 """
3152 The version command will later manage changing the syntax to match
3153 the requested version.
3154 """
3155 new_version = data.get('version')
3156 if new_version == None:
3157 return
3158
3159 version = new_version # save for error message
3160 new_version = sdnsh.desc_version_to_path_elem(new_version)
3161
3162 # skip version change is this is the current version.
3163 if sdnsh.desc_version == new_version:
3164 return
3165
3166 # see if the requested version exists
3167 if not sdnsh.command_packages_exists(new_version):
3168 print 'No command description group for version %s' % version
3169 return
3170
3171 # run 'env [envriron_vars] ... cli.py'
3172 command = ['env']
3173 command.append('CLI_COMMAND_VERSION=%s' % version)
3174 command.append('CLI_STARTING_MODE=config')
3175 if os.path.exists('/opt/sdnplatform/cli/bin/cli'):
3176 # controller VM
3177 command.append('/opt/sdnplatform/cli/bin/cli --init')
3178 else:
3179 # developer setup
3180 base = os.path.dirname(__file__)
3181 command.append(os.path.join(base, 'cli.py'))
3182 command.append('--init')
3183
3184 # dump the command descriptions, and read a new set.
3185 # open a subshell with a new command version
3186 subprocess.call(command, cwd=os.environ.get("HOME"))
3187
3188 return
3189
3190
3191def command_clearterm():
3192 """
3193 Print reset characters to the screen to clear the console
3194 """
3195 subprocess.call("reset")
3196
3197def command_display_cli(data):
3198 """
3199 Display various cli details
3200 (this may need to be re-factored into some general "internal" state show
3201 """
3202 debug = []
3203 if sdnsh.debug:
3204 debug.append('debug')
3205 if sdnsh.debug_backtrace:
3206 debug.append('backtrace')
3207
3208 modes = sdnsh.command_dict.keys() + sdnsh.command_nested_dict.keys()
3209
3210 entry = {
3211 'version' : ', '.join(command.command_syntax_version.keys()),
3212 'desc' : ', '.join(sorted(command.command_added_modules.keys())),
3213 'format' : ', '.join(sorted(sdnsh.pp.format_added_modules.keys())),
3214 'modes' : ', '.join(sorted(utif.unique_list_from_list(modes))),
3215 'debug' : ', '.join(debug),
3216 }
3217 basic = sdnsh.pp.format_entry(entry, 'cli')
3218
3219 mode_entries = command.command_submode_dictionary(modes)
3220 mode_table = sdnsh.pp.format_table(mode_entries, 'cli-modes')
3221
3222 return basic + '\n\nCommand Submode Transition\n' + mode_table
3223
3224 return
3225
3226
3227def delete_alias_by_id(alias_obj_type, alias_value):
3228 """
3229 Common delete operation for alias, based on primary key
3230
3231 @param alias_obj_type string, name of table where single entry is removed
3232 @param alias_value string, value of primary key to delete
3233 """
3234 xref = mi.foreign_key_xref.get(alias_obj_type)
3235 if xref:
3236 # look for any referecnes to this alias_value. Since this
3237 # is an alias table, only the pk ought to exist in the xref.
3238 # When the alias is getting removed, any references to it
3239 # via foreign keys must also get removed.
3240 if len(xref) > 1 or not mi.pk(alias_obj_type) in xref:
3241 print 'Internal Inconsistency'
3242 else:
3243 for (fk_obj_type, fk_field) in xref[mi.pk(alias_obj_type)]:
3244 rows = sdnsh.get_table_from_store(fk_obj_type,
3245 fk_field,
3246 alias_value,
3247 'exact')
3248 for row in rows:
3249 sdnsh.rest_delete_object(fk_obj_type, row[mi.pk(fk_obj_type)])
3250 sdnsh.rest_delete_object(alias_obj_type, alias_value)
3251
3252
3253def delete_alias_by_fk(alias_obj_type, foreign_key):
3254 """
3255 Common delete operation for alias, by foreign key
3256
3257 @param alias_obj_type string, name of table where single entry is removed
3258 @param alias_value string, value of primary key to delete
3259 """
3260 # find all the id's based on the foreign key, then delete them all.
3261 # note: see similar midw alias_lookup_with_foreign_key()
3262
3263 foreign_field = mi.alias_obj_type_field(alias_obj_type)
3264 try:
3265 rows = sdnsh.get_table_from_store(alias_obj_type,
3266 foreign_field,
3267 foreign_key,
3268 "exact")
3269 except Exception, e:
3270 raise error.CommandInternalError("Can't fetch %s:%s" %
3271 (foreign_field, foreign_key))
3272 pk = mi.pk(alias_obj_type)
3273 for row in rows:
3274 delete_alias_by_id(alias_obj_type, row[pk])
3275
3276
3277def command_delete_alias(obj_type, data):
3278 """
3279 Action for delete-alias
3280
3281 A single row is deleted from an alias table.
3282 Current alias tables include host-alias, switch-alias, port-alias
3283
3284 @param obj_type string, name of alias table to manage
3285 @param data dict, collection of field:value's from command description
3286 """
3287 if sdnsh.description: # description debugging
3288 print "command_delete_alias: ", obj_type, data
3289
3290 parent_id = sdnsh.get_current_mode_obj()
3291
3292 key = mi.pk(obj_type)
3293 if key not in data:
3294 delete_alias_by_fk(obj_type, parent_id)
3295 else:
3296 delete_alias_by_id(obj_type, data[key])
3297
3298
3299def command_create_alias(obj_type, data, reserved = None, fail_if_exists = False):
3300 """
3301 Action for create-alias
3302
3303 Current alias tables include host-alias, switch-alias, port-alias
3304
3305 @param obj_type string, name of alias table to manage
3306 @param data dict, collection of field:value's from the command description
3307 """
3308 if sdnsh.description: # description debugging
3309 print "command_create_alias: ", obj_type, data, reserved, fail_if_exists
3310
3311 parent_obj_type = sdnsh.get_current_mode_obj_type()
3312 parent_id = sdnsh.get_current_mode_obj()
3313
3314 key = mi.pk(obj_type)
3315 if key not in data:
3316 raise error.CommandInternalError("Alias table '%s': description "
3317 "doesn't populate correct '%s' field as data" %
3318 (obj_type, key))
3319 alias_value = data[key]
3320 #
3321 # Determine if the alias name is allowed.
3322 if alias_value in sdnsh.reserved_words:
3323 raise error.ArgumentValidationError('reserved name "%s" in "%s"'
3324 % (alias_value, ','.join(sdnsh.reserved_words)))
3325 if reserved and type(reserved) != list:
3326 reserved = [reserved]
3327
3328 if reserved and alias_value in reserved:
3329 raise error.ArgumentValidationError('reserved name "%s" in "%s"'
3330 % (alias_value, ','.join(reserved)))
3331
3332 # Walk the foreign key's in the (alias) obj-type, looking
3333 # for the parent reference.
3334
3335 alias_fk = None
3336 obj_type_foreign_keys = mi.obj_type_foreign_keys(obj_type)
3337 if len(obj_type_foreign_keys) == 1:
3338 alias_fk = obj_type_foreign_keys[0]
3339 else:
3340 for alias_fn in obj_type_foreign_keys:
3341 (fk_ot, fk_fn) = mi.foreign_key_references(obj_type, alias_fn)
3342 if fk_ot == parent_obj_type:
3343 alias_fk = alias_fn
3344
3345 if not alias_fk:
3346 raise error.CommandInternalError("Alias table '%s' has no foreign key to '%s'" %
3347 (obj_type, parent_obj_type))
3348
3349 try:
3350 sdnsh.get_object_from_store(obj_type, alias_value)
3351 if sdnsh.description: # description debugging
3352 print "command_create_alias: delete ", obj_type, alias_value
3353 if fail_if_exists:
3354 raise error.ArgumentValidationError("Interface name '%s' already in use - cannot reassign" %(alias_value))
3355 delete_alias_by_id(obj_type, alias_value)
3356 except:
3357 pass
3358
3359 # Remove other existing alias for the same foreign key
3360 # (ie: only one alias per each item, this could be relaxed)
3361 # XXX improve method of managing errors here
3362 try:
3363 rows = sdnsh.get_table_from_store(obj_type,
3364 alias_fk,
3365 parent_id,
3366 "exact")
3367 except Exception, e:
3368 errors = sdnsh.rest_error_to_dict(e)
3369 print sdnsh.rest_error_dict_to_message(errors)
3370 rows = []
3371
3372 for row in rows:
3373 try:
3374 delete_alias_by_id(obj_type, row[key])
3375 if row[alias_fk] != parent_id:
3376 sdnsh.warning("Removed additional alias '%s'"
3377 ", also refers to %s '%s'" %
3378 (row[key], parent_obj_type, parent_id))
3379 except:
3380 if sdnsh.debug or sdnsh.debug_backtrace:
3381 traceback.print_exc()
3382
3383 # This set's the foreign key to allow the create to succeed
3384 c_dict = {
3385 key : alias_value,
3386 alias_fk : parent_id,
3387 }
3388
3389 if sdnsh.description: # description debugging
3390 print "command_create_alias: create ", obj_type, c_dict
3391 result = sdnsh.rest_create_object(obj_type, c_dict)
3392 check_rest_result(result)
3393 result = sdnsh.rest_query_objects(obj_type, c_dict)
3394 check_rest_result(result)
3395
3396 return None
3397
3398
3399def command_create_tag(obj_type, data):
3400 """
3401 obj_type needs to be one of the objects which implements
3402 a relationship to 'tag', for example: tag-mac-mapping
3403 """
3404
3405 item = sdnsh.get_current_mode_obj_type()
3406 fks = mi.obj_type_foreign_keys(obj_type)
3407 for fk in fks:
3408 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
3409 if fk_obj == item:
3410 break
3411 else:
3412 raise error.CommandSemanticError( "type mapping %s doesn't have "
3413 "relationship to the current object %s" %
3414 (obj_type, item))
3415
3416 if sdnsh.description: # description debugging
3417 print "command_create_tag: create ", obj_type, data
3418
3419 tag_and_value = data['tag'].split('=')
3420 if len(tag_and_value) != 2:
3421 # deal with tag_and_value's 'va=vb=vc...'
3422 raise error.CommandSemanticError("tag <[tag-namespace.]name>=<value> "
3423 ": associate tag with host")
3424
3425 tag_parts = tag_and_value[0].split('.')
3426 if len(tag_parts) == 0:
3427 raise error.CommandSemanticError("tag <[tag-namespace.]name>"
3428 ": must have a name")
3429 elif len(tag_parts) == 1:
3430 tag_namespace = "default"
3431 tag_name = tag_parts[0]
3432 elif len(tag_parts) >= 2:
3433 # the tag_name is not allowed to have '.'
3434 # collect all the '.'s together into the namespace
3435 tag_namespace = '.'.join(tag_parts[:-1])
3436 tag_name = tag_parts[-1]
3437
3438 tag_value = tag_and_value[1]
3439
3440 # first manage the tag ...
3441 tag_dict = {
3442 'namespace' : tag_namespace,
3443 'name' : tag_name,
3444 'value' : tag_value,
3445 }
3446
3447 query = sdnsh.rest_query_objects('tag', tag_dict)
3448 sdnsh.check_rest_result(query)
3449 tag_dict['persist'] = True
3450 if len(query) == 0:
3451 result = sdnsh.rest_create_object('tag', tag_dict)
3452 sdnsh.check_rest_result(result)
3453 elif len(query) == 1:
3454 update = sdnsh.rest_update_object('tag',
3455 mi.pk('tag'),
3456 query[0][mi.pk('tag')],
3457 tag_dict)
3458 sdnsh.check_rest_result(update)
3459
3460 del tag_dict['persist']
3461 query = sdnsh.rest_query_objects('tag', tag_dict)
3462 sdnsh.check_rest_result(query)
3463 tag_id = query[0][mi.pk('tag')]
3464
3465 # now create the tag-mapping
3466 tag_dict = {
3467 fk : sdnsh.get_current_mode_obj(), # fk from early for loop
3468 'tag' : tag_id,
3469 }
3470
3471 query = sdnsh.rest_query_objects(obj_type, tag_dict)
3472 sdnsh.check_rest_result(query)
3473 if len(query) == 0:
3474 result = sdnsh.rest_create_object(obj_type, tag_dict)
3475 sdnsh.check_rest_result(result)
3476
3477
3478def command_delete_tag(obj_type, data):
3479 """
3480 obj_type describes the tag-XXX-mapping which is getting
3481 managed, data has the tag 'string' to delete.
3482 """
3483 item = sdnsh.get_current_mode_obj_type()
3484 fks = mi.obj_type_foreign_keys(obj_type)
3485 for fk in fks:
3486 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
3487 if fk_obj == item:
3488 break
3489 else:
3490 raise error.CommandSemanticError( "type mapping %s doesn't have "
3491 "relationship to the current object %s" %
3492 (obj_type, item))
3493
3494 if 'tag' not in data:
3495 raise error.CommandSemanticError('Tag value missing')
3496
3497 tag = data['tag']
3498 name_and_value = tag.split('=')
3499
3500 name_part = name_and_value[0].split('.')
3501 if len(name_part) == 1:
3502 namespace = 'default'
3503 name = name_part[0]
3504 elif len(name_part) >= 2:
3505 namespace = '.'.join(name_part[:-1])
3506 name = name_part[-1]
3507
3508 value = name_and_value[1]
3509 pk_value = sdnsh.unique_key_from_non_unique([namespace,
3510 name,
3511 value,
3512 sdnsh.get_current_mode_obj()])
3513 try:
3514 sdnsh.get_object_from_store(obj_type, pk_value)
3515 except Exception:
3516 raise error.CommandSemanticError('%s No such tag %s' % (obj_type, tag))
3517
3518 sdnsh.rest_delete_object(obj_type, pk_value)
3519
3520 # with that entry removed, check to see if any other
3521 # foreign keys assocaited with class Tag exist.
3522
3523 fk_value = sdnsh.unique_key_from_non_unique([namespace,
3524 name,
3525 value])
3526
3527 for tag_fields in mi.foreign_key_xref['tag']:
3528 for (fk_obj_type, fk_name) in mi.foreign_key_xref['tag'][tag_fields]:
3529 try:
3530 sdnsh.get_table_from_store(fk_obj_type, fk_name, fk_value)
3531 break
3532 except Exception, e:
3533 pass
3534 else:
3535 continue
3536 break
3537 else:
3538 try:
3539 sdnsh.rest_delete_object('tag', fk_value)
3540 except Exception, e:
3541 raise error.CommandSemanticError('base tag missing' % fk_value)
3542
3543
3544def command_rest_post_data(path, data=None, verb='PUT'):
3545 """
3546 """
3547 url = 'http://%s/rest/v1/%s' % (sdnsh.controller, path)
3548 result = sdnsh.rest_post_request(url, data, verb)
3549 check_rest_result(result)
3550 return None
3551
3552
3553def command_cli_variables_set(variable, value, data):
3554 global sdnsh
3555
3556 if variable == 'debug':
3557 print '***** %s cli debug *****' % \
3558 ('Enabled' if value else 'Disabled')
3559 sdnsh.debug = value
3560 elif variable == 'cli-backtrace':
3561 print '***** %s cli debug backtrace *****' % \
3562 ('Enabled' if value else 'Disabled')
3563 sdnsh.debug_backtrace = value
3564 elif variable == 'cli-batch':
3565 print '***** %s cli batch mode *****' % \
3566 ('Enabled' if value else 'Disabled')
3567 sdnsh.batch = value
3568 elif variable == 'description':
3569 print '***** %s command description mode *****' % \
3570 ('Enabled' if value else 'Disabled')
3571 sdnsh.description = value
3572 elif variable == 'rest':
3573 if 'record' in data and value:
3574 print '***** Eanbled rest record mode %s *****' % \
3575 (data['record'])
3576 url_cache.record(data['record'])
3577 return
3578 print '***** %s display rest mode *****' % \
3579 ('Enabled' if value else 'Disabled')
3580 if 'detail' in data and data['detail'] == 'details':
3581 if value == True:
3582 sdnsh.disply_rest_detail = value
3583 sdnsh.store.display_reply_mode(value)
3584 sdnsh.display_rest = value
3585 sdnsh.store.display_mode(value)
3586 if value == False:
3587 sdnsh.disply_rest_detail = value
3588 sdnsh.store.display_reply_mode(value)
3589 url_cache.record(None)
3590 elif variable == 'set':
3591 if 'length' in data:
3592 sdnsh.length = utif.try_int(data['length'])
3593
3594
3595def command_cli_set(variable, data):
3596 command_cli_variables_set(variable, True, data)
3597
3598def command_cli_unset(variable, data):
3599 command_cli_variables_set(variable, False, data)
3600
3601
3602def command_shell_command(script):
3603
3604 def shell(args):
3605 subprocess.call(["env", "SHELL=/bin/bash", "/bin/bash"] + list(args),
3606 cwd=os.environ.get("HOME"))
3607 print
3608
3609 print "\n***** Warning: this is a debug command - use caution! *****"
3610 if script == 'bash':
3611 print '***** Type "exit" or Ctrl-D to return to the CLI *****\n'
3612 shell(["-l", "-i"])
3613 elif script == 'python':
3614 print '***** Type "exit()" or Ctrl-D to return to the CLI *****\n'
3615 shell(["-l", "-c", "python"])
3616 elif script == 'cassandra-cli':
3617 print '***** Type "exit" or Ctrl-D to return to the CLI *****\n'
3618 shell(["-l", "-c", "/opt/sdnplatform/db/bin/cassandra-cli --host localhost"])
3619 elif script == 'netconfig':
3620 if not re.match("/dev/ttyS?[\d]+$", os.ttyname(0)):
3621 print '***** You seem to be connected via SSH or another remote protocol;'
3622 print '***** reconfiguring the network interface may disrupt the connection!'
3623 print '\n(Press Control-C now to leave the network configuration unchanged)\n'
3624 subprocess.call(["sudo",
3625 "env",
3626 "SHELL=/bin/bash",
3627 "/opt/sdnplatform/sys/bin/bscnetconfig",
3628 "eth0"],
3629 cwd=os.environ.get("HOME"))
3630 else:
3631 # XXX possibly run the script directly?
3632 print "Unknown debug choice %s" % script
3633
3634
3635def command_prompt_update():
3636 """
3637 Action to recompute the prompt, used when there's some possibility
3638 the prompt has changes after some other action (hostname update, for example)
3639 """
3640 sdnsh.set_controller_for_prompt()
3641 sdnsh.update_prompt()
3642
3643def command_controller_decommission(data):
3644 """
3645 Decommission the controller using the REST API
3646 """
3647 id = data.get('id')
3648 confirm_request("Decommission controller '%s'?\n(yes to continue) " % id)
3649
3650 while True:
3651 url = 'http://%s/rest/v1/system/ha/decommission' % (sdnsh.controller)
3652 result = sdnsh.rest_post_request(url, {"id": id}, 'PUT')
3653 status = json.loads(result)
3654
3655 if (status['status'] == 'OK') and status['description'].endswith('is already decommissioned') == True:
3656 print 'Decommission finished'
3657 print
3658 break
3659 else:
3660 print 'Decommission in progress'
3661
3662 time.sleep(10)
3663
3664def command_controller_upgrade(data = None):
3665 """
3666 Upgrade the controller using the REST API
3667 """
3668
3669 force = 'force' in data
3670 details = 'details' in data
3671
3672 if force:
3673 print "WARNING: Ignoring any validation errors during upgrade"
3674 url = "http://%s/rest/v1/system/upgrade/image-name" % sdnsh.controller
3675 result = sdnsh.store.rest_simple_request(url)
3676 check_rest_result(result)
3677 iname = json.loads(result)
3678 if (iname['file'] is None or iname['file'] == ""):
3679 print "Error: No upgrade image present."
3680 print ""
3681 print """To perform upgrade, an upgrade image package needs to be uploaded (with scp) to the controller's \"images\" user."""
3682 print """Upgrade image package is a file with name of format \"upgrade-YYYY.MM.DD.XXXX.pkg\"."""
3683 print ""
3684 print "Following is an example to prepare upgrade for controller with IP address 192.168.67.141:"
3685 print "scp $path/upgrade-2013.02.13.0921.pkg images@192.168.67.141:"
3686 print ""
3687 return
3688
3689 confirm_request("Upgrade controller from image '%s'?\n(yes to continue) "
3690 % iname['file'])
3691
3692 url = "http://%s/rest/v1/system/upgrade/extract-image-manifest" % sdnsh.controller
3693 result = sdnsh.store.rest_simple_request(url)
3694 check_rest_result(result)
3695 manifest = json.loads(result)
3696
3697 print "Executing upgrade..."
3698 for step in manifest:
3699 print "%s - %s" % (step['step'], step['description'])
3700 url = 'http://%s/rest/v1/system/upgrade/execute-upgrade-step' % \
3701 (sdnsh.controller)
3702 result = sdnsh.rest_post_request(url, {"step": step['step'],
3703 "imageName": iname['file'],
3704 "force": force},
3705 'PUT')
3706 check_rest_result(result)
3707 status = json.loads(result)
3708
3709 if (status['status'] == "OK"):
3710 print " Succeeded"
3711 if details:
3712 print "\nDetailed output:"
3713 print status['description']
3714 print
3715 else:
3716 print " Failed to execute upgrade step %d" % step['step']
3717 print "\nDetailed output:"
3718 print status['description']
3719 print
3720 return
3721
3722 print """Controller node upgrade complete.
3723Upgrade will not take effect until system is rebooted. Use 'reload' to
3724reboot this controller node. To revert, select the appropriate image
3725from the boot menu"""
3726
3727def command_cluster_config_rollback(data):
3728 path = ''
3729 if data.get('dir') == 'images://':
3730 path += '/home/images/'
3731 elif data.get('dir') == 'saved-configs://':
3732 path += '/opt/sdnplatform/run/saved-configs/'
3733 path += data.get('file')
3734
3735 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3736 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3737 ha_role = json.loads(result)
3738 if ha_role['role'] != 'MASTER':
3739 print "Command can only be run on Master"
3740 return
3741
3742 command_legacy_cli('copy', {'dest': 'file://running-config-copy', 'source': 'running-config'})
3743 print "INFO: Checking config '%s'" % path
3744 url = "http://%s/rest/v1/system/rollback/diffconfig" % sdnsh.controller
3745 result = sdnsh.rest_post_request(url, {"config-1": "/opt/sdnplatform/run/saved-configs/running-config-copy", "config-2": path}, 'PUT')
3746 check_rest_result(result)
3747 if json.loads(result)['out'].startswith('Found differences'):
3748 print json.loads(result)['out']
3749 print "Rollback aborted"
3750 return
3751
3752 url = "http://%s/rest/v1/system/controller" % sdnsh.controller
3753 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3754 controller_id = json.loads(result)['id']
3755
3756 url = "http://%s/rest/v1/model/controller-interface?controller=%s" % (sdnsh.controller, controller_id)
3757 result = sdnsh.store.rest_simple_request(url)
3758 local_iface = json.loads(result)[0]['discovered-ip']
3759
3760 url = "http://%s/rest/v1/model/controller-interface" % sdnsh.controller
3761 result = sdnsh.store.rest_simple_request(url)
3762 check_rest_result(result)
3763 ifaces = json.loads(result)
3764
3765 nodeCount = len(ifaces)
3766 cutover = nodeCount/2
3767 if nodeCount%2 == 1:
3768 cutover = cutover + 1
3769
3770 rollbackedNodes = []
3771
3772 # remove and add object for local node at the end of the list
3773 for index, iface in enumerate(ifaces):
3774 if iface['discovered-ip'] == local_iface:
3775 break
3776 del ifaces[index]
3777 ifaces.append(iface)
3778
3779 config=open(path, 'r').read()
3780 url = 'http://%s/rest/v1/system/upload-data' % ifaces[0]['discovered-ip']
3781 result = sdnsh.rest_post_request(url, {"data": config, "dst" : "/tmp/rollback.conf"}, 'PUT')
3782 check_rest_result(result)
3783
3784 while len(ifaces) > 0:
3785 if sdnsh.batch == False:
3786 while True:
3787 confirm = raw_input("Rollback controller at '%s'. [yes/no] ?" % ifaces[0]['discovered-ip'])
3788 if confirm.lower() == 'n' or confirm.lower() == 'no':
3789 if len(rollbackedNodes) == 0:
3790 print "INFO: Rollback aborted"
3791 return
3792
3793 print "INFO: Undoing Rollback on previously rollbacked nodes"
3794 for node in rollbackedNodes:
3795 print "INFO: Resetting database on '%s'" % node['discovered-ip']
3796 url = 'http://%s/rest/v1/system/resetbsc' % (node['discovered-ip'])
3797 result = sdnsh.rest_post_request(url, {}, 'PUT')
3798 check_rest_result(result)
3799 print "INFO: Rebooting '%s'" % node['discovered-ip']
3800 url = 'http://%s/rest/v1/system/reload' % (node['discovered-ip'])
3801 result = sdnsh.rest_post_request(url, {}, 'GET')
3802 check_rest_result(result)
3803
3804 if len(rollbackedNodes) >= cutover:
3805 # delete the REJECT rules
3806 url="http://localhost/rest/v1/model/firewall-rule?port=6633"
3807 result = sdnsh.rest_post_request(url, {}, 'DELETE')
3808 # enable allow openflow on all controllers not rollbacked.
3809 url="http://localhost/rest/v1/model/firewall-rule"
3810 for iface in ifaces:
3811 pk_id = '%s|Ethernet|0' % iface['controller']
3812 data = {
3813 'action': 'allow',
3814 'interface': pk_id,
3815 'src-ip': '',
3816 'port': '6633',
3817 'proto': 'tcp',
3818 'vrrp-ip': '',
3819 }
3820 print "INFO: re-allow openflow on %s" % iface['discovered-ip']
3821 result = sdnsh.rest_post_request(url, data, 'PUT')
3822 check_rest_result(result)
3823
3824 print "Rollback aborted"
3825 return
3826 elif confirm.lower() == 'y' or confirm.lower() == 'yes':
3827 break
3828
3829 url = 'http://%s/rest/v1/system/rollback/config' % (ifaces[0]['discovered-ip'])
3830 result = sdnsh.rest_post_request(url, {"path": "/tmp/rollback.conf"}, 'PUT')
3831 check_rest_result(result)
3832 time.sleep(10)
3833
3834 print "INFO: Rebooting ", ifaces[0]['discovered-ip']
3835 url = "http://%s/rest/v1/system/reload" % ifaces[0]['discovered-ip']
3836 result = sdnsh.store.rest_simple_request(url)
3837
3838 if ifaces[0]['discovered-ip'] == local_iface:
3839 break
3840
3841 print "INFO: Waiting for %s to come back up" % ifaces[0]['discovered-ip']
3842 url = "http://%s/rest/v1/system/ha/role" % ifaces[0]['discovered-ip']
3843 while True:
3844 time.sleep(30)
3845 try:
3846 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3847 status = json.loads(result)
3848 if status['role'] == 'SLAVE' or status['role'] == 'MASTER':
3849 print "INFO: Rollback complete on '%s'" % ifaces[0]['discovered-ip']
3850 break
3851 print "INFO: Waiting for 30 seconds"
3852 except:
3853 print "INFO: Waiting for 30 seconds"
3854
3855
3856 iface = ifaces.pop(0)
3857 rollbackedNodes.append(iface)
3858
3859 print "Rollback completed"
3860
3861def command_wait_for_controller(delay = None, sdnplatform_check = False,
3862 within_command = False):
3863 """
3864 For various commands, it makes sense for the command to verify that
3865 the controller restart has been completed. In the situation where
3866 a single controller is configured, it also makes sense to verify the
3867 controller is now configured as MASTER.
3868
3869 This is especially true for command which are known to cause the
3870 controller to restart, for exampe the 'feature' command.
3871
3872 The procedure is also used during CLI startup (see cli.py)
3873 to verify that the controller is in MASTER mode. Its normal
3874 for the HA role to transition from SLAVE to master during
3875 system startup.
3876 """
3877
3878 # if the CLI was started with --init, skip the wait, the
3879 # controller isn't running.
3880 if sdnsh.options.init:
3881 return
3882
3883 def is_ready(sdnsh, verbose, duration):
3884 """
3885 Be loud-as-_ean when the duration is greater then 15 seconds.
3886 Display the gory details for all to know.
3887 """
3888 too_long = 90
3889 try:
3890 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3891 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3892 ha_role = json.loads(result)
3893 if duration > too_long:
3894 print 'Long delay: reason', \
3895 ', '.join(['%s: %s' % (n,v) for (n,v) in ha_role.items()
3896 if v != ''])
3897 if (ha_role['role'] == 'MASTER' or
3898 sdnsh.find_master()['master'] is not None):
3899 if verbose:
3900 print 'Current role is MASTER'
3901 return True
3902 return False
3903 except error.CommandRestError,e:
3904 print "REST error whileUnable to determine controller HA role."
3905 errors = self.rest_error_to_dict(e, obj_type)
3906 print self.rest_error_dict_to_message(errors)
3907 return True
3908 except Exception, e:
3909 if duration > too_long:
3910 print 'MASTER Transition Failure: ', e
3911 traceback.print_exc()
3912 return True
3913 return False
3914
3915 # if this isn't a typical environment (ie: running remotely)
3916 # don't bother trying to determine the role
3917 if not os.path.exists('/opt/sdnplatform/current_role'):
3918 return
3919
3920 # now vadalidate the rest api port is working
3921 ip_and_port = sdnsh.controller.split(':')
3922 if len(ip_and_port) == 2:
3923 # first ensure the REST API is answering
3924 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
3925 try:
3926 s.connect((ip_and_port[0], int(ip_and_port[1])))
3927 s.close()
3928 except Exception, e:
3929 print 'REST API not running, emergency CLI access'
3930 if sdnsh.debug: # enable debug to see messages
3931 print 'Exception:', e
3932 return
3933
3934 # issue a REST API request directed at the model.
3935 try:
3936 entry = sdnsh.get_table_from_store('feature')
3937 except Exception, e:
3938 print 'REST API/Database not responding, emergency CLI access'
3939 if sdnsh.debug: # enable debug to see messages
3940 print 'Exception:', e
3941 return
3942
3943 if sdnplatform_check:
3944 # the REST API request for ha-role will return UNAVAILABLE
3945 # when sdnplatform isn't running.
3946 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3947 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3948 ha_role = json.loads(result)
3949 if ha_role['role'] == 'UNAVAILABLE':
3950 print 'REST API/SDN platform not responding, emergency CLI access'
3951 return
3952
3953
3954 if delay == None:
3955 delay = 1
3956 delay_str = 'a sec' if delay == 1 else '%d seconds' % delay
3957
3958 duration = 0
3959 while True:
3960 try:
3961 verbose = False
3962 while not is_ready(sdnsh, verbose, duration):
3963 if within_command:
3964 print 'Waiting %s to complete command execution, ' \
3965 'Hit Ctrl-C to exit early' % delay_str
3966 verbose = False
3967 else:
3968 print 'Waiting %s while current role is SLAVE mode, ' \
3969 'Hit Ctrl-C to exit early' % delay_str
3970 verbose = True
3971 time.sleep(delay)
3972 duration += delay
3973 return
3974 except:
3975 if is_ready(sdnsh, True, duration):
3976 if duration > 15:
3977 print 'MASTER Transition: %s sec' % duration
3978 return
3979 try:
3980 resp = raw_input('Controller is not yet ready.'
3981 'Do you still want to continue to the CLI? [n]')
3982 if resp and "yes".startswith(resp.lower()):
3983 print 'Continuing with CLI despite initialization error ...'
3984 return
3985 except KeyboardInterrupt:
3986 return
3987
3988
3989def command_factory_default():
3990 print "Re-setting controller to factory defaults ..."
3991 os.system("sudo /opt/sdnplatform/sys/bin/resetbsc")
3992 return
3993
3994
3995def command_dump_log(data):
3996 controller = data.get('controller-node') # can be None.
3997 controller_dict = { 'id' : controller }
3998 for ip_port in controller_ip_and_port(controller_dict):
3999 log_name = data['log-name']
4000 if log_name == 'all':
4001 url = log_url(ip_and_port = ip_port)
4002 log_names = command.sdnsh.rest_simple_request_to_dict(url)
4003 for log in log_names:
4004 yield '*' * 40 + ip_port + ' ' + log['log'] + '\n'
4005 for item in command_dump_log({ 'log-name' : log['log'] }):
4006 yield item
4007 return
4008
4009 # use a streaming method so the complete log is not in memory
4010 url = log_url(ip_and_port = ip_port, log = log_name)
4011 request = urllib2.urlopen(url)
4012 for line in request:
4013 yield line
4014 request.close()
4015
4016
4017#
4018# Initialize action functions
4019#
4020#
4021
4022def init_actions(bs, modi):
4023 global sdnsh, mi
4024 sdnsh = bs
4025 mi = modi
4026
4027 command.add_action('create-tunnel',
4028 tunnel_create,
4029 {'kwargs': {'data' : '$data',}})
4030
4031 command.add_action('remove-tunnel',
4032 tunnel_remove,
4033 {'kwargs': {'data' : '$data',}})
4034
Srikanth Vavilapallib5c3ca52014-12-15 15:59:33 -08004035 command.add_action('create-tunnelset',
4036 tunnelset_create,
4037 {'kwargs': {'data' : '$data',}})
4038
4039 command.add_action('remove-tunnelset',
4040 tunnelset_remove,
4041 {'kwargs': {'data' : '$data',}})
4042
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08004043 command.add_action('create-policy',
4044 policy_create,
4045 {'kwargs': {'data' : '$data',}})
4046
4047 command.add_action('remove-policy',
4048 policy_remove,
4049 {'kwargs': {'data' : '$data',}})
4050
4051 command.add_action('write-fields', write_fields,
4052 {'kwargs': {'obj_type': '$current-mode-obj-type',
4053 'obj_id': '$current-mode-obj-id',
4054 'data': '$data'}})
4055
4056 command.add_action('reset-fields', reset_fields,
4057 {'kwargs': {'obj_type' : '$current-mode-obj-type',
4058 'obj_id' : '$current-mode-obj-id',
4059 'arg_data' : '$data',
4060 'match_for_no' : '$match-for-no',
4061 'fields' : '$fields'}})
4062
4063 command.add_action('write-fields-explicit', write_fields,
4064 {'kwargs': {'obj_type' : '$obj-type',
4065 'obj_id' : '$obj-id',
4066 'data' : '$data'}})
4067
4068 command.add_action('reset-fields-explicit', reset_fields,
4069 {'kwargs': {'obj_type' : '$obj-type',
4070 'obj_id' : '$obj-id',
4071 'arg_data' : '$data',
4072 'match_for_no' : '$match-for-no',
4073 'fields' : '$fields'}})
4074
4075 command.add_action('update-config', update_config,
4076 {'kwargs': {'obj_type' : '$obj-type',
4077 'obj_id' : '$current-mode-obj-id',
4078 'data' : '$data',
4079 'no_command' : '$is-no-command', }})
4080
4081 command.add_action('delete-objects', delete_objects,
4082 {'kwargs': {'obj_type': '$obj-type',
4083 'data': '$data',
4084 'parent_field': '$parent-field',
4085 'parent_id': '$current-mode-obj-id'}})
4086
4087 command.add_action('write-object', write_object,
4088 {'kwargs': {'obj_type': '$obj-type',
4089 'data': '$data',
4090 'parent_field': '$parent-field',
4091 'parent_id': '$current-mode-obj-id'}})
4092
4093 command.add_action('set-data', set_data,
4094 {'kwargs': {'data': '$data',
4095 'key': '$key',
4096 'value': '$value'}})
4097
4098 command.add_action('push-mode-stack', push_mode_stack,
4099 {'kwargs': {'mode_name': '$submode-name',
4100 'obj_type': '$obj-type',
4101 'parent_field': '$parent-field',
4102 'parent_id': '$current-mode-obj-id',
4103 'data': '$data',
4104 'create': '$create'}})
4105
4106 command.add_action('pop-mode-stack', pop_mode_stack)
4107
4108 command.add_action('confirm', confirm_request,
4109 {'kwargs': {'prompt': '$prompt'}})
4110
4111 command.add_action('convert-vns-access-list', convert_vns_access_list,
4112 {'kwargs': {'obj_type': '$obj-type',
4113 'key' : '$current-mode-obj-id',
4114 'data' : '$data'}})
4115 command.add_action('display-table', command_display_table,
4116 {'kwargs': {'obj_type' : '$obj-type',
4117 'data' : '$data',
4118 'table_format' : '$format',
4119 'title' : '$title',
4120 'detail' : '$detail',
4121 'scoped' : '$scoped',
4122 'sort' : '$sort',
4123 }})
4124
4125 command.add_action('display-rest', command_display_rest,
4126 {'kwargs': { 'data' : '$data',
4127 'url' : '$url',
4128 'path' : '$path',
4129 'rest_type' : '$rest-type',
4130 'sort' : '$sort',
4131 'title' : '$title',
4132 'table_format' : '$format',
4133 'detail' : '$detail',
4134 }})
4135
4136 command.add_action('query-table', command_query_table,
4137 {'kwargs': {'obj_type' : '$obj-type',
4138 'data' : '$data',
4139 'key' : '$key',
4140 'scoped' : '$scoped',
4141 'sort' : '$sort',
4142 'crack' : '$crack',
4143 'append' : '$append',
4144 'clear' : True,
4145 }})
4146
4147 command.add_action('query-table-append', command_query_table,
4148 {'kwargs': {'obj_type' : '$obj-type',
4149 'data' : '$data',
4150 'key' : '$key',
4151 'scoped' : '$scoped',
4152 'sort' : '$sort',
4153 'crack' : '$crack',
4154 'append' : '$append',
4155 'clear' : False,
4156 }})
4157
4158
4159 command.add_action('query-rest', command_query_rest,
4160 {'kwargs': {'url' : '$url',
4161 'path' : '$path',
4162 'rest_type' : '$rest-type',
4163 'data' : '$data',
4164 'key' : '$key',
4165 'scoped' : '$scoped',
4166 'sort' : '$sort',
4167 'append' : '$append',
4168 'clear' : True,
4169 }})
4170
4171 command.add_action('query-rest-append', command_query_rest,
4172 {'kwargs': {'url' : '$url',
4173 'path' : '$path',
4174 'rest_type' : '$rest-type',
4175 'data' : '$data',
4176 'key' : '$key',
4177 'scoped' : '$scoped',
4178 'sort' : '$sort',
4179 'crack' : '$crack',
4180 'append' : '$append',
4181 'clear' : False,
4182 }})
4183
4184 command.add_action('join-rest', command_join_rest,
4185 {'kwargs': {'url' : '$url',
4186 'key' : '$key',
4187 'join_field' : '$join-field',
4188 'rest_type' : '$rest-type',
4189 'add_field' : '$add-field',
4190 'data' : '$data',
4191 'crack' : '$crack',
4192 'url_key' : '$url-key',
4193 }})
4194
4195 command.add_action('join-table', command_join_table,
4196 {'kwargs': {'obj_type' : '$obj-type',
4197 'data' : '$data',
4198 'key' : '$key',
4199 'key_value' : '$key-value',
4200 'add_field' : '$add-field',
4201 'join_field' : '$join-field',
4202 'crack' : '$crack',
4203 }})
4204
4205 command.add_action('crack', command_crack,
4206 {'kwargs': {
4207 'field' : '$field',
4208 }})
4209
4210 command.add_action('display', command_display,
4211 {'kwargs': {'data' : '$data',
4212 'table_format' : '$format',
4213 'sort' : '$sort',
4214 'detail' : '$detail',
4215 'title' : '$title',
4216 }})
4217
4218 command.add_action('legacy-cli', command_legacy_cli,
4219 {'kwargs': {'obj_type' : '$obj-type',
4220 'data' : '$data',
4221 'detail' : '$detail',
4222 'sort' : '$sort',
4223 'scoped' : '$scoped',
4224 }})
4225
4226 command.add_action('legacy-cli-no', command_legacy_cli_no,
4227 {'kwargs': {'obj_type' : '$obj-type',
4228 'data' : '$data',
4229 'detail' : '$detail',
4230 'sort' : '$sort',
4231 'scoped' : '$scoped',
4232 }})
4233
4234 command.add_action('version', command_version,
4235 {'kwargs': {'data' : '$data',
4236 }})
4237
4238 command.add_action('clearterm', command_clearterm)
4239
4240 command.add_action('display-cli', command_display_cli,
4241 {'kwargs': {'data' : '$data',
4242 'detail' : '$detail',
4243 }})
4244
4245 command.add_action('create-alias', command_create_alias,
4246 {'kwargs': {'obj_type' : '$obj-type',
4247 'data' : '$data',
4248 'reserved' : '$reserved',
4249 'fail_if_exists' : '$fail-if-exists',
4250 }})
4251
4252 command.add_action('delete-alias', command_delete_alias,
4253 {'kwargs': {'obj_type' : '$obj-type',
4254 'data' : '$data',
4255 }})
4256
4257 command.add_action('create-tag', command_create_tag,
4258 {'kwargs': {'obj_type' : '$obj-type',
4259 'data' : '$data',
4260 }})
4261
4262 command.add_action('delete-tag', command_delete_tag,
4263 {'kwargs': {'obj_type' : '$obj-type',
4264 'data' : '$data',
4265 }})
4266
4267 command.add_action('cli-set', command_cli_set,
4268 {'kwargs': {'variable' : '$variable',
4269 'data' : '$data',
4270 }})
4271
4272 command.add_action('cli-unset', command_cli_unset,
4273 {'kwargs': {'variable' : '$variable',
4274 'data' : '$data',
4275 }})
4276
4277 command.add_action('shell-command', command_shell_command,
4278 {'kwargs': {'script' : '$command',
4279 }})
4280
4281 command.add_action('rest-post-data', command_rest_post_data,
4282 {'kwargs': {'path': '$path',
4283 'data': '$data',
4284 'verb': '$verb'
4285 }})
4286
4287 command.add_action('prompt-update', command_prompt_update,)
4288
4289 command.add_action('controller-upgrade', command_controller_upgrade,
4290 {'kwargs': {'data': '$data'}})
4291
4292 command.add_action('controller-config-rollback', command_cluster_config_rollback,
4293 {'kwargs': {'data': '$data'}})
4294
4295 command.add_action('controller-decommission', command_controller_decommission,
4296 {'kwargs': {'data': '$data'}})
4297
4298 command.add_action('wait-for-controller', command_wait_for_controller,
4299 {'kwargs': {'within_command': True}})
4300
4301 command.add_action('factory-default', command_factory_default)
4302
4303 command.add_action('dump-log', command_dump_log,
4304 {'kwargs' : { 'data' : '$data', }})