blob: 227370921ad86c3afd37568b26f0faf39bebed0d [file] [log] [blame]
Srikanth Vavilapalli1725e492014-12-01 17:50:52 -08001#
2# Copyright (c) 2011,2012,2013 Big Switch Networks, Inc.
3#
4# Licensed under the Eclipse Public License, Version 1.0 (the
5# "License"); you may not use this file except in compliance with the
6# License. You may obtain a copy of the License at
7#
8# http://www.eclipse.org/legal/epl-v10.html
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13# implied. See the License for the specific language governing
14# permissions and limitations under the License.
15#
16
17import re
18import numbers
19import collections
20import traceback
21import types
22import json
23import time
24import sys
25import datetime
26import os
27import subprocess
28import socket
29import urllib2 # exception, dump_log()
30
31import modi
32import error
33import command
34import run_config
35import rest_to_model
36import url_cache
37
38from midw import *
39from vnsw import *
40#from html5lib.constants import DataLossWarning
41
42onos=1
43#
44# ACTION PROCS
45#Format actions for stats per table
46def remove_unicodes(actions):
47
48 if actions:
49 #TODO: Check:- Why I have to remove last two character from string
50 #instead of 1 character to get rid of comma from last aciton
51 a=''
52 b=''
53 newActions=''
54 isRemoved_u = False
55 for ch in actions:
56 if ch =='u':
57 a= 'u'
58 if ch =='\'':
59 b= '\''
60 if isRemoved_u:
61 isRemoved_u=False
62 continue
63 if (a+b) == 'u\'':
64 newActions = newActions[:-1]
65 a= ''
66 isRemoved_u = True
67 else:
68 newActions += ch
69 return newActions
70 else:
71 ''
72def renameActions(actions):
73
74 actions = actions.replace('GOTO_TABLE','GOTO')
75 actions = actions.replace('WRITE_ACTIONS','WRITE')
76 actions = actions.replace('APPLY_ACTIONS','APPLY')
77 actions = actions.replace('DEC_NW_TTL: True','DEC_NW_TTL')
78 actions = actions.replace('POP_MPLS: True','POP_MPLS')
79 actions = actions.replace('COPY_TTL_IN: True','COPY_TTL_IN')
80 actions = actions.replace('COPY_TTL_OUT: True','COPY_TTL_OUT')
81 actions = actions.replace('DEC_MPLS_TTL: True','DEC_MPLS_TTL')
82 actions = actions.replace('SET_DL_SRC','SRC_MAC')
83 actions = actions.replace('SET_DL_DST','DST_MAC')
84 actions = actions.replace('SET_NW_SRC','SRC_IP')
85 actions = actions.replace('SET_NW_DST','DST_IP')
86 actions = actions.replace('CLEAR_ACTIONS: {CLEAR_ACTIONS: True}','CLEAR_ACTIONS')
87
88 return actions
89
90def check_rest_result(result, message=None):
91 if isinstance(result, collections.Mapping):
92 error_type = result.get('error_type')
93 if error_type:
94 raise error.CommandRestError(result, message)
95
96tunnel_id=None
97tunnel_dict={}
98def tunnel_create(data=None):
99 global tunnel_id,tunnel_dict
100 if sdnsh.description: # description debugging
101 print "tunnel_create:" , data
102 if data.has_key('tunnel-id'):
103 if (tunnel_id != None):
104 if sdnsh.description: # description debugging
105 print "tunnel_create: previous data is not cleaned up"
106 tunnel_id=None
107 tunnel_dict={}
108 tunnel_id=data['tunnel-id']
109 tunnel_dict[tunnel_id]=[]
110 if data.has_key('node-label'):
111 tunnel_dict[tunnel_id].append(data['node-label'])
112 if data.has_key('adjacency-label'):
113 tunnel_dict[tunnel_id].append(data['adjacency-label'])
114 if sdnsh.description: # description debugging
115 print "tunnel_create:" , tunnel_id, tunnel_dict
116
117def tunnel_config_exit():
118 global tunnel_id,tunnel_dict
119 if sdnsh.description: # description debugging
120 print "tunnel_config_exit entered", tunnel_dict
121 if tunnel_dict:
122 url_str = ""
123 entries = tunnel_dict[tunnel_id]
124 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
125 obj_data = {}
126 obj_data['tunnel_id']=tunnel_id
127 obj_data['label_path']=entries
128 result = "fail"
129 try:
130 result = sdnsh.store.rest_post_request(url_str,obj_data)
131 except Exception, e:
132 errors = sdnsh.rest_error_to_dict(e)
133 print sdnsh.rest_error_dict_to_message(errors)
134 # LOOK! successful stuff should be returned in json too.
135 tunnel_dict = {}
136 tunnel_id = None
137 if result != "success":
138 print "command failed"
139 else:
140 print "empty command"
141 #Clear the transit information
142
143def tunnel_remove(data=None):
144 if sdnsh.description: # description debugging
145 print "tunnel_remove:" , data
146 tunnel_id=data['tunnel-id']
147 url_str = "http://%s/rest/v1/tunnel/" % (sdnsh.controller)
148 obj_data = {}
149 obj_data['tunnel_id']=data['tunnel-id']
150 result = "fail"
151 try:
152 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
153 except Exception, e:
154 errors = sdnsh.rest_error_to_dict(e)
155 print sdnsh.rest_error_dict_to_message(errors)
156 if not result.startswith("SUCCESS"):
157 print result
158
159policy_obj_data = {}
160def policy_create(data=None):
161 global policy_obj_data
162 if sdnsh.description: # description debugging
163 print "policy_create:" , data
164 if data.has_key('policy-id'):
165 if policy_obj_data:
166 if sdnsh.description: # description debugging
167 print "policy_create: previous data is not cleaned up"
168 policy_obj_data = {}
169 policy_obj_data['policy_id'] = data['policy-id']
170 policy_obj_data['policy_type'] = data['policy-type']
171 if data.has_key('src_ip'):
172 for key in data:
173 policy_obj_data[key] = data[key]
174 if data.has_key('priority'):
175 policy_obj_data['priority'] = data['priority']
176 if data.has_key('tunnel-id'):
177 policy_obj_data['tunnel_id'] = data['tunnel-id']
178
179 if sdnsh.description: # description debugging
180 print policy_obj_data
181
182def policy_config_exit():
183 global policy_obj_data
184 if sdnsh.description: # description debugging
185 print "policy_config_exit entered", policy_obj_data
186 if policy_obj_data:
187 url_str = "http://%s/rest/v1/policy/" % (sdnsh.controller)
188 result = "fail"
189 try:
190 result = sdnsh.store.rest_post_request(url_str,policy_obj_data)
191 except Exception, e:
192 errors = sdnsh.rest_error_to_dict(e)
193 print sdnsh.rest_error_dict_to_message(errors)
194 if result != "success":
195 print "command failed"
196 policy_obj_data = {}
197 else:
198 print "empty command"
199 #Clear the transit information
200
201def policy_remove(data=None):
202 if sdnsh.description: # description debugging
203 print "policy_remove:" , data
204 policy_id=data['policy-id']
205 url_str = "http://%s/rest/v1/policy/" % (sdnsh.controller)
206 obj_data = {}
207 obj_data['policy_id']=data['policy-id']
208 result = "fail"
209 try:
210 result = sdnsh.store.rest_post_request(url_str,obj_data,'DELETE')
211 except Exception, e:
212 errors = sdnsh.rest_error_to_dict(e)
213 print sdnsh.rest_error_dict_to_message(errors)
214 if result != "deleted":
215 print "command failed"
216
217
218
219def write_fields(obj_type, obj_id, data):
220 """
221 Typical action to update fields of a row in the model
222
223 @param obj_type a string, the name of the db table to update
224 @param obj_id a string, the value of the primary key in for the table
225 @param data a dict, the name:value pairs of data to update in the table
226 """
227 if sdnsh.description: # description debugging
228 print "write_fields:", obj_type, obj_id, data
229
230 pk_name = mi.pk(obj_type)
231 if not pk_name:
232 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
233 if sdnsh.description: # description debugging
234 print "write_fields:", obj_type, pk_name, obj_id, data
235 for fk in mi.obj_type_foreign_keys(obj_type):
236 if fk in data and mi.is_null_allowed(obj_type, fk):
237 if data[fk] == 'default': # XXX much too magic, seems an option here would be good
238 data[fk] = None
239
240 result = sdnsh.rest_update_object(obj_type, pk_name, obj_id, data)
241 check_rest_result(result)
242
243
244def verify_row_includes(obj_type, pk_value, data, verify):
245 """
246 Intended to raise an exception when a user enters 'no field value',
247 and the field isn't currently set to value, for example:
248 'address-space as1 ; no address-space as2', should complain
249 that the 'address-space' field isn't currently set to 'as2'.
250
251 @param obj_type a string, identifies the db table
252 @param pk_value a string, identifies the value for the primary key
253 @param data is a dict, collecting the name:value pairs from the description
254 @verify the string or list of field names to be verified
255 """
256 if sdnsh.description: # description debugging
257 print "validate_row_includes:", obj_type, pk_value, data, verify
258
259 if type(verify) == str:
260 verify = [verify] # if not a list, make it a list
261
262 try:
263 row = sdnsh.get_object_from_store(obj_type, pk_value)
264 except Exception, e:
265 if sdnsh.debug or sdnsh.debug_backtrace:
266 print 'Failed lookup of %s:%s:%s', (obj_type, pk_value, e)
267 traceback.print_exc()
268 raise error.ArgumentValidationError("%s: '%s' doesn't exist" %
269 (obj_type, pk_value))
270 return
271
272 if sdnsh.description: # description debugging
273 print "validate_includes: ", row
274 for field in [x for x in verify if x in data and x in row]:
275 if row[field] != data[field]:
276 raise error.ArgumentValidationError("%s: %s found '%s' current value '%s'" %
277 (obj_type, field, data[field], row[field]))
278
279
280def reset_fields(obj_type, arg_data,
281 obj_id = None, fields = None, match_for_no = None):
282 """
283 For an obj_type, revert fields back to their default value.
284 This is the typical action for 'no' commands.
285
286 When verify is set, this is a string or list of fields who's values
287 must match in the table for the primary key associated with the reset.
288 This allows command descriptions to identify any fields which need to
289 be checked against, when they are explicidly named in the 'no' command,
290 so that 'no XXX value' will verify that 'value' matches the current
291 row's value before allowing the reset to continue
292
293 @param obj_type a string, identifies the db table
294 @param obj_id a string, identifies the value for the primary key of the row in the table,
295 possibly unset, the key is looked for in the arg_data in that case.
296 @param arg_data a dict, collection of name:value pairs from the description
297 @param fields a list, collection of fields to update in the table
298 @param match_for_no a string or list, list of fields to check for matched values in arg_data
299 """
300
301 if obj_type == None:
302 raise error.CommandDescriptionError("No object to reset (missing obj-type)")
303
304 pk_name = mi.pk(obj_type)
305 # If the fields aren't specified explicitly, then derive from the arg_data
306 if fields is None:
307 fields = []
308 for field in arg_data.keys():
309 # Only add arguments that correspond to valid fields in the object
310 if mi.obj_type_has_field(obj_type, field):
311 if field != pk_name: # don't reset primary keys
312 fields.append(field)
313
314 if len(fields) == 0:
315 raise error.CommandDescriptionError("No fields to reset: type: %s" % obj_type)
316
317 # Get the primary key name
318 if not pk_name:
319 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
320 if obj_id == None:
321 if pk_name in arg_data:
322 obj_id = arg_data[pk_name]
323 elif mi.field_default_value(obj_type, pk_name):
324 # unusual, but not impossible for singletons
325 obj_id = mi.field_default_value(obj_type, pk_name)
326 else:
327 raise error.CommandDescriptionError("Can't find id value name for type: %s"
328 " field %s" % (obj_type, pk_name))
329
330 if match_for_no:
331 verify_row_includes(obj_type, obj_id, arg_data, match_for_no)
332
333 # Get the default values of the specified field from CLI model info
334 data = {}
335 for field in fields:
336 if field == pk_name:
337 continue
338 type_info = mi.cli_model_info.get_field_info(obj_type, field)
339 if type_info == None:
340 raise error.CommandDescriptionError("Can't find field details for "
341 "field %s in type %s" % (field, obj_type))
342 data[field] = type_info.get('default')
343 if data[field] == None and type_info.get('type') == 'BooleanField':
344 data[field] = False
345 # why does boolean not respect the default in the model?!?
346 # data[field] = type_info.get('default') if type_info.get('type') != 'BooleanField' else False
347
348 if sdnsh.description: # description debugging
349 print "reset_fields:", obj_type, pk_name, obj_id, data, match_for_no
350
351 # Invoke the REST API to set the default values
352 try:
353 result = sdnsh.rest_update_object(obj_type, pk_name, obj_id, data)
354 except Exception, e:
355 errors = sdnsh.rest_error_to_dict(e, obj_type)
356 raise error.CommandError('REST', sdnsh.rest_error_dict_to_message(errors))
357
358
359def obj_type_fields_have_default_value(obj_type, row, data):
360 """
361 Return True when all the fields have a default value,
362 row is the queried data from the store,
363 data is the data to be updated.
364
365 The goal is to determine whether to delete or update
366 the row in the store.
367
368 """
369
370 ckf = []
371 if mi.is_compound_key(obj_type, mi.pk(obj_type)):
372 # XXX primitive compound keys' too?
373 ckf = mi.compound_key_fields(obj_type, mi.pk(obj_type))
374
375 for field in mi.obj_type_fields(obj_type):
376 if mi.is_primary_key(obj_type, field):
377 continue
378 if mi.is_foreign_key(obj_type, field):
379 # perhaps only allow a single foreign key?
380 continue
381 # also any fields which are used to compound the ident.
382 if field in ckf:
383 continue
384 # Needs a better way to identify non-model-fields
385 if field == 'Idx':
386 continue
387 if mi.is_null_allowed(obj_type, field):
388 # does this need to be more complex?
389 if field in data and data[field] != None:
390 return False
391 continue # next field
392 default_value = mi.field_default_value(obj_type, field)
393 if default_value == None:
394 if sdnsh.description: # description debugging
395 print 'default_value: no default: %s %s' % (obj_type, field)
396 return False
397 # check to see if the updated value would be the default
398 if field in data and data[field] != default_value:
399 if sdnsh.description: # description debugging
400 print 'default_value: not default %s %s %s' % \
401 (field, data[field], default_value)
402 return False
403 elif row.get(field, default_value) != default_value:
404 if field in data and data[field] == default_value:
405 if sdnsh.description: # description debugging
406 print 'default_value: db not default %s %s %s' \
407 ' new value in data %s is default' % \
408 (field, row[field], default_value, data[field])
409 continue
410 if sdnsh.description: # description debugging
411 print 'default_value: db not default %s %s %s' % \
412 (field, row[field], default_value)
413 return False
414 return True
415
416
417def update_config(obj_type, obj_id, data, no_command):
418 """
419 update_config is intended to write a row when the described data
420 is different from the default values of the fields of the row.
421
422 When the data described in the call updates the field's values
423 to all default values, the row associated with the obj_id is
424 deleted.
425
426 This is intended to be used for models which contain configuration
427 row data, and that every field has a default value,
428 so that when the config data is transitioned to the default
429 state, the row is intended to be removed. For these sorts of
430 command descriptions, updating a field to some default value
431 may result in the row getting deleted.
432 """
433
434 c_data = dict(data) # make a local copy
435 if sdnsh.description: # description debugging
436 print "update_config: ", obj_type, obj_id, c_data, no_command
437
438 if not mi.obj_type_exists(obj_type):
439 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
440
441 # collect any dict.key names which aren't fields in the object
442 for unknown_field in [x for x in c_data.keys() if not mi.obj_type_has_field(obj_type, x)]:
443 del c_data[unknown_field]
444
445 # if its a no command, set the value to 'None' if it's allowed,
446 # of to its default value otherwise
447 if no_command:
448 for field in c_data.keys():
449 if mi.is_null_allowed(obj_type, field):
450 c_data[field] = None
451 else:
452 # required to have a default value
453 c_data[field] = mi.field_default_value(obj_type, field)
454
455 # Get the primary key name
456 pk_name = mi.pk(obj_type)
457 if not pk_name:
458 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
459 pk_value = obj_id
460 if pk_name in data:
461 pk_value = data[pk_name]
462 if pk_name in c_data:
463 del c_data[pk_name]
464
465 # Query for the row, if it doesn't exist, create the item if any item isn't default
466 if sdnsh.description: # description debugging
467 print "update_config: query:", obj_type, pk_value
468
469 result = sdnsh.rest_query_objects(obj_type, { pk_name : pk_value })
470 check_rest_result(result)
471 if len(result) == 0:
472 # result[0] -> dictionary of field:value pairs
473 # check to ensure c_data isn't just default row values
474 if not obj_type_fields_have_default_value(obj_type, {}, c_data):
475 if sdnsh.description: # description debugging
476 print "update_config: create:", obj_type, c_data
477 # populate the create dictionary
478 create_dict = dict(c_data)
479 create_dict[pk_name] = pk_value
480 result = sdnsh.rest_create_object(obj_type, create_dict)
481 check_rest_result(result)
482 else:
483 if sdnsh.description: # description debugging
484 print "update_config: no current row"
485 return
486 else:
487 if sdnsh.description: # description debugging
488 print "update_config: found row", result[0]
489
490 if len(result) > 1:
491 raise error.CommandInternalError("Multiple rows for obj-type: %s: pk %s" %
492 (obj_type, pk_value))
493
494 # See if the complete row needs to be deleted.
495 # For each of the current fields, if a field's default doesn't exist,
496 # skip the row delete, or if any field has a non-default value, update
497 # the requested fields instead of deleting the row.
498 if obj_type_fields_have_default_value(obj_type, result[0], c_data):
499 # if the table has foreign keys, check no children refer to this table.
500 no_foreign_keys_active = True
501 if obj_type in mi.foreign_key_xref:
502 for (fk_obj_type, fk_fn) in mi.foreign_key_xref[obj_type][mi.pk(obj_type)]:
503 try:
504 rows = sdnsh.get_table_from_store(fk_obj_type, fk_fn,
505 pk_value, "exact")
506 except Exception, e:
507 rows = []
508 if len(rows):
509 if sdnsh.description: # description debugging
510 print "update_config: foreign key active:", \
511 fk_obj_type, fk_fn, pk_value
512 no_foreign_keys_active = False
513 break
514
515 if no_foreign_keys_active:
516 if sdnsh.description: # description debugging
517 print "update_config: delete:", obj_type, pk_value
518 try:
519 delete_result = sdnsh.rest_delete_objects(obj_type, { pk_name : pk_value })
520 check_rest_result(delete_result)
521 except Exception, e:
522 errors = sdnsh.rest_error_to_dict(e)
523 raise error.CommandInvocationError(sdnsh.rest_error_dict_to_message(errors))
524 return
525 # XXX if a row from some table is removed, and that table is using
526 # foreign keys, then the table which is refered to ought to be
527 # reviewed, to see if all the entries of the row which this table
528 # refer's to are default, and if that parent table is a config-style
529 # table, with all default values for every field, there's a good
530 # argument that the row ought to be removed.
531
532 # See if any of the c_data items in the matching row are different
533 # (ie: is this update really necessary?)
534 update_necessary = False
535 for (name, value) in c_data.items():
536 if name in result[0]:
537 if value != result[0][name]:
538 update_necessary = True
539 if sdnsh.description: # description debugging
540 print "update_config: update necessary:", name, result[0][name], value
541 else:
542 update_necessary = True
543
544 if not update_necessary:
545 if sdnsh.description: # description debugging
546 print "update_config: no update needed", obj_type, pk_name, pk_value
547 return
548
549 if sdnsh.description: # description debugging
550 print "update_config: update:", obj_type, pk_name, pk_value, c_data
551 # Invoke the REST API to set the default values
552 result = sdnsh.rest_update_object(obj_type, pk_name, pk_value, c_data)
553 check_rest_result(result)
554
555
556def delete_objects(obj_type, data, parent_field=None, parent_id=None):
557 """
558 Delete a row in the table.
559
560 @param obj_type a string, the name of the table to update
561 @param data a dictionary, name:value pairs to describe the delete
562 @param parent_field a string, the name of a field in the obj_type,
563 identifying a relationship between this table, and another table
564 @param parent_id a string, the value of the parent_field, to identify
565 another row in the other table identified by a field in this table
566 """
567
568 pk_name = mi.pk(obj_type)
569 if not pk_name:
570 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
571
572 query_data = dict(data)
573 if parent_field:
574 query_data[parent_field] = parent_id
575
576 # case conversion
577 for field in data:
578 if mi.obj_type_has_field(obj_type, field):
579 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
580 if case:
581 if sdnsh.description: # description debugging
582 print 'delete_objects: case convert %s:%s to %s' % \
583 (obj_type, field, case)
584 data[field] = utif.convert_case(case, data[field])
585
586 query_result = sdnsh.rest_query_objects(obj_type, query_data)
587 check_rest_result(query_result)
588 #
589 # if there were no results, try to delete by removing any
590 # items which have "None" values
591 if len(query_result) == 0:
592 for key in query_data.keys():
593 if query_data[key] == None:
594 del query_data[key]
595 query_result = sdnsh.rest_query_objects(obj_type, query_data)
596 check_rest_result(query_result)
597
598 if sdnsh.description: # description debugging
599 print "delete_objects:", obj_type, query_data
600 delete_result = sdnsh.rest_delete_objects(obj_type, query_data)
601 check_rest_result(delete_result)
602
603 for item in query_result:
604 key = item[pk_name]
605 sdnsh.cascade_delete(obj_type, key)
606
607
608def set_data(data, key, value):
609 """
610 Action to associate a new name:value pair with 'data', the dictionary used
611 to pass to REST API's. Allows the action to describe a value for a field
612 which wasn't directly named in the description.
613
614 """
615 if sdnsh.description: # description debugging
616 print "set_data:", data, key, value
617 data[key] = value
618
619
620def write_object(obj_type, data, parent_field=None, parent_id=None):
621 """
622 Write a new row into a specific table.
623
624 """
625 # If we're pushing a config submode with an object, then we need to extend the
626 # argument data that was entered explicitly in the command with the information
627 # about the parent object (by default obtained by looking at the obj info on
628 # the mode stack -- see default arguments for this action when it is added).
629
630 if sdnsh.description: # description debugging
631 print 'write_object: params ', obj_type, data, parent_field, parent_id
632 data = dict(data) # data is overwriten in various situations below
633 if parent_field:
634 if not parent_id:
635 raise error.CommandDescriptionError('Invalid command description;'
636 'improperly configured parent info for create-object')
637 data[parent_field] = parent_id
638
639 pk_name = mi.pk(obj_type)
640 if not pk_name:
641 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
642
643 # look for unpopulated foreign keys.
644 fks = mi.obj_type_foreign_keys(obj_type)
645 if fks:
646 for fk in fks:
647 (fk_obj, fk_nm) = mi.foreign_key_references(obj_type, fk)
648
649 if not fk in data or \
650 (mi.is_compound_key(fk_obj, fk_nm) and data[fk].find('|') == -1):
651 # use various techniques to populate the foreign key
652 # - if the foreign key is for class which has a compound key, see if all the
653 # parts of the compound key are present
654
655 if mi.is_compound_key(fk_obj, fk_nm):
656 kfs = mi.deep_compound_key_fields(fk_obj, fk_nm)
657 missing = [x for x in kfs if not x in data]
658 if len(missing) == 0:
659 # remove the entries, build the compound key for the foreign key reference
660 new_value = mi.compound_key_separator(fk_obj, fk_nm).\
661 join([data[x] for x in kfs])
662 # verify the foreign key exists, if not complain and return,
663 # preventing a error during the create request
664 query_result = sdnsh.rest_query_objects( fk_obj, { fk_nm : new_value })
665 check_rest_result(query_result)
666 if len(query_result) == 0:
667 joinable_name = ["%s: %s" % (x, data[x]) for x in kfs]
668 raise error.CommandSemanticError("Reference to non-existant object: %s " %
669 ', '.join(joinable_name))
670 for rfn in kfs: # remove field name
671 del data[rfn]
672 data[fk] = new_value
673 else:
674 qr = sdnsh.rest_query_objects(fk_obj, data)
675 if len(qr) == 1:
676 data[fk] = qr[0][mi.pk(fk_obj)]
677
678 if pk_name in data:
679 if sdnsh.description: # description debugging
680 print command._line(), 'write_object: query pk_name ', obj_type, pk_name, data
681 case = mi.get_obj_type_field_case_sensitive(obj_type, pk_name)
682 if case:
683 data[pk_name] = utif.convert_case(case, data[pk_name])
684 query_result = sdnsh.rest_query_objects(obj_type, { pk_name : data[pk_name]})
685 else:
686 query_data = dict([[n,v] for (n,v) in data.items() if v != None])
687 if sdnsh.description: # description debugging
688 print command._line(), 'write_object: query ', obj_type, query_data
689 query_result = sdnsh.rest_query_objects(obj_type, query_data)
690 check_rest_result(query_result)
691
692 # Consider checking to see if all the fields listed here
693 # already match a queried result, if so, no write is needed
694
695 if (len(query_result) > 0) and (pk_name in data):
696 if sdnsh.description: # description debugging
697 print "write_object: update object", obj_type, pk_name, data
698 result = sdnsh.rest_update_object(obj_type, pk_name, data[pk_name], data)
699 else:
700 if sdnsh.description: # description debugging
701 print "write_object: create_object", obj_type, data
702 result = sdnsh.rest_create_object(obj_type, data)
703
704 check_rest_result(result)
705
706 for item in query_result:
707 key = item[pk_name]
708 sdnsh.cascade_delete(obj_type, key)
709
710
711def delete_object(obj_type, data, parent_field=None, parent_id=None):
712 global sdnsh
713
714 data = dict(data)
715 if parent_field:
716 if not parent_id:
717 raise error.CommandDescriptionError('Invalid command description;'
718 'improperly configured parent info for delete-object')
719 data[parent_field] = parent_id
720
721 # case conversion
722 for field in data:
723 if mi.obj_type_has_field(obj_type, field):
724 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
725 if case:
726 if sdnsh.description: # description debugging
727 print 'delete_object: case convert %s:%s to %s' % \
728 (obj_type, field, case)
729 data[field] = utif.convert_case(case, data[field])
730
731 if sdnsh.description: # description debugging
732 print "delete_object: ", obj_type, data
733 result = sdnsh.rest_delete_objects(obj_type, data)
734 check_rest_result(result)
735
736
737def push_mode_stack(mode_name, obj_type, data, parent_field = None, parent_id = None, create=True):
738 """
739 Push a submode on the config stack.
740 """
741 global sdnsh, modi
742
743 # Some few minor validations: enable only in login, config only in enable,
744 # and additional config modes must also have the same prefix as the
745 # current mode.
746 current_mode = sdnsh.current_mode()
747
748 if sdnsh.description: # description debugging
749 print "push_mode: ", mode_name, obj_type, data, parent_field, parent_id
750
751 # See if this is a nested submode, or whether some current modes
752 # need to be popped.
753 if (mode_name.startswith('config-') and
754 (not mode_name.startswith(current_mode) or (mode_name == current_mode))):
755
756 sdnsh.pop_mode()
757 current_mode = sdnsh.current_mode()
758 # pop until it it matches
759 while not mode_name.startswith(current_mode):
760 if len(sdnsh.mode_stack) == 0:
761 raise error.CommandSemanticError('%s not valid within %s mode' %
762 (mode_name, current_mode))
763 sdnsh.pop_mode()
764 current_mode = sdnsh.current_mode()
765
766 # if there's a parent id, it is typically the parent, and audit
767 # ought to be done to verify this
768 if parent_field:
769 data = dict(data)
770 data[parent_field] = sdnsh.get_current_mode_obj()
771
772 elif mode_name in ['config', 'enable', 'login']:
773 # see if the mode is in the stack
774 if mode_name in [x['mode_name'] for x in sdnsh.mode_stack]:
775 if sdnsh.description: # description debugging
776 print 'push_mode: popping stack for', mode_name
777 current_mode = sdnsh.current_mode()
778 while current_mode != mode_name:
779 sdnsh.pop_mode()
780 current_mode = sdnsh.current_mode()
781 return
782
783
784 # If we're pushing a config submode with an object, then we need to extend the
785 # argument data that was entered explicitly in the command with the information
786 # about the parent object (by default obtained by looking at the obj info on
787 # the mode stack -- see default arguments for this action when it is added).
788 elif parent_field:
789 if not parent_id:
790 raise error.CommandDescriptionError('Invalid command description; '
791 'improperly configured parent info for push-mode-stack')
792 data = dict(data)
793 data[parent_field] = parent_id
794
795 key = None
796 if obj_type:
797 for field in data:
798 if mi.obj_type_has_field(obj_type, field):
799 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
800 if case:
801 if sdnsh.description: # description debugging
802 print 'push_mode: case convert %s:%s to %s' % \
803 (obj_type, field, case)
804 data[field] = utif.convert_case(case, data[field])
805
806
807 # Query for the object both to see if it exists and also to determine
808 # the pk value we're going to push on the stack. We need to do
809 # the query in the case where the model uses compound keys and we're
810 # specifying the individual fields that compose the compound key.
811 result = sdnsh.rest_query_objects(obj_type, data)
812 check_rest_result(result)
813 if len(result) == 0 and create:
814 #
815 # For vns-interface, the association of 'rule' with the data dict
816 # is difficult to explain via the command description. This is
817 # obviously a poor method of dealing with the issue, but until
818 # a better one arises (possibly REST api create? possibly
819 # model validation code?), this solution works.
820 if obj_type == 'vns-interface':
821 data = associate_foreign_key_for_vns_interface(data)
822
823 # Create the object and re-query to get the id/pk value
824 # FIXME: Could probably optimize here if the data already
825 # contains the pk value.
826 if sdnsh.description: # description debugging
827 print "push_mode: create ", obj_type, data
828 result = sdnsh.rest_create_object(obj_type, data)
829 check_rest_result(result)
830 result = sdnsh.rest_query_objects(obj_type, data)
831 check_rest_result(result)
832 else:
833 if sdnsh.description: # description debugging
834 print "push_mode: object found", obj_type, result
835
836 # Check (again) to make sure that we have an object
837 if len(result) == 0:
838 raise error.CommandSemanticError('Object not found; type = %s' % obj_type)
839
840 # Check to make sure there aren't multiple matching objects. If there
841 # are that would indicate a problem in the command description.
842 if len(result) > 1:
843 raise error.CommandDescriptionError('Push mode info must identify a single object;'
844 'type = %s; data = %s' %
845 (obj_type, str(data)))
846
847 # Get the id/pk value from the object info
848 pk_name = mi.pk(obj_type)
849 if not pk_name:
850 raise error.CommandDescriptionError("Can't find primary key name for type: %s" % obj_type)
851 key = result[0][pk_name]
852 else:
853 pk_name = '<none>'
854
855 if sdnsh.description: # description debugging
856 print "push_mode: ", mode_name, obj_type, pk_name, key
857 exitCallback = None
858 if (mode_name == 'config-tunnel'):
859 exitCallback = tunnel_config_exit
860 if (mode_name == 'config-policy'):
861 exitCallback = policy_config_exit
862 sdnsh.push_mode(mode_name, obj_type, key, exitCallback)
863
864
865def pop_mode_stack():
866 global sdnsh
867
868 if sdnsh.description: # description debugging
869 print "pop_mode: "
870 sdnsh.pop_mode()
871
872def confirm_request(prompt):
873 global sdnsh
874
875 if sdnsh.batch:
876 return
877 result = raw_input(prompt)
878 if result.lower() == 'y' or result.lower() == 'yes':
879 return
880 raise error.ArgumentValidationError("Expected y or yes, command: ")
881
882import c_data_handlers
883
884def convert_vns_access_list(obj_type, key, data):
885 """
886 For vns-access-group's, the access list which is the first parameter
887 needs to be converted into a vns-access-list foreign key. This is
888 possible since the vns name is part of the current object id.
889 """
890 global sdnsh, modi
891
892 key_parts = key.split('|')
893 if len(key_parts) != 3:
894 raise error.ArgumentValidationError("invalid id")
895 if not 'vns-access-list' in data:
896 raise error.ArgumentValidationError("missing vns-access-list")
897 try:
898 key_parts.pop()
899 vnskey='|'.join(key_parts)
900 entry = sdnsh.rest_query_objects('vns-access-list',
901 { 'vns' : vnskey,
902 'name' : data['vns-access-list']
903 })
904 except Exception, _e:
905 entry = []
906
907 if len(entry) != 1:
908 raise error.ArgumentValidationError("unknown acl %s" % data['vns-access-list'])
909 data['vns-access-list'] = entry[0]['id']
910
911def command_query_object(obj_type, data, scoped, sort):
912 """
913 Return model entries (db rows) via the REST API. Try to be
914 very smart about using parameters and the model definition to
915 figure out how to query for the entries.
916 """
917
918 if sdnsh.description:
919 print 'command_query_object: ', obj_type, data, scoped, sort
920
921 skipforeignsearch=False
922 if (obj_type=='virtualrouter-routingrule' or obj_type=='virtualrouter-interface'):
923 skipforeignsearch=True
924 # big_search describes a related search which must be done to
925 # satisfy this request, see the relationship of tag-mapping to tag
926 # as an example.
927 big_search = []
928
929 key = mi.pk(obj_type)
930 #
931 if mi.is_compound_key(obj_type, key):
932 if sdnsh.description: # description debugging
933 print "command_query_object: %s compound %s" % (obj_type, key)
934 #
935 # collect compound key names, look for these in the data,
936 # if any of the values are 'all', remove the item from
937 # the group of data.
938 #
939 # XXX needs work: we ought to check to see if the
940 # compound key is part of some other key.
941 #
942 if scoped:
943 obj_d = { key : sdnsh.get_current_mode_obj() }
944 mi.split_compound_into_dict(obj_type, key, obj_d, is_prefix = True)
945 for (k,v) in obj_d.items():
946 if k != key and not k in data:
947 data[k] = v
948
949 new_data = {}
950 dckfs = mi.deep_compound_key_fields(obj_type, key)
951 if key in data:
952 mi.split_compound_into_dict(obj_type, key, data, is_prefix = True)
953 foreign_obj_type_search = {}
954
955 for kf in dckfs:
956 if mi.obj_type_has_field(obj_type, kf) and kf in data and data[kf] != 'all':
957 new_data[kf] = data[kf]
958 elif not mi.obj_type_has_field(obj_type, kf):
959 # deep_compound_keys returns references via foreign keys.
960 # if the field is missing in obj_type, its likely from
961 # some related fk.
962 for fk in mi.obj_type_foreign_keys(obj_type):
963 (_fk_obj_type, fk_name) = mi.foreign_key_references(obj_type,
964 fk)
965 if kf == fk_name:
966 # print "FOUND MATCH ", kf, _fk_obj_type, fk_name
967 continue
968 elif not mi.is_compound_key( _fk_obj_type, fk_name):
969 continue
970 for fkcf in mi.compound_key_fields(_fk_obj_type, fk_name):
971 if fkcf in data and data[fkcf] != 'all':
972 # assume all models use COMPOUND_KEY_FIELDS
973 if _fk_obj_type not in foreign_obj_type_search:
974 foreign_obj_type_search[_fk_obj_type] = {}
975 foreign_obj_type_search[_fk_obj_type][fkcf] = data[fkcf]
976 pass
977 # see if foreign key fields are indirectly named
978 elif mi.is_foreign_key(obj_type, kf):
979 (_fk_obj_type, fk_name) = mi.foreign_key_references(obj_type,
980 kf)
981 if fk_name in data and data[fk_name] != 'all':
982 new_data[kf] = data[fk_name]
983 if (not skipforeignsearch): #skip foreign key search for routingrule type
984 if len(foreign_obj_type_search):
985 # This means to collect the entries, a search though a
986 # related obj_type (through foreign key) will need to be done
987 # a single query isn't enough, unless all entries are collected
988 # consider the relationship between tag-mapping and tags
989 #
990 # This code seems to handle single indirected foreign key
991 # lookup, but if deep_compound_key_fields() found more than
992 # three layers deep (the obj-type has a fk reference to a
993 # table, which had a fk reference to another table, which
994 # had a value to search with), this won't do the trick.
995 # at that point some sort of recursive building of the
996 # foreign keys would be needed to collect up the required
997 # final seraches
998 for (_fk_obj_type, search) in foreign_obj_type_search.items():
999 fk_entries = sdnsh.rest_query_objects(_fk_obj_type, search)
1000 # need to identify the name associated foreign key in this model
1001 for fk in mi.obj_type_foreign_keys(obj_type):
1002 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
1003 if fk_obj == _fk_obj_type:
1004 obj_type_field = fk
1005 break
1006 else:
1007 raise error.CommandSemanticError("bigsearch: can't find fk reference"
1008 "for %s for obj-type %s" %
1009 (fk, obj_type))
1010 big_search += [{obj_type_field:
1011 x[mi.pk(_fk_obj_type)]} for x in fk_entries]
1012 # big_search would return id's for the _fk_obj_type,
1013 # which can be used to search this obj_type
1014 # look for fields which are set in new_data, which aren't in data.
1015 for (field, value) in data.items():
1016 if field not in new_data:
1017 if mi.is_marked_searchable(obj_type, field) and value!='all':
1018 new_data[field] = value
1019
1020 data = new_data
1021 else:
1022 # Only allow fields which are searchable (XXX need a prediate)
1023 # only save primary key's and foreigh keys.
1024 new_data = {}
1025 if key in data and mi.is_primary_key(obj_type, key):
1026 new_data[key] = data[key]
1027 for fk in mi.obj_type_foreign_keys(obj_type):
1028 if fk in data:
1029 new_data[fk] = data[fk]
1030 (_fk_obj, fk_fn) = mi.foreign_key_references(obj_type, fk)
1031 if fk_fn in data:
1032 new_data[fk_fn] = data[fk_fn]
1033 for f in mi.obj_type_fields(obj_type):
1034 if f in data and f not in new_data:
1035 new_data[f] = data[f]
1036
1037 data = new_data
1038
1039 if scoped:
1040 data[key] = sdnsh.get_current_mode_obj()
1041
1042 if key in data and (data[key]=='all' or data[key]==None):
1043 del data[key]
1044 #
1045 # Now that the fields have been disassembled as much as possible, see
1046 # if some of the entries need to be cobbled back together.
1047 fks = mi.obj_type_foreign_keys(obj_type)
1048 if sdnsh.description: # description debugging
1049 print "command_query_object: %s foreign-key %s" % (obj_type, fks)
1050 if fks:
1051 for fk in fks:
1052 (fk_obj, fk_nm) = mi.foreign_key_references(obj_type, fk)
1053
1054 if not fk in data or \
1055 (mi.is_compound_key(fk_obj, fk_nm) and data[fk].find('|') == -1):
1056
1057 # use various techniques to populate the foreign key
1058 # - if the foreign key is for class which has a compound key, see if all the
1059 # parts of the compound key are present
1060 if mi.is_compound_key(fk_obj, fk_nm):
1061 kfs = mi.deep_compound_key_fields(fk_obj, fk_nm)
1062 missing = [x for x in kfs if not x in data]
1063 if len(missing) == 0:
1064 # remove the entries, build the compound key for the foreign key reference
1065 new_value = mi.compound_key_separator(fk_obj, fk_nm).\
1066 join([data[x] for x in kfs])
1067 # verify the foreign key exists, if not complain and return,
1068 # preventing a error during the create request
1069 query_result = sdnsh.rest_query_objects( fk_obj, { fk_nm : new_value })
1070 check_rest_result(query_result)
1071 if len(query_result) == 0:
1072 joinable_name = ["%s: %s" % (x, data[x]) for x in kfs]
1073 raise error.CommandSemanticError("Reference to non-existant object: %s " %
1074 ', '.join(joinable_name))
1075 for rfn in kfs: # remove field name
1076 del data[rfn]
1077 data[fk] = new_value
1078 if sdnsh.description: # description debugging
1079 print "command_query_object: %s foreign key construction " % obj_type, data
1080 #
1081 # Do something for alias displays, for obj_types which sdnsh says
1082 # are aliases, find the foreign reference in the alias obj_type,
1083 # and use that to determine the field name (fk_fn) in the parent.
1084 # Do lookups based on either the alias field name, or the parent's
1085 # fk_fn when set in data{}
1086 if obj_type in mi.alias_obj_types:
1087 field = mi.alias_obj_type_field(obj_type)
1088 (_fk_obj, fk_fn) = mi.foreign_key_references(obj_type, field)
1089 new_data = {}
1090 if fk_fn in data and data[fk_fn] != 'all':
1091 new_data[field] = data[fk_fn]
1092 elif field in data and data[field] != 'all':
1093 new_data[field] = data[field]
1094 data = new_data
1095
1096 #
1097 # The sort value ought to be a command separated list of fields within the model
1098 #
1099 if sort:
1100 data['orderby'] = sort
1101
1102 if not mi.obj_type_has_model(obj_type):
1103 return rest_to_model.get_model_from_url(obj_type, data)
1104
1105 if sdnsh.description: # description debugging
1106 print "command_query_object: ", obj_type, data
1107
1108 if len(big_search):
1109 entries = []
1110 if sdnsh.description: # description debugging
1111 print "command_query_object: big search", big_search
1112 for bs in big_search:
1113 search = dict(list(bs.items()) + list(data.items()))
1114 entries += sdnsh.rest_query_objects(obj_type, search)
1115 # XXX needs to be re-sorted
1116 return entries
1117
1118 return sdnsh.rest_query_objects(obj_type, data)
1119
1120
1121def command_display_table_join_entries(obj_type, data, entries, detail):
1122 """
1123 """
1124 if obj_type == 'tag-mapping':
1125 # lift persist from the parent tag
1126 if len(entries) == 1:
1127 entry = entries[0]
1128 tag = sdnsh.rest_query_objects('tag', { mi.pk('tag') : entry['tag']})
1129 entry['persist'] = tag[0]['persist']
1130 else:
1131 # key? value? for the _dict?
1132 tags = create_obj_type_dict('tag', mi.pk('tag'))
1133 for entry in entries:
1134 entry['persist'] = tags[entry['tag']][0]['persist']
1135
1136 if obj_type == 'controller-node':
1137 # This is a big odd, since the current node needs to be asked
1138 # which controller node it is
1139 url = "http://%s/rest/v1/system/controller" % sdnsh.controller
1140
1141 result = sdnsh.store.rest_simple_request(url)
1142 check_rest_result(result)
1143 iam = json.loads(result)
1144
1145 cluster_url = ("http://%s/rest/v1/system/ha/clustername"
1146 % sdnsh.controller)
1147 result = sdnsh.store.rest_simple_request(cluster_url)
1148 check_rest_result(result)
1149 # perhaps ought to assert on lenresult) == 1
1150 clustername = json.loads(result)[0]['clustername']
1151
1152 for entry in entries:
1153 controller = None
1154 if entry['id'] == iam['id']:
1155 controller = sdnsh.controller
1156 else:
1157 # find interfaces which hacve a firewall rule open for
1158 # tcp/80. ie: ip for the interface with rest-api role
1159 ips = local_interfaces_firewall_open("tcp", 80, entry)
1160
1161 # controller-interfaces needs to be examined to determine
1162 # if there's an ip address to use to discover the ha-role
1163 if len(ips) == 1:
1164 # Not even certain if this is reachable
1165 if ips[0]['discovered-ip'] != '':
1166 controller = ips[0]['discovered-ip']
1167 elif ips[0]['ip'] != '':
1168 controller = ips[0]['ip']
1169 else:
1170 entry['ha-role'] = 'no-ip'
1171 entry['errors'] = 'No IP Address'
1172 else:
1173 entry['errors'] = 'No IP Address'
1174
1175 if controller == None:
1176 entry['errors'] = 'No ip address configured'
1177 entry['ha-role'] = 'unknown'
1178 continue
1179
1180 try:
1181 url = "http://%s/rest/v1/system/ha/role" % controller
1182 result = sdnsh.store.rest_simple_request(url, timeout = 2)
1183 check_rest_result(result)
1184 ha_role = json.loads(result)
1185 entry['ha-role'] = ha_role['role']
1186 if not 'clustername' in ha_role:
1187 entry['errors'] = 'no clustername in ha-role rest api'
1188 entry['ha-role'] = 'Untrusted: %s' % ha_role['role']
1189 elif ha_role['clustername'] != clustername:
1190 entry['errors'] = 'Not in HA Cluster, requires decomission'
1191 entry['ha-role'] = 'External Cluster: %s' % ha_role['role']
1192 if 'change-date-time' in ha_role:
1193 entry['change-date-time'] = ha_role['change-date-time']
1194 if 'change-description' in ha_role:
1195 entry['change-description'] = ha_role['change-description']
1196 except urllib2.HTTPError, e: # timeout?
1197 entry['errors'] = e.reason
1198 entry['ha-role'] = 'unknown'
1199 continue
1200 except urllib2.URLError, e: # timeout?
1201 entry['errors'] = '%s: %s' % (controller, e.reason)
1202 entry['ha-role'] = 'unknown'
1203 continue # dontt try the uptime, it will fail too
1204 except Exception, e:
1205 entry['errors'] = str(e)
1206 entry['ha-role'] = 'unknown'
1207
1208 url = "http://%s/rest/v1/system/uptime" % controller
1209 try:
1210 result = sdnsh.store.rest_simple_request(url)
1211 check_rest_result(result)
1212 uptime = json.loads(result)
1213 entry['uptime'] = uptime['systemUptimeMsec']
1214
1215 except Exception, e:
1216 pass
1217
1218 return detail
1219
1220
1221def command_display_table(obj_type, data, detail = 'default',
1222 table_format = None, title = None, scoped = None, sort = None):
1223
1224 """
1225 Display entries from a obj_type, with some filtering done via data,
1226 and the output format described by table_format, with the devel of detail in detail
1227
1228 @param obj_type string name of the object type
1229 @param data dictionary of configured data items from the description
1230 @param table_format string describing table format to use for output
1231 @param detail string describing the detail-flavor for format
1232 @param scoped string, when not null, indicates the submode level is used to filter query request
1233 @param sort string, describes sort to append to the query request
1234 """
1235
1236 if not mi.obj_type_exists(obj_type):
1237 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1238
1239 if sdnsh.description: # description debugging
1240 print "command_display_table:", obj_type, data, table_format, detail, scoped, sort
1241
1242 if 'detail' in data:
1243 detail = data['detail']
1244
1245 if not table_format:
1246 if 'format' in data:
1247 table_format = data['format']
1248 else:
1249 table_format = obj_type
1250 if 'scoped' in data:
1251 scoped=data['scoped']
1252 del data['scoped']
1253 entries = command_query_object(obj_type, data, scoped, sort)
1254 if sdnsh.description: # description debugging
1255 print "command_display_table: %d entries found, using %s" % (len(entries), data)
1256
1257 # update any of the pretty-printer tables based on the obj_type
1258 obj_type_show_alias_update(obj_type)
1259
1260 # with_key manages whether a 'detail' or table is displayed.
1261 with_key = '<with_key>' if detail == 'details' and len(entries) > 0 else '<no_key>'
1262
1263 # pick foreign keys which are compound keys, explode these into fields
1264 fks = [x for x in mi.obj_type_foreign_keys(obj_type) if mi.is_compound_key(obj_type,x)]
1265 for entry in entries:
1266 for fk in fks:
1267 if fk in entry: # fk may be null-able
1268 mi.split_compound_into_dict(obj_type, fk, entry, True)
1269 #
1270 detail = command_display_table_join_entries(obj_type, data, entries, detail)
1271
1272 # use display_obj_type_rows since it (currently) joins fields for obj_types.
1273 display = sdnsh.display_obj_type_rows(table_format, entries, with_key, detail)
1274 if title:
1275 return title + display
1276 return display
1277
1278
1279def command_display_rest_join_entries(table_format, data, entries, detail):
1280 """
1281 @param table_format string, identifying the final table output
1282 @param data dict, used to query the rest api output
1283 @param entries list of dicts, ready to be displayed
1284 @return string replacing detail
1285
1286 """
1287
1288 if sdnsh.description: # description debugging
1289 print "command_display_rest_join_entries: ", table_format, data, detail
1290
1291 if table_format == 'controller-interface':
1292 # join firewall rules for these interfaces
1293 for intf in entries:
1294 rules = [x['rule'] for x in sdnsh.get_firewall_rules(intf['id'])]
1295 intf['firewall'] = ', '.join(rules)
1296
1297 if table_format == 'system-clock':
1298 # join the 'time' string, possibly remove 'tz' from entries[0]
1299 entries[0]['time'] = sdnsh.get_clock_string(entries[0], data.get('detail'))
1300 return 'details' # force table format
1301
1302 return detail
1303
1304
1305def command_display_rest_type_converter(table_format, rest_type, data, entries):
1306 """
1307 the expected display table_format is a list of dictionaries
1308 each dictionary has the field : value pairs. Many rest api's
1309 return a dictionary of different layers, the description
1310 provides a rest-type, which is used to describe the form
1311 of the value returned from the rest api.
1312 """
1313
1314 if sdnsh.description: # description debugging
1315 print "command_display_rest_type_converter: ", table_format, rest_type
1316
1317 if rest_type.startswith('dict-of-list-of-'):
1318 # entries look like { row_name : [value, ...], ... more-row-value-pairs }
1319 #
1320 # dict-of-list-of: a dict with key's which are given
1321 # the name of the first token, then the dict's value is
1322 # a list which can be given an associated name.
1323 # for example 'dict-of-list-of-cluster-id|[switches]'
1324 #
1325 # 'dict-of-list-of-switch' is a dict with key : value's
1326 # where the value is a list. The member's of the list
1327 # are dictionaries. the key of the outer dict is added to
1328 # each of the dicts, and this interior dict is added to
1329 # the final output list.
1330
1331 # identify the added key from the rest_type
1332 key = rest_type.replace('dict-of-list-of-','')
1333 parts = key.split('|')
1334 names = None
1335 build_list = False
1336 if len(parts) > 0:
1337 key = parts[0]
1338 names = parts[1:] # should only be one name
1339 if len(names) > 0 and names[0][0] == '[':
1340 build_list = True
1341 formatted_list = []
1342 for (row_name, rows) in entries.items():
1343 if not rows:
1344 continue
1345 # use the names as ways of describing each of the list's items
1346 if type(rows) == list and build_list:
1347 # name[0] looks like '[switches]', requesting that this
1348 # list become switches : [rows]
1349 formatted_list.append({key : row_name, names[0][1:-1] : rows})
1350 elif type(rows) == list:
1351 for row in rows:
1352 add_dict = {key : row_name}
1353 if type(row) == str or type(row) == unicode:
1354 add_dict[names[0]] = row
1355 elif type(row) == dict:
1356 # addition names make no difference
1357 add_dict.update(row)
1358 formatted_list.append(add_dict)
1359 elif type(rows) == dict:
1360 do_append = True
1361 new_row = { key : row_name }
1362 for name in [x for x in names.keys() if x in row]:
1363 item = row[name]
1364 if type(item) == str or type(item) == unicode:
1365 new_row[name] = item
1366 if type(item) == dict:
1367 new_row[name].update(item)
1368 if type(item) == list:
1369 do_append = False
1370 for i_row in item:
1371 new_row.update(i_row)
1372 formatted_list.append(new_row)
1373 new_row = { key : row_name }
1374 if do_append:
1375 formatted_list.append(new_row)
1376
1377 entries = formatted_list
1378 elif rest_type.startswith('dict-of-dict-of-'):
1379 # entries looks like { row_name : { [ { }, ... ] } }
1380 # ^
1381 # want this |
1382 # ie: dict with a value which is a dict, whose
1383 # 'dict-of-dict-of-switch|ports' The dict has key : values
1384 # where the value is a dict. That dict has the 'switch' : key
1385 # added, and it becomes the final output dict.
1386 #
1387 # if a second name is included, then the outer dict is
1388 # examined to find these values (ie: values[names]), and these
1389 # get added to the final output dict.
1390 #
1391 # identify the added key from the rest_type
1392 key = rest_type.replace('dict-of-dict-of-','')
1393 parts = key.split('|')
1394 name = None
1395 if len(parts) > 0:
1396 names = parts[1:]
1397 key = parts[0]
1398 formatted_list = []
1399 for (row_name, row) in entries.items():
1400 row[key] = row_name
1401 do_append = False
1402 if names:
1403 new_row = {}
1404 for name in names:
1405 if name in row:
1406 item = row[name]
1407 if type(item) == str or type(item) == unicode:
1408 new_row[name] = item
1409 do_append = True
1410 elif type(item) == dict:
1411 if name == row_name:
1412 do_append = True
1413 elif type(item) == list:
1414 for i_row in item:
1415 row_items = {}
1416 row_items[key] = row_name
1417 row_items.update(i_row)
1418 formatted_list.append(row_items)
1419 if do_append:
1420 formatted_list.append(row)
1421
1422 else:
1423 formatted_list.append(row)
1424
1425 entries = formatted_list
1426 elif rest_type.startswith('dict-with-'):
1427 # rest result looks like: { k : v, k : { } }
1428 # ^
1429 # want this |
1430 # dict-with: typically used for dict returns which have
1431 # nested dict's who's values are promoted to a single
1432 # list with a dict with these values.
1433 #
1434 # identify the added key from the rest_type
1435 key = rest_type.replace('dict-with-','')
1436 names = key.split('|')
1437 collect_row = {}
1438 formatted_list = []
1439 for name in names:
1440 if name in entries:
1441 item = entries[name]
1442 if type(item) == str or type(item) == unicode or \
1443 type(item) == int or type(item) == long: # XXX float?
1444 collect_row[name] = item
1445 elif type(item) == list:
1446 for i_row in item:
1447 row_items = {}
1448 formatted_list.append(i_row)
1449 elif type(item) == dict:
1450 collect_row.update(item)
1451
1452 if len(collect_row) == 0:
1453 entries = formatted_list
1454 else:
1455 entries = [collect_row] + formatted_list
1456
1457 elif rest_type == 'dict':
1458 entries = [entries]
1459 else:
1460 raise error.CommandDescriptionError("Unknown rest-type: %s" % rest_type)
1461 return entries
1462
1463
1464def missing_part(key_parts, entry, key_case = False):
1465 """
1466 Return the name of the missing field of one of the strings
1467 in the key_parts list when it doesn't appear in the 'entry' dictionary.
1468
1469 Return None otherwise.
1470
1471 This is used to identify rows which don't have all the
1472 parts needed to constrcut a join key, or a db-table or
1473 query "key" to support addition of two different tables.
1474
1475 @key_parts list of strings,
1476 @entry dictionary, needs to contains each string in key_parts
1477 @key_case True when all key_parts may contain a leading '~' to
1478 denote the field needs to be lower cased for joining
1479 """
1480 for kn in key_parts:
1481 if not kn in entry:
1482 if key_case == False:
1483 return kn
1484 if kn[0] != '~':
1485 return kn
1486 if kn[1:] not in entry:
1487 return kn[1:]
1488
1489 return None
1490
1491
1492def case_cvt(fn, f_dict):
1493 """
1494 For join operations, the fields in the partial result can no longer
1495 be associated with any obj-type, which means its impossible to determine
1496 whether the associated field is case sensitive.
1497
1498 One approach to this problem is to case-normalize the obj-type's
1499 field values when they're first added to the row. That doesn't
1500 help for rest-api's, which means it can only be a partial solution.
1501 In addition, it makes sense to preserve these values when possible,
1502 but still join based on case-normalization.
1503 """
1504 if fn[0] == '~':
1505 return str(f_dict.get(fn[1:], '').lower())
1506 return str(f_dict.get(fn, ''))
1507
1508
1509def obj_type_field_case(data, obj_type, field):
1510 """
1511 For objects where the case-normalization is identifed,
1512 manage conversion of the value associated with the field
1513 """
1514 case = mi.get_obj_type_field_case_sensitive(obj_type, field)
1515 return str(data[field]) if not case else str(utif.convert_case(case, data[field]))
1516
1517
1518def add_fields(dest, src):
1519 """
1520 These should both be dictionaries, leave the original entries in place
1521 when the 'dest' entries are populated from 'src'. This operation is
1522 handy since the original 'dest' entries may differ from the 'src' due
1523 to case normalization. Since having consistent names is a plus, by
1524 not updating the value with the 'src' entries, 'dest' retains its original
1525 values.
1526 """
1527 for (n,v) in src.items():
1528 if n not in dest:
1529 dest[n] = v
1530 elif str(dest[n]).lower() == str(v).lower:
1531 # should have better controls for when the case matters
1532 if sdnsh.description:
1533 print 'ADD %s skipping updating %s <-> %s' % (n, dest[n], v)
1534 else:
1535 dest[n] = v
1536
1537
1538def command_query_table(obj_type, data,
1539 clear = True,
1540 key = None, append = None, scoped = None, sort = None, crack = None):
1541 """
1542 Leave the result in command's global query_result, which can
1543 be used by other c_action steps
1544
1545 'key' is one or more fields which are concatenated together to form
1546 the display-pipeline's version of a primary key. It could be the
1547 actual primary key of the table, or it could be some fields which
1548 appear in all the rows. Once the 'key' is constructed, it used to
1549 determine how results are added to the command.query_result.
1550
1551 If the existing entries are to be 'cleared', then te primary key's
1552 are simply added to the table. When the entries aren't cleared, then
1553 the computed primary key is used to join against existing items.
1554
1555 Finally, the dict field name for the primary key is a single character: '@'
1556 This name was picked since its not possible for the database to ever
1557 use that name.
1558 """
1559
1560 if not mi.obj_type_exists(obj_type):
1561 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1562
1563 if sdnsh.description: # description debugging
1564 print "command_query_table:", obj_type, data, clear, key, append, scoped, sort, crack
1565
1566 if 'scoped' in data:
1567 scoped=data['scoped']
1568 del data['scoped']
1569
1570 result = command_query_object(obj_type, data, scoped, sort)
1571 if sdnsh.description: # description debugging
1572 print "command_query_table: %d entries found, using %s" % \
1573 (len(result), data)
1574
1575 if crack:
1576 if crack == True:
1577 crack = mi.pk(obj_type)
1578 for entry in result:
1579 mi.split_compound_into_dict(obj_type, crack, entry, True)
1580
1581 if append:
1582 for entry in result:
1583 if type(append) == dict:
1584 entry.update(append)
1585 elif type(append) == list:
1586 entry.update(dict(append))
1587 else:
1588 entry[append] = True
1589
1590 # all the field from all the rows need to be present.
1591 if key:
1592 fields = key.split('|')
1593
1594 if clear:
1595 command.query_result = result
1596 if key:
1597 for r in result:
1598 missing = missing_part(fields, r)
1599 if missing:
1600 if sdnsh.description:
1601 print "command_query_table: ' \
1602 ' missing field in row %s (%s) " % (missing, obj_type)
1603 continue
1604 r['@'] = '|'.join([obj_type_field_case(r, obj_type, f) for f in fields])
1605 else:
1606 if key == None:
1607 if command.query_resuls != None:
1608 command.query_result += result
1609 else:
1610 command.query_result = result
1611 else:
1612 r_dict = {}
1613 for r in result:
1614 missing = missing_part(fields, r)
1615 if missing:
1616 if sdnsh.description:
1617 print "command_query_table: ' \
1618 ' missing field in row %s (%s) " % (missing, obj_type)
1619 continue
1620 pk = '|'.join([r[f] for f in fields])
1621 r_dict[pk] = r
1622 if hasattr(command, 'query_result') and command.query_result:
1623 for qr in command.query_result:
1624 if '@' in qr and qr['@'] in r_dict:
1625 add_fields(qr, r_dict[qr['@']])
1626 del r_dict[qr['@']]
1627 command.query_result += r_dict.values()
1628 else:
1629 for (r, value) in r_dict.items():
1630 value['@'] = '|'.join([value[f] for f in fields])
1631 command.query_result = r_dict.values()
1632
1633
1634def command_query_rest(data,
1635 url = None, path = None, clear = True,
1636 key = None, rest_type = None, scoped = None, sort = None, append = None):
1637 """
1638 Leave the result in command's global query_result, which can
1639 be used by other c_action steps (query-table, join-table, join-rest, display)
1640
1641 'key' is one or more fields which are concatenated together to form
1642 the display-pipeline's version of a primary key. It could be the
1643 actual primary key of the table, or it could be some fields which
1644 appear in all the rows. Once the 'key' is constructed, it used to
1645 determine how results are added to the command.query_result.
1646
1647 If the existing entries are to be 'cleared', then te primary key's
1648 are simply added to the table. When the entries aren't cleared, then
1649 the computed primary key is used to join against existing items.
1650
1651 Finally, the dict field name for the primary key is a single character: '@'
1652 This name was picked since its not possible for the database to ever
1653 use that name.
1654
1655 """
1656
1657 if sdnsh.description: # description debugging
1658 print "command_query_rest:", url, path, rest_type, data, scoped, sort, append
1659
1660 if url == None and path == None:
1661 raise error.CommandDescriptionError("missing url or path")
1662
1663 if path:
1664 schema = sdnsh.sdndb.schema_detail(path)
1665 if schema:
1666 result = sdnsh.sdndb.data_rest_request(path)
1667 if key:
1668 # create a key dictionary, with the key values, pointing to
1669 # a psth in the schema.
1670 pass
1671 print 'PATH', path, result
1672 else:
1673 # if url is a list, pick the first one which can be build from the data
1674 if type(url) == list:
1675 select_url = url
1676 else:
1677 select_url = [url]
1678
1679 use_url = None
1680 for u in select_url:
1681 try:
1682 use_url = (u % data)
1683 break
1684 except:
1685 pass
1686
1687 if use_url == None:
1688 if sdnsh.description: # description debugging
1689 print "command_query_rest: no url found"
1690 return
1691
1692 query_url = "http://%s/rest/v1/" % sdnsh.controller + use_url
1693
1694 if sdnsh.description: # description debugging
1695 print "command_query_rest: query ", query_url
1696 try:
1697 result = sdnsh.store.rest_simple_request(query_url)
1698 check_rest_result(result)
1699 entries = json.loads(result)
1700 except Exception, e:
1701 if sdnsh.description or sdnsh.debug:
1702 print 'command_query_rest: ERROR url %s %s' % (url, e)
1703 entries = []
1704
1705 if entries == None or len(entries) == 0:
1706 if sdnsh.description: # description debugging
1707 print "command_query_rest: no new entries ", query_url
1708 if clear:
1709 command.query_result = None
1710 return
1711
1712 # It certainly seems possible to map from url's to the type associated,
1713 # with the result, but it also makes sense to encode that type information
1714 # into the description
1715 if rest_type:
1716 result = command_display_rest_type_converter(None,
1717 rest_type,
1718 data,
1719 entries)
1720 if sdnsh.description: # description debugging
1721 print "command_query_rest: %s #entries %d " % (url, len(entries))
1722 print result
1723 else:
1724 result = []
1725 import fmtcnv
1726 if (onos == 1) and (url == 'links'):
1727 for entry in entries:
1728 src = entry.get('src')
1729 dst = entry.get('dst')
1730 for tempEntry in entries:
1731 if cmp(src, tempEntry.get('dst')) == 0:
1732 if cmp(dst, tempEntry.get('src')) == 0:
1733 entries.remove(tempEntry)
1734 result.append({
1735 'src-switch' : fmtcnv.print_switch_and_alias(entry['src']['dpid']),
1736 'src-port' : entry['src']['portNumber'],
1737 'src-port-state' : 0,
1738 'dst-switch' : fmtcnv.print_switch_and_alias(entry['dst']['dpid']),
1739 'dst-port' : entry['dst']['portNumber'],
1740 'dst-port-state' : 0,
1741 'type' : entry['type'],
1742 })
1743 else:
1744 result = entries
1745
1746 if append:
1747 for entry in result:
1748 if type(append) == dict:
1749 entry.update(append)
1750 elif type(append) == list:
1751 entry.update(dict(append))
1752 else:
1753 entry[append] = True
1754
1755 if key:
1756 fields = key.split('|')
1757
1758 if clear:
1759 command.query_result = result
1760 if key:
1761 for r in result:
1762 r['@'] = '|'.join([r[f] for f in fields])
1763 else:
1764 if key == None:
1765 if command.query_result != None:
1766 command.query_result += result
1767 else:
1768 command.query_result = result
1769 else:
1770 r_dict = {}
1771 for r in result:
1772 missing = missing_part(fields, r, key_case = True)
1773 if missing:
1774 if sdnsh.description:
1775 print "command_query_rest: missing field %s in row %s" % (missing, r)
1776 continue
1777 pk = '|'.join([case_cvt(f, r) for f in fields])
1778 r_dict[pk] = r
1779 for qr in command.query_result:
1780 if '@' in qr and qr['@'] in r_dict:
1781 add_fields(qr, r_dict[qr['@']])
1782
1783
1784def command_join_rest(url, data, key, join_field,
1785 add_field = None, rest_type = None, crack = None, url_key = None):
1786
1787 """
1788 url-key allows single row results to have a name:value added to the
1789 entry in situations where a single dictionary is computed after the
1790 rest-type conversion. this allows simple results from the url to
1791 have a keyword added to allow joins.
1792 """
1793 if not hasattr(command, 'query_result'):
1794 if sdnsh.description: # description debugging
1795 print "command_join_rest: no entries found"
1796 return
1797
1798 if command.query_result == None:
1799 if sdnsh.description: # description debugging
1800 print "command_join_rest: query_result: None"
1801 return
1802
1803 if sdnsh.description: # description debugging
1804 print "command_join_rest: %d entries found, using %s, url %s" % \
1805 (len(command.query_result), data, url)
1806 print "command_join_rest:", data, key, join_field
1807
1808 if url == None:
1809 return
1810 if join_field == None:
1811 return
1812 if key == None:
1813 return
1814
1815
1816 # Collect all the queries, removing any duplicates
1817 queries = {}
1818 for entry in command.query_result:
1819 # if url is a list, pick the first one which can be build from the data
1820 if type(url) == list:
1821 select_url = url
1822 else:
1823 select_url = [url]
1824
1825 use_url = None
1826 for u in select_url:
1827 try:
1828 use_url = (u % entry)
1829 break
1830 except:
1831 pass
1832
1833 if use_url == None:
1834 if sdnsh.description: # description debugging
1835 print "command_join_rest: no url found", url
1836 continue
1837 query_url = "http://%s/rest/v1/" % sdnsh.controller + use_url
1838
1839 if sdnsh.description: # description debugging
1840 print "command_join_rest: query ", query_url, entry
1841 if query_url in queries:
1842 continue
1843
1844 try:
1845 result = sdnsh.store.rest_simple_request(query_url)
1846 check_rest_result(result)
1847 entries = json.loads(result)
1848 except Exception, e:
1849 entries = []
1850
1851 if entries == None or len(entries) == 0:
1852 continue
1853
1854 # It certainly seems possible to map from url's to the type associated,
1855 # with the result, but it also makes sense to encode that type information
1856 # into the description
1857 if rest_type:
1858 queries[query_url] = command_display_rest_type_converter(None,
1859 rest_type,
1860 data,
1861 entries)
1862 #
1863 # url_key allows the addition of a key for joining for single results
1864 if url_key and len(queries[query_url]) == 1:
1865 queries[query_url][0][url_key] = entry.get(url_key)
1866
1867 if sdnsh.description: # description debugging
1868 print "command_join_rest: %s #entries %d #result %s" % \
1869 (url, len(entries), len(queries[query_url]))
1870 else:
1871 queries[query_url] = entries
1872
1873 # From the query results, generate the dictionary to join through
1874
1875 key_parts = key.split('|') # all the fields needed to make a key
1876 key_dict = {} # resulting key dictionary
1877 for (url, value) in queries.items():
1878 for entry in value:
1879 # see if all the key parts are in the entry
1880 missing = missing_part(key_parts, entry)
1881 if missing:
1882 if sdnsh.description:
1883 print 'command_join_rest: missing field %s in %s' % (missing, entry)
1884 continue
1885 new_key = '|'.join([str(entry[kn]) for kn in key_parts])
1886 if sdnsh.description: # description debugging
1887 print 'command_join_rest: new-key', new_key
1888 key_dict[new_key] = entry
1889
1890 # Using the key-dictinoary, look for matches from the original entries
1891
1892 if add_field:
1893 parts = add_field.split('|')
1894 from_fields = None
1895 if len(parts):
1896 add_field = parts[0]
1897 from_fields = parts[1:]
1898
1899 join_parts = join_field.split('|')
1900 for entry in command.query_result:
1901 if len(join_parts):
1902 missing = missing_part(join_parts, entry, key_case = True)
1903 if missing:
1904 if sdnsh.description: # description debugging
1905 print "command_join_rest: missing field %s in %s" % (missing, entry)
1906 continue
1907
1908 joiner = '|'.join([case_cvt(kn, entry) for kn in join_parts])
1909 else:
1910 if sdnsh.description: # description debugging
1911 print "command_join_rest: joining ", entry, join_field, entry.get(join_field)
1912 if not join_field in entry:
1913 continue
1914 joiner = case_cvt(join_field, entry)
1915
1916 if sdnsh.description: # description debugging
1917 print "command_join_rest: joining ", entry, joiner, key_dict.get(joiner)
1918
1919 if joiner in key_dict:
1920 # add all the entries from the key_dict
1921 if sdnsh.description: # description debugging
1922 print 'command_join_rest: ADD', key_dict[joiner]
1923 if add_field == None:
1924 add_fields(entry, key_dict[joiner])
1925 elif from_fields:
1926 if len(from_fields) == 1:
1927 # add a single field
1928 if from_fields[0] in key_dict[joiner]:
1929 entry[add_field] = key_dict[joiner][from_fields[0]]
1930 else:
1931 # add a dictionary
1932 entry[add_field] = dict([[ff, key_dict[joiner][ff]]
1933 for ff in from_fields])
1934 else:
1935 entry[add_field] = key_dict[joiner]
1936
1937 if sdnsh.description: # description debugging
1938 print "command_join_rest: ", command.query_result
1939
1940
1941def command_join_table(obj_type, data, key, join_field,
1942 key_value = None, add_field = None, crack = None):
1943 """
1944 Add fieds to the current command.query_result by looking up the entry in
1945 the db/store. key represents the value of the index to use from
1946 the entries read from the database. The key can be composed of
1947 multiple fields within the entry. The join_field is the name
1948 of the field within the command.query_result to use as the value to match
1949 against the key field.
1950
1951 When key_value is None, the matched entry from the join_field's is
1952 treated as a dictionary, and all the pair of name:values are added
1953 directly to the new entry.
1954
1955 When key_value is a field name, the joined entries are collected
1956 as a list, and added to the new entry a the key_value name.
1957 (see the use of tag-mapping as an example)
1958 """
1959 if not hasattr(command, 'query_result'):
1960 if sdnsh.description: # description debugging
1961 print "command_join_table: no entries found"
1962 return
1963
1964 if command.query_result == None:
1965 if sdnsh.description: # description debugging
1966 print "command_join_table: query_result: None"
1967 return
1968
1969 if sdnsh.description: # description debugging
1970 print "command_join_table: %d entries found, using %s, obj_type %s %s %s" % \
1971 (len(command.query_result), data, obj_type, key, join_field)
1972 print "command_join_table:", data, key, join_field
1973
1974 if join_field == None:
1975 return
1976 if key == None:
1977 return
1978
1979 if not mi.obj_type_exists(obj_type):
1980 raise error.CommandDescriptionError("Unknown obj-type: %s" % obj_type)
1981
1982 # build the join_dict, which will have keys for the items to
1983 # add into the entries
1984 if not mi.obj_type_has_model(obj_type):
1985 entries = rest_to_model.get_model_from_url(obj_type, data)
1986 else:
1987 entries = sdnsh.get_table_from_store(obj_type)
1988
1989 # determine whether specific field names are added
1990 if add_field:
1991 parts = add_field.split('|')
1992 from_fields = None
1993 if len(parts):
1994 add_field = parts[0]
1995 from_fields = parts[1:]
1996
1997 # constuct the join key for each row from the db table
1998 key_parts = key.split('|') # all the fields needed to make a key
1999 key_dict = {} # resulting key dictionary
2000 for entry in entries:
2001 # see if all the key parts are in the entry
2002 missing = missing_part(key_parts, entry)
2003 if missing:
2004 if sdnsh.description: # description debugging
2005 print "command_join_table: missing field %s in %s" % (missing, entry)
2006 continue
2007
2008 new_key = '|'.join([obj_type_field_case(entry, obj_type, kn) for kn in key_parts])
2009 if sdnsh.description: # description debugging
2010 print 'command_join_table: new-key', new_key, key_value
2011 if key_value:
2012 if not new_key in key_dict:
2013 key_dict[new_key] = [entry]
2014 else:
2015 key_dict[new_key].append(entry)
2016 else:
2017 key_dict[new_key] = entry
2018
2019
2020 # let 'crack' contain the field's name, not a boolean.
2021 if crack and crack == True:
2022 crack = mi.pk(obj_type)
2023
2024 # Using the key-dictinoary, look for matches from the original entries
2025
2026 join_parts = join_field.split('|')
2027 for entry in command.query_result:
2028 if len(join_parts):
2029 missing = missing_part(join_parts, entry, key_case = True)
2030 if missing:
2031 if sdnsh.description: # description debugging
2032 print "command_join_table: missing field %s in %s" % (missing, entry)
2033 continue
2034
2035 joiner = '|'.join([case_cvt(kn, entry) for kn in join_parts])
2036 else:
2037 if sdnsh.description: # description debugging
2038 print "command_join_table: joining ", entry, join_field, entry.get(join_field)
2039 if not join_field in entry:
2040 continue
2041 joiner = case_cvt(join_field, entry)
2042
2043 if joiner in key_dict:
2044 if crack:
2045 if not crack in key_dict[entry[joiner]]:
2046 if sdnsh.description: # description debugging
2047 print "command_join_table: field %s not in entry" % crack, key_dict[joiner]
2048 else:
2049 mi.split_compound_into_dict(obj_type, crack, key_dict[joiner], True)
2050
2051 # add all the entries from the key_dict
2052 if sdnsh.description: # description debugging
2053 print 'command_join_table: ADD %s as %s ' % (key_dict[joiner], add_field)
2054 if add_field == None:
2055 if key_value:
2056 entry[key_value] = key_dict[joiner]
2057 else:
2058 add_fields(entry, key_dict[joiner])
2059 elif from_fields:
2060 if len(from_fields) == 1:
2061 # add a single field
2062 if type(key_dict[joiner]) == list:
2063 entry[add_field] = [x[from_fields[0]] for x in key_dict[joiner]]
2064 else:
2065 entry[add_field] = key_dict[joiner][from_fields[0]]
2066 else:
2067 # add a dictionary with named fields
2068 if type(key_dict[joiner]) == list:
2069 for item in key_dict[joiner]:
2070 entry[add_field] = dict([[ff, item[ff]]
2071 for ff in from_fields])
2072 else:
2073 entry[add_field] = dict([[ff, key_dict[joiner][ff]]
2074 for ff in from_fields])
2075
2076 else:
2077 entry[add_field] = key_dict[joiner]
2078
2079 if sdnsh.description: # description debugging
2080 print "command_join_table: ", command.query_result
2081
2082
2083def command_display_rest(data, url = None, sort = None, rest_type = None,
2084 table_format = None, title = None, detail = None):
2085 """
2086 Perform a call to the rest api, and format the result.
2087
2088 When sort isn't None, it names a field whose's value are sorted on.
2089 """
2090 #just a hack check to implement decending sorting
2091 descending = False
2092 #raise error.ArgumentValidationError('\n\n\n %s' % (descending))
2093 if sdnsh.description: # description debugging
2094 print "command_display_rest: ", data, url, rest_type, table_format, detail
2095
2096 if not url:
2097 url = data.get('url')
2098 if not table_format:
2099 table_format = data.get('format')
2100
2101 check_single_entry = True
2102
2103 # if url is a list, pick the first one which can be build from the data
2104 select_url = url
2105 if url and type(url) == list:
2106 for u in url:
2107 try:
2108 select_url = (u % data)
2109 select_url = u # select this url from the list
2110 break
2111 except:
2112 pass
2113
2114 if not detail:
2115 detail = data.get('detail', 'default')
2116 url = "http://%s/rest/v1/" % sdnsh.controller + (select_url % data)
2117
2118 result = sdnsh.store.rest_simple_request(url)
2119 check_rest_result(result)
2120 if sdnsh.description: # description debugging
2121 print "command_display_rest: result ", result
2122 entries = json.loads(result)
2123 #rest_type = None
2124 #raise error.ArgumentValidationError('\n\n\n %s' % (attributes))
2125 #if 'realtimestats' in data and data['realtimestats'] == 'group':
2126
2127 entries2 = None
2128
2129
2130 if 'realtimestats' in data and data['realtimestats'] == 'group':
2131 url2 = "http://%s/rest/v1/" % sdnsh.controller + ("realtimestats/groupdesc/%(dpid)s/" % data)
2132 result2 = sdnsh.store.rest_simple_request(url2)
2133 check_rest_result(result2)
2134 if sdnsh.description: # description debugging
2135 print "command_display_rest: groupdesc result ", result2
2136 entries2 = json.loads(result2)
2137
2138 # It certainly seems possible to map from url's to the type associated,
2139 # with the result, but it also makes sense to encode that type information
2140 # into the description
2141 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'adjacency':
2142 rest_type =False
2143 if rest_type:
2144 entries = command_display_rest_type_converter(table_format,
2145 rest_type,
2146 data,
2147 entries)
2148 if 'realtimestats' in data and data['realtimestats'] == 'group':
2149 if entries2 is not None:
2150 entries2 = command_display_rest_type_converter(table_format,
2151 rest_type,
2152 data,
2153 entries2)
2154
2155 if 'router' in data and data['router'] == 'router':
2156 combResult = []
2157 for entry in entries:
2158 attributes = entry.get('stringAttributes')
2159 #raise error.ArgumentValidationError('\n\n\n %s' % (attributes))
2160 combResult.append({
2161 'dpid' : entry.get('dpid'),
2162 'routerIP' : attributes['routerIp'],
2163 'name' : attributes['name'],
2164 'isEdgeRouter' : attributes['isEdgeRouter'],
2165 'routerMac' : attributes['routerMac'],
2166 'nodeSId' : attributes['nodeSid'],
2167 },)
2168 entries = combResult
2169 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2170 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'port':
2171 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2172 combResult = []
2173 portList = entries
2174 for port in portList:
2175 portData = port.get("port")
2176 name = portData.get("stringAttributes").get('name')
2177 portNo = portData.get("portNumber") & 0xFFFF # converting to unsigned16int
2178 subnetIp = port.get("subnetIp")
2179 adjacency = str(port.get('adjacency'))
2180 combResult.append({
2181 'name' :name,
2182 'portNo' : portNo,
2183 'subnetIp' : subnetIp,
2184 'adjacency' : adjacency,
2185 })
2186 entries = combResult
2187 if 'routerrealtimestats' in data and data['routerrealtimestats'] == 'adjacency':
2188 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2189 #raise error.ArgumentValidationError('\n\n\n %s' % (entries))
2190 combResult = []
2191 adjacencyPairList = entries
2192 for adjacencyPair in adjacencyPairList:
2193 adjacencySid = adjacencyPair.get("adjacencySid")
2194 ports = adjacencyPair.get("ports")
2195 combResult.append({
2196 'adjacencySid' : adjacencySid,
2197 'ports' : ports,
2198 })
2199 entries = combResult
2200 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2201
2202 if 'showtunnel' in data and (data['showtunnel'] == 'tunnel' or data['detail'] == 'details'):
2203 #eraise error.ArgumentValidationError('\n\n\n %s' % (entries))
2204 combResult = []
2205 tunnelList = entries
2206 for tunnel in tunnelList:
2207 labelStackList = (tunnel.get('labelStack'))
2208 labelStackString = str(labelStackList)
2209 labelStackString = remove_unicodes(labelStackString)
2210 #labelStackList = (tunnel.get('labelStack'))
2211 #labelStackString ='['
2212 #for labelSack in labelStackList:
2213 # for label in labelSack:
2214 # labelStackString += (label + ',')
2215 #if labelStackString == '[':
2216 # labelStackString = ''
2217 #else:
2218 # labelStackString = labelStackString[:-1]
2219 # labelStackString += ']'
2220 tunnelId = tunnel.get('tunnelId')
2221 tunnelPath = tunnel.get('tunnelPath')
2222 dpidGroup = str(tunnel.get('dpidGroup'))
2223 dpidGroup= remove_unicodes(dpidGroup)
2224 policies = tunnel.get('policies')
2225 combResult.append({
2226 'tunnelId' : tunnelId,
2227 'labelStack' : labelStackString,
2228 'dpidGroup' : dpidGroup,
2229 'tunnelPath' : tunnelPath,
2230 'policies' : policies,
2231 })
2232 entries = combResult
2233
2234 if 'showpolicy' in data and data['showpolicy'] == 'policy':
2235 #raise error.ArgumentValidationError('\n\n\n %s' % (data))
2236 combResult = []
2237 portList = entries
2238 for policy in portList:
2239 policyId = policy.get("policyId")
2240 policyType = policy.get("policyType")
2241 priority = policy.get("priority")
2242 tunnelId = policy.get('tunnelId')
2243 match = policy.get("match")
2244 dstIpAddress = match.get('dstIpAddress')['value'] if match.get('dstIpAddress') else '*'
2245 dstMacAddress = match.get('dstMacAddress')['value'] if match.get('dstMacAddress') else '*'
2246 dstTcpPortNumber = match.get('dstTcpPortNumber') if match.get('dstTcpPortNumber') else '*'
2247 etherType = ('0x'+ str(match.get('etherType'))) if match.get('etherType') else '*'
2248 ipProtocolNumber = match.get('ipProtocolNumber') if match.get('ipProtocolNumber') else '*'
2249 srcIpAddress = match.get('srcIpAddress')['value'] if match.get('srcIpAddress') else '*'
2250 srcMacAddress = match.get('srcMacAddress')['value'] if match.get('srcMacAddress') else '*'
2251 srcTcpPortNumber = match.get('srcTcpPortNumber') if match.get('srcTcpPortNumber') else '*'
2252 combResult.append({
2253 'policyId' : policyId,
2254 'policyType' : policyType,
2255 'tunnelId' : tunnelId,
2256 'priority' : priority,
2257 'dstIpAddress' : dstIpAddress,
2258 'dstMacAddress' : dstMacAddress,
2259 'dstTcpPortNumber': dstTcpPortNumber,
2260 'etherType' : etherType,
2261 'ipProtocolNumber': ipProtocolNumber,
2262 'srcIpAddress' : srcIpAddress,
2263 'srcMacAddress' : srcMacAddress,
2264 'srcTcpPortNumber': srcTcpPortNumber,
2265
2266 })
2267 entries = combResult
2268
2269 if 'realtimestats' in data and 'tabletype' in data and data['realtimestats'] == 'table':
2270 combResult = []
2271 if data['tabletype'] == 'ip':
2272 #for decending sorting
2273 descending = True
2274 for ipTableEntry in entries:
2275 match = ipTableEntry['match']
2276 networkDestination = '*'
2277 if match :
2278 networkDestination = match.get('networkDestination') if match.get('networkDestination') else '*'
2279 #raise error.ArgumentValidationError('\n\n\n %s' % json.tool(entries))
2280 instructions = ipTableEntry['instructions']
2281 actions = str(instructions[0]) if instructions[0] else None
2282 if actions != None:
2283 actions = remove_unicodes(actions)
2284 actions = renameActions(actions)
2285 actions = actions.lower()
2286 else:
2287 actions =''
2288 combResult.append({
2289 'switch' : ipTableEntry['switch'],
2290 'byteCount' : ipTableEntry['byteCount'],
2291 'packetCount' : ipTableEntry['packetCount'],
2292 'priority' : ipTableEntry['priority'],
2293 'cookie' : ipTableEntry['cookie'],
2294 'durationSeconds' : ipTableEntry['durationSec'],
2295 'networkDestination' : networkDestination,
2296 'actions' : actions,
2297 })
2298 elif data['tabletype'] == 'mpls':
2299 for ipTableEntry in entries:
2300 match = ipTableEntry['match']
2301 mplsTc = '*'
2302 mplsLabel = '*'
2303 mplsBos = '*'
2304 if match :
2305 mplsTc = match.get('mplsTc') if match.get('mplsTc') else '*'
2306 mplsLabel = match.get('mplsLabel') if match.get('mplsLabel') else '*'
2307 mplsBos = match.get('mplsBos') if match.get('mplsBos') else '*'
2308 instructions = ipTableEntry['instructions']
2309 #raise error.ArgumentValidationError('\n\n\n %s' %len(actions))
2310 actions = str(instructions[0])if instructions[0] else None
2311 if actions != None:
2312 actions = remove_unicodes(actions)
2313 actions = renameActions(actions)
2314 actions = actions.lower()
2315 else:
2316 actions =''
2317 combResult.append({
2318 'switch' : ipTableEntry['switch'],
2319 'byteCount' : ipTableEntry['byteCount'],
2320 'packetCount' : ipTableEntry['packetCount'],
2321 'cookie' : ipTableEntry['cookie'],
2322 'priority' : ipTableEntry['priority'],
2323 'mplsTc' : mplsTc,
2324 'mplsLabel' : mplsLabel,
2325 'mplsBos' : mplsBos,
2326 'durationSeconds' : ipTableEntry['durationSec'],
2327 'actions' : actions
2328 })
2329 elif data['tabletype'] == 'acl':
2330 descending = True
2331 for ipTableEntry in entries:
2332 match = ipTableEntry['match']
2333 networkDestination ='*'
2334 networkProtocol = '*'
2335 networkSource = '*'
2336 mplsTc = '*'
2337 mplsLabel = '*'
2338 mplsBos = '*'
2339 transportDestination = '*'
2340 inputPort = '*'
2341 transportSource = '*'
2342 dataLayerSource = '*'
2343 dataLayerDestination = '*'
2344 dataLayerType = '*'
2345 if match :
2346 networkDestination = match.get('networkDestination') if match.get('networkDestination') else '*'
2347 networkProtocol = match.get('networkProtocol') if match.get('networkProtocol') else '*'
2348 networkSource = match.get('networkSource') if match.get('networkSource') else '*'
2349 mplsTc = match.get('mplsTc') if match.get('mplsTc') else '*'
2350 mplsLabel = match.get('mplsLabel')if match.get('mplsLabel') else '*'
2351 transportDestination = match.get('transportDestination') if match.get('transportDestination') else '*'
2352 transportSource = match.get('transportSource') if match.get('transportSource') else '*'
2353 inputPort = match.get('inputPort') if match.get('inputPort') else '*'
2354 dataLayerSource = match.get('dataLayerSource') if match.get('dataLayerSource') else '*'
2355 dataLayerDestination = match.get('dataLayerDestination') if match.get('dataLayerDestination') else '*'
2356 dataLayerType= match.get('dataLayerType') if match.get('dataLayerType') else '*'
2357 mplsBos = match.get('mplsBos') if match.get('mplsBos') else '*'
2358 instructions = ipTableEntry['instructions']
2359 actions = str(instructions[0])if instructions[0] else None
2360 if actions != None:
2361 actions = remove_unicodes(actions)
2362 actions = renameActions(actions)
2363 actions = actions.lower()
2364 else:
2365 actions = ''
2366 combResult.append({
2367 'switch' : ipTableEntry['switch'],
2368 'byteCount' : ipTableEntry['byteCount'],
2369 'packetCount' : ipTableEntry['packetCount'],
2370 'cookie' : ipTableEntry['cookie'],
2371 'priority' : ipTableEntry['priority'],
2372 'inputPort' : inputPort,
2373 'durationSeconds' : ipTableEntry['durationSec'],
2374 'networkSource' : networkSource,
2375 'networkDestination' : networkDestination,
2376 'networkProtocol' : networkProtocol,
2377 'dataLayerType' : dataLayerType,
2378 'dataLayerSource' : dataLayerSource,
2379 'dataLayerDestination' : dataLayerDestination,
2380 'mplsTc' : mplsTc,
2381 'mplsLabel' : mplsLabel,
2382 'mplsBos' : mplsBos,
2383 'transportDestination' : transportDestination,
2384 'transportSource' : transportSource,
2385 'actions' : actions
2386 })
2387 entries = combResult
2388
2389 if 'realtimestats' in data and data['realtimestats'] == 'group':
2390 combResult = []
2391 for groupStatEntry in entries:
2392 groupId = groupStatEntry["groupId"]
2393 groupDescEntry = None
2394 for entry in entries2:
2395 if groupId == entry["groupId"]:
2396 groupDescEntry = entry
2397 break
2398 if groupDescEntry is '':
2399 print "command_display_rest: missing group desc for group id %s" % (groupId)
2400 continue
2401
2402 if (len(groupStatEntry['bucketStats']) > 0):
2403 for bucketId in range(len(groupStatEntry['bucketStats'])):
2404 setsrcmac = ''
2405 if 'SET_DL_SRC' in groupDescEntry['bucketsActions'][bucketId]:
2406 setsrcmac = groupDescEntry['bucketsActions'][bucketId]['SET_DL_SRC']
2407 setdstmac = ''
2408 if 'SET_DL_DST' in groupDescEntry['bucketsActions'][bucketId]:
2409 setdstmac = groupDescEntry['bucketsActions'][bucketId]['SET_DL_DST']
2410 pushmpls = ''
2411 if 'PUSH_MPLS_LABEL' in groupDescEntry['bucketsActions'][bucketId]:
2412 pushmpls = groupDescEntry['bucketsActions'][bucketId]['PUSH_MPLS_LABEL']
2413 popmpls = ''
2414 if 'POP_MPLS' in groupDescEntry['bucketsActions'][bucketId]:
2415 popmpls = groupDescEntry['bucketsActions'][bucketId]['POP_MPLS']
2416 outport = ''
2417 if 'OUTPUT' in groupDescEntry['bucketsActions'][bucketId]:
2418 outport = groupDescEntry['bucketsActions'][bucketId]['OUTPUT']
2419 goToGroup = ''
2420 if 'goToGroup' in groupDescEntry['bucketsActions'][bucketId]:
2421 goToGroup = groupDescEntry['bucketsActions'][bucketId]['goToGroup']
2422 setBos= ''
2423 if 'PUSH_MPLS_BOS' in groupDescEntry['bucketsActions'][bucketId]:
2424 setBos = groupDescEntry['bucketsActions'][bucketId]['PUSH_MPLS_BOS']
2425 COPY_TTL_IN= ''
2426 if 'COPY_TTL_IN' in groupDescEntry['bucketsActions'][bucketId]:
2427 COPY_TTL_IN = groupDescEntry['bucketsActions'][bucketId]['COPY_TTL_IN']
2428 COPY_TTL_OUT= ''
2429 if 'COPY_TTL_OUT' in groupDescEntry['bucketsActions'][bucketId]:
2430 COPY_TTL_OUT = groupDescEntry['bucketsActions'][bucketId]['COPY_TTL_OUT']
2431 DEC_MPLS_TTL= ''
2432 if 'DEC_MPLS_TTL' in groupDescEntry['bucketsActions'][bucketId]:
2433 DEC_MPLS_TTL = groupDescEntry['bucketsActions'][bucketId]['DEC_MPLS_TTL']
2434 DEC_NW_TTL= ''
2435 if 'DEC_NW_TTL' in groupDescEntry['bucketsActions'][bucketId]:
2436 DEC_NW_TTL = groupDescEntry['bucketsActions'][bucketId]['DEC_NW_TTL']
2437
2438 combResult.append({
2439 'groupid' : groupId,
2440 'grouptype' : groupDescEntry['groupType'],
2441 'totalpktcnt' : groupStatEntry['packetCount'],
2442 'totalbytecnt' : groupStatEntry['byteCount'],
2443 'bucketpktcnt' : groupStatEntry['bucketStats'][bucketId]['pktCount'],
2444 'bucketbytecnt' : groupStatEntry['bucketStats'][bucketId]['byteCount'],
2445 'setsrcmac' : setsrcmac,
2446 'setdstmac' : setdstmac,
2447 'pushMplsLabel' : pushmpls,
2448 'popmpls' : popmpls,
2449 'outport' : outport,
2450 'goToGroup' : goToGroup,
2451 'setBos' : setBos,
2452 'COPY_TTL_IN' : COPY_TTL_IN,
2453 'COPY_TTL_OUT' : COPY_TTL_OUT,
2454 'DEC_MPLS_TTL' : DEC_MPLS_TTL,
2455 'DEC_NW_TTL' : DEC_NW_TTL,
2456 })
2457 else:
2458 combResult.append({
2459 'groupid' : groupId,
2460 'grouptype' : groupDescEntry['groupType'],
2461 'totalpktcnt' : groupStatEntry['packetCount'],
2462 'totalbytecnt' : groupStatEntry['byteCount'],
2463 'bucketpktcnt' : '',
2464 'bucketbytecnt' : '',
2465 'setsrcmac' : '',
2466 'setdstmac' : '',
2467 'pushMplsLabel' : '',
2468 'popmpls' : '',
2469 'outport' : '',
2470 'goToGroup' : '',
2471 'setBos' : '',
2472 'COPY_TTL_IN' : '',
2473 'COPY_TTL_OUT' : '',
2474 'DEC_MPLS_TTL' : '',
2475 'DEC_NW_TTL' : '',
2476 })
2477 entries = combResult
2478 #
2479 if format:
2480 #
2481 detail = command_display_rest_join_entries(table_format, data, entries, detail)
2482 #if 'realtimestats' in data and data['realtimestats'] == 'flow':
2483 # entries = sdnsh.fix_realtime_flows(entries)
2484 # check_single_entry = False
2485
2486 if 'realtimestats' in data and data['realtimestats'] == 'features':
2487 for entry in entries:
2488 entry['stp-state'] = entry['state']
2489
2490 # update any of the pretty-printer tables based on the table_format (obj_type)
2491 obj_type_show_alias_update(table_format % data)
2492
2493 if check_single_entry and entries and len(entries) == 1 and detail == 'details':
2494 return sdnsh.pp.format_entry(entries[0],
2495 table_format % data,
2496 detail,
2497 sdnsh.debug)
2498 if sort:
2499 if descending:
2500 reverse = True
2501 else:
2502 reverse = False
2503 def sort_cmp(x,y):
2504 for f in sort:
2505 if f in x:
2506 c = cmp(x.get(f), y.get(f))
2507 if c != 0:
2508 return c
2509 return 0
2510 entries = sorted(entries, cmp=sort_cmp, reverse=reverse )
2511 if 'realtimestats' in data and data['realtimestats'] == 'group':
2512 repeatGroupId = -1
2513 length = len(entries)
2514 for i in range(0, length):
2515 entry = entries[i]
2516 groupId = entry.get('groupid')
2517 if groupId == repeatGroupId:
2518 entries[i]['groupid'] = ''
2519 else:
2520 repeatGroupId = groupId
2521
2522 display = sdnsh.pp.format_table(entries, table_format % data, detail)
2523 else:
2524 display = entries
2525
2526 if title:
2527 return title + display
2528 return display
2529
2530
2531def command_crack(field):
2532 """
2533 Part of the show pipeline, split is typically used with rest api's
2534 not associated with the model (database), since the cli has enough
2535 details of the relationships between model fields to understand
2536 which of the fields has a compound key value, and has options to
2537 crack those into component parts.
2538
2539 The operation is called 'crack' (not split), since the other
2540 options for some of the actions is called 'crack'
2541
2542 The field identifies the name of the field in the entry to
2543 split into parts, and the remaining '|' separated fields list
2544 the labels to associate in the result from each of the
2545 split components. Currently, the 'crack' character is '|',
2546 although this could be parameterized.
2547 """
2548 if sdnsh.description: # description debugging
2549 print "command_split: ", field
2550
2551 if hasattr(command, 'query_result'):
2552 entries = command.query_result
2553 if command.query_result == None:
2554 entries = []
2555 else:
2556 if sdnsh.description: # description debugging
2557 print "command_join_table: no entries found"
2558 entries = []
2559
2560 parts = field.split('|')
2561 if len(parts) == 0:
2562 if sdnsh.description: # description debugging
2563 print "command_join_table: field doesn't contain labels" \
2564 " use field|label1|label2|..."
2565 return
2566
2567 field = parts[0]
2568 label = parts[1:]
2569 many = len(label)
2570
2571 for entry in entries:
2572 if field in entry:
2573 parts = entry[field].split('|')
2574 if len(parts) and many >= len(parts) :
2575 # use enumerate to create a tuple for each item in parts,
2576 # assocaiting an index, which can be used to identify the
2577 # label to use for each of the elements; from that create
2578 # a dictionay, which is then used to update the entry
2579 entry.update(dict([[label[n],p] for (n,p) in enumerate(parts)]))
2580
2581
2582def command_display(data, table_format, detail = 'default', sort = None, title = None):
2583
2584 if sdnsh.description: # description debugging
2585 print "command_display: ", data, table_format, detail
2586
2587 if 'detail' in data:
2588 detail = data['detail']
2589
2590 if hasattr(command, 'query_result'):
2591 entries = command.query_result
2592 if command.query_result == None:
2593 entries = []
2594 else:
2595 if sdnsh.description: # description debugging
2596 print "command_join_table: no entries found"
2597 entries = []
2598
2599 if sdnsh.description: # description debugging
2600 print "command_display: #entries ", len(entries)
2601
2602 # XXX controller-node has an odd url, join-rest needs to be able to
2603 # be handed a complete url, and replace the ip address with the controller's
2604 # ip address.
2605 detail = command_display_table_join_entries(table_format, data, entries, detail)
2606
2607 # update any of the pretty-printer tables based on the table_format (obj_type)
2608 obj_type_show_alias_update(table_format)
2609
2610 # with_key manages whether a 'detail' or table is displayed.
2611 with_key = '<with_key>' if detail == 'details' and len(entries) > 0 else '<no_key>'
2612
2613 #
2614 if sort:
2615 def sort_cmp(x,y):
2616 for f in sort:
2617 if f in x:
2618 c = utif.trailing_integer_cmp(x.get(f),y.get(f))
2619 if c:
2620 return c
2621 return 0
2622 entries = sorted(entries, cmp=sort_cmp)
2623
2624 # use display_obj_type_rows since it (currently) joins fields for obj_types.
2625 display = sdnsh.display_obj_type_rows(table_format, entries, with_key, detail)
2626
2627 if title:
2628 return title + display
2629 return display
2630
2631
2632def command_legacy_cli(obj_type, data, detail = 'default', scoped = None, sort = None):
2633 """
2634 Unfortunatly, the command descriptions don't have enough different
2635 detail to describe how to join specific distinct fields. In the future,
2636 there will be rest api's for each of the cli requests; that should cause
2637 this trampoline code to become obsolete.
2638 """
2639
2640 if sdnsh.description: # description debugging
2641 print "command_legacy_cli: ", obj_type, data, detail, scoped, sort
2642
2643 # update any of the pretty-printer tables based on the obj_type
2644 obj_type_show_alias_update(obj_type)
2645
2646 #
2647 #
2648 # Various show command 'join' data to create a table not
2649 # directly available in the REST API, someday in the future,
2650 # these joins will be directly implemented in the REST API,
2651 # but these special cases still exist:
2652 #
2653 if 'running-config' in data:
2654 result = sdnsh.error_msg("No running-config choice")
2655 words = []
2656 if 'word' in data and data['word'] != 'all':
2657 words = [data['word']]
2658
2659 if data['running-config'] == 'running-config':
2660 # 'show vns XXX running-config'
2661 if 'vnsname' in data and data['vnsname'] != 'all':
2662 return sdnsh.show_vns_running_config(data['vnsname'],data['tenant'])
2663 elif 'vns' in data and data['vns']=='all':
2664 data['running-config'] = 'vns'
2665 elif 'tenant' in data:
2666 data['running-config']='tenant'
2667 words=[data['tenant']]
2668 if data['running-config'] in run_config.registry_items_enabled():
2669 result = run_config.perform_running_config(data['running-config'], sdnsh, config, words)
2670
2671 if result:
2672 return result
2673 return ''.join(config)
2674
2675 if obj_type == 'running-config':
2676 return run_config.implement_show_running_config([])
2677
2678 if obj_type == 'vns-interface':
2679 if scoped:
2680 # should check for missing 'id' in data
2681 data['vns'] = sdnsh.get_current_mode_obj()
2682
2683 if 'vns' in data:
2684 if data['vns'] == 'all':
2685 return sdnsh.display_vns_interface(None, {}, '<no_key>')
2686 vns_name=data['vns']
2687 return sdnsh.display_vns_interface(vns_name, {'vns': vns_name },
2688 '<no_key>', detail = 'scoped')
2689
2690 if obj_type == 'vns-switch-ports':
2691 if 'vns' in data:
2692 return sdnsh.show_vns_switch_ports([data['vns']])
2693 return sdnsh.show_vns_switch_ports([])
2694
2695 if obj_type == 'switch-ports-vns':
2696 if 'dpid' in data:
2697 return sdnsh.show_switch_ports_vns([data['dpid']])
2698 return sdnsh.show_switch_ports_vns([])
2699
2700 if obj_type == 'switch-interfaces':
2701 key = mi.pk(obj_type)
2702 if scoped:
2703 data['dpid'] = sdnsh.get_current_mode_obj()
2704
2705 # in legacy_cli to join the switch-interfaces with port stats
2706 port_obj = 'port'
2707 entries = sdnsh.show_sort_obj_type(obj_type,
2708 command_query_object(port_obj, data, scoped, sort))
2709
2710 # switch-interfaces is really class Port, and the primary key
2711 # is '#|switch|number, not name.
2712
2713 entries_dict = dict([['%s|%s' % (x['switch'], x['name']), x] for x in entries])
2714 # collect switch-interface-config
2715 sic = 'switch-interface-config'
2716 if 'dpid' in data and data['dpid'] != 'all':
2717 sic_dict = create_obj_type_dict(sic, mi.pk(sic), mi.pk(sic), data['dpid'])
2718 else:
2719 sic_dict = create_obj_type_dict(sic, mi.pk(sic))
2720
2721 # add switch-interface-config names when missing
2722 for (sic_id, sic_value) in sic_dict.items():
2723 if not sic_id in entries_dict:
2724 # add 'state' to this item for prettyprinting column width computation
2725 for sv in sic_value:
2726 sv['state'] = ''
2727 entries += sic_value
2728
2729 # collect the stats for the interfaces
2730 stats_url = 'realtimestats/port/%(dpid)s/' % data
2731 url = "http://%s/rest/v1/" % sdnsh.controller + stats_url
2732 try:
2733 result = sdnsh.store.rest_simple_request(url)
2734 check_rest_result(result)
2735 stats = json.loads(result)
2736
2737 except Exception, e:
2738 stats = {}
2739
2740 # join realtimestats
2741 for entry in entries:
2742 if 'state' in entry:
2743 entry['stp-state'] = entry['state']
2744 stats_list = stats.get(entry['switch'])
2745 # Note, 'number' may be missing from entry if the switch
2746 # matches for switch-interface-config but the interface name
2747 # doesn't show up.
2748 if stats_list and 'number' in entry:
2749 ifn = entry['number']
2750 # Notice that the realtime stat's use a int for the 2^16 value here
2751 # The & 0xffff converts the "-x" to a positive 2^16 value
2752 item = [x for x in stats_list if (x['portNumber'] & 0xffff) == ifn]
2753 if len(item) == 1:
2754 entry.update(item[0])
2755 if entry['id'] in sic_dict:
2756 entry.update(sic_dict[entry['id']][0])
2757
2758 # Update the alias mappings for display
2759 obj_type_show_alias_update(obj_type)
2760
2761 return sdnsh.pp.format_table(entries, obj_type, detail)
2762
2763 if obj_type == 'tunnel-interfaces':
2764 # Use the active tunnels to identify the interfaces on the
2765 # switches which are the tunneling interfaces, with that
2766 # collect to port -> if_name mappings from 'port', then
2767 # find all the switches interfaces, convert those port numbers to
2768 # if names, to collect only tunneling interfaces. Now collect
2769 # realtimestats for the switch's ports, and associate those
2770 # stats with any filtered interfaces, finally display the result
2771 tunnel_url = "tunnel-manager/%(dpid)s" % data
2772 url = "http://%s/rest/v1/" % sdnsh.controller + tunnel_url
2773 result = sdnsh.store.rest_simple_request(url)
2774 check_rest_result(result)
2775 tunnels = json.loads(result)
2776
2777 # use the active tunnels to
2778 # collect dpid's, convert the remote ip's to interface names.
2779 tunnel_ports = {}
2780 for t in tunnels:
2781 quad = t['tunnelPorts'].split('.')
2782 if_name = "vta%03d%03d%03d%03d" % (int(quad[0]), int(quad[1]),
2783 int(quad[2]), int(quad[3]))
2784 key = "%s|%s" % (t['dpid'], if_name)
2785 if not key in tunnel_ports:
2786 tunnel_ports[key] = {t['dpid']: t['tunnelPorts']}
2787
2788 # Collect interfaces on associated switch
2789 port_obj = 'port'
2790 entries = sdnsh.show_sort_obj_type(port_obj,
2791 command_query_object(port_obj, data, scoped, sort))
2792 # Associate port names with interface names
2793 port_to_if_name = {}
2794
2795 try:
2796 ports = sdnsh.get_table_from_store("port")
2797 except Exception, e:
2798 port = []
2799
2800 for port in ports:
2801 key_string = '%s|%s' % (port['switch'], port['number'])
2802 port_to_if_name[key_string] = port['name']
2803
2804 # Filter elements, 'filtered' only contains active tunnel interfaces
2805 filtered = []
2806 for e in entries:
2807 e['ifname'] = port_to_if_name[e['id']]
2808 key = '%s|%s' % (e['switch'], e['ifname'])
2809 if sdnsh.description: # description debugging
2810 print command._line(), key
2811 if key in tunnel_ports:
2812 if sdnsh.description: # description debugging
2813 print command._line(), "Found ", e['id']
2814 filtered.append(e)
2815 entries = filtered
2816
2817 # collect switch-interface-config
2818 sic = 'switch-interface-config'
2819 if 'dpid' in data:
2820 sic_dict = create_obj_type_dict(sic, mi.pk(sic), mi.pk(sic), data['dpid'])
2821 else:
2822 sic_dict = create_obj_type_dict(sic, mi.pk(sic))
2823
2824 # collect the stats for the interfaces
2825 stats_url = 'realtimestats/port/%(dpid)s/' % data
2826 url = "http://%s/rest/v1/" % sdnsh.controller + stats_url
2827 try:
2828 result = sdnsh.store.rest_simple_request(url)
2829 check_rest_result(result)
2830 stats = json.loads(result)
2831 except Exception, e:
2832 stats = {}
2833
2834 # join realtimestats
2835 for entry in entries:
2836 if 'state' in entry:
2837 entry['stp-state'] = entry['state']
2838 stats_list = stats.get(entry['switch'])
2839 if stats_list and 'number' in entry:
2840 ifn = entry['number']
2841 # Notice that the realtime stat's use a int for the 2^16 value here
2842 # The & 0xffff converts the "-x" to a positive 2^16 value
2843 item = [x for x in stats_list if (x['portNumber'] & 0xffff) == ifn]
2844 if len(item) == 1:
2845 entry.update(item[0])
2846 if entry['id'] in sic_dict:
2847 entry.update(sic_dict[entry['id']][0])
2848
2849 obj_type_show_alias_update('switch-interfaces')
2850
2851 return sdnsh.pp.format_table(entries, 'switch-interfaces', detail)
2852
2853 if obj_type == 'host-vns-interface-vns':
2854 words = []
2855 for w in []: # list of options to display_vns_mac_address_table
2856 if w in data:
2857 words[w] = data[w]
2858
2859 return sdnsh.display_vns_mac_address_table(data['vns'], words)
2860
2861 if obj_type == 'config':
2862 if 'config' in data:
2863 if 'version' in data:
2864 return sdnsh.implement_show_config([data['config'],data['version']])
2865 return sdnsh.implement_show_config([data['config']])
2866
2867 if 'config-diff' in data:
2868 if 'version' in data:
2869 return sdnsh.implement_show_config([ data['first'],
2870 'diff',
2871 data['second'],
2872 data['version']])
2873 return sdnsh.implement_show_config([data['first'],
2874 'diff',
2875 data['second'], ])
2876 return sdnsh.implement_show_config([])
2877
2878 if obj_type == 'vns-flow':
2879 if 'detail' in data:
2880 return sdnsh.show_vns_flow_annotated([data['vns'],
2881 'flow',
2882 data['detail']])
2883 return sdnsh.show_vns_flow_annotated([data['vns'], 'flow'])
2884
2885 if obj_type == 'tech-support':
2886 return sdnsh.do_show_tech_support([])
2887
2888 if obj_type == 'config-file':
2889 if 'file' in data:
2890 return sdnsh.implement_show_config_file(['config-file', data['config']])
2891 return sdnsh.implement_show_config_file(['config-file', ])
2892
2893 if obj_type == 'logging':
2894 if 'log-name' in data:
2895 return sdnsh.implement_show_logging([data['log-name']])
2896 return sdnsh.implement_show_logging([])
2897
2898 if obj_type == 'event-history':
2899 if 'count' in data:
2900 return sdnsh.do_show_event_history([data['event'],
2901 'last',
2902 str(data['count'])])
2903 return sdnsh.do_show_event_history([data['event']])
2904
2905 if obj_type == 'flow-cache':
2906 words = []
2907 if 'counters' in data:
2908 words.append('counters')
2909 elif 'application' in data:
2910 words.append('app')
2911 words.append(data['application'])
2912 words.append('app-instance')
2913 words.append(data['instance'])
2914
2915 return sdnsh.do_show_flow_cache(words)
2916
2917 if obj_type in ['controller-stats', 'switch-stats']:
2918 #
2919 # data['id'] is the name of the controller
2920 helper_item = obj_type.replace('-stats','')
2921 if helper_item == 'controller':
2922 helper_item = 'controller-node'
2923 key = mi.pk(helper_item)
2924 words = [helper_item, data[key], 'stats']
2925 if 'stats-type' in data:
2926 words.append(data['stats-type'])
2927 for (n,v) in data.items():
2928 if not n in [key, 'stats', 'stats-type']:
2929 words.append(n)
2930 words.append(v)
2931 return sdnsh.helper_show_object_stats(words)
2932
2933 if obj_type == 'switch-tcpdump':
2934 words = ['trace', data['dpid']]
2935 for (n,v) in data.items():
2936 if not n in ['tcpdump', 'dpid']:
2937 words.append(n)
2938 return sdnsh.do_trace(words)
2939
2940 if obj_type == 'copy':
2941 words = [data['source']]
2942 if 'dest' in data:
2943 words.append(data['dest'])
2944 return sdnsh.implement_copy(words)
2945
2946 if obj_type == 'write':
2947 return sdnsh.implement_write([data['target']])
2948
2949 if obj_type == 'this':
2950 obj_type = sdnsh.get_current_mode_obj_type()
2951 show_this = mi.obj_type_show_this(obj_type)
2952 if not show_this:
2953 return sdnsh.do_show_object(['this'])
2954 result = []
2955 for show in show_this:
2956 if type(show) is list and len(show) >= 3:
2957 # [ object, format, detail ]
2958 if len(result) > 0:
2959 result.append(mi.obj_type_show_title(show[0]))
2960 sort = None
2961 if len(show) > 3:
2962 sort = show[3]
2963 result.append(command_display_table(show[0], {},
2964 table_format = show[1],
2965 detail = show[2],
2966 sort = sort,
2967 scoped = True))
2968 elif type(show) is list and len(show) == 2:
2969 # [ object, detail ]
2970 if len(result) > 0:
2971 result.append(mi.obj_type_show_title(show[0]))
2972 result.append(command_display_table(show[0], {}, detail = show[1], scoped = True))
2973 else:
2974 result.append(sdnsh.do_show_object([show]))
2975 return '\n'.join(result)
2976
2977 if obj_type == 'version':
2978 return sdnsh.do_show_version([])
2979
2980 if obj_type == 'reload':
2981 return sdnsh.implement_reload()
2982
2983 if obj_type == 'test-command':
2984 if data['test-type'] == 'packet-in':
2985 return sdnsh.implement_test_packet_in(data)
2986 if data['test-type'] == 'path':
2987 return sdnsh.implement_test_path(data)
2988
2989 print 'command_legacy_cli: obj-type unknown: ', obj_type
2990
2991
2992def command_legacy_cli_no(obj_type, data, detail = 'default', scoped = None, sort = None):
2993 """
2994 Implement no command for trampoline code back to the original code
2995 """
2996 if obj_type == 'tag-mapping':
2997 return sdnsh.implement_no_tag(['tag', data['tag']])
2998
2999
3000def command_version(data):
3001 """
3002 The version command will later manage changing the syntax to match
3003 the requested version.
3004 """
3005 new_version = data.get('version')
3006 if new_version == None:
3007 return
3008
3009 version = new_version # save for error message
3010 new_version = sdnsh.desc_version_to_path_elem(new_version)
3011
3012 # skip version change is this is the current version.
3013 if sdnsh.desc_version == new_version:
3014 return
3015
3016 # see if the requested version exists
3017 if not sdnsh.command_packages_exists(new_version):
3018 print 'No command description group for version %s' % version
3019 return
3020
3021 # run 'env [envriron_vars] ... cli.py'
3022 command = ['env']
3023 command.append('CLI_COMMAND_VERSION=%s' % version)
3024 command.append('CLI_STARTING_MODE=config')
3025 if os.path.exists('/opt/sdnplatform/cli/bin/cli'):
3026 # controller VM
3027 command.append('/opt/sdnplatform/cli/bin/cli --init')
3028 else:
3029 # developer setup
3030 base = os.path.dirname(__file__)
3031 command.append(os.path.join(base, 'cli.py'))
3032 command.append('--init')
3033
3034 # dump the command descriptions, and read a new set.
3035 # open a subshell with a new command version
3036 subprocess.call(command, cwd=os.environ.get("HOME"))
3037
3038 return
3039
3040
3041def command_clearterm():
3042 """
3043 Print reset characters to the screen to clear the console
3044 """
3045 subprocess.call("reset")
3046
3047def command_display_cli(data):
3048 """
3049 Display various cli details
3050 (this may need to be re-factored into some general "internal" state show
3051 """
3052 debug = []
3053 if sdnsh.debug:
3054 debug.append('debug')
3055 if sdnsh.debug_backtrace:
3056 debug.append('backtrace')
3057
3058 modes = sdnsh.command_dict.keys() + sdnsh.command_nested_dict.keys()
3059
3060 entry = {
3061 'version' : ', '.join(command.command_syntax_version.keys()),
3062 'desc' : ', '.join(sorted(command.command_added_modules.keys())),
3063 'format' : ', '.join(sorted(sdnsh.pp.format_added_modules.keys())),
3064 'modes' : ', '.join(sorted(utif.unique_list_from_list(modes))),
3065 'debug' : ', '.join(debug),
3066 }
3067 basic = sdnsh.pp.format_entry(entry, 'cli')
3068
3069 mode_entries = command.command_submode_dictionary(modes)
3070 mode_table = sdnsh.pp.format_table(mode_entries, 'cli-modes')
3071
3072 return basic + '\n\nCommand Submode Transition\n' + mode_table
3073
3074 return
3075
3076
3077def delete_alias_by_id(alias_obj_type, alias_value):
3078 """
3079 Common delete operation for alias, based on primary key
3080
3081 @param alias_obj_type string, name of table where single entry is removed
3082 @param alias_value string, value of primary key to delete
3083 """
3084 xref = mi.foreign_key_xref.get(alias_obj_type)
3085 if xref:
3086 # look for any referecnes to this alias_value. Since this
3087 # is an alias table, only the pk ought to exist in the xref.
3088 # When the alias is getting removed, any references to it
3089 # via foreign keys must also get removed.
3090 if len(xref) > 1 or not mi.pk(alias_obj_type) in xref:
3091 print 'Internal Inconsistency'
3092 else:
3093 for (fk_obj_type, fk_field) in xref[mi.pk(alias_obj_type)]:
3094 rows = sdnsh.get_table_from_store(fk_obj_type,
3095 fk_field,
3096 alias_value,
3097 'exact')
3098 for row in rows:
3099 sdnsh.rest_delete_object(fk_obj_type, row[mi.pk(fk_obj_type)])
3100 sdnsh.rest_delete_object(alias_obj_type, alias_value)
3101
3102
3103def delete_alias_by_fk(alias_obj_type, foreign_key):
3104 """
3105 Common delete operation for alias, by foreign key
3106
3107 @param alias_obj_type string, name of table where single entry is removed
3108 @param alias_value string, value of primary key to delete
3109 """
3110 # find all the id's based on the foreign key, then delete them all.
3111 # note: see similar midw alias_lookup_with_foreign_key()
3112
3113 foreign_field = mi.alias_obj_type_field(alias_obj_type)
3114 try:
3115 rows = sdnsh.get_table_from_store(alias_obj_type,
3116 foreign_field,
3117 foreign_key,
3118 "exact")
3119 except Exception, e:
3120 raise error.CommandInternalError("Can't fetch %s:%s" %
3121 (foreign_field, foreign_key))
3122 pk = mi.pk(alias_obj_type)
3123 for row in rows:
3124 delete_alias_by_id(alias_obj_type, row[pk])
3125
3126
3127def command_delete_alias(obj_type, data):
3128 """
3129 Action for delete-alias
3130
3131 A single row is deleted from an alias table.
3132 Current alias tables include host-alias, switch-alias, port-alias
3133
3134 @param obj_type string, name of alias table to manage
3135 @param data dict, collection of field:value's from command description
3136 """
3137 if sdnsh.description: # description debugging
3138 print "command_delete_alias: ", obj_type, data
3139
3140 parent_id = sdnsh.get_current_mode_obj()
3141
3142 key = mi.pk(obj_type)
3143 if key not in data:
3144 delete_alias_by_fk(obj_type, parent_id)
3145 else:
3146 delete_alias_by_id(obj_type, data[key])
3147
3148
3149def command_create_alias(obj_type, data, reserved = None, fail_if_exists = False):
3150 """
3151 Action for create-alias
3152
3153 Current alias tables include host-alias, switch-alias, port-alias
3154
3155 @param obj_type string, name of alias table to manage
3156 @param data dict, collection of field:value's from the command description
3157 """
3158 if sdnsh.description: # description debugging
3159 print "command_create_alias: ", obj_type, data, reserved, fail_if_exists
3160
3161 parent_obj_type = sdnsh.get_current_mode_obj_type()
3162 parent_id = sdnsh.get_current_mode_obj()
3163
3164 key = mi.pk(obj_type)
3165 if key not in data:
3166 raise error.CommandInternalError("Alias table '%s': description "
3167 "doesn't populate correct '%s' field as data" %
3168 (obj_type, key))
3169 alias_value = data[key]
3170 #
3171 # Determine if the alias name is allowed.
3172 if alias_value in sdnsh.reserved_words:
3173 raise error.ArgumentValidationError('reserved name "%s" in "%s"'
3174 % (alias_value, ','.join(sdnsh.reserved_words)))
3175 if reserved and type(reserved) != list:
3176 reserved = [reserved]
3177
3178 if reserved and alias_value in reserved:
3179 raise error.ArgumentValidationError('reserved name "%s" in "%s"'
3180 % (alias_value, ','.join(reserved)))
3181
3182 # Walk the foreign key's in the (alias) obj-type, looking
3183 # for the parent reference.
3184
3185 alias_fk = None
3186 obj_type_foreign_keys = mi.obj_type_foreign_keys(obj_type)
3187 if len(obj_type_foreign_keys) == 1:
3188 alias_fk = obj_type_foreign_keys[0]
3189 else:
3190 for alias_fn in obj_type_foreign_keys:
3191 (fk_ot, fk_fn) = mi.foreign_key_references(obj_type, alias_fn)
3192 if fk_ot == parent_obj_type:
3193 alias_fk = alias_fn
3194
3195 if not alias_fk:
3196 raise error.CommandInternalError("Alias table '%s' has no foreign key to '%s'" %
3197 (obj_type, parent_obj_type))
3198
3199 try:
3200 sdnsh.get_object_from_store(obj_type, alias_value)
3201 if sdnsh.description: # description debugging
3202 print "command_create_alias: delete ", obj_type, alias_value
3203 if fail_if_exists:
3204 raise error.ArgumentValidationError("Interface name '%s' already in use - cannot reassign" %(alias_value))
3205 delete_alias_by_id(obj_type, alias_value)
3206 except:
3207 pass
3208
3209 # Remove other existing alias for the same foreign key
3210 # (ie: only one alias per each item, this could be relaxed)
3211 # XXX improve method of managing errors here
3212 try:
3213 rows = sdnsh.get_table_from_store(obj_type,
3214 alias_fk,
3215 parent_id,
3216 "exact")
3217 except Exception, e:
3218 errors = sdnsh.rest_error_to_dict(e)
3219 print sdnsh.rest_error_dict_to_message(errors)
3220 rows = []
3221
3222 for row in rows:
3223 try:
3224 delete_alias_by_id(obj_type, row[key])
3225 if row[alias_fk] != parent_id:
3226 sdnsh.warning("Removed additional alias '%s'"
3227 ", also refers to %s '%s'" %
3228 (row[key], parent_obj_type, parent_id))
3229 except:
3230 if sdnsh.debug or sdnsh.debug_backtrace:
3231 traceback.print_exc()
3232
3233 # This set's the foreign key to allow the create to succeed
3234 c_dict = {
3235 key : alias_value,
3236 alias_fk : parent_id,
3237 }
3238
3239 if sdnsh.description: # description debugging
3240 print "command_create_alias: create ", obj_type, c_dict
3241 result = sdnsh.rest_create_object(obj_type, c_dict)
3242 check_rest_result(result)
3243 result = sdnsh.rest_query_objects(obj_type, c_dict)
3244 check_rest_result(result)
3245
3246 return None
3247
3248
3249def command_create_tag(obj_type, data):
3250 """
3251 obj_type needs to be one of the objects which implements
3252 a relationship to 'tag', for example: tag-mac-mapping
3253 """
3254
3255 item = sdnsh.get_current_mode_obj_type()
3256 fks = mi.obj_type_foreign_keys(obj_type)
3257 for fk in fks:
3258 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
3259 if fk_obj == item:
3260 break
3261 else:
3262 raise error.CommandSemanticError( "type mapping %s doesn't have "
3263 "relationship to the current object %s" %
3264 (obj_type, item))
3265
3266 if sdnsh.description: # description debugging
3267 print "command_create_tag: create ", obj_type, data
3268
3269 tag_and_value = data['tag'].split('=')
3270 if len(tag_and_value) != 2:
3271 # deal with tag_and_value's 'va=vb=vc...'
3272 raise error.CommandSemanticError("tag <[tag-namespace.]name>=<value> "
3273 ": associate tag with host")
3274
3275 tag_parts = tag_and_value[0].split('.')
3276 if len(tag_parts) == 0:
3277 raise error.CommandSemanticError("tag <[tag-namespace.]name>"
3278 ": must have a name")
3279 elif len(tag_parts) == 1:
3280 tag_namespace = "default"
3281 tag_name = tag_parts[0]
3282 elif len(tag_parts) >= 2:
3283 # the tag_name is not allowed to have '.'
3284 # collect all the '.'s together into the namespace
3285 tag_namespace = '.'.join(tag_parts[:-1])
3286 tag_name = tag_parts[-1]
3287
3288 tag_value = tag_and_value[1]
3289
3290 # first manage the tag ...
3291 tag_dict = {
3292 'namespace' : tag_namespace,
3293 'name' : tag_name,
3294 'value' : tag_value,
3295 }
3296
3297 query = sdnsh.rest_query_objects('tag', tag_dict)
3298 sdnsh.check_rest_result(query)
3299 tag_dict['persist'] = True
3300 if len(query) == 0:
3301 result = sdnsh.rest_create_object('tag', tag_dict)
3302 sdnsh.check_rest_result(result)
3303 elif len(query) == 1:
3304 update = sdnsh.rest_update_object('tag',
3305 mi.pk('tag'),
3306 query[0][mi.pk('tag')],
3307 tag_dict)
3308 sdnsh.check_rest_result(update)
3309
3310 del tag_dict['persist']
3311 query = sdnsh.rest_query_objects('tag', tag_dict)
3312 sdnsh.check_rest_result(query)
3313 tag_id = query[0][mi.pk('tag')]
3314
3315 # now create the tag-mapping
3316 tag_dict = {
3317 fk : sdnsh.get_current_mode_obj(), # fk from early for loop
3318 'tag' : tag_id,
3319 }
3320
3321 query = sdnsh.rest_query_objects(obj_type, tag_dict)
3322 sdnsh.check_rest_result(query)
3323 if len(query) == 0:
3324 result = sdnsh.rest_create_object(obj_type, tag_dict)
3325 sdnsh.check_rest_result(result)
3326
3327
3328def command_delete_tag(obj_type, data):
3329 """
3330 obj_type describes the tag-XXX-mapping which is getting
3331 managed, data has the tag 'string' to delete.
3332 """
3333 item = sdnsh.get_current_mode_obj_type()
3334 fks = mi.obj_type_foreign_keys(obj_type)
3335 for fk in fks:
3336 (fk_obj, fk_name) = mi.foreign_key_references(obj_type, fk)
3337 if fk_obj == item:
3338 break
3339 else:
3340 raise error.CommandSemanticError( "type mapping %s doesn't have "
3341 "relationship to the current object %s" %
3342 (obj_type, item))
3343
3344 if 'tag' not in data:
3345 raise error.CommandSemanticError('Tag value missing')
3346
3347 tag = data['tag']
3348 name_and_value = tag.split('=')
3349
3350 name_part = name_and_value[0].split('.')
3351 if len(name_part) == 1:
3352 namespace = 'default'
3353 name = name_part[0]
3354 elif len(name_part) >= 2:
3355 namespace = '.'.join(name_part[:-1])
3356 name = name_part[-1]
3357
3358 value = name_and_value[1]
3359 pk_value = sdnsh.unique_key_from_non_unique([namespace,
3360 name,
3361 value,
3362 sdnsh.get_current_mode_obj()])
3363 try:
3364 sdnsh.get_object_from_store(obj_type, pk_value)
3365 except Exception:
3366 raise error.CommandSemanticError('%s No such tag %s' % (obj_type, tag))
3367
3368 sdnsh.rest_delete_object(obj_type, pk_value)
3369
3370 # with that entry removed, check to see if any other
3371 # foreign keys assocaited with class Tag exist.
3372
3373 fk_value = sdnsh.unique_key_from_non_unique([namespace,
3374 name,
3375 value])
3376
3377 for tag_fields in mi.foreign_key_xref['tag']:
3378 for (fk_obj_type, fk_name) in mi.foreign_key_xref['tag'][tag_fields]:
3379 try:
3380 sdnsh.get_table_from_store(fk_obj_type, fk_name, fk_value)
3381 break
3382 except Exception, e:
3383 pass
3384 else:
3385 continue
3386 break
3387 else:
3388 try:
3389 sdnsh.rest_delete_object('tag', fk_value)
3390 except Exception, e:
3391 raise error.CommandSemanticError('base tag missing' % fk_value)
3392
3393
3394def command_rest_post_data(path, data=None, verb='PUT'):
3395 """
3396 """
3397 url = 'http://%s/rest/v1/%s' % (sdnsh.controller, path)
3398 result = sdnsh.rest_post_request(url, data, verb)
3399 check_rest_result(result)
3400 return None
3401
3402
3403def command_cli_variables_set(variable, value, data):
3404 global sdnsh
3405
3406 if variable == 'debug':
3407 print '***** %s cli debug *****' % \
3408 ('Enabled' if value else 'Disabled')
3409 sdnsh.debug = value
3410 elif variable == 'cli-backtrace':
3411 print '***** %s cli debug backtrace *****' % \
3412 ('Enabled' if value else 'Disabled')
3413 sdnsh.debug_backtrace = value
3414 elif variable == 'cli-batch':
3415 print '***** %s cli batch mode *****' % \
3416 ('Enabled' if value else 'Disabled')
3417 sdnsh.batch = value
3418 elif variable == 'description':
3419 print '***** %s command description mode *****' % \
3420 ('Enabled' if value else 'Disabled')
3421 sdnsh.description = value
3422 elif variable == 'rest':
3423 if 'record' in data and value:
3424 print '***** Eanbled rest record mode %s *****' % \
3425 (data['record'])
3426 url_cache.record(data['record'])
3427 return
3428 print '***** %s display rest mode *****' % \
3429 ('Enabled' if value else 'Disabled')
3430 if 'detail' in data and data['detail'] == 'details':
3431 if value == True:
3432 sdnsh.disply_rest_detail = value
3433 sdnsh.store.display_reply_mode(value)
3434 sdnsh.display_rest = value
3435 sdnsh.store.display_mode(value)
3436 if value == False:
3437 sdnsh.disply_rest_detail = value
3438 sdnsh.store.display_reply_mode(value)
3439 url_cache.record(None)
3440 elif variable == 'set':
3441 if 'length' in data:
3442 sdnsh.length = utif.try_int(data['length'])
3443
3444
3445def command_cli_set(variable, data):
3446 command_cli_variables_set(variable, True, data)
3447
3448def command_cli_unset(variable, data):
3449 command_cli_variables_set(variable, False, data)
3450
3451
3452def command_shell_command(script):
3453
3454 def shell(args):
3455 subprocess.call(["env", "SHELL=/bin/bash", "/bin/bash"] + list(args),
3456 cwd=os.environ.get("HOME"))
3457 print
3458
3459 print "\n***** Warning: this is a debug command - use caution! *****"
3460 if script == 'bash':
3461 print '***** Type "exit" or Ctrl-D to return to the CLI *****\n'
3462 shell(["-l", "-i"])
3463 elif script == 'python':
3464 print '***** Type "exit()" or Ctrl-D to return to the CLI *****\n'
3465 shell(["-l", "-c", "python"])
3466 elif script == 'cassandra-cli':
3467 print '***** Type "exit" or Ctrl-D to return to the CLI *****\n'
3468 shell(["-l", "-c", "/opt/sdnplatform/db/bin/cassandra-cli --host localhost"])
3469 elif script == 'netconfig':
3470 if not re.match("/dev/ttyS?[\d]+$", os.ttyname(0)):
3471 print '***** You seem to be connected via SSH or another remote protocol;'
3472 print '***** reconfiguring the network interface may disrupt the connection!'
3473 print '\n(Press Control-C now to leave the network configuration unchanged)\n'
3474 subprocess.call(["sudo",
3475 "env",
3476 "SHELL=/bin/bash",
3477 "/opt/sdnplatform/sys/bin/bscnetconfig",
3478 "eth0"],
3479 cwd=os.environ.get("HOME"))
3480 else:
3481 # XXX possibly run the script directly?
3482 print "Unknown debug choice %s" % script
3483
3484
3485def command_prompt_update():
3486 """
3487 Action to recompute the prompt, used when there's some possibility
3488 the prompt has changes after some other action (hostname update, for example)
3489 """
3490 sdnsh.set_controller_for_prompt()
3491 sdnsh.update_prompt()
3492
3493def command_controller_decommission(data):
3494 """
3495 Decommission the controller using the REST API
3496 """
3497 id = data.get('id')
3498 confirm_request("Decommission controller '%s'?\n(yes to continue) " % id)
3499
3500 while True:
3501 url = 'http://%s/rest/v1/system/ha/decommission' % (sdnsh.controller)
3502 result = sdnsh.rest_post_request(url, {"id": id}, 'PUT')
3503 status = json.loads(result)
3504
3505 if (status['status'] == 'OK') and status['description'].endswith('is already decommissioned') == True:
3506 print 'Decommission finished'
3507 print
3508 break
3509 else:
3510 print 'Decommission in progress'
3511
3512 time.sleep(10)
3513
3514def command_controller_upgrade(data = None):
3515 """
3516 Upgrade the controller using the REST API
3517 """
3518
3519 force = 'force' in data
3520 details = 'details' in data
3521
3522 if force:
3523 print "WARNING: Ignoring any validation errors during upgrade"
3524 url = "http://%s/rest/v1/system/upgrade/image-name" % sdnsh.controller
3525 result = sdnsh.store.rest_simple_request(url)
3526 check_rest_result(result)
3527 iname = json.loads(result)
3528 if (iname['file'] is None or iname['file'] == ""):
3529 print "Error: No upgrade image present."
3530 print ""
3531 print """To perform upgrade, an upgrade image package needs to be uploaded (with scp) to the controller's \"images\" user."""
3532 print """Upgrade image package is a file with name of format \"upgrade-YYYY.MM.DD.XXXX.pkg\"."""
3533 print ""
3534 print "Following is an example to prepare upgrade for controller with IP address 192.168.67.141:"
3535 print "scp $path/upgrade-2013.02.13.0921.pkg images@192.168.67.141:"
3536 print ""
3537 return
3538
3539 confirm_request("Upgrade controller from image '%s'?\n(yes to continue) "
3540 % iname['file'])
3541
3542 url = "http://%s/rest/v1/system/upgrade/extract-image-manifest" % sdnsh.controller
3543 result = sdnsh.store.rest_simple_request(url)
3544 check_rest_result(result)
3545 manifest = json.loads(result)
3546
3547 print "Executing upgrade..."
3548 for step in manifest:
3549 print "%s - %s" % (step['step'], step['description'])
3550 url = 'http://%s/rest/v1/system/upgrade/execute-upgrade-step' % \
3551 (sdnsh.controller)
3552 result = sdnsh.rest_post_request(url, {"step": step['step'],
3553 "imageName": iname['file'],
3554 "force": force},
3555 'PUT')
3556 check_rest_result(result)
3557 status = json.loads(result)
3558
3559 if (status['status'] == "OK"):
3560 print " Succeeded"
3561 if details:
3562 print "\nDetailed output:"
3563 print status['description']
3564 print
3565 else:
3566 print " Failed to execute upgrade step %d" % step['step']
3567 print "\nDetailed output:"
3568 print status['description']
3569 print
3570 return
3571
3572 print """Controller node upgrade complete.
3573Upgrade will not take effect until system is rebooted. Use 'reload' to
3574reboot this controller node. To revert, select the appropriate image
3575from the boot menu"""
3576
3577def command_cluster_config_rollback(data):
3578 path = ''
3579 if data.get('dir') == 'images://':
3580 path += '/home/images/'
3581 elif data.get('dir') == 'saved-configs://':
3582 path += '/opt/sdnplatform/run/saved-configs/'
3583 path += data.get('file')
3584
3585 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3586 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3587 ha_role = json.loads(result)
3588 if ha_role['role'] != 'MASTER':
3589 print "Command can only be run on Master"
3590 return
3591
3592 command_legacy_cli('copy', {'dest': 'file://running-config-copy', 'source': 'running-config'})
3593 print "INFO: Checking config '%s'" % path
3594 url = "http://%s/rest/v1/system/rollback/diffconfig" % sdnsh.controller
3595 result = sdnsh.rest_post_request(url, {"config-1": "/opt/sdnplatform/run/saved-configs/running-config-copy", "config-2": path}, 'PUT')
3596 check_rest_result(result)
3597 if json.loads(result)['out'].startswith('Found differences'):
3598 print json.loads(result)['out']
3599 print "Rollback aborted"
3600 return
3601
3602 url = "http://%s/rest/v1/system/controller" % sdnsh.controller
3603 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3604 controller_id = json.loads(result)['id']
3605
3606 url = "http://%s/rest/v1/model/controller-interface?controller=%s" % (sdnsh.controller, controller_id)
3607 result = sdnsh.store.rest_simple_request(url)
3608 local_iface = json.loads(result)[0]['discovered-ip']
3609
3610 url = "http://%s/rest/v1/model/controller-interface" % sdnsh.controller
3611 result = sdnsh.store.rest_simple_request(url)
3612 check_rest_result(result)
3613 ifaces = json.loads(result)
3614
3615 nodeCount = len(ifaces)
3616 cutover = nodeCount/2
3617 if nodeCount%2 == 1:
3618 cutover = cutover + 1
3619
3620 rollbackedNodes = []
3621
3622 # remove and add object for local node at the end of the list
3623 for index, iface in enumerate(ifaces):
3624 if iface['discovered-ip'] == local_iface:
3625 break
3626 del ifaces[index]
3627 ifaces.append(iface)
3628
3629 config=open(path, 'r').read()
3630 url = 'http://%s/rest/v1/system/upload-data' % ifaces[0]['discovered-ip']
3631 result = sdnsh.rest_post_request(url, {"data": config, "dst" : "/tmp/rollback.conf"}, 'PUT')
3632 check_rest_result(result)
3633
3634 while len(ifaces) > 0:
3635 if sdnsh.batch == False:
3636 while True:
3637 confirm = raw_input("Rollback controller at '%s'. [yes/no] ?" % ifaces[0]['discovered-ip'])
3638 if confirm.lower() == 'n' or confirm.lower() == 'no':
3639 if len(rollbackedNodes) == 0:
3640 print "INFO: Rollback aborted"
3641 return
3642
3643 print "INFO: Undoing Rollback on previously rollbacked nodes"
3644 for node in rollbackedNodes:
3645 print "INFO: Resetting database on '%s'" % node['discovered-ip']
3646 url = 'http://%s/rest/v1/system/resetbsc' % (node['discovered-ip'])
3647 result = sdnsh.rest_post_request(url, {}, 'PUT')
3648 check_rest_result(result)
3649 print "INFO: Rebooting '%s'" % node['discovered-ip']
3650 url = 'http://%s/rest/v1/system/reload' % (node['discovered-ip'])
3651 result = sdnsh.rest_post_request(url, {}, 'GET')
3652 check_rest_result(result)
3653
3654 if len(rollbackedNodes) >= cutover:
3655 # delete the REJECT rules
3656 url="http://localhost/rest/v1/model/firewall-rule?port=6633"
3657 result = sdnsh.rest_post_request(url, {}, 'DELETE')
3658 # enable allow openflow on all controllers not rollbacked.
3659 url="http://localhost/rest/v1/model/firewall-rule"
3660 for iface in ifaces:
3661 pk_id = '%s|Ethernet|0' % iface['controller']
3662 data = {
3663 'action': 'allow',
3664 'interface': pk_id,
3665 'src-ip': '',
3666 'port': '6633',
3667 'proto': 'tcp',
3668 'vrrp-ip': '',
3669 }
3670 print "INFO: re-allow openflow on %s" % iface['discovered-ip']
3671 result = sdnsh.rest_post_request(url, data, 'PUT')
3672 check_rest_result(result)
3673
3674 print "Rollback aborted"
3675 return
3676 elif confirm.lower() == 'y' or confirm.lower() == 'yes':
3677 break
3678
3679 url = 'http://%s/rest/v1/system/rollback/config' % (ifaces[0]['discovered-ip'])
3680 result = sdnsh.rest_post_request(url, {"path": "/tmp/rollback.conf"}, 'PUT')
3681 check_rest_result(result)
3682 time.sleep(10)
3683
3684 print "INFO: Rebooting ", ifaces[0]['discovered-ip']
3685 url = "http://%s/rest/v1/system/reload" % ifaces[0]['discovered-ip']
3686 result = sdnsh.store.rest_simple_request(url)
3687
3688 if ifaces[0]['discovered-ip'] == local_iface:
3689 break
3690
3691 print "INFO: Waiting for %s to come back up" % ifaces[0]['discovered-ip']
3692 url = "http://%s/rest/v1/system/ha/role" % ifaces[0]['discovered-ip']
3693 while True:
3694 time.sleep(30)
3695 try:
3696 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3697 status = json.loads(result)
3698 if status['role'] == 'SLAVE' or status['role'] == 'MASTER':
3699 print "INFO: Rollback complete on '%s'" % ifaces[0]['discovered-ip']
3700 break
3701 print "INFO: Waiting for 30 seconds"
3702 except:
3703 print "INFO: Waiting for 30 seconds"
3704
3705
3706 iface = ifaces.pop(0)
3707 rollbackedNodes.append(iface)
3708
3709 print "Rollback completed"
3710
3711def command_wait_for_controller(delay = None, sdnplatform_check = False,
3712 within_command = False):
3713 """
3714 For various commands, it makes sense for the command to verify that
3715 the controller restart has been completed. In the situation where
3716 a single controller is configured, it also makes sense to verify the
3717 controller is now configured as MASTER.
3718
3719 This is especially true for command which are known to cause the
3720 controller to restart, for exampe the 'feature' command.
3721
3722 The procedure is also used during CLI startup (see cli.py)
3723 to verify that the controller is in MASTER mode. Its normal
3724 for the HA role to transition from SLAVE to master during
3725 system startup.
3726 """
3727
3728 # if the CLI was started with --init, skip the wait, the
3729 # controller isn't running.
3730 if sdnsh.options.init:
3731 return
3732
3733 def is_ready(sdnsh, verbose, duration):
3734 """
3735 Be loud-as-_ean when the duration is greater then 15 seconds.
3736 Display the gory details for all to know.
3737 """
3738 too_long = 90
3739 try:
3740 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3741 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3742 ha_role = json.loads(result)
3743 if duration > too_long:
3744 print 'Long delay: reason', \
3745 ', '.join(['%s: %s' % (n,v) for (n,v) in ha_role.items()
3746 if v != ''])
3747 if (ha_role['role'] == 'MASTER' or
3748 sdnsh.find_master()['master'] is not None):
3749 if verbose:
3750 print 'Current role is MASTER'
3751 return True
3752 return False
3753 except error.CommandRestError,e:
3754 print "REST error whileUnable to determine controller HA role."
3755 errors = self.rest_error_to_dict(e, obj_type)
3756 print self.rest_error_dict_to_message(errors)
3757 return True
3758 except Exception, e:
3759 if duration > too_long:
3760 print 'MASTER Transition Failure: ', e
3761 traceback.print_exc()
3762 return True
3763 return False
3764
3765 # if this isn't a typical environment (ie: running remotely)
3766 # don't bother trying to determine the role
3767 if not os.path.exists('/opt/sdnplatform/current_role'):
3768 return
3769
3770 # now vadalidate the rest api port is working
3771 ip_and_port = sdnsh.controller.split(':')
3772 if len(ip_and_port) == 2:
3773 # first ensure the REST API is answering
3774 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
3775 try:
3776 s.connect((ip_and_port[0], int(ip_and_port[1])))
3777 s.close()
3778 except Exception, e:
3779 print 'REST API not running, emergency CLI access'
3780 if sdnsh.debug: # enable debug to see messages
3781 print 'Exception:', e
3782 return
3783
3784 # issue a REST API request directed at the model.
3785 try:
3786 entry = sdnsh.get_table_from_store('feature')
3787 except Exception, e:
3788 print 'REST API/Database not responding, emergency CLI access'
3789 if sdnsh.debug: # enable debug to see messages
3790 print 'Exception:', e
3791 return
3792
3793 if sdnplatform_check:
3794 # the REST API request for ha-role will return UNAVAILABLE
3795 # when sdnplatform isn't running.
3796 url = "http://%s/rest/v1/system/ha/role" % sdnsh.controller
3797 result = sdnsh.store.rest_simple_request(url, use_cache = False)
3798 ha_role = json.loads(result)
3799 if ha_role['role'] == 'UNAVAILABLE':
3800 print 'REST API/SDN platform not responding, emergency CLI access'
3801 return
3802
3803
3804 if delay == None:
3805 delay = 1
3806 delay_str = 'a sec' if delay == 1 else '%d seconds' % delay
3807
3808 duration = 0
3809 while True:
3810 try:
3811 verbose = False
3812 while not is_ready(sdnsh, verbose, duration):
3813 if within_command:
3814 print 'Waiting %s to complete command execution, ' \
3815 'Hit Ctrl-C to exit early' % delay_str
3816 verbose = False
3817 else:
3818 print 'Waiting %s while current role is SLAVE mode, ' \
3819 'Hit Ctrl-C to exit early' % delay_str
3820 verbose = True
3821 time.sleep(delay)
3822 duration += delay
3823 return
3824 except:
3825 if is_ready(sdnsh, True, duration):
3826 if duration > 15:
3827 print 'MASTER Transition: %s sec' % duration
3828 return
3829 try:
3830 resp = raw_input('Controller is not yet ready.'
3831 'Do you still want to continue to the CLI? [n]')
3832 if resp and "yes".startswith(resp.lower()):
3833 print 'Continuing with CLI despite initialization error ...'
3834 return
3835 except KeyboardInterrupt:
3836 return
3837
3838
3839def command_factory_default():
3840 print "Re-setting controller to factory defaults ..."
3841 os.system("sudo /opt/sdnplatform/sys/bin/resetbsc")
3842 return
3843
3844
3845def command_dump_log(data):
3846 controller = data.get('controller-node') # can be None.
3847 controller_dict = { 'id' : controller }
3848 for ip_port in controller_ip_and_port(controller_dict):
3849 log_name = data['log-name']
3850 if log_name == 'all':
3851 url = log_url(ip_and_port = ip_port)
3852 log_names = command.sdnsh.rest_simple_request_to_dict(url)
3853 for log in log_names:
3854 yield '*' * 40 + ip_port + ' ' + log['log'] + '\n'
3855 for item in command_dump_log({ 'log-name' : log['log'] }):
3856 yield item
3857 return
3858
3859 # use a streaming method so the complete log is not in memory
3860 url = log_url(ip_and_port = ip_port, log = log_name)
3861 request = urllib2.urlopen(url)
3862 for line in request:
3863 yield line
3864 request.close()
3865
3866
3867#
3868# Initialize action functions
3869#
3870#
3871
3872def init_actions(bs, modi):
3873 global sdnsh, mi
3874 sdnsh = bs
3875 mi = modi
3876
3877 command.add_action('create-tunnel',
3878 tunnel_create,
3879 {'kwargs': {'data' : '$data',}})
3880
3881 command.add_action('remove-tunnel',
3882 tunnel_remove,
3883 {'kwargs': {'data' : '$data',}})
3884
3885 command.add_action('create-policy',
3886 policy_create,
3887 {'kwargs': {'data' : '$data',}})
3888
3889 command.add_action('remove-policy',
3890 policy_remove,
3891 {'kwargs': {'data' : '$data',}})
3892
3893 command.add_action('write-fields', write_fields,
3894 {'kwargs': {'obj_type': '$current-mode-obj-type',
3895 'obj_id': '$current-mode-obj-id',
3896 'data': '$data'}})
3897
3898 command.add_action('reset-fields', reset_fields,
3899 {'kwargs': {'obj_type' : '$current-mode-obj-type',
3900 'obj_id' : '$current-mode-obj-id',
3901 'arg_data' : '$data',
3902 'match_for_no' : '$match-for-no',
3903 'fields' : '$fields'}})
3904
3905 command.add_action('write-fields-explicit', write_fields,
3906 {'kwargs': {'obj_type' : '$obj-type',
3907 'obj_id' : '$obj-id',
3908 'data' : '$data'}})
3909
3910 command.add_action('reset-fields-explicit', reset_fields,
3911 {'kwargs': {'obj_type' : '$obj-type',
3912 'obj_id' : '$obj-id',
3913 'arg_data' : '$data',
3914 'match_for_no' : '$match-for-no',
3915 'fields' : '$fields'}})
3916
3917 command.add_action('update-config', update_config,
3918 {'kwargs': {'obj_type' : '$obj-type',
3919 'obj_id' : '$current-mode-obj-id',
3920 'data' : '$data',
3921 'no_command' : '$is-no-command', }})
3922
3923 command.add_action('delete-objects', delete_objects,
3924 {'kwargs': {'obj_type': '$obj-type',
3925 'data': '$data',
3926 'parent_field': '$parent-field',
3927 'parent_id': '$current-mode-obj-id'}})
3928
3929 command.add_action('write-object', write_object,
3930 {'kwargs': {'obj_type': '$obj-type',
3931 'data': '$data',
3932 'parent_field': '$parent-field',
3933 'parent_id': '$current-mode-obj-id'}})
3934
3935 command.add_action('set-data', set_data,
3936 {'kwargs': {'data': '$data',
3937 'key': '$key',
3938 'value': '$value'}})
3939
3940 command.add_action('push-mode-stack', push_mode_stack,
3941 {'kwargs': {'mode_name': '$submode-name',
3942 'obj_type': '$obj-type',
3943 'parent_field': '$parent-field',
3944 'parent_id': '$current-mode-obj-id',
3945 'data': '$data',
3946 'create': '$create'}})
3947
3948 command.add_action('pop-mode-stack', pop_mode_stack)
3949
3950 command.add_action('confirm', confirm_request,
3951 {'kwargs': {'prompt': '$prompt'}})
3952
3953 command.add_action('convert-vns-access-list', convert_vns_access_list,
3954 {'kwargs': {'obj_type': '$obj-type',
3955 'key' : '$current-mode-obj-id',
3956 'data' : '$data'}})
3957 command.add_action('display-table', command_display_table,
3958 {'kwargs': {'obj_type' : '$obj-type',
3959 'data' : '$data',
3960 'table_format' : '$format',
3961 'title' : '$title',
3962 'detail' : '$detail',
3963 'scoped' : '$scoped',
3964 'sort' : '$sort',
3965 }})
3966
3967 command.add_action('display-rest', command_display_rest,
3968 {'kwargs': { 'data' : '$data',
3969 'url' : '$url',
3970 'path' : '$path',
3971 'rest_type' : '$rest-type',
3972 'sort' : '$sort',
3973 'title' : '$title',
3974 'table_format' : '$format',
3975 'detail' : '$detail',
3976 }})
3977
3978 command.add_action('query-table', command_query_table,
3979 {'kwargs': {'obj_type' : '$obj-type',
3980 'data' : '$data',
3981 'key' : '$key',
3982 'scoped' : '$scoped',
3983 'sort' : '$sort',
3984 'crack' : '$crack',
3985 'append' : '$append',
3986 'clear' : True,
3987 }})
3988
3989 command.add_action('query-table-append', command_query_table,
3990 {'kwargs': {'obj_type' : '$obj-type',
3991 'data' : '$data',
3992 'key' : '$key',
3993 'scoped' : '$scoped',
3994 'sort' : '$sort',
3995 'crack' : '$crack',
3996 'append' : '$append',
3997 'clear' : False,
3998 }})
3999
4000
4001 command.add_action('query-rest', command_query_rest,
4002 {'kwargs': {'url' : '$url',
4003 'path' : '$path',
4004 'rest_type' : '$rest-type',
4005 'data' : '$data',
4006 'key' : '$key',
4007 'scoped' : '$scoped',
4008 'sort' : '$sort',
4009 'append' : '$append',
4010 'clear' : True,
4011 }})
4012
4013 command.add_action('query-rest-append', command_query_rest,
4014 {'kwargs': {'url' : '$url',
4015 'path' : '$path',
4016 'rest_type' : '$rest-type',
4017 'data' : '$data',
4018 'key' : '$key',
4019 'scoped' : '$scoped',
4020 'sort' : '$sort',
4021 'crack' : '$crack',
4022 'append' : '$append',
4023 'clear' : False,
4024 }})
4025
4026 command.add_action('join-rest', command_join_rest,
4027 {'kwargs': {'url' : '$url',
4028 'key' : '$key',
4029 'join_field' : '$join-field',
4030 'rest_type' : '$rest-type',
4031 'add_field' : '$add-field',
4032 'data' : '$data',
4033 'crack' : '$crack',
4034 'url_key' : '$url-key',
4035 }})
4036
4037 command.add_action('join-table', command_join_table,
4038 {'kwargs': {'obj_type' : '$obj-type',
4039 'data' : '$data',
4040 'key' : '$key',
4041 'key_value' : '$key-value',
4042 'add_field' : '$add-field',
4043 'join_field' : '$join-field',
4044 'crack' : '$crack',
4045 }})
4046
4047 command.add_action('crack', command_crack,
4048 {'kwargs': {
4049 'field' : '$field',
4050 }})
4051
4052 command.add_action('display', command_display,
4053 {'kwargs': {'data' : '$data',
4054 'table_format' : '$format',
4055 'sort' : '$sort',
4056 'detail' : '$detail',
4057 'title' : '$title',
4058 }})
4059
4060 command.add_action('legacy-cli', command_legacy_cli,
4061 {'kwargs': {'obj_type' : '$obj-type',
4062 'data' : '$data',
4063 'detail' : '$detail',
4064 'sort' : '$sort',
4065 'scoped' : '$scoped',
4066 }})
4067
4068 command.add_action('legacy-cli-no', command_legacy_cli_no,
4069 {'kwargs': {'obj_type' : '$obj-type',
4070 'data' : '$data',
4071 'detail' : '$detail',
4072 'sort' : '$sort',
4073 'scoped' : '$scoped',
4074 }})
4075
4076 command.add_action('version', command_version,
4077 {'kwargs': {'data' : '$data',
4078 }})
4079
4080 command.add_action('clearterm', command_clearterm)
4081
4082 command.add_action('display-cli', command_display_cli,
4083 {'kwargs': {'data' : '$data',
4084 'detail' : '$detail',
4085 }})
4086
4087 command.add_action('create-alias', command_create_alias,
4088 {'kwargs': {'obj_type' : '$obj-type',
4089 'data' : '$data',
4090 'reserved' : '$reserved',
4091 'fail_if_exists' : '$fail-if-exists',
4092 }})
4093
4094 command.add_action('delete-alias', command_delete_alias,
4095 {'kwargs': {'obj_type' : '$obj-type',
4096 'data' : '$data',
4097 }})
4098
4099 command.add_action('create-tag', command_create_tag,
4100 {'kwargs': {'obj_type' : '$obj-type',
4101 'data' : '$data',
4102 }})
4103
4104 command.add_action('delete-tag', command_delete_tag,
4105 {'kwargs': {'obj_type' : '$obj-type',
4106 'data' : '$data',
4107 }})
4108
4109 command.add_action('cli-set', command_cli_set,
4110 {'kwargs': {'variable' : '$variable',
4111 'data' : '$data',
4112 }})
4113
4114 command.add_action('cli-unset', command_cli_unset,
4115 {'kwargs': {'variable' : '$variable',
4116 'data' : '$data',
4117 }})
4118
4119 command.add_action('shell-command', command_shell_command,
4120 {'kwargs': {'script' : '$command',
4121 }})
4122
4123 command.add_action('rest-post-data', command_rest_post_data,
4124 {'kwargs': {'path': '$path',
4125 'data': '$data',
4126 'verb': '$verb'
4127 }})
4128
4129 command.add_action('prompt-update', command_prompt_update,)
4130
4131 command.add_action('controller-upgrade', command_controller_upgrade,
4132 {'kwargs': {'data': '$data'}})
4133
4134 command.add_action('controller-config-rollback', command_cluster_config_rollback,
4135 {'kwargs': {'data': '$data'}})
4136
4137 command.add_action('controller-decommission', command_controller_decommission,
4138 {'kwargs': {'data': '$data'}})
4139
4140 command.add_action('wait-for-controller', command_wait_for_controller,
4141 {'kwargs': {'within_command': True}})
4142
4143 command.add_action('factory-default', command_factory_default)
4144
4145 command.add_action('dump-log', command_dump_log,
4146 {'kwargs' : { 'data' : '$data', }})