2017-01-06 20:19:42 -06:00
|
|
|
#!/usr/bin/env python
|
2017-08-01 21:02:00 -05:00
|
|
|
"""sonic-cfggen
|
|
|
|
|
|
|
|
A tool to read SONiC config data from one or more of the following sources:
|
|
|
|
minigraph file, config DB, json file(s), yaml files(s), command line input,
|
|
|
|
and write the data into DB, print as json, or render a jinja2 config template.
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
Render template with minigraph:
|
|
|
|
sonic-cfggen -m -t /usr/share/template/bgpd.conf.j2
|
|
|
|
Dump config DB content into json file:
|
|
|
|
sonic-cfggen -d --print-data > db_dump.json
|
|
|
|
Load content of json file into config DB:
|
|
|
|
sonic-cfggen -j db_dump.json --write-to-db
|
|
|
|
See usage string for detail description for arguments.
|
|
|
|
"""
|
2017-01-06 20:19:42 -06:00
|
|
|
|
|
|
|
import sys
|
|
|
|
import os.path
|
|
|
|
import argparse
|
|
|
|
import yaml
|
|
|
|
import jinja2
|
|
|
|
import netaddr
|
2017-01-19 22:56:26 -06:00
|
|
|
import json
|
2017-09-12 16:13:27 -05:00
|
|
|
from functools import partial
|
2017-02-28 12:52:56 -06:00
|
|
|
from minigraph import minigraph_encoder
|
2017-04-11 15:04:21 -05:00
|
|
|
from minigraph import parse_xml
|
2017-07-06 17:28:23 -05:00
|
|
|
from minigraph import parse_device_desc_xml
|
2017-04-11 15:04:21 -05:00
|
|
|
from sonic_platform import get_machine_info
|
|
|
|
from sonic_platform import get_platform_info
|
2018-02-20 16:38:13 -06:00
|
|
|
from sonic_platform import get_system_mac
|
2017-08-01 21:02:00 -05:00
|
|
|
from swsssdk import ConfigDBConnector
|
[configdb]: Sorting all json config-elements residing in config_db.json (#1454)
Just a simple change to make sonic's user life a little bit easier. Displaying the multiple elements of config_db.json in an orderly fashion allows a more intuitive user-experience.
For this change i'm simply intercepting the config-state that is about to be dumped to config_db.json, and i'm placing it in an ordered-dictionary, so that information is pushed to file in natural/alphanumerical ordering.
Example:
admin@lnos-x1-a-csw01:~$ sudo sonic-cfggen -m /etc/sonic/minigraph.xml --print-data
{
...
"PORT": {
"Ethernet0": {
"alias": "Eth1/1",
"lanes": "65"
},
"Ethernet1": {
"alias": "Eth1/2",
"lanes": "66"
},
"Ethernet2": {
"alias": "Eth1/3",
"lanes": "67"
},
"Ethernet3": {
"alias": "Eth1/4",
"lanes": "68"
},
"Ethernet4": {
"alias": "Eth2/1",
"lanes": "69"
},
...
...
"INTERFACE": {
"Ethernet0|10.0.0.0/31": {},
"Ethernet1|10.0.0.2/31": {},
"Ethernet2|10.0.0.4/31": {},
"Ethernet3|10.0.0.6/31": {},
"Ethernet4|10.0.0.8/31": {},
2018-03-10 01:51:33 -06:00
|
|
|
from collections import OrderedDict
|
|
|
|
from natsort import natsorted
|
2017-01-06 20:19:42 -06:00
|
|
|
|
|
|
|
def is_ipv4(value):
|
|
|
|
if not value:
|
|
|
|
return False
|
2017-09-12 16:13:27 -05:00
|
|
|
if isinstance(value, netaddr.IPNetwork):
|
2017-01-06 20:19:42 -06:00
|
|
|
addr = value
|
|
|
|
else:
|
|
|
|
try:
|
2017-09-12 16:13:27 -05:00
|
|
|
addr = netaddr.IPNetwork(str(value))
|
2017-01-06 20:19:42 -06:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return addr.version == 4
|
|
|
|
|
|
|
|
def is_ipv6(value):
|
|
|
|
if not value:
|
|
|
|
return False
|
2017-09-12 16:13:27 -05:00
|
|
|
if isinstance(value, netaddr.IPNetwork):
|
2017-01-06 20:19:42 -06:00
|
|
|
addr = value
|
|
|
|
else:
|
|
|
|
try:
|
2017-09-12 16:13:27 -05:00
|
|
|
addr = netaddr.IPNetwork(str(value))
|
2017-01-06 20:19:42 -06:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return addr.version == 6
|
|
|
|
|
2017-09-12 16:13:27 -05:00
|
|
|
def prefix_attr(attr, value):
|
|
|
|
if not value:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
prefix = netaddr.IPNetwork(str(value))
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
return str(getattr(prefix, attr))
|
|
|
|
|
2017-03-16 13:22:40 -05:00
|
|
|
def unique_name(l):
|
|
|
|
name_list = []
|
|
|
|
new_list = []
|
|
|
|
for item in l:
|
|
|
|
if item['name'] not in name_list:
|
|
|
|
name_list.append(item['name'])
|
|
|
|
new_list.append(item)
|
|
|
|
return new_list
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
|
|
|
|
class FormatConverter:
|
|
|
|
"""Convert config DB based schema to legacy minigraph based schema for backward capability.
|
|
|
|
We will move to DB schema and remove this class when the config templates are modified.
|
|
|
|
|
|
|
|
TODO(taoyl): Current version of config db only supports BGP admin states.
|
|
|
|
All other configuration are still loaded from minigraph. Plan to remove
|
|
|
|
minigraph and move everything into config db in a later commit.
|
|
|
|
"""
|
|
|
|
@staticmethod
|
|
|
|
def db_to_output(db_data):
|
2017-08-08 18:23:58 -05:00
|
|
|
return db_data
|
2017-08-01 21:02:00 -05:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def output_to_db(output_data):
|
|
|
|
db_data = {}
|
2017-08-08 18:23:58 -05:00
|
|
|
for table_name in output_data:
|
2017-09-12 16:13:27 -05:00
|
|
|
if table_name[0].isupper():
|
2017-08-08 18:23:58 -05:00
|
|
|
db_data[table_name] = output_data[table_name]
|
2017-08-01 21:02:00 -05:00
|
|
|
return db_data
|
|
|
|
|
2017-09-12 16:13:27 -05:00
|
|
|
@staticmethod
|
|
|
|
def to_serialized(data):
|
|
|
|
for table in data:
|
|
|
|
if type(data[table]) is dict:
|
[configdb]: Sorting all json config-elements residing in config_db.json (#1454)
Just a simple change to make sonic's user life a little bit easier. Displaying the multiple elements of config_db.json in an orderly fashion allows a more intuitive user-experience.
For this change i'm simply intercepting the config-state that is about to be dumped to config_db.json, and i'm placing it in an ordered-dictionary, so that information is pushed to file in natural/alphanumerical ordering.
Example:
admin@lnos-x1-a-csw01:~$ sudo sonic-cfggen -m /etc/sonic/minigraph.xml --print-data
{
...
"PORT": {
"Ethernet0": {
"alias": "Eth1/1",
"lanes": "65"
},
"Ethernet1": {
"alias": "Eth1/2",
"lanes": "66"
},
"Ethernet2": {
"alias": "Eth1/3",
"lanes": "67"
},
"Ethernet3": {
"alias": "Eth1/4",
"lanes": "68"
},
"Ethernet4": {
"alias": "Eth2/1",
"lanes": "69"
},
...
...
"INTERFACE": {
"Ethernet0|10.0.0.0/31": {},
"Ethernet1|10.0.0.2/31": {},
"Ethernet2|10.0.0.4/31": {},
"Ethernet3|10.0.0.6/31": {},
"Ethernet4|10.0.0.8/31": {},
2018-03-10 01:51:33 -06:00
|
|
|
data[table] = OrderedDict(natsorted(data[table].items()))
|
2017-09-12 16:13:27 -05:00
|
|
|
for key in data[table].keys():
|
|
|
|
new_key = ConfigDBConnector.serialize_key(key)
|
|
|
|
if new_key != key:
|
|
|
|
data[table][new_key] = data[table].pop(key)
|
|
|
|
return data
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def to_deserialized(data):
|
|
|
|
for table in data:
|
|
|
|
if type(data[table]) is dict:
|
|
|
|
for key in data[table].keys():
|
|
|
|
new_key = ConfigDBConnector.deserialize_key(key)
|
|
|
|
if new_key != key:
|
|
|
|
data[table][new_key] = data[table].pop(key)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-08-08 18:23:58 -05:00
|
|
|
def deep_update(dst, src):
|
|
|
|
for key, value in src.iteritems():
|
|
|
|
if isinstance(value, dict):
|
|
|
|
node = dst.setdefault(key, {})
|
|
|
|
deep_update(node, value)
|
|
|
|
else:
|
|
|
|
dst[key] = value
|
|
|
|
return dst
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
def main():
|
|
|
|
parser=argparse.ArgumentParser(description="Render configuration file from minigraph data and jinja2 template.")
|
2017-07-06 17:28:23 -05:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
2017-08-01 21:02:00 -05:00
|
|
|
group.add_argument("-m", "--minigraph", help="minigraph xml file", nargs='?', const='/etc/sonic/minigraph.xml')
|
2017-07-06 17:28:23 -05:00
|
|
|
group.add_argument("-M", "--device-description", help="device description xml file")
|
2017-03-17 23:38:20 -05:00
|
|
|
parser.add_argument("-p", "--port-config", help="port config file, used with -m")
|
2017-08-01 21:02:00 -05:00
|
|
|
parser.add_argument("-y", "--yaml", help="yaml file that contains additional variables", action='append', default=[])
|
|
|
|
parser.add_argument("-j", "--json", help="json file that contains additional variables", action='append', default=[])
|
2017-01-19 22:56:26 -06:00
|
|
|
parser.add_argument("-a", "--additional-data", help="addition data, in json string")
|
2017-08-01 21:02:00 -05:00
|
|
|
parser.add_argument("-d", "--from-db", help="read config from configdb", action='store_true')
|
2018-02-20 16:38:13 -06:00
|
|
|
parser.add_argument("-H", "--platform-info", help="read platform and hardware info", action='store_true')
|
2017-12-06 23:45:03 -06:00
|
|
|
parser.add_argument("-s", "--redis-unix-sock-file", help="unix sock file for redis connection")
|
2017-01-19 22:56:26 -06:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
|
|
|
group.add_argument("-t", "--template", help="render the data with the template file")
|
2017-02-28 12:52:56 -06:00
|
|
|
group.add_argument("-v", "--var", help="print the value of a variable, support jinja2 expression")
|
2017-01-19 22:56:26 -06:00
|
|
|
group.add_argument("--var-json", help="print the value of a variable, in json format")
|
2017-08-01 21:02:00 -05:00
|
|
|
group.add_argument("--write-to-db", help="write config into configdb", action='store_true')
|
2017-01-19 22:56:26 -06:00
|
|
|
group.add_argument("--print-data", help="print all data", action='store_true')
|
2017-01-06 20:19:42 -06:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
platform = get_platform_info(get_machine_info())
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2017-12-06 23:45:03 -06:00
|
|
|
db_kwargs = {}
|
|
|
|
if args.redis_unix_sock_file != None:
|
|
|
|
db_kwargs['unix_socket_path'] = args.redis_unix_sock_file
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
data = {}
|
|
|
|
|
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.minigraph != None:
|
|
|
|
minigraph = args.minigraph
|
2018-02-20 16:38:13 -06:00
|
|
|
if platform:
|
2017-03-17 23:38:20 -05:00
|
|
|
if args.port_config != None:
|
2018-02-20 16:38:13 -06:00
|
|
|
deep_update(data, parse_xml(minigraph, platform, args.port_config))
|
2017-03-17 23:38:20 -05:00
|
|
|
else:
|
2018-02-20 16:38:13 -06:00
|
|
|
deep_update(data, parse_xml(minigraph, platform))
|
2017-02-17 15:03:42 -06:00
|
|
|
else:
|
2017-03-17 23:38:20 -05:00
|
|
|
if args.port_config != None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, port_config_file=args.port_config))
|
2017-03-17 23:38:20 -05:00
|
|
|
else:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, parse_xml(minigraph))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2017-07-06 17:28:23 -05:00
|
|
|
if args.device_description != None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, parse_device_desc_xml(args.device_description))
|
2017-07-06 17:28:23 -05:00
|
|
|
|
2017-06-22 01:49:15 -05:00
|
|
|
for yaml_file in args.yaml:
|
|
|
|
with open(yaml_file, 'r') as stream:
|
2017-01-19 22:56:26 -06:00
|
|
|
additional_data = yaml.load(stream)
|
2017-09-12 16:13:27 -05:00
|
|
|
deep_update(data, FormatConverter.to_deserialized(additional_data))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
for json_file in args.json:
|
|
|
|
with open(json_file, 'r') as stream:
|
2017-09-12 16:13:27 -05:00
|
|
|
deep_update(data, FormatConverter.to_deserialized(json.load(stream)))
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.additional_data != None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, json.loads(args.additional_data))
|
2017-08-01 21:02:00 -05:00
|
|
|
|
|
|
|
if args.from_db:
|
2017-12-06 23:45:03 -06:00
|
|
|
configdb = ConfigDBConnector(**db_kwargs)
|
2017-08-01 21:02:00 -05:00
|
|
|
configdb.connect()
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, FormatConverter.db_to_output(configdb.get_config()))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
if args.platform_info:
|
|
|
|
hardware_data = {'DEVICE_METADATA': {'localhost': {
|
|
|
|
'platform': platform,
|
|
|
|
'mac': get_system_mac()
|
|
|
|
}}}
|
|
|
|
deep_update(data, hardware_data)
|
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.template != None:
|
|
|
|
template_file = os.path.abspath(args.template)
|
2018-02-27 14:15:56 -06:00
|
|
|
paths = ['/', '/usr/share/sonic/templates', os.path.dirname(template_file)]
|
|
|
|
loader = jinja2.FileSystemLoader(paths)
|
|
|
|
|
|
|
|
env = jinja2.Environment(loader=loader, trim_blocks=True)
|
2017-01-19 22:56:26 -06:00
|
|
|
env.filters['ipv4'] = is_ipv4
|
|
|
|
env.filters['ipv6'] = is_ipv6
|
2017-03-16 13:22:40 -05:00
|
|
|
env.filters['unique_name'] = unique_name
|
2017-09-12 16:13:27 -05:00
|
|
|
for attr in ['ip', 'network', 'prefixlen', 'netmask']:
|
|
|
|
env.filters[attr] = partial(prefix_attr, attr)
|
2017-01-19 22:56:26 -06:00
|
|
|
template = env.get_template(template_file)
|
|
|
|
print template.render(data)
|
|
|
|
|
|
|
|
if args.var != None:
|
2017-02-28 12:52:56 -06:00
|
|
|
template = jinja2.Template('{{' + args.var + '}}')
|
|
|
|
print template.render(data)
|
2017-01-19 22:56:26 -06:00
|
|
|
|
|
|
|
if args.var_json != None:
|
2017-10-05 23:45:34 -05:00
|
|
|
print json.dumps(FormatConverter.to_serialized(data[args.var_json]), indent=4, cls=minigraph_encoder)
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
if args.write_to_db:
|
2017-12-06 23:45:03 -06:00
|
|
|
configdb = ConfigDBConnector(**db_kwargs)
|
2017-08-01 21:02:00 -05:00
|
|
|
configdb.connect(False)
|
2017-12-08 19:28:01 -06:00
|
|
|
configdb.mod_config(FormatConverter.output_to_db(data))
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.print_data:
|
2017-09-12 16:13:27 -05:00
|
|
|
print json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder)
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|