2017-01-06 20:19:42 -06:00
|
|
|
#!/usr/bin/env python
|
2017-08-01 21:02:00 -05:00
|
|
|
"""sonic-cfggen
|
|
|
|
|
|
|
|
A tool to read SONiC config data from one or more of the following sources:
|
|
|
|
minigraph file, config DB, json file(s), yaml files(s), command line input,
|
|
|
|
and write the data into DB, print as json, or render a jinja2 config template.
|
|
|
|
|
|
|
|
Examples:
|
2019-06-13 21:04:45 -05:00
|
|
|
Render template with minigraph:
|
2017-08-01 21:02:00 -05:00
|
|
|
sonic-cfggen -m -t /usr/share/template/bgpd.conf.j2
|
2019-06-13 21:04:45 -05:00
|
|
|
Dump config DB content into json file:
|
2017-08-01 21:02:00 -05:00
|
|
|
sonic-cfggen -d --print-data > db_dump.json
|
|
|
|
Load content of json file into config DB:
|
|
|
|
sonic-cfggen -j db_dump.json --write-to-db
|
|
|
|
See usage string for detail description for arguments.
|
|
|
|
"""
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2018-04-24 18:01:17 -05:00
|
|
|
from __future__ import print_function
|
2019-10-31 11:17:29 -05:00
|
|
|
|
|
|
|
# monkey patch re.compile to do lazy regular expression compilation.
|
|
|
|
# This is done to improve import time of jinja2, yaml, natsort modules, because they
|
|
|
|
# do many regexp compilation at import time, so it will speed up sonic-cfggen invocations
|
|
|
|
# that do not require template generation or yaml loading. sonic-cfggen is used in so many places
|
|
|
|
# during system boot up that importing jinja2, yaml, natsort every time
|
|
|
|
# without lazy regular expression compilation affect boot up time.
|
|
|
|
# FIXME: remove this once sonic-cfggen and templates dependencies are replaced with a faster approach
|
|
|
|
import lazy_re
|
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
import sys
|
|
|
|
import os.path
|
|
|
|
import argparse
|
|
|
|
import yaml
|
|
|
|
import jinja2
|
|
|
|
import netaddr
|
2017-01-19 22:56:26 -06:00
|
|
|
import json
|
2017-09-12 16:13:27 -05:00
|
|
|
from functools import partial
|
2017-02-28 12:52:56 -06:00
|
|
|
from minigraph import minigraph_encoder
|
2017-04-11 15:04:21 -05:00
|
|
|
from minigraph import parse_xml
|
2017-07-06 17:28:23 -05:00
|
|
|
from minigraph import parse_device_desc_xml
|
2020-05-04 18:15:15 -05:00
|
|
|
from minigraph import parse_asic_sub_role
|
2018-04-24 18:01:17 -05:00
|
|
|
from portconfig import get_port_config
|
2019-05-15 18:22:56 -05:00
|
|
|
from sonic_device_util import get_machine_info
|
|
|
|
from sonic_device_util import get_platform_info
|
|
|
|
from sonic_device_util import get_system_mac
|
2020-05-04 18:15:15 -05:00
|
|
|
from sonic_device_util import get_npu_id_from_name
|
2018-09-16 00:15:02 -05:00
|
|
|
from config_samples import generate_sample_config
|
|
|
|
from config_samples import get_available_config
|
2020-05-08 23:24:05 -05:00
|
|
|
from swsssdk import SonicV2Connector, ConfigDBConnector, SonicDBConfig
|
2019-10-31 11:17:29 -05:00
|
|
|
from redis_bcc import RedisBytecodeCache
|
[configdb]: Sorting all json config-elements residing in config_db.json (#1454)
Just a simple change to make sonic's user life a little bit easier. Displaying the multiple elements of config_db.json in an orderly fashion allows a more intuitive user-experience.
For this change i'm simply intercepting the config-state that is about to be dumped to config_db.json, and i'm placing it in an ordered-dictionary, so that information is pushed to file in natural/alphanumerical ordering.
Example:
admin@lnos-x1-a-csw01:~$ sudo sonic-cfggen -m /etc/sonic/minigraph.xml --print-data
{
...
"PORT": {
"Ethernet0": {
"alias": "Eth1/1",
"lanes": "65"
},
"Ethernet1": {
"alias": "Eth1/2",
"lanes": "66"
},
"Ethernet2": {
"alias": "Eth1/3",
"lanes": "67"
},
"Ethernet3": {
"alias": "Eth1/4",
"lanes": "68"
},
"Ethernet4": {
"alias": "Eth2/1",
"lanes": "69"
},
...
...
"INTERFACE": {
"Ethernet0|10.0.0.0/31": {},
"Ethernet1|10.0.0.2/31": {},
"Ethernet2|10.0.0.4/31": {},
"Ethernet3|10.0.0.6/31": {},
"Ethernet4|10.0.0.8/31": {},
2018-03-10 01:51:33 -06:00
|
|
|
from collections import OrderedDict
|
|
|
|
from natsort import natsorted
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2018-10-17 16:10:34 -05:00
|
|
|
def sort_by_port_index(value):
|
|
|
|
if not value:
|
|
|
|
return
|
|
|
|
if isinstance(value, list):
|
|
|
|
value.sort(key = lambda k: int(k[8:]))
|
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
def is_ipv4(value):
|
|
|
|
if not value:
|
|
|
|
return False
|
2017-09-12 16:13:27 -05:00
|
|
|
if isinstance(value, netaddr.IPNetwork):
|
2017-01-06 20:19:42 -06:00
|
|
|
addr = value
|
|
|
|
else:
|
|
|
|
try:
|
2017-09-12 16:13:27 -05:00
|
|
|
addr = netaddr.IPNetwork(str(value))
|
2017-01-06 20:19:42 -06:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return addr.version == 4
|
|
|
|
|
|
|
|
def is_ipv6(value):
|
|
|
|
if not value:
|
|
|
|
return False
|
2017-09-12 16:13:27 -05:00
|
|
|
if isinstance(value, netaddr.IPNetwork):
|
2017-01-06 20:19:42 -06:00
|
|
|
addr = value
|
|
|
|
else:
|
|
|
|
try:
|
2017-09-12 16:13:27 -05:00
|
|
|
addr = netaddr.IPNetwork(str(value))
|
2017-01-06 20:19:42 -06:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return addr.version == 6
|
|
|
|
|
2017-09-12 16:13:27 -05:00
|
|
|
def prefix_attr(attr, value):
|
|
|
|
if not value:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
prefix = netaddr.IPNetwork(str(value))
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
return str(getattr(prefix, attr))
|
|
|
|
|
2017-03-16 13:22:40 -05:00
|
|
|
def unique_name(l):
|
|
|
|
name_list = []
|
|
|
|
new_list = []
|
|
|
|
for item in l:
|
|
|
|
if item['name'] not in name_list:
|
|
|
|
name_list.append(item['name'])
|
|
|
|
new_list.append(item)
|
|
|
|
return new_list
|
|
|
|
|
2019-06-10 16:02:55 -05:00
|
|
|
def pfx_filter(value):
|
|
|
|
"""INTERFACE Table can have keys in one of the two formats:
|
|
|
|
string or tuple - This filter skips the string keys and only
|
|
|
|
take into account the tuple.
|
|
|
|
For eg - VLAN_INTERFACE|Vlan1000 vs VLAN_INTERFACE|Vlan1000|192.168.0.1/21
|
|
|
|
"""
|
|
|
|
table = OrderedDict()
|
2019-06-13 21:04:45 -05:00
|
|
|
|
|
|
|
if not value:
|
|
|
|
return table
|
|
|
|
|
2019-06-10 16:02:55 -05:00
|
|
|
for key,val in value.items():
|
|
|
|
if not isinstance(key, tuple):
|
|
|
|
continue
|
|
|
|
table[key] = val
|
|
|
|
return table
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2019-10-24 09:35:14 -05:00
|
|
|
def ip_network(value):
|
|
|
|
""" Extract network for network prefix """
|
|
|
|
try:
|
|
|
|
r_v = netaddr.IPNetwork(value)
|
|
|
|
except:
|
|
|
|
return "Invalid ip address %s" % value
|
|
|
|
return r_v.network
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
class FormatConverter:
|
|
|
|
"""Convert config DB based schema to legacy minigraph based schema for backward capability.
|
|
|
|
We will move to DB schema and remove this class when the config templates are modified.
|
|
|
|
|
|
|
|
TODO(taoyl): Current version of config db only supports BGP admin states.
|
2019-06-13 21:04:45 -05:00
|
|
|
All other configuration are still loaded from minigraph. Plan to remove
|
2017-08-01 21:02:00 -05:00
|
|
|
minigraph and move everything into config db in a later commit.
|
|
|
|
"""
|
|
|
|
@staticmethod
|
|
|
|
def db_to_output(db_data):
|
2017-08-08 18:23:58 -05:00
|
|
|
return db_data
|
2017-08-01 21:02:00 -05:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def output_to_db(output_data):
|
|
|
|
db_data = {}
|
2017-08-08 18:23:58 -05:00
|
|
|
for table_name in output_data:
|
2017-09-12 16:13:27 -05:00
|
|
|
if table_name[0].isupper():
|
2017-08-08 18:23:58 -05:00
|
|
|
db_data[table_name] = output_data[table_name]
|
2017-08-01 21:02:00 -05:00
|
|
|
return db_data
|
|
|
|
|
2017-09-12 16:13:27 -05:00
|
|
|
@staticmethod
|
2019-08-06 10:04:33 -05:00
|
|
|
def to_serialized(data, lookup_key = None):
|
2019-05-18 12:40:57 -05:00
|
|
|
if type(data) is dict:
|
|
|
|
data = OrderedDict(natsorted(data.items()))
|
2019-08-06 10:04:33 -05:00
|
|
|
|
|
|
|
if lookup_key != None:
|
|
|
|
newData = {}
|
|
|
|
for key in data.keys():
|
|
|
|
if ((type(key) is unicode and lookup_key == key) or (type(key) is tuple and lookup_key in key)):
|
|
|
|
newData[ConfigDBConnector.serialize_key(key)] = data.pop(key)
|
|
|
|
break
|
|
|
|
return newData
|
|
|
|
|
2019-05-18 12:40:57 -05:00
|
|
|
for key in data.keys():
|
|
|
|
new_key = ConfigDBConnector.serialize_key(key)
|
|
|
|
if new_key != key:
|
|
|
|
data[new_key] = data.pop(key)
|
|
|
|
data[new_key] = FormatConverter.to_serialized(data[new_key])
|
2017-09-12 16:13:27 -05:00
|
|
|
return data
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def to_deserialized(data):
|
|
|
|
for table in data:
|
|
|
|
if type(data[table]) is dict:
|
|
|
|
for key in data[table].keys():
|
|
|
|
new_key = ConfigDBConnector.deserialize_key(key)
|
|
|
|
if new_key != key:
|
|
|
|
data[table][new_key] = data[table].pop(key)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-08-08 18:23:58 -05:00
|
|
|
def deep_update(dst, src):
|
|
|
|
for key, value in src.iteritems():
|
|
|
|
if isinstance(value, dict):
|
|
|
|
node = dst.setdefault(key, {})
|
|
|
|
deep_update(node, value)
|
|
|
|
else:
|
|
|
|
dst[key] = value
|
|
|
|
return dst
|
|
|
|
|
[sonic-cfggen]: Sorting the information generated by sonic-cfggen for all j2 templates. (#1616)
With this patch all the content of the files generated by "sonic-cfggen -d -t" instructions will now be sorted, which is very useful for operational and troubleshooting purposes. See below a few examples of this behavior. Also, a few sonic-cfggen UT's have been modified to accomodate the new sorted configurations.
admin@lnos-x1-a-asw02:~$ sonic-cfggen -d -t /usr/share/sonic/templates/interfaces.j2 | more
…
auto lo
iface lo inet loopback
iface lo inet static
address 10.10.1.2
netmask 255.255.255.255
auto eth0
iface eth0 inet dhcp
allow-hotplug Ethernet112
iface Ethernet112 inet static
mtu 9100
address 10.1.2.2
netmask 255.255.255.0
allow-hotplug Ethernet112
iface Ethernet112 inet6 static
mtu 9100
address fc00:1:2::2
netmask 64
allow-hotplug Ethernet116
iface Ethernet116 inet static
mtu 9100
address 10.2.2.2
netmask 255.255.255.0
allow-hotplug Ethernet116
iface Ethernet116 inet6 static
mtu 9100
address fc00:2:2::2
netmask 64
allow-hotplug Ethernet120
iface Ethernet120 inet static
mtu 9100
address 10.3.2.2
netmask 255.255.255.0
root@lnos-x1-a-csw01:/# sonic-cfggen -d -y /etc/sonic/deployment_id_asn_map.yml -t /usr/share/sonic/templates/bgpd.conf.j2
…
router bgp 65100
…
network 10.10.2.1/32
neighbor 10.0.0.1 remote-as 65200
neighbor 10.0.0.1 description ARISTA01T2
address-family ipv4
neighbor 10.0.0.1 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.11 remote-as 65200
neighbor 10.0.0.11 description ARISTA06T2
address-family ipv4
neighbor 10.0.0.11 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.13 remote-as 65200
neighbor 10.0.0.13 description ARISTA07T2
address-family ipv4
neighbor 10.0.0.13 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.15 remote-as 65200
neighbor 10.0.0.15 description ARISTA08T2
address-family ipv4
neighbor 10.0.0.15 activate
maximum-paths 64
exit-address-family
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/lldpd.conf.j2
…
configure ports Ethernet4 lldp portidsubtype local Eth2/1 description ixia-card2-port8:Eth1/2/8
configure ports Ethernet112 lldp portidsubtype local Eth29/1 description lnos-x1-a-csw01:Eth29/1
configure ports Ethernet116 lldp portidsubtype local Eth30/1 description lnos-x1-a-csw02:Eth30/1
configure ports Ethernet120 lldp portidsubtype local Eth31/1 description lnos-x1-a-csw03:Eth31/1
configure ports Ethernet124 lldp portidsubtype local Eth32/1 description lnos-x1-a-csw04:Eth32/1
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/ports.json.j2 | more
[
{
"PORT_TABLE:Ethernet0": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet1": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet2": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
]
2018-04-19 03:43:00 -05:00
|
|
|
def sort_data(data):
|
|
|
|
for table in data:
|
|
|
|
if type(data[table]) is dict:
|
|
|
|
data[table] = OrderedDict(natsorted(data[table].items()))
|
|
|
|
return data
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
def main():
|
|
|
|
parser=argparse.ArgumentParser(description="Render configuration file from minigraph data and jinja2 template.")
|
2017-07-06 17:28:23 -05:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
2017-08-01 21:02:00 -05:00
|
|
|
group.add_argument("-m", "--minigraph", help="minigraph xml file", nargs='?', const='/etc/sonic/minigraph.xml')
|
2017-07-06 17:28:23 -05:00
|
|
|
group.add_argument("-M", "--device-description", help="device description xml file")
|
2018-04-24 18:01:17 -05:00
|
|
|
group.add_argument("-k", "--hwsku", help="HwSKU")
|
2020-05-08 23:24:05 -05:00
|
|
|
parser.add_argument("-n", "--namespace", help="namespace name", nargs='?', const=None, default=None)
|
2018-04-24 18:01:17 -05:00
|
|
|
parser.add_argument("-p", "--port-config", help="port config file, used with -m or -k", nargs='?', const=None)
|
2017-08-01 21:02:00 -05:00
|
|
|
parser.add_argument("-y", "--yaml", help="yaml file that contains additional variables", action='append', default=[])
|
|
|
|
parser.add_argument("-j", "--json", help="json file that contains additional variables", action='append', default=[])
|
2017-01-19 22:56:26 -06:00
|
|
|
parser.add_argument("-a", "--additional-data", help="addition data, in json string")
|
2017-08-01 21:02:00 -05:00
|
|
|
parser.add_argument("-d", "--from-db", help="read config from configdb", action='store_true')
|
2018-02-20 16:38:13 -06:00
|
|
|
parser.add_argument("-H", "--platform-info", help="read platform and hardware info", action='store_true')
|
2017-12-06 23:45:03 -06:00
|
|
|
parser.add_argument("-s", "--redis-unix-sock-file", help="unix sock file for redis connection")
|
2017-01-19 22:56:26 -06:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
|
|
|
group.add_argument("-t", "--template", help="render the data with the template file")
|
2020-04-23 11:42:22 -05:00
|
|
|
parser.add_argument("-T", "--template_dir", help="search base for the template files", action='store')
|
2017-02-28 12:52:56 -06:00
|
|
|
group.add_argument("-v", "--var", help="print the value of a variable, support jinja2 expression")
|
2017-01-19 22:56:26 -06:00
|
|
|
group.add_argument("--var-json", help="print the value of a variable, in json format")
|
2019-05-15 09:48:20 -05:00
|
|
|
group.add_argument("-w", "--write-to-db", help="write config into configdb", action='store_true')
|
2017-01-19 22:56:26 -06:00
|
|
|
group.add_argument("--print-data", help="print all data", action='store_true')
|
2018-09-16 00:15:02 -05:00
|
|
|
group.add_argument("--preset", help="generate sample configuration from a preset template", choices=get_available_config())
|
2019-08-06 10:04:33 -05:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
|
|
|
group.add_argument("-K", "--key", help="Lookup for a specific key")
|
2017-01-06 20:19:42 -06:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
platform = get_platform_info(get_machine_info())
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2017-12-06 23:45:03 -06:00
|
|
|
db_kwargs = {}
|
|
|
|
if args.redis_unix_sock_file != None:
|
|
|
|
db_kwargs['unix_socket_path'] = args.redis_unix_sock_file
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
data = {}
|
2018-04-24 18:01:17 -05:00
|
|
|
hwsku = args.hwsku
|
2020-05-04 18:15:15 -05:00
|
|
|
asic_name = args.namespace
|
|
|
|
asic_id = None
|
|
|
|
if asic_name is not None:
|
|
|
|
asic_id = get_npu_id_from_name(asic_name)
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
|
2020-05-08 23:24:05 -05:00
|
|
|
# Load the database config for the namespace from global database json
|
|
|
|
if args.namespace is not None:
|
|
|
|
SonicDBConfig.load_sonic_global_db_config(namespace=args.namespace)
|
|
|
|
|
2018-04-24 18:01:17 -05:00
|
|
|
if hwsku is not None:
|
|
|
|
hardware_data = {'DEVICE_METADATA': {'localhost': {
|
|
|
|
'hwsku': hwsku
|
|
|
|
}}}
|
|
|
|
deep_update(data, hardware_data)
|
2020-05-04 18:15:15 -05:00
|
|
|
(ports, _, _) = get_port_config(hwsku, platform, args.port_config, asic_id)
|
2018-04-24 18:01:17 -05:00
|
|
|
if not ports:
|
|
|
|
print('Failed to get port config', file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
deep_update(data, {'PORT': ports})
|
2018-02-20 16:38:13 -06:00
|
|
|
|
2020-02-28 00:08:52 -06:00
|
|
|
for json_file in args.json:
|
|
|
|
with open(json_file, 'r') as stream:
|
|
|
|
deep_update(data, FormatConverter.to_deserialized(json.load(stream)))
|
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.minigraph != None:
|
|
|
|
minigraph = args.minigraph
|
2018-02-20 16:38:13 -06:00
|
|
|
if platform:
|
2017-03-17 23:38:20 -05:00
|
|
|
if args.port_config != None:
|
2020-05-04 18:15:15 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, platform, args.port_config, asic_name=asic_name))
|
2017-03-17 23:38:20 -05:00
|
|
|
else:
|
2020-05-04 18:15:15 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, platform, asic_name=asic_name))
|
2017-02-17 15:03:42 -06:00
|
|
|
else:
|
2020-05-04 18:15:15 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, port_config_file=args.port_config, asic_name=asic_name))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2017-07-06 17:28:23 -05:00
|
|
|
if args.device_description != None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, parse_device_desc_xml(args.device_description))
|
2017-07-06 17:28:23 -05:00
|
|
|
|
2017-06-22 01:49:15 -05:00
|
|
|
for yaml_file in args.yaml:
|
|
|
|
with open(yaml_file, 'r') as stream:
|
2019-10-11 15:25:45 -05:00
|
|
|
if yaml.__version__ >= "5.1":
|
|
|
|
additional_data = yaml.full_load(stream)
|
|
|
|
else:
|
|
|
|
additional_data = yaml.load(stream)
|
2017-09-12 16:13:27 -05:00
|
|
|
deep_update(data, FormatConverter.to_deserialized(additional_data))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.additional_data != None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, json.loads(args.additional_data))
|
2019-06-13 21:04:45 -05:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
if args.from_db:
|
2020-05-08 23:24:05 -05:00
|
|
|
if args.namespace is None:
|
|
|
|
configdb = ConfigDBConnector(**db_kwargs)
|
|
|
|
else:
|
|
|
|
configdb = ConfigDBConnector(use_unix_socket_path=True, namespace=args.namespace, **db_kwargs)
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
configdb.connect()
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, FormatConverter.db_to_output(configdb.get_config()))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2020-05-04 18:15:15 -05:00
|
|
|
|
|
|
|
# the minigraph file must be provided to get the mac address for backend asics
|
2018-02-20 16:38:13 -06:00
|
|
|
if args.platform_info:
|
2020-05-04 18:15:15 -05:00
|
|
|
asic_role = None
|
|
|
|
if asic_name is not None:
|
|
|
|
if args.minigraph is not None:
|
|
|
|
asic_role = parse_asic_sub_role(args.minigraph, asic_name)
|
|
|
|
|
|
|
|
if asic_role is not None and asic_role.lower() == "backend":
|
|
|
|
mac = get_system_mac(namespace=asic_name)
|
|
|
|
else:
|
|
|
|
mac = get_system_mac()
|
|
|
|
else:
|
|
|
|
mac = get_system_mac()
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
hardware_data = {'DEVICE_METADATA': {'localhost': {
|
|
|
|
'platform': platform,
|
2020-05-04 18:15:15 -05:00
|
|
|
'mac': mac,
|
2018-02-20 16:38:13 -06:00
|
|
|
}}}
|
2020-05-04 18:15:15 -05:00
|
|
|
# The ID needs to be passed to the SAI to identify the asic.
|
|
|
|
if asic_name is not None:
|
|
|
|
hardware_data['DEVICE_METADATA']['localhost'].update(asic_id=asic_id)
|
2018-02-20 16:38:13 -06:00
|
|
|
deep_update(data, hardware_data)
|
|
|
|
|
2020-04-23 11:42:22 -05:00
|
|
|
if args.template is not None:
|
2017-01-19 22:56:26 -06:00
|
|
|
template_file = os.path.abspath(args.template)
|
2018-02-27 14:15:56 -06:00
|
|
|
paths = ['/', '/usr/share/sonic/templates', os.path.dirname(template_file)]
|
2020-04-23 11:42:22 -05:00
|
|
|
if args.template_dir is not None:
|
|
|
|
template_dir = os.path.abspath(args.template_dir)
|
|
|
|
paths.append(template_dir)
|
2018-02-27 14:15:56 -06:00
|
|
|
loader = jinja2.FileSystemLoader(paths)
|
|
|
|
|
2019-10-31 11:17:29 -05:00
|
|
|
redis_bcc = RedisBytecodeCache(SonicV2Connector(host='127.0.0.1'))
|
|
|
|
env = jinja2.Environment(loader=loader, trim_blocks=True, bytecode_cache=redis_bcc)
|
2018-10-17 16:10:34 -05:00
|
|
|
env.filters['sort_by_port_index'] = sort_by_port_index
|
2017-01-19 22:56:26 -06:00
|
|
|
env.filters['ipv4'] = is_ipv4
|
|
|
|
env.filters['ipv6'] = is_ipv6
|
2017-03-16 13:22:40 -05:00
|
|
|
env.filters['unique_name'] = unique_name
|
2019-06-10 16:02:55 -05:00
|
|
|
env.filters['pfx_filter'] = pfx_filter
|
2019-10-24 09:35:14 -05:00
|
|
|
env.filters['ip_network'] = ip_network
|
2020-03-21 16:25:19 -05:00
|
|
|
for attr in ['ip', 'network', 'prefixlen', 'netmask', 'broadcast']:
|
2017-09-12 16:13:27 -05:00
|
|
|
env.filters[attr] = partial(prefix_attr, attr)
|
2017-01-19 22:56:26 -06:00
|
|
|
template = env.get_template(template_file)
|
2018-04-24 18:01:17 -05:00
|
|
|
print(template.render(sort_data(data)))
|
[sonic-cfggen]: Sorting the information generated by sonic-cfggen for all j2 templates. (#1616)
With this patch all the content of the files generated by "sonic-cfggen -d -t" instructions will now be sorted, which is very useful for operational and troubleshooting purposes. See below a few examples of this behavior. Also, a few sonic-cfggen UT's have been modified to accomodate the new sorted configurations.
admin@lnos-x1-a-asw02:~$ sonic-cfggen -d -t /usr/share/sonic/templates/interfaces.j2 | more
…
auto lo
iface lo inet loopback
iface lo inet static
address 10.10.1.2
netmask 255.255.255.255
auto eth0
iface eth0 inet dhcp
allow-hotplug Ethernet112
iface Ethernet112 inet static
mtu 9100
address 10.1.2.2
netmask 255.255.255.0
allow-hotplug Ethernet112
iface Ethernet112 inet6 static
mtu 9100
address fc00:1:2::2
netmask 64
allow-hotplug Ethernet116
iface Ethernet116 inet static
mtu 9100
address 10.2.2.2
netmask 255.255.255.0
allow-hotplug Ethernet116
iface Ethernet116 inet6 static
mtu 9100
address fc00:2:2::2
netmask 64
allow-hotplug Ethernet120
iface Ethernet120 inet static
mtu 9100
address 10.3.2.2
netmask 255.255.255.0
root@lnos-x1-a-csw01:/# sonic-cfggen -d -y /etc/sonic/deployment_id_asn_map.yml -t /usr/share/sonic/templates/bgpd.conf.j2
…
router bgp 65100
…
network 10.10.2.1/32
neighbor 10.0.0.1 remote-as 65200
neighbor 10.0.0.1 description ARISTA01T2
address-family ipv4
neighbor 10.0.0.1 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.11 remote-as 65200
neighbor 10.0.0.11 description ARISTA06T2
address-family ipv4
neighbor 10.0.0.11 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.13 remote-as 65200
neighbor 10.0.0.13 description ARISTA07T2
address-family ipv4
neighbor 10.0.0.13 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.15 remote-as 65200
neighbor 10.0.0.15 description ARISTA08T2
address-family ipv4
neighbor 10.0.0.15 activate
maximum-paths 64
exit-address-family
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/lldpd.conf.j2
…
configure ports Ethernet4 lldp portidsubtype local Eth2/1 description ixia-card2-port8:Eth1/2/8
configure ports Ethernet112 lldp portidsubtype local Eth29/1 description lnos-x1-a-csw01:Eth29/1
configure ports Ethernet116 lldp portidsubtype local Eth30/1 description lnos-x1-a-csw02:Eth30/1
configure ports Ethernet120 lldp portidsubtype local Eth31/1 description lnos-x1-a-csw03:Eth31/1
configure ports Ethernet124 lldp portidsubtype local Eth32/1 description lnos-x1-a-csw04:Eth32/1
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/ports.json.j2 | more
[
{
"PORT_TABLE:Ethernet0": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet1": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet2": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
]
2018-04-19 03:43:00 -05:00
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.var != None:
|
2017-02-28 12:52:56 -06:00
|
|
|
template = jinja2.Template('{{' + args.var + '}}')
|
2018-04-24 18:01:17 -05:00
|
|
|
print(template.render(data))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2020-03-30 11:24:16 -05:00
|
|
|
if args.var_json != None and args.var_json in data:
|
2019-08-06 10:04:33 -05:00
|
|
|
if args.key != None:
|
|
|
|
print(json.dumps(FormatConverter.to_serialized(data[args.var_json], args.key), indent=4, cls=minigraph_encoder))
|
|
|
|
else:
|
|
|
|
print(json.dumps(FormatConverter.to_serialized(data[args.var_json]), indent=4, cls=minigraph_encoder))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
if args.write_to_db:
|
2020-05-08 23:24:05 -05:00
|
|
|
if args.namespace is None:
|
|
|
|
configdb = ConfigDBConnector(**db_kwargs)
|
|
|
|
else:
|
|
|
|
configdb = ConfigDBConnector(use_unix_socket_path=True, namespace=args.namespace, **db_kwargs)
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
configdb.connect(False)
|
2017-12-08 19:28:01 -06:00
|
|
|
configdb.mod_config(FormatConverter.output_to_db(data))
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.print_data:
|
2018-04-24 18:01:17 -05:00
|
|
|
print(json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2018-09-16 00:15:02 -05:00
|
|
|
if args.preset != None:
|
|
|
|
data = generate_sample_config(data, args.preset)
|
|
|
|
print(json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder))
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|