2017-01-06 20:19:42 -06:00
|
|
|
#!/usr/bin/env python
|
2017-08-01 21:02:00 -05:00
|
|
|
"""sonic-cfggen
|
|
|
|
|
|
|
|
A tool to read SONiC config data from one or more of the following sources:
|
|
|
|
minigraph file, config DB, json file(s), yaml files(s), command line input,
|
|
|
|
and write the data into DB, print as json, or render a jinja2 config template.
|
|
|
|
|
|
|
|
Examples:
|
2019-06-13 21:04:45 -05:00
|
|
|
Render template with minigraph:
|
2017-08-01 21:02:00 -05:00
|
|
|
sonic-cfggen -m -t /usr/share/template/bgpd.conf.j2
|
2019-06-13 21:04:45 -05:00
|
|
|
Dump config DB content into json file:
|
2017-08-01 21:02:00 -05:00
|
|
|
sonic-cfggen -d --print-data > db_dump.json
|
|
|
|
Load content of json file into config DB:
|
|
|
|
sonic-cfggen -j db_dump.json --write-to-db
|
|
|
|
See usage string for detail description for arguments.
|
|
|
|
"""
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2018-04-24 18:01:17 -05:00
|
|
|
from __future__ import print_function
|
2019-10-31 11:17:29 -05:00
|
|
|
|
|
|
|
# monkey patch re.compile to do lazy regular expression compilation.
|
|
|
|
# This is done to improve import time of jinja2, yaml, natsort modules, because they
|
|
|
|
# do many regexp compilation at import time, so it will speed up sonic-cfggen invocations
|
|
|
|
# that do not require template generation or yaml loading. sonic-cfggen is used in so many places
|
|
|
|
# during system boot up that importing jinja2, yaml, natsort every time
|
|
|
|
# without lazy regular expression compilation affect boot up time.
|
|
|
|
# FIXME: remove this once sonic-cfggen and templates dependencies are replaced with a faster approach
|
|
|
|
import lazy_re
|
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
import argparse
|
2020-09-28 22:07:39 -05:00
|
|
|
import contextlib
|
2017-01-06 20:19:42 -06:00
|
|
|
import jinja2
|
2017-01-19 22:56:26 -06:00
|
|
|
import json
|
2020-09-28 22:07:39 -05:00
|
|
|
import netaddr
|
|
|
|
import os.path
|
|
|
|
import sys
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
from collections import OrderedDict
|
|
|
|
from config_samples import generate_sample_config, get_available_config
|
2017-09-12 16:13:27 -05:00
|
|
|
from functools import partial
|
2020-09-28 22:07:39 -05:00
|
|
|
from minigraph import minigraph_encoder, parse_xml, parse_device_desc_xml, parse_asic_sub_role
|
2020-09-04 12:19:12 -05:00
|
|
|
from portconfig import get_port_config, get_breakout_mode
|
2020-09-28 22:07:39 -05:00
|
|
|
from redis_bcc import RedisBytecodeCache
|
2020-08-14 09:36:00 -05:00
|
|
|
from sonic_py_common.multi_asic import get_asic_id_from_name, is_multi_asic
|
2020-09-04 12:19:12 -05:00
|
|
|
from sonic_py_common import device_info
|
2020-08-26 17:05:54 -05:00
|
|
|
from swsssdk import SonicV2Connector, ConfigDBConnector, SonicDBConfig, ConfigDBPipeConnector
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2020-08-19 11:29:40 -05:00
|
|
|
#TODO: Remove STR_TYPE once SONiC moves to Python 3.x
|
|
|
|
PY3x = sys.version_info >= (3, 0)
|
|
|
|
STR_TYPE = str if PY3x else unicode
|
|
|
|
|
2018-10-17 16:10:34 -05:00
|
|
|
def sort_by_port_index(value):
|
|
|
|
if not value:
|
|
|
|
return
|
|
|
|
if isinstance(value, list):
|
2020-07-27 12:57:07 -05:00
|
|
|
# In multi-ASIC platforms backend ethernet ports are identified as
|
|
|
|
# 'Ethernet-BPxy'. Add 1024 to sort backend ports to the end.
|
|
|
|
value.sort(
|
|
|
|
key = lambda k: int(k[8:]) if "BP" not in k else int(k[11:]) + 1024
|
|
|
|
)
|
2018-10-17 16:10:34 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
def is_ipv4(value):
|
|
|
|
if not value:
|
|
|
|
return False
|
2017-09-12 16:13:27 -05:00
|
|
|
if isinstance(value, netaddr.IPNetwork):
|
2017-01-06 20:19:42 -06:00
|
|
|
addr = value
|
|
|
|
else:
|
|
|
|
try:
|
2017-09-12 16:13:27 -05:00
|
|
|
addr = netaddr.IPNetwork(str(value))
|
2017-01-06 20:19:42 -06:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return addr.version == 4
|
|
|
|
|
|
|
|
def is_ipv6(value):
|
|
|
|
if not value:
|
|
|
|
return False
|
2017-09-12 16:13:27 -05:00
|
|
|
if isinstance(value, netaddr.IPNetwork):
|
2017-01-06 20:19:42 -06:00
|
|
|
addr = value
|
|
|
|
else:
|
|
|
|
try:
|
2017-09-12 16:13:27 -05:00
|
|
|
addr = netaddr.IPNetwork(str(value))
|
2017-01-06 20:19:42 -06:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
return addr.version == 6
|
|
|
|
|
2017-09-12 16:13:27 -05:00
|
|
|
def prefix_attr(attr, value):
|
|
|
|
if not value:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
prefix = netaddr.IPNetwork(str(value))
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
return str(getattr(prefix, attr))
|
|
|
|
|
2017-03-16 13:22:40 -05:00
|
|
|
def unique_name(l):
|
|
|
|
name_list = []
|
|
|
|
new_list = []
|
|
|
|
for item in l:
|
|
|
|
if item['name'] not in name_list:
|
|
|
|
name_list.append(item['name'])
|
|
|
|
new_list.append(item)
|
|
|
|
return new_list
|
|
|
|
|
2019-06-10 16:02:55 -05:00
|
|
|
def pfx_filter(value):
|
|
|
|
"""INTERFACE Table can have keys in one of the two formats:
|
|
|
|
string or tuple - This filter skips the string keys and only
|
|
|
|
take into account the tuple.
|
|
|
|
For eg - VLAN_INTERFACE|Vlan1000 vs VLAN_INTERFACE|Vlan1000|192.168.0.1/21
|
|
|
|
"""
|
|
|
|
table = OrderedDict()
|
2019-06-13 21:04:45 -05:00
|
|
|
|
|
|
|
if not value:
|
|
|
|
return table
|
|
|
|
|
2019-06-10 16:02:55 -05:00
|
|
|
for key,val in value.items():
|
|
|
|
if not isinstance(key, tuple):
|
|
|
|
continue
|
2020-07-02 02:22:58 -05:00
|
|
|
intf, ip_address = key
|
|
|
|
if '/' not in ip_address:
|
|
|
|
if is_ipv4(ip_address):
|
|
|
|
new_ip_address = "%s/32" % ip_address
|
|
|
|
elif is_ipv6(ip_address):
|
|
|
|
new_ip_address = "%s/128" % ip_address
|
|
|
|
else:
|
|
|
|
raise ValueError("'%s' is invalid ip address" % ip_address)
|
|
|
|
table[(intf, new_ip_address)] = val
|
|
|
|
else:
|
|
|
|
table[key] = val
|
2019-06-10 16:02:55 -05:00
|
|
|
return table
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2019-10-24 09:35:14 -05:00
|
|
|
def ip_network(value):
|
|
|
|
""" Extract network for network prefix """
|
|
|
|
try:
|
|
|
|
r_v = netaddr.IPNetwork(value)
|
|
|
|
except:
|
|
|
|
return "Invalid ip address %s" % value
|
|
|
|
return r_v.network
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
class FormatConverter:
|
|
|
|
"""Convert config DB based schema to legacy minigraph based schema for backward capability.
|
|
|
|
We will move to DB schema and remove this class when the config templates are modified.
|
|
|
|
|
|
|
|
TODO(taoyl): Current version of config db only supports BGP admin states.
|
2019-06-13 21:04:45 -05:00
|
|
|
All other configuration are still loaded from minigraph. Plan to remove
|
2017-08-01 21:02:00 -05:00
|
|
|
minigraph and move everything into config db in a later commit.
|
|
|
|
"""
|
|
|
|
@staticmethod
|
|
|
|
def db_to_output(db_data):
|
2017-08-08 18:23:58 -05:00
|
|
|
return db_data
|
2017-08-01 21:02:00 -05:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def output_to_db(output_data):
|
|
|
|
db_data = {}
|
2017-08-08 18:23:58 -05:00
|
|
|
for table_name in output_data:
|
2017-09-12 16:13:27 -05:00
|
|
|
if table_name[0].isupper():
|
2017-08-08 18:23:58 -05:00
|
|
|
db_data[table_name] = output_data[table_name]
|
2017-08-01 21:02:00 -05:00
|
|
|
return db_data
|
|
|
|
|
2017-09-12 16:13:27 -05:00
|
|
|
@staticmethod
|
2019-08-06 10:04:33 -05:00
|
|
|
def to_serialized(data, lookup_key = None):
|
2019-05-18 12:40:57 -05:00
|
|
|
if type(data) is dict:
|
2020-09-28 22:07:39 -05:00
|
|
|
if lookup_key is not None:
|
2019-08-06 10:04:33 -05:00
|
|
|
newData = {}
|
|
|
|
for key in data.keys():
|
2020-08-19 11:29:40 -05:00
|
|
|
if ((type(key) is STR_TYPE and lookup_key == key) or (type(key) is tuple and lookup_key in key)):
|
2019-08-06 10:04:33 -05:00
|
|
|
newData[ConfigDBConnector.serialize_key(key)] = data.pop(key)
|
|
|
|
break
|
|
|
|
return newData
|
|
|
|
|
2019-05-18 12:40:57 -05:00
|
|
|
for key in data.keys():
|
|
|
|
new_key = ConfigDBConnector.serialize_key(key)
|
|
|
|
if new_key != key:
|
|
|
|
data[new_key] = data.pop(key)
|
|
|
|
data[new_key] = FormatConverter.to_serialized(data[new_key])
|
2017-09-12 16:13:27 -05:00
|
|
|
return data
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def to_deserialized(data):
|
|
|
|
for table in data:
|
|
|
|
if type(data[table]) is dict:
|
|
|
|
for key in data[table].keys():
|
|
|
|
new_key = ConfigDBConnector.deserialize_key(key)
|
|
|
|
if new_key != key:
|
|
|
|
data[table][new_key] = data[table].pop(key)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-08-08 18:23:58 -05:00
|
|
|
def deep_update(dst, src):
|
2020-08-19 11:29:40 -05:00
|
|
|
for key, value in src.items():
|
2017-08-08 18:23:58 -05:00
|
|
|
if isinstance(value, dict):
|
|
|
|
node = dst.setdefault(key, {})
|
|
|
|
deep_update(node, value)
|
|
|
|
else:
|
|
|
|
dst[key] = value
|
|
|
|
return dst
|
|
|
|
|
[sonic-cfggen]: Sorting the information generated by sonic-cfggen for all j2 templates. (#1616)
With this patch all the content of the files generated by "sonic-cfggen -d -t" instructions will now be sorted, which is very useful for operational and troubleshooting purposes. See below a few examples of this behavior. Also, a few sonic-cfggen UT's have been modified to accomodate the new sorted configurations.
admin@lnos-x1-a-asw02:~$ sonic-cfggen -d -t /usr/share/sonic/templates/interfaces.j2 | more
…
auto lo
iface lo inet loopback
iface lo inet static
address 10.10.1.2
netmask 255.255.255.255
auto eth0
iface eth0 inet dhcp
allow-hotplug Ethernet112
iface Ethernet112 inet static
mtu 9100
address 10.1.2.2
netmask 255.255.255.0
allow-hotplug Ethernet112
iface Ethernet112 inet6 static
mtu 9100
address fc00:1:2::2
netmask 64
allow-hotplug Ethernet116
iface Ethernet116 inet static
mtu 9100
address 10.2.2.2
netmask 255.255.255.0
allow-hotplug Ethernet116
iface Ethernet116 inet6 static
mtu 9100
address fc00:2:2::2
netmask 64
allow-hotplug Ethernet120
iface Ethernet120 inet static
mtu 9100
address 10.3.2.2
netmask 255.255.255.0
root@lnos-x1-a-csw01:/# sonic-cfggen -d -y /etc/sonic/deployment_id_asn_map.yml -t /usr/share/sonic/templates/bgpd.conf.j2
…
router bgp 65100
…
network 10.10.2.1/32
neighbor 10.0.0.1 remote-as 65200
neighbor 10.0.0.1 description ARISTA01T2
address-family ipv4
neighbor 10.0.0.1 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.11 remote-as 65200
neighbor 10.0.0.11 description ARISTA06T2
address-family ipv4
neighbor 10.0.0.11 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.13 remote-as 65200
neighbor 10.0.0.13 description ARISTA07T2
address-family ipv4
neighbor 10.0.0.13 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.15 remote-as 65200
neighbor 10.0.0.15 description ARISTA08T2
address-family ipv4
neighbor 10.0.0.15 activate
maximum-paths 64
exit-address-family
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/lldpd.conf.j2
…
configure ports Ethernet4 lldp portidsubtype local Eth2/1 description ixia-card2-port8:Eth1/2/8
configure ports Ethernet112 lldp portidsubtype local Eth29/1 description lnos-x1-a-csw01:Eth29/1
configure ports Ethernet116 lldp portidsubtype local Eth30/1 description lnos-x1-a-csw02:Eth30/1
configure ports Ethernet120 lldp portidsubtype local Eth31/1 description lnos-x1-a-csw03:Eth31/1
configure ports Ethernet124 lldp portidsubtype local Eth32/1 description lnos-x1-a-csw04:Eth32/1
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/ports.json.j2 | more
[
{
"PORT_TABLE:Ethernet0": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet1": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet2": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
]
2018-04-19 03:43:00 -05:00
|
|
|
def sort_data(data):
|
|
|
|
for table in data:
|
|
|
|
if type(data[table]) is dict:
|
|
|
|
data[table] = OrderedDict(natsorted(data[table].items()))
|
|
|
|
return data
|
|
|
|
|
2020-08-12 17:13:06 -05:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def smart_open(filename=None, mode=None):
|
|
|
|
"""
|
|
|
|
Provide contextual file descriptor of filename if it is not a file descriptor
|
|
|
|
"""
|
|
|
|
smart_file = open(filename, mode) if not isinstance(filename, file) else filename
|
|
|
|
try:
|
|
|
|
yield smart_file
|
|
|
|
finally:
|
|
|
|
if not isinstance(filename, file):
|
|
|
|
smart_file.close()
|
|
|
|
|
|
|
|
def _process_json(args, data):
|
|
|
|
"""
|
|
|
|
Process JSON file and update switch configuration data
|
|
|
|
"""
|
|
|
|
for json_file in args.json:
|
|
|
|
with open(json_file, 'r') as stream:
|
|
|
|
deep_update(data, FormatConverter.to_deserialized(json.load(stream)))
|
|
|
|
|
|
|
|
def _get_jinja2_env(paths):
|
|
|
|
"""
|
|
|
|
Retreive Jinj2 env used to render configuration templates
|
|
|
|
"""
|
|
|
|
loader = jinja2.FileSystemLoader(paths)
|
|
|
|
redis_bcc = RedisBytecodeCache(SonicV2Connector(host='127.0.0.1'))
|
|
|
|
env = jinja2.Environment(loader=loader, trim_blocks=True, bytecode_cache=redis_bcc)
|
|
|
|
env.filters['sort_by_port_index'] = sort_by_port_index
|
|
|
|
env.filters['ipv4'] = is_ipv4
|
|
|
|
env.filters['ipv6'] = is_ipv6
|
|
|
|
env.filters['unique_name'] = unique_name
|
|
|
|
env.filters['pfx_filter'] = pfx_filter
|
|
|
|
env.filters['ip_network'] = ip_network
|
|
|
|
for attr in ['ip', 'network', 'prefixlen', 'netmask', 'broadcast']:
|
|
|
|
env.filters[attr] = partial(prefix_attr, attr)
|
2020-08-14 09:36:00 -05:00
|
|
|
# Pass the is_multi_asic function as global
|
|
|
|
env.globals['multi_asic'] = is_multi_asic
|
2020-08-12 17:13:06 -05:00
|
|
|
|
|
|
|
return env
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
def main():
|
|
|
|
parser=argparse.ArgumentParser(description="Render configuration file from minigraph data and jinja2 template.")
|
2017-07-06 17:28:23 -05:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
2017-08-01 21:02:00 -05:00
|
|
|
group.add_argument("-m", "--minigraph", help="minigraph xml file", nargs='?', const='/etc/sonic/minigraph.xml')
|
2017-07-06 17:28:23 -05:00
|
|
|
group.add_argument("-M", "--device-description", help="device description xml file")
|
2018-04-24 18:01:17 -05:00
|
|
|
group.add_argument("-k", "--hwsku", help="HwSKU")
|
2020-05-08 23:24:05 -05:00
|
|
|
parser.add_argument("-n", "--namespace", help="namespace name", nargs='?', const=None, default=None)
|
2018-04-24 18:01:17 -05:00
|
|
|
parser.add_argument("-p", "--port-config", help="port config file, used with -m or -k", nargs='?', const=None)
|
2020-06-18 18:26:09 -05:00
|
|
|
parser.add_argument("-S", "--hwsku-config", help="hwsku config file, used with -p and -m or -k", nargs='?', const=None)
|
2017-08-01 21:02:00 -05:00
|
|
|
parser.add_argument("-y", "--yaml", help="yaml file that contains additional variables", action='append', default=[])
|
|
|
|
parser.add_argument("-j", "--json", help="json file that contains additional variables", action='append', default=[])
|
2017-01-19 22:56:26 -06:00
|
|
|
parser.add_argument("-a", "--additional-data", help="addition data, in json string")
|
2017-08-01 21:02:00 -05:00
|
|
|
parser.add_argument("-d", "--from-db", help="read config from configdb", action='store_true')
|
2018-02-20 16:38:13 -06:00
|
|
|
parser.add_argument("-H", "--platform-info", help="read platform and hardware info", action='store_true')
|
2017-12-06 23:45:03 -06:00
|
|
|
parser.add_argument("-s", "--redis-unix-sock-file", help="unix sock file for redis connection")
|
2017-01-19 22:56:26 -06:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
2020-08-12 17:13:06 -05:00
|
|
|
group.add_argument("-t", "--template", help="render the data with the template file", action="append", default=[],
|
|
|
|
type=lambda opt_value: tuple(opt_value.split(',')) if ',' in opt_value else (opt_value, sys.stdout))
|
2020-04-23 11:42:22 -05:00
|
|
|
parser.add_argument("-T", "--template_dir", help="search base for the template files", action='store')
|
2017-02-28 12:52:56 -06:00
|
|
|
group.add_argument("-v", "--var", help="print the value of a variable, support jinja2 expression")
|
2017-01-19 22:56:26 -06:00
|
|
|
group.add_argument("--var-json", help="print the value of a variable, in json format")
|
2018-09-16 00:15:02 -05:00
|
|
|
group.add_argument("--preset", help="generate sample configuration from a preset template", choices=get_available_config())
|
2019-08-06 10:04:33 -05:00
|
|
|
group = parser.add_mutually_exclusive_group()
|
2020-08-12 17:13:06 -05:00
|
|
|
group.add_argument("--print-data", help="print all data", action='store_true')
|
2020-08-18 20:24:36 -05:00
|
|
|
group.add_argument("-w", "--write-to-db", help="write config into configdb", action='store_true')
|
2019-08-06 10:04:33 -05:00
|
|
|
group.add_argument("-K", "--key", help="Lookup for a specific key")
|
2017-01-06 20:19:42 -06:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2020-09-04 12:19:12 -05:00
|
|
|
platform = device_info.get_platform()
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2017-12-06 23:45:03 -06:00
|
|
|
db_kwargs = {}
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.redis_unix_sock_file is not None:
|
2017-12-06 23:45:03 -06:00
|
|
|
db_kwargs['unix_socket_path'] = args.redis_unix_sock_file
|
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
data = {}
|
2018-04-24 18:01:17 -05:00
|
|
|
hwsku = args.hwsku
|
2020-05-04 18:15:15 -05:00
|
|
|
asic_name = args.namespace
|
|
|
|
asic_id = None
|
|
|
|
if asic_name is not None:
|
2020-08-14 09:36:00 -05:00
|
|
|
asic_id = get_asic_id_from_name(asic_name)
|
2020-05-04 18:15:15 -05:00
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
|
2020-05-08 23:24:05 -05:00
|
|
|
# Load the database config for the namespace from global database json
|
|
|
|
if args.namespace is not None:
|
|
|
|
SonicDBConfig.load_sonic_global_db_config(namespace=args.namespace)
|
|
|
|
|
2018-04-24 18:01:17 -05:00
|
|
|
if hwsku is not None:
|
|
|
|
hardware_data = {'DEVICE_METADATA': {'localhost': {
|
|
|
|
'hwsku': hwsku
|
|
|
|
}}}
|
|
|
|
deep_update(data, hardware_data)
|
2020-06-18 18:26:09 -05:00
|
|
|
if args.port_config is None:
|
2020-09-04 12:19:12 -05:00
|
|
|
args.port_config = device_info.get_path_to_port_config_file(hwsku)
|
2020-05-04 18:15:15 -05:00
|
|
|
(ports, _, _) = get_port_config(hwsku, platform, args.port_config, asic_id)
|
2018-04-24 18:01:17 -05:00
|
|
|
if not ports:
|
|
|
|
print('Failed to get port config', file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
deep_update(data, {'PORT': ports})
|
2018-02-20 16:38:13 -06:00
|
|
|
|
2020-06-18 18:26:09 -05:00
|
|
|
brkout_table = get_breakout_mode(hwsku, platform, args.port_config)
|
|
|
|
if brkout_table is not None:
|
|
|
|
deep_update(data, {'BREAKOUT_CFG': brkout_table})
|
|
|
|
|
2020-08-12 17:13:06 -05:00
|
|
|
_process_json(args, data)
|
2020-02-28 00:08:52 -06:00
|
|
|
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.minigraph is not None:
|
2017-01-19 22:56:26 -06:00
|
|
|
minigraph = args.minigraph
|
2018-02-20 16:38:13 -06:00
|
|
|
if platform:
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.port_config is not None:
|
2020-06-18 18:26:09 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, platform, args.port_config, asic_name=asic_name, hwsku_config_file=args.hwsku_config))
|
2017-03-17 23:38:20 -05:00
|
|
|
else:
|
2020-05-04 18:15:15 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, platform, asic_name=asic_name))
|
2017-02-17 15:03:42 -06:00
|
|
|
else:
|
2020-06-18 18:26:09 -05:00
|
|
|
deep_update(data, parse_xml(minigraph, port_config_file=args.port_config, asic_name=asic_name, hwsku_config_file=args.hwsku_config))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.device_description is not None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, parse_device_desc_xml(args.device_description))
|
2017-07-06 17:28:23 -05:00
|
|
|
|
2017-06-22 01:49:15 -05:00
|
|
|
for yaml_file in args.yaml:
|
|
|
|
with open(yaml_file, 'r') as stream:
|
2019-10-11 15:25:45 -05:00
|
|
|
if yaml.__version__ >= "5.1":
|
|
|
|
additional_data = yaml.full_load(stream)
|
|
|
|
else:
|
|
|
|
additional_data = yaml.load(stream)
|
2017-09-12 16:13:27 -05:00
|
|
|
deep_update(data, FormatConverter.to_deserialized(additional_data))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.additional_data is not None:
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, json.loads(args.additional_data))
|
2019-06-13 21:04:45 -05:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
if args.from_db:
|
2020-05-08 23:24:05 -05:00
|
|
|
if args.namespace is None:
|
2020-08-26 17:05:54 -05:00
|
|
|
configdb = ConfigDBPipeConnector(use_unix_socket_path=True, **db_kwargs)
|
2020-05-08 23:24:05 -05:00
|
|
|
else:
|
2020-08-26 17:05:54 -05:00
|
|
|
configdb = ConfigDBPipeConnector(use_unix_socket_path=True, namespace=args.namespace, **db_kwargs)
|
2020-05-08 23:24:05 -05:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
configdb.connect()
|
2017-08-08 18:23:58 -05:00
|
|
|
deep_update(data, FormatConverter.db_to_output(configdb.get_config()))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2020-05-04 18:15:15 -05:00
|
|
|
|
|
|
|
# the minigraph file must be provided to get the mac address for backend asics
|
2018-02-20 16:38:13 -06:00
|
|
|
if args.platform_info:
|
2020-05-04 18:15:15 -05:00
|
|
|
asic_role = None
|
|
|
|
if asic_name is not None:
|
|
|
|
if args.minigraph is not None:
|
|
|
|
asic_role = parse_asic_sub_role(args.minigraph, asic_name)
|
|
|
|
|
|
|
|
if asic_role is not None and asic_role.lower() == "backend":
|
2020-09-04 12:19:12 -05:00
|
|
|
mac = device_info.get_system_mac(namespace=asic_name)
|
2020-05-04 18:15:15 -05:00
|
|
|
else:
|
2020-09-04 12:19:12 -05:00
|
|
|
mac = device_info.get_system_mac()
|
2020-05-04 18:15:15 -05:00
|
|
|
else:
|
2020-09-04 12:19:12 -05:00
|
|
|
mac = device_info.get_system_mac()
|
2020-05-04 18:15:15 -05:00
|
|
|
|
2018-02-20 16:38:13 -06:00
|
|
|
hardware_data = {'DEVICE_METADATA': {'localhost': {
|
|
|
|
'platform': platform,
|
2020-05-04 18:15:15 -05:00
|
|
|
'mac': mac,
|
2018-02-20 16:38:13 -06:00
|
|
|
}}}
|
2020-05-04 18:15:15 -05:00
|
|
|
# The ID needs to be passed to the SAI to identify the asic.
|
|
|
|
if asic_name is not None:
|
|
|
|
hardware_data['DEVICE_METADATA']['localhost'].update(asic_id=asic_id)
|
2018-02-20 16:38:13 -06:00
|
|
|
deep_update(data, hardware_data)
|
|
|
|
|
2020-08-12 17:13:06 -05:00
|
|
|
paths = ['/', '/usr/share/sonic/templates']
|
|
|
|
if args.template_dir:
|
|
|
|
paths.append(os.path.abspath(args.template_dir))
|
|
|
|
|
|
|
|
if args.template:
|
|
|
|
for template_file, _ in args.template:
|
|
|
|
paths.append(os.path.dirname(os.path.abspath(template_file)))
|
|
|
|
env = _get_jinja2_env(paths)
|
|
|
|
for template_file, dest_file in args.template:
|
|
|
|
template = env.get_template(os.path.basename(template_file))
|
2020-09-28 22:07:39 -05:00
|
|
|
template_data = template.render(data)
|
2020-08-12 17:13:06 -05:00
|
|
|
if dest_file == "config-db":
|
|
|
|
deep_update(data, FormatConverter.to_deserialized(json.loads(template_data)))
|
|
|
|
else:
|
|
|
|
with smart_open(dest_file, 'w') as df:
|
|
|
|
print(template_data, file=df)
|
[sonic-cfggen]: Sorting the information generated by sonic-cfggen for all j2 templates. (#1616)
With this patch all the content of the files generated by "sonic-cfggen -d -t" instructions will now be sorted, which is very useful for operational and troubleshooting purposes. See below a few examples of this behavior. Also, a few sonic-cfggen UT's have been modified to accomodate the new sorted configurations.
admin@lnos-x1-a-asw02:~$ sonic-cfggen -d -t /usr/share/sonic/templates/interfaces.j2 | more
…
auto lo
iface lo inet loopback
iface lo inet static
address 10.10.1.2
netmask 255.255.255.255
auto eth0
iface eth0 inet dhcp
allow-hotplug Ethernet112
iface Ethernet112 inet static
mtu 9100
address 10.1.2.2
netmask 255.255.255.0
allow-hotplug Ethernet112
iface Ethernet112 inet6 static
mtu 9100
address fc00:1:2::2
netmask 64
allow-hotplug Ethernet116
iface Ethernet116 inet static
mtu 9100
address 10.2.2.2
netmask 255.255.255.0
allow-hotplug Ethernet116
iface Ethernet116 inet6 static
mtu 9100
address fc00:2:2::2
netmask 64
allow-hotplug Ethernet120
iface Ethernet120 inet static
mtu 9100
address 10.3.2.2
netmask 255.255.255.0
root@lnos-x1-a-csw01:/# sonic-cfggen -d -y /etc/sonic/deployment_id_asn_map.yml -t /usr/share/sonic/templates/bgpd.conf.j2
…
router bgp 65100
…
network 10.10.2.1/32
neighbor 10.0.0.1 remote-as 65200
neighbor 10.0.0.1 description ARISTA01T2
address-family ipv4
neighbor 10.0.0.1 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.11 remote-as 65200
neighbor 10.0.0.11 description ARISTA06T2
address-family ipv4
neighbor 10.0.0.11 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.13 remote-as 65200
neighbor 10.0.0.13 description ARISTA07T2
address-family ipv4
neighbor 10.0.0.13 activate
maximum-paths 64
exit-address-family
neighbor 10.0.0.15 remote-as 65200
neighbor 10.0.0.15 description ARISTA08T2
address-family ipv4
neighbor 10.0.0.15 activate
maximum-paths 64
exit-address-family
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/lldpd.conf.j2
…
configure ports Ethernet4 lldp portidsubtype local Eth2/1 description ixia-card2-port8:Eth1/2/8
configure ports Ethernet112 lldp portidsubtype local Eth29/1 description lnos-x1-a-csw01:Eth29/1
configure ports Ethernet116 lldp portidsubtype local Eth30/1 description lnos-x1-a-csw02:Eth30/1
configure ports Ethernet120 lldp portidsubtype local Eth31/1 description lnos-x1-a-csw03:Eth31/1
configure ports Ethernet124 lldp portidsubtype local Eth32/1 description lnos-x1-a-csw04:Eth32/1
root@lnos-x1-a-asw02:/# sonic-cfggen -d -t /usr/share/sonic/templates/ports.json.j2 | more
[
{
"PORT_TABLE:Ethernet0": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet1": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
{
"PORT_TABLE:Ethernet2": {
"speed": "10000",
"description": ""
},
"OP": "SET"
},
]
2018-04-19 03:43:00 -05:00
|
|
|
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.var is not None:
|
2017-02-28 12:52:56 -06:00
|
|
|
template = jinja2.Template('{{' + args.var + '}}')
|
2018-04-24 18:01:17 -05:00
|
|
|
print(template.render(data))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.var_json is not None and args.var_json in data:
|
|
|
|
if args.key is not None:
|
2019-08-06 10:04:33 -05:00
|
|
|
print(json.dumps(FormatConverter.to_serialized(data[args.var_json], args.key), indent=4, cls=minigraph_encoder))
|
|
|
|
else:
|
|
|
|
print(json.dumps(FormatConverter.to_serialized(data[args.var_json]), indent=4, cls=minigraph_encoder))
|
2017-01-19 22:56:26 -06:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
if args.write_to_db:
|
2020-05-08 23:24:05 -05:00
|
|
|
if args.namespace is None:
|
2020-08-26 17:05:54 -05:00
|
|
|
configdb = ConfigDBPipeConnector(use_unix_socket_path=True, **db_kwargs)
|
2020-05-08 23:24:05 -05:00
|
|
|
else:
|
2020-08-26 17:05:54 -05:00
|
|
|
configdb = ConfigDBPipeConnector(use_unix_socket_path=True, namespace=args.namespace, **db_kwargs)
|
2020-05-08 23:24:05 -05:00
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
configdb.connect(False)
|
2017-12-08 19:28:01 -06:00
|
|
|
configdb.mod_config(FormatConverter.output_to_db(data))
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-19 22:56:26 -06:00
|
|
|
if args.print_data:
|
2018-04-24 18:01:17 -05:00
|
|
|
print(json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder))
|
2017-01-06 20:19:42 -06:00
|
|
|
|
2020-09-28 22:07:39 -05:00
|
|
|
if args.preset is not None:
|
2018-09-16 00:15:02 -05:00
|
|
|
data = generate_sample_config(data, args.preset)
|
|
|
|
print(json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder))
|
|
|
|
|
2017-08-01 21:02:00 -05:00
|
|
|
|
2017-01-06 20:19:42 -06:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|