sonic-buildimage/src/sonic-config-engine/sonic-cfggen
judyjoseph acf465b43b
Multi DB with namespace support, Introducing the database_global.json… (#4477)
* Multi DB with namespace support, Introducing the database_global.json file
for supporting accessing DB's in other namespaces for service running in
linux host

* Updates based on comments

* Adding the j2 templates for database_config and database_global files.

* Updating to retrieve the redis DIR's to be mounted from database_global.json file.

* Additional check to see if asic.conf file exists before sourcing it.

* Updates based on PR comments discussion.

* Review comments update

* Updates to the argument "-n" for namespace used in both context of parsing minigraph and multi DB access.

* Update with the attribute "persistence_for_warm_boot" that was added to database_config.json file earlier.

* Removing the database_config.json file to avioid confusion in future.
We use the database_config.json.j2 file to generate database_config.json files dynamically.

* Update the comments for sudo usage in docker_image_ctrl.j2

* Update with the new logic in PING PONG tests using sonic-db-cli. With this we wait till the
PONG response is received when redis server is up.

* Similar changes in swss and syncd scripts for the PING tests with sonic-db-cli

* Updated with a missing , in the database_config.json.j2 file, Do pip install of j2cli in docker-base-buster.
2020-05-08 21:24:05 -07:00

360 lines
14 KiB
Python
Executable File

#!/usr/bin/env python
"""sonic-cfggen
A tool to read SONiC config data from one or more of the following sources:
minigraph file, config DB, json file(s), yaml files(s), command line input,
and write the data into DB, print as json, or render a jinja2 config template.
Examples:
Render template with minigraph:
sonic-cfggen -m -t /usr/share/template/bgpd.conf.j2
Dump config DB content into json file:
sonic-cfggen -d --print-data > db_dump.json
Load content of json file into config DB:
sonic-cfggen -j db_dump.json --write-to-db
See usage string for detail description for arguments.
"""
from __future__ import print_function
# monkey patch re.compile to do lazy regular expression compilation.
# This is done to improve import time of jinja2, yaml, natsort modules, because they
# do many regexp compilation at import time, so it will speed up sonic-cfggen invocations
# that do not require template generation or yaml loading. sonic-cfggen is used in so many places
# during system boot up that importing jinja2, yaml, natsort every time
# without lazy regular expression compilation affect boot up time.
# FIXME: remove this once sonic-cfggen and templates dependencies are replaced with a faster approach
import lazy_re
import sys
import os.path
import argparse
import yaml
import jinja2
import netaddr
import json
from functools import partial
from minigraph import minigraph_encoder
from minigraph import parse_xml
from minigraph import parse_device_desc_xml
from minigraph import parse_asic_sub_role
from portconfig import get_port_config
from sonic_device_util import get_machine_info
from sonic_device_util import get_platform_info
from sonic_device_util import get_system_mac
from sonic_device_util import get_npu_id_from_name
from config_samples import generate_sample_config
from config_samples import get_available_config
from swsssdk import SonicV2Connector, ConfigDBConnector, SonicDBConfig
from redis_bcc import RedisBytecodeCache
from collections import OrderedDict
from natsort import natsorted
def sort_by_port_index(value):
if not value:
return
if isinstance(value, list):
value.sort(key = lambda k: int(k[8:]))
def is_ipv4(value):
if not value:
return False
if isinstance(value, netaddr.IPNetwork):
addr = value
else:
try:
addr = netaddr.IPNetwork(str(value))
except:
return False
return addr.version == 4
def is_ipv6(value):
if not value:
return False
if isinstance(value, netaddr.IPNetwork):
addr = value
else:
try:
addr = netaddr.IPNetwork(str(value))
except:
return False
return addr.version == 6
def prefix_attr(attr, value):
if not value:
return None
else:
try:
prefix = netaddr.IPNetwork(str(value))
except:
return None
return str(getattr(prefix, attr))
def unique_name(l):
name_list = []
new_list = []
for item in l:
if item['name'] not in name_list:
name_list.append(item['name'])
new_list.append(item)
return new_list
def pfx_filter(value):
"""INTERFACE Table can have keys in one of the two formats:
string or tuple - This filter skips the string keys and only
take into account the tuple.
For eg - VLAN_INTERFACE|Vlan1000 vs VLAN_INTERFACE|Vlan1000|192.168.0.1/21
"""
table = OrderedDict()
if not value:
return table
for key,val in value.items():
if not isinstance(key, tuple):
continue
table[key] = val
return table
def ip_network(value):
""" Extract network for network prefix """
try:
r_v = netaddr.IPNetwork(value)
except:
return "Invalid ip address %s" % value
return r_v.network
class FormatConverter:
"""Convert config DB based schema to legacy minigraph based schema for backward capability.
We will move to DB schema and remove this class when the config templates are modified.
TODO(taoyl): Current version of config db only supports BGP admin states.
All other configuration are still loaded from minigraph. Plan to remove
minigraph and move everything into config db in a later commit.
"""
@staticmethod
def db_to_output(db_data):
return db_data
@staticmethod
def output_to_db(output_data):
db_data = {}
for table_name in output_data:
if table_name[0].isupper():
db_data[table_name] = output_data[table_name]
return db_data
@staticmethod
def to_serialized(data, lookup_key = None):
if type(data) is dict:
data = OrderedDict(natsorted(data.items()))
if lookup_key != None:
newData = {}
for key in data.keys():
if ((type(key) is unicode and lookup_key == key) or (type(key) is tuple and lookup_key in key)):
newData[ConfigDBConnector.serialize_key(key)] = data.pop(key)
break
return newData
for key in data.keys():
new_key = ConfigDBConnector.serialize_key(key)
if new_key != key:
data[new_key] = data.pop(key)
data[new_key] = FormatConverter.to_serialized(data[new_key])
return data
@staticmethod
def to_deserialized(data):
for table in data:
if type(data[table]) is dict:
for key in data[table].keys():
new_key = ConfigDBConnector.deserialize_key(key)
if new_key != key:
data[table][new_key] = data[table].pop(key)
return data
def deep_update(dst, src):
for key, value in src.iteritems():
if isinstance(value, dict):
node = dst.setdefault(key, {})
deep_update(node, value)
else:
dst[key] = value
return dst
def sort_data(data):
for table in data:
if type(data[table]) is dict:
data[table] = OrderedDict(natsorted(data[table].items()))
return data
def main():
parser=argparse.ArgumentParser(description="Render configuration file from minigraph data and jinja2 template.")
group = parser.add_mutually_exclusive_group()
group.add_argument("-m", "--minigraph", help="minigraph xml file", nargs='?', const='/etc/sonic/minigraph.xml')
group.add_argument("-M", "--device-description", help="device description xml file")
group.add_argument("-k", "--hwsku", help="HwSKU")
parser.add_argument("-n", "--namespace", help="namespace name", nargs='?', const=None, default=None)
parser.add_argument("-p", "--port-config", help="port config file, used with -m or -k", nargs='?', const=None)
parser.add_argument("-y", "--yaml", help="yaml file that contains additional variables", action='append', default=[])
parser.add_argument("-j", "--json", help="json file that contains additional variables", action='append', default=[])
parser.add_argument("-a", "--additional-data", help="addition data, in json string")
parser.add_argument("-d", "--from-db", help="read config from configdb", action='store_true')
parser.add_argument("-H", "--platform-info", help="read platform and hardware info", action='store_true')
parser.add_argument("-s", "--redis-unix-sock-file", help="unix sock file for redis connection")
group = parser.add_mutually_exclusive_group()
group.add_argument("-t", "--template", help="render the data with the template file")
parser.add_argument("-T", "--template_dir", help="search base for the template files", action='store')
group.add_argument("-v", "--var", help="print the value of a variable, support jinja2 expression")
group.add_argument("--var-json", help="print the value of a variable, in json format")
group.add_argument("-w", "--write-to-db", help="write config into configdb", action='store_true')
group.add_argument("--print-data", help="print all data", action='store_true')
group.add_argument("--preset", help="generate sample configuration from a preset template", choices=get_available_config())
group = parser.add_mutually_exclusive_group()
group.add_argument("-K", "--key", help="Lookup for a specific key")
args = parser.parse_args()
platform = get_platform_info(get_machine_info())
db_kwargs = {}
if args.redis_unix_sock_file != None:
db_kwargs['unix_socket_path'] = args.redis_unix_sock_file
data = {}
hwsku = args.hwsku
asic_name = args.namespace
asic_id = None
if asic_name is not None:
asic_id = get_npu_id_from_name(asic_name)
# Load the database config for the namespace from global database json
if args.namespace is not None:
SonicDBConfig.load_sonic_global_db_config(namespace=args.namespace)
if hwsku is not None:
hardware_data = {'DEVICE_METADATA': {'localhost': {
'hwsku': hwsku
}}}
deep_update(data, hardware_data)
(ports, _, _) = get_port_config(hwsku, platform, args.port_config, asic_id)
if not ports:
print('Failed to get port config', file=sys.stderr)
sys.exit(1)
deep_update(data, {'PORT': ports})
for json_file in args.json:
with open(json_file, 'r') as stream:
deep_update(data, FormatConverter.to_deserialized(json.load(stream)))
if args.minigraph != None:
minigraph = args.minigraph
if platform:
if args.port_config != None:
deep_update(data, parse_xml(minigraph, platform, args.port_config, asic_name=asic_name))
else:
deep_update(data, parse_xml(minigraph, platform, asic_name=asic_name))
else:
deep_update(data, parse_xml(minigraph, port_config_file=args.port_config, asic_name=asic_name))
if args.device_description != None:
deep_update(data, parse_device_desc_xml(args.device_description))
for yaml_file in args.yaml:
with open(yaml_file, 'r') as stream:
if yaml.__version__ >= "5.1":
additional_data = yaml.full_load(stream)
else:
additional_data = yaml.load(stream)
deep_update(data, FormatConverter.to_deserialized(additional_data))
if args.additional_data != None:
deep_update(data, json.loads(args.additional_data))
if args.from_db:
if args.namespace is None:
configdb = ConfigDBConnector(**db_kwargs)
else:
configdb = ConfigDBConnector(use_unix_socket_path=True, namespace=args.namespace, **db_kwargs)
configdb.connect()
deep_update(data, FormatConverter.db_to_output(configdb.get_config()))
# the minigraph file must be provided to get the mac address for backend asics
if args.platform_info:
asic_role = None
if asic_name is not None:
if args.minigraph is not None:
asic_role = parse_asic_sub_role(args.minigraph, asic_name)
if asic_role is not None and asic_role.lower() == "backend":
mac = get_system_mac(namespace=asic_name)
else:
mac = get_system_mac()
else:
mac = get_system_mac()
hardware_data = {'DEVICE_METADATA': {'localhost': {
'platform': platform,
'mac': mac,
}}}
# The ID needs to be passed to the SAI to identify the asic.
if asic_name is not None:
hardware_data['DEVICE_METADATA']['localhost'].update(asic_id=asic_id)
deep_update(data, hardware_data)
if args.template is not None:
template_file = os.path.abspath(args.template)
paths = ['/', '/usr/share/sonic/templates', os.path.dirname(template_file)]
if args.template_dir is not None:
template_dir = os.path.abspath(args.template_dir)
paths.append(template_dir)
loader = jinja2.FileSystemLoader(paths)
redis_bcc = RedisBytecodeCache(SonicV2Connector(host='127.0.0.1'))
env = jinja2.Environment(loader=loader, trim_blocks=True, bytecode_cache=redis_bcc)
env.filters['sort_by_port_index'] = sort_by_port_index
env.filters['ipv4'] = is_ipv4
env.filters['ipv6'] = is_ipv6
env.filters['unique_name'] = unique_name
env.filters['pfx_filter'] = pfx_filter
env.filters['ip_network'] = ip_network
for attr in ['ip', 'network', 'prefixlen', 'netmask', 'broadcast']:
env.filters[attr] = partial(prefix_attr, attr)
template = env.get_template(template_file)
print(template.render(sort_data(data)))
if args.var != None:
template = jinja2.Template('{{' + args.var + '}}')
print(template.render(data))
if args.var_json != None and args.var_json in data:
if args.key != None:
print(json.dumps(FormatConverter.to_serialized(data[args.var_json], args.key), indent=4, cls=minigraph_encoder))
else:
print(json.dumps(FormatConverter.to_serialized(data[args.var_json]), indent=4, cls=minigraph_encoder))
if args.write_to_db:
if args.namespace is None:
configdb = ConfigDBConnector(**db_kwargs)
else:
configdb = ConfigDBConnector(use_unix_socket_path=True, namespace=args.namespace, **db_kwargs)
configdb.connect(False)
configdb.mod_config(FormatConverter.output_to_db(data))
if args.print_data:
print(json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder))
if args.preset != None:
data = generate_sample_config(data, args.preset)
print(json.dumps(FormatConverter.to_serialized(data), indent=4, cls=minigraph_encoder))
if __name__ == "__main__":
main()