[yang] Adding a tool for generating documentation based on yang model (#11291)
#### Why I did it Added a tool to generate configuration based on yang model #### How I did it Parse the yang model and create documentation based on description field. #### How to verify it Added UT to verify it.
This commit is contained in:
parent
4fb6cf03e6
commit
ebe4a84eee
@ -24,10 +24,14 @@ setup(
|
||||
description="Package contains Python Library for YANG for sonic.",
|
||||
license="GNU General Public License v3",
|
||||
long_description=readme + '\n\n',
|
||||
scripts = [
|
||||
'sonic-cfg-help',
|
||||
],
|
||||
install_requires = [
|
||||
'xmltodict==0.12.0',
|
||||
'ijson==2.6.1',
|
||||
'jsondiff>=1.2.0',
|
||||
'tabulate==0.8.2'
|
||||
],
|
||||
tests_require = [
|
||||
'pytest>3',
|
||||
|
205
src/sonic-yang-mgmt/sonic-cfg-help
Executable file
205
src/sonic-yang-mgmt/sonic-cfg-help
Executable file
@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
from tabulate import tabulate
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import sonic_yang
|
||||
|
||||
YANG_MODELS_DIR = "/usr/local/yang-models"
|
||||
|
||||
|
||||
class SonicCfgDescriber:
|
||||
|
||||
def __init__(self, table_name, field, print_format,
|
||||
yang_models_dir=YANG_MODELS_DIR):
|
||||
self.yang_models_dir = yang_models_dir
|
||||
self.yang_parser = sonic_yang.SonicYang(self.yang_models_dir)
|
||||
self.yang_parser.loadYangModel()
|
||||
self.table_descr = {}
|
||||
self.table_name = table_name
|
||||
self.field = field
|
||||
self.print_format = print_format
|
||||
|
||||
def print_documentation(self):
|
||||
for j in self.yang_parser.yJson:
|
||||
toplevel = j['module'].get('container')
|
||||
if toplevel is not None:
|
||||
container = toplevel.get('container')
|
||||
if isinstance(container, list):
|
||||
for c in container:
|
||||
if c.get('@name') == self.table_name or \
|
||||
not self.table_name:
|
||||
self.print_table(c, self.field)
|
||||
elif isinstance(container, dict):
|
||||
if container.get('@name') == self.table_name or \
|
||||
not self.table_name:
|
||||
self.print_table(container, self.field)
|
||||
|
||||
def print_table(self, table, field):
|
||||
if table is None:
|
||||
return
|
||||
print("\n" + table.get('@name'))
|
||||
if table.get('description', {}).get('text') is not None:
|
||||
print("Description: " + table.get('description').get('text'))
|
||||
print ()
|
||||
|
||||
if table.get('list') is not None:
|
||||
if (isinstance(table['list'], list)):
|
||||
for schema in table['list']:
|
||||
self.print_field_desc(schema, field)
|
||||
print()
|
||||
else:
|
||||
self.print_field_desc(table['list'], field)
|
||||
print()
|
||||
elif table.get('container') is not None:
|
||||
self.print_field_desc(table.get('container'), field)
|
||||
print()
|
||||
|
||||
def get_referenced_table_field(self, ref):
|
||||
if 'LIST' in ref.split('/')[-2]:
|
||||
table = ref.split('/')[-3].split(':')[-1]
|
||||
else:
|
||||
table = ref.split('/')[-2].split(':')[-1]
|
||||
field = ref.split('/')[-1].split(':')[-1]
|
||||
return(table + ":" + field)
|
||||
|
||||
def parse_when_condition(self, table):
|
||||
condition = table['@condition']
|
||||
desc = ""
|
||||
if "boolean" in condition:
|
||||
values = re.findall("\'(.*?)\'", condition, re.DOTALL)
|
||||
field = re.search("boolean\((.*?)\[", condition)
|
||||
desc = "when " + field.group(1) + " in " + ",".join(values)
|
||||
elif condition.startswith("(/"):
|
||||
field = re.search("/(.*)\:(.*) \=", condition)
|
||||
ref_table = condition.split("/")[2].split(':')[-1]
|
||||
values = re.findall("\'(.*?)\'", condition, re.DOTALL)
|
||||
desc = "when " + ref_table + ":" + field.group(2) + \
|
||||
" in " + ",".join(values)
|
||||
|
||||
return desc
|
||||
|
||||
def parse_choice(self, table, field):
|
||||
out = []
|
||||
for keys in table['case']:
|
||||
desc = "Mutually exclusive in group " + table['@name']
|
||||
if 'when' in keys:
|
||||
desc += "\n" + self.parse_when_condition(keys['when'])
|
||||
out += self.validate_and_parse_leaf(keys, field, desc)
|
||||
return out
|
||||
|
||||
def parse_leaf(self, key, field, desc=""):
|
||||
mandatory = ''
|
||||
default = ''
|
||||
out = []
|
||||
reference = ''
|
||||
name = key.get('@name')
|
||||
if field and name != field:
|
||||
return []
|
||||
if isinstance(key, dict):
|
||||
if key.get('description', {}).get('text') is not None:
|
||||
desc += "\n".join(textwrap.wrap(re.sub(r"\s+", " ",
|
||||
key['description']['text']), width=50))
|
||||
if key.get('mandatory') is not None:
|
||||
mandatory = key.get('mandatory').get('@value')
|
||||
if key.get('default') is not None:
|
||||
default = key.get('default').get('@value')
|
||||
if key.get('type') is not None:
|
||||
if key['type'].get('@name') == 'leafref':
|
||||
reference = self.get_referenced_table_field(
|
||||
key['type']['path'].get('@value'))
|
||||
elif key['type'].get('@name') == 'union':
|
||||
for types in key['type']['type']:
|
||||
if 'path' in types:
|
||||
val = self.get_referenced_table_field(
|
||||
types['path'].get('@value'))
|
||||
if not reference:
|
||||
reference = val
|
||||
else:
|
||||
reference += "\n" + val
|
||||
out.append([name, desc, mandatory, default, reference])
|
||||
return out
|
||||
|
||||
def validate_and_parse_leaf(self, table, field, desc=""):
|
||||
out = []
|
||||
if 'leaf' in table:
|
||||
if isinstance(table['leaf'], list):
|
||||
for key in table['leaf']:
|
||||
ret = self.parse_leaf(key, field, desc)
|
||||
out = out + ret
|
||||
elif isinstance(table['leaf'], dict):
|
||||
ret = self.parse_leaf(table['leaf'], field, desc)
|
||||
out = out + ret
|
||||
|
||||
if 'leaf-list' in table:
|
||||
if desc:
|
||||
desc = desc + "\n"
|
||||
desc = desc + "The field contains list of unique members"
|
||||
if isinstance(table['leaf-list'], list):
|
||||
for key in table['leaf-list']:
|
||||
ret = self.parse_leaf(key, field, desc)
|
||||
out = out + ret
|
||||
elif isinstance(table['leaf-list'], dict):
|
||||
ret = self.parse_leaf(table['leaf-list'], field, desc)
|
||||
out = out + ret
|
||||
return out
|
||||
|
||||
def print_field_desc(self, table, field):
|
||||
if table is None:
|
||||
return
|
||||
|
||||
header = ['Field', 'Description', 'Mandatory', 'Default', 'Reference']
|
||||
out = []
|
||||
if 'key' in table:
|
||||
print("key - " + ":".join(table['key']['@value'].split()))
|
||||
|
||||
out += self.validate_and_parse_leaf(table, field)
|
||||
|
||||
if 'choice' in table:
|
||||
if isinstance(table['choice'], list):
|
||||
for key in table['choice']:
|
||||
out += self.parse_choice(key, field)
|
||||
elif isinstance(table['choice'], dict):
|
||||
out += self.parse_choice(table['choice'], field)
|
||||
|
||||
if 'list' in table:
|
||||
out += self.validate_and_parse_leaf(table['list'], field,
|
||||
"This field is for storing " +
|
||||
"mapping between two fields")
|
||||
|
||||
print(tabulate(out, header, tablefmt=self.print_format))
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Description of table name")
|
||||
parser.add_argument("-t", "--table", help="Table name", default='')
|
||||
parser.add_argument("-f", "--field", help="Field", default='')
|
||||
parser.add_argument("-p", "--print_format", help="Print format",
|
||||
default='grid')
|
||||
parser.add_argument('-a', "--all", action="store_true", default=False,
|
||||
help="Print all tables")
|
||||
args = parser.parse_args()
|
||||
if not (args.table or args.all):
|
||||
print("Error: Table or all option is required")
|
||||
parser.print_help()
|
||||
return -1
|
||||
|
||||
if args.table and args.all:
|
||||
print("Cannot have table and all option together")
|
||||
parser.print_help()
|
||||
return -1
|
||||
|
||||
if args.field and not args.table:
|
||||
print("Error: Filter by field requires table to be specified")
|
||||
parser.print_help()
|
||||
return -1
|
||||
|
||||
yang_cfg = SonicCfgDescriber(args.table, args.field, args.print_format)
|
||||
yang_cfg.print_documentation()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
170
src/sonic-yang-mgmt/tests/test_cfghelp.py
Normal file
170
src/sonic-yang-mgmt/tests/test_cfghelp.py
Normal file
@ -0,0 +1,170 @@
|
||||
import json
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
output1="""\
|
||||
Error: Table or all option is required
|
||||
usage: sonic-cfg-help [-h] [-t TABLE] [-f FIELD] [-p PRINT_FORMAT] [-a]
|
||||
|
||||
Description of table name
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-t TABLE, --table TABLE
|
||||
Table name
|
||||
-f FIELD, --field FIELD
|
||||
Field
|
||||
-p PRINT_FORMAT, --print_format PRINT_FORMAT
|
||||
Print format
|
||||
-a, --all Print all tables
|
||||
"""
|
||||
|
||||
techsupport_table_output="""\
|
||||
|
||||
AUTO_TECHSUPPORT
|
||||
Description: AUTO_TECHSUPPORT part of config_db.json
|
||||
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| Field | Description | Mandatory | Default | Reference |
|
||||
+=========================+====================================================+=============+===========+=============+
|
||||
| state | Knob to make techsupport invocation event-driven | | | |
|
||||
| | based on core-dump generation | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| rate_limit_interval | Minimum time in seconds between two successive | | | |
|
||||
| | techsupport invocations. Configure 0 to explicitly | | | |
|
||||
| | disable | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| max_techsupport_limit | Max Limit in percentage for the cummulative size | | | |
|
||||
| | of ts dumps. No cleanup is performed if the value | | | |
|
||||
| | isn't configured or is 0.0 | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| max_core_limit | Max Limit in percentage for the cummulative size | | | |
|
||||
| | of core dumps. No cleanup is performed if the | | | |
|
||||
| | value isn't congiured or is 0.0 | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| available_mem_threshold | Memory threshold; 0 to disable techsupport | | 10.0 | |
|
||||
| | invocation on memory usage threshold crossing | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| min_available_mem | Minimum Free memory (in MB) that should be | | 200 | |
|
||||
| | available for the techsupport execution to start | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
| since | Only collect the logs & core-dumps generated since | | | |
|
||||
| | the time provided. A default value of '2 days ago' | | | |
|
||||
| | is used if this value is not set explicitly or a | | | |
|
||||
| | non-valid string is provided | | | |
|
||||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+
|
||||
|
||||
"""
|
||||
|
||||
techsupport_table_field_output="""\
|
||||
|
||||
AUTO_TECHSUPPORT
|
||||
Description: AUTO_TECHSUPPORT part of config_db.json
|
||||
|
||||
+---------+--------------------------------------------------+-------------+-----------+-------------+
|
||||
| Field | Description | Mandatory | Default | Reference |
|
||||
+=========+==================================================+=============+===========+=============+
|
||||
| state | Knob to make techsupport invocation event-driven | | | |
|
||||
| | based on core-dump generation | | | |
|
||||
+---------+--------------------------------------------------+-------------+-----------+-------------+
|
||||
|
||||
"""
|
||||
|
||||
portchannel_table_field_output="""\
|
||||
|
||||
PORTCHANNEL
|
||||
Description: PORTCHANNEL part of config_db.json
|
||||
|
||||
key - name
|
||||
+---------+-------------------------------------------+-------------+-----------+-------------+
|
||||
| Field | Description | Mandatory | Default | Reference |
|
||||
+=========+===========================================+=============+===========+=============+
|
||||
| members | The field contains list of unique members | | | PORT:name |
|
||||
+---------+-------------------------------------------+-------------+-----------+-------------+
|
||||
|
||||
"""
|
||||
|
||||
dscp_to_tc_table_field_output="""\
|
||||
|
||||
DSCP_TO_TC_MAP
|
||||
Description: DSCP_TO_TC_MAP part of config_db.json
|
||||
|
||||
key - name
|
||||
+---------+------------------------------------------------------+-------------+-----------+-------------+
|
||||
| Field | Description | Mandatory | Default | Reference |
|
||||
+=========+======================================================+=============+===========+=============+
|
||||
| name | | | | |
|
||||
+---------+------------------------------------------------------+-------------+-----------+-------------+
|
||||
| dscp | This field is for storing mapping between two fields | | | |
|
||||
+---------+------------------------------------------------------+-------------+-----------+-------------+
|
||||
| tc | This field is for storing mapping between two fields | | | |
|
||||
+---------+------------------------------------------------------+-------------+-----------+-------------+
|
||||
|
||||
"""
|
||||
|
||||
acl_rule_table_field_output="""\
|
||||
|
||||
ACL_RULE
|
||||
Description: ACL_RULE part of config_db.json
|
||||
|
||||
key - ACL_TABLE_NAME:RULE_NAME
|
||||
+-----------+-----------------------------------------+-------------+-----------+-------------+
|
||||
| Field | Description | Mandatory | Default | Reference |
|
||||
+===========+=========================================+=============+===========+=============+
|
||||
| ICMP_TYPE | Mutually exclusive in group icmp | | | |
|
||||
| | when IP_TYPE in ANY,IP,IPV4,IPv4ANY,ARP | | | |
|
||||
+-----------+-----------------------------------------+-------------+-----------+-------------+
|
||||
|
||||
"""
|
||||
|
||||
class TestCfgHelp(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.test_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
self.script_file = os.path.join(self.test_dir, '..', 'sonic-cfg-help')
|
||||
|
||||
def run_script(self, argument):
|
||||
print('\n Running sonic-cfg-help ' + argument)
|
||||
output = subprocess.check_output(self.script_file + ' ' + argument, shell=True)
|
||||
|
||||
output = output.decode()
|
||||
|
||||
linecount = output.strip().count('\n')
|
||||
if linecount <= 0:
|
||||
print(' Output: ' + output.strip())
|
||||
else:
|
||||
print(' Output: ({0} lines, {1} bytes)'.format(linecount + 1, len(output)))
|
||||
return output
|
||||
|
||||
def test_dummy_run(self):
|
||||
argument = ''
|
||||
output = self.run_script(argument)
|
||||
self.assertEqual(output, output1)
|
||||
|
||||
def test_single_table(self):
|
||||
argument = '-t AUTO_TECHSUPPORT'
|
||||
output = self.run_script(argument)
|
||||
self.assertEqual(output, techsupport_table_output)
|
||||
|
||||
def test_single_field(self):
|
||||
argument = '-t AUTO_TECHSUPPORT -f state'
|
||||
output = self.run_script(argument)
|
||||
self.assertEqual(output, techsupport_table_field_output)
|
||||
|
||||
def test_leaf_list(self):
|
||||
argument = '-t PORTCHANNEL -f members'
|
||||
output = self.run_script(argument)
|
||||
self.assertEqual(output, portchannel_table_field_output)
|
||||
|
||||
def test_leaf_list_map(self):
|
||||
argument = '-t DSCP_TO_TC_MAP'
|
||||
output = self.run_script(argument)
|
||||
self.maxDiff = None
|
||||
self.assertEqual(output, dscp_to_tc_table_field_output)
|
||||
|
||||
def test_when_condition(self):
|
||||
argument = '-t ACL_RULE -f ICMP_TYPE'
|
||||
output = self.run_script(argument)
|
||||
self.assertEqual(output, acl_rule_table_field_output)
|
Reference in New Issue
Block a user