Skip to content

Commit

Permalink
Merge branch 'master' into route-flow-counter
Browse files Browse the repository at this point in the history
  • Loading branch information
Junchao-Mellanox committed Mar 2, 2022
2 parents 2817f19 + d440df7 commit c826391
Show file tree
Hide file tree
Showing 27 changed files with 3,715 additions and 134 deletions.
1 change: 1 addition & 0 deletions .azure-pipelines/build-docker-sonic-vs-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ jobs:
runVersion: 'latestFromBranch'
runBranch: 'refs/heads/master'
allowPartiallySucceededBuilds: true
allowFailedBuilds: true
displayName: "Download docker-sonic-vs artifact"

- script: |
Expand Down
13 changes: 9 additions & 4 deletions .azure-pipelines/test-docker-sonic-vs-template.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
parameters:
- name: timeout
type: number
default: 240
default: 360

- name: log_artifact_name
type: string
Expand All @@ -11,10 +11,13 @@ jobs:
displayName: vstest
timeoutInMinutes: ${{ parameters.timeout }}

pool:
vmImage: 'ubuntu-20.04'
pool: sonic-common

steps:
- script: |
ls -A1 | xargs -I{} sudo rm -rf {}
displayName: "Clean workspace"
- checkout: self
- task: DownloadPipelineArtifact@2
inputs:
artifact: docker-sonic-vs
Expand All @@ -38,14 +41,16 @@ jobs:
artifact: sonic-swss-pytests
runVersion: 'latestFromBranch'
runBranch: 'refs/heads/master'
allowPartiallySucceededBuilds: true
displayName: "Download sonic swss pytests"

- checkout: self
displayName: "Checkout sonic-utilities"

- script: |
set -x
sudo .azure-pipelines/build_and_install_module.sh
set -e
sudo ./sonic-utilities/.azure-pipelines/build_and_install_module.sh
sudo apt-get install -y libhiredis0.14
sudo dpkg -i --force-confask,confnew ../libswsscommon_1.0.0_amd64.deb || apt-get install -f
Expand Down
14 changes: 11 additions & 3 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ stages:
- job:
displayName: "Python3"
variables:
DIFF_COVER_CHECK_THRESHOLD: 0
DIFF_COVER_CHECK_THRESHOLD: 50
DIFF_COVER_ENABLE: 'true'
pool:
vmImage: ubuntu-20.04
Expand All @@ -29,14 +29,22 @@ stages:
image: sonicdev-microsoft.azurecr.io:443/sonic-slave-buster:latest

steps:
- script: |
sourceBranch=$(Build.SourceBranchName)
if [[ "$(Build.Reason)" == "PullRequest" ]];then
sourceBranch=$(System.PullRequest.TargetBranch)
fi
echo "Download artifact branch: $sourceBranch"
echo "##vso[task.setvariable variable=sourceBranch]$sourceBranch"
displayName: "Get correct artifact downloading branch"
- task: DownloadPipelineArtifact@2
inputs:
source: specific
project: build
pipeline: 1
pipeline: 142
artifact: sonic-buildimage.vs
runVersion: 'latestFromBranch'
runBranch: 'refs/heads/master'
runBranch: 'refs/heads/$(sourceBranch)'
displayName: "Download artifacts from latest sonic-buildimage build"

- script: |
Expand Down
8 changes: 4 additions & 4 deletions config/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2499,13 +2499,13 @@ def add_snmp_agent_address(ctx, agentip, port, vrf):
ipaddresses = netifaces.ifaddresses(intf)
if ip_family[ip.version] in ipaddresses:
for ipaddr in ipaddresses[ip_family[ip.version]]:
if agentip == ipaddr['addr']:
if agentip.lower() == ipaddr['addr'].lower():
found = 1
break;
break
if found == 1:
break;
break
else:
click.echo("IP addfress is not available")
click.echo("IP address is not available")
return

key = agentip+'|'
Expand Down
76 changes: 76 additions & 0 deletions config/muxcable.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,14 @@ def lookup_statedb_and_update_configdb(db, per_npu_statedb, config_db, port, sta
else:
port_status_dict[port_name] = 'OK'

def update_configdb_pck_loss_data(config_db, port, val):
configdb_state = get_value_for_key_in_config_tbl(config_db, port, "state", "MUX_CABLE")
ipv4_value = get_value_for_key_in_config_tbl(config_db, port, "server_ipv4", "MUX_CABLE")
ipv6_value = get_value_for_key_in_config_tbl(config_db, port, "server_ipv6", "MUX_CABLE")

config_db.set_entry("MUX_CABLE", port, {"state": configdb_state,
"server_ipv4": ipv4_value, "server_ipv6": ipv6_value,
"pck_loss_data_reset": val})

# 'muxcable' command ("config muxcable mode <port|all> active|auto")
@muxcable.command()
Expand Down Expand Up @@ -333,6 +341,74 @@ def mode(db, state, port, json_output):
sys.exit(CONFIG_SUCCESSFUL)


#'muxcable' command ("config muxcable packetloss reset <port|all>")
@muxcable.command()
@click.argument('action', metavar='<action_name>', required=True, type=click.Choice(["reset"]))
@click.argument('port', metavar='<port_name>', required=True, default=None)
@clicommon.pass_db
def packetloss(db, action, port):
"""config muxcable packetloss reset"""

port = platform_sfputil_helper.get_interface_name(port, db)

port_table_keys = {}
mux_cable_table_keys = {}
pck_loss_table_keys = {}
per_npu_configdb = {}
per_npu_statedb = {}

# Getting all front asic namespace and correspding config and state DB connector

namespaces = multi_asic.get_front_end_namespaces()
for namespace in namespaces:
asic_id = multi_asic.get_asic_index_from_namespace(namespace)
# replace these with correct macros
per_npu_configdb[asic_id] = ConfigDBConnector(use_unix_socket_path=True, namespace=namespace)
per_npu_configdb[asic_id].connect()
per_npu_statedb[asic_id] = swsscommon.SonicV2Connector(use_unix_socket_path=True, namespace=namespace)
per_npu_statedb[asic_id].connect(per_npu_statedb[asic_id].STATE_DB)

port_table_keys[asic_id] = per_npu_statedb[asic_id].keys(
per_npu_statedb[asic_id].STATE_DB, 'LINK_PROBE_STATS|*')
mux_cable_table_keys[asic_id] = per_npu_configdb[asic_id].get_table("MUX_CABLE").keys() # keys here are port names
if port is not None and port != "all":

asic_index = None
if platform_sfputil is not None:
asic_index = platform_sfputil.get_asic_id_for_logical_port(port)
if asic_index is None:
# TODO this import is only for unit test purposes, and should be removed once sonic_platform_base
# is fully mocked
import sonic_platform_base.sonic_sfp.sfputilhelper
asic_index = sonic_platform_base.sonic_sfp.sfputilhelper.SfpUtilHelper().get_asic_id_for_logical_port(port)
if asic_index is None:
click.echo("Got invalid asic index for port {}, cant retreive mux status".format(port))
sys.exit(CONFIG_FAIL)

if per_npu_statedb[asic_index] is not None:
pck_loss_table_keys = port_table_keys[asic_index]
logical_key = "LINK_PROBE_STATS|{}".format(port)
if logical_key in pck_loss_table_keys:
update_configdb_pck_loss_data(per_npu_configdb[asic_index], port, "reset")
sys.exit(CONFIG_SUCCESSFUL)
else:
click.echo("this is not a valid port present on pck_loss_stats".format(port))
sys.exit(CONFIG_FAIL)
else:
click.echo("there is not a valid asic table for this asic_index".format(asic_index))
sys.exit(CONFIG_FAIL)

elif port == "all" and port is not None:

for namespace in namespaces:
asic_id = multi_asic.get_asic_index_from_namespace(namespace)
for key in port_table_keys[asic_id]:
logical_port = key.split("|")[1]
if logical_port in mux_cable_table_keys[asic_id]:
update_configdb_pck_loss_data(per_npu_configdb[asic_id], logical_port, "reset")

sys.exit(CONFIG_SUCCESSFUL)

@muxcable.group(cls=clicommon.AbbreviationGroup)
def prbs():
"""Enable/disable PRBS mode on a port"""
Expand Down
4 changes: 4 additions & 0 deletions config/vlan.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ def add_vlan(db, vid):
ctx.fail("Invalid VLAN ID {} (1-4094)".format(vid))

vlan = 'Vlan{}'.format(vid)

if vid == 1:
ctx.fail("{} is default VLAN".format(vlan))

if clicommon.check_if_vlanid_exist(db.cfgdb, vlan):
ctx.fail("{} already exists".format(vlan))

Expand Down
6 changes: 3 additions & 3 deletions counterpoll/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,13 +411,13 @@ def show():
if rif_info:
data.append(["RIF_STAT", rif_info.get("POLL_INTERVAL", DEFLT_1_SEC), rif_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if queue_wm_info:
data.append(["QUEUE_WATERMARK_STAT", queue_wm_info.get("POLL_INTERVAL", DEFLT_10_SEC), queue_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
data.append(["QUEUE_WATERMARK_STAT", queue_wm_info.get("POLL_INTERVAL", DEFLT_60_SEC), queue_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if pg_wm_info:
data.append(["PG_WATERMARK_STAT", pg_wm_info.get("POLL_INTERVAL", DEFLT_10_SEC), pg_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
data.append(["PG_WATERMARK_STAT", pg_wm_info.get("POLL_INTERVAL", DEFLT_60_SEC), pg_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if pg_drop_info:
data.append(['PG_DROP_STAT', pg_drop_info.get("POLL_INTERVAL", DEFLT_10_SEC), pg_drop_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if buffer_pool_wm_info:
data.append(["BUFFER_POOL_WATERMARK_STAT", buffer_pool_wm_info.get("POLL_INTERVAL", DEFLT_10_SEC), buffer_pool_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
data.append(["BUFFER_POOL_WATERMARK_STAT", buffer_pool_wm_info.get("POLL_INTERVAL", DEFLT_60_SEC), buffer_pool_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if acl_info:
data.append([ACL, pg_drop_info.get("POLL_INTERVAL", DEFLT_10_SEC), acl_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if tunnel_info:
Expand Down
72 changes: 68 additions & 4 deletions generic_config_updater/gu_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,13 @@ def create_path(self, tokens):
return JsonPointer.from_parts(tokens).path

def has_path(self, doc, path):
return JsonPointer(path).get(doc, default=None) is not None
return self.get_from_path(doc, path) is not None

def get_from_path(self, doc, path):
return JsonPointer(path).get(doc, default=None)

def is_config_different(self, path, current, target):
return self.get_from_path(current, path) != self.get_from_path(target, path)

def get_xpath_tokens(self, xpath):
"""
Expand Down Expand Up @@ -360,6 +366,9 @@ def create_xpath(self, tokens):

return f"{PathAddressing.XPATH_SEPARATOR}{PathAddressing.XPATH_SEPARATOR.join(str(t) for t in tokens)}"

def _create_sonic_yang_with_loaded_models(self):
return self.config_wrapper.create_sonic_yang_with_loaded_models()

def find_ref_paths(self, path, config):
"""
Finds the paths referencing any line under the given 'path' within the given 'config'.
Expand Down Expand Up @@ -401,7 +410,7 @@ def find_ref_paths(self, path, config):
return self._find_leafref_paths(path, config)

def _find_leafref_paths(self, path, config):
sy = self.config_wrapper.create_sonic_yang_with_loaded_models()
sy = self._create_sonic_yang_with_loaded_models()

tmp_config = copy.deepcopy(config)

Expand Down Expand Up @@ -547,8 +556,16 @@ def _get_xpath_tokens_from_leaf(self, model, token_index, path_tokens, config):
# /module-name:container/leaf-list[.='val']
# Source: Check examples in https://netopeer.liberouter.org/doc/libyang/master/html/howto_x_path.html
return [f"{token}[.='{value}']"]

# checking 'uses' statement
if not isinstance(config[token], list): # leaf-list under uses is not supported yet in sonic_yang
table = path_tokens[0]
uses_leaf_model = self._get_uses_leaf_model(model, table, token)
if uses_leaf_model:
return [token]

raise ValueError("Token not found")
raise ValueError(f"Path token not found.\n model: {model}\n token_index: {token_index}\n " + \
f"path_tokens: {path_tokens}\n config: {config}")

def _extractKey(self, tableKey, keys):
keyList = keys.split()
Expand Down Expand Up @@ -712,7 +729,15 @@ def _get_path_tokens_from_leaf(self, model, token_index, xpath_tokens, config):
list_idx = list_config.index(leaf_list_value)
return [leaf_list_name, list_idx]

raise Exception("no leaf")
# checking 'uses' statement
if not isinstance(config[leaf_list_name], list): # leaf-list under uses is not supported yet in sonic_yang
table = xpath_tokens[1]
uses_leaf_model = self._get_uses_leaf_model(model, table, token)
if uses_leaf_model:
return [token]

raise ValueError(f"Xpath token not found.\n model: {model}\n token_index: {token_index}\n " + \
f"xpath_tokens: {xpath_tokens}\n config: {config}")

def _extract_key_dict(self, list_token):
# Example: VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']
Expand Down Expand Up @@ -746,6 +771,45 @@ def _get_model(self, model, name):

return None

def _get_uses_leaf_model(self, model, table, token):
"""
Getting leaf model in uses model matching the given token.
"""
uses_s = model.get('uses')
if not uses_s:
return None

# a model can be a single dict or a list of dictionaries, unify to a list of dictionaries
if not isinstance(uses_s, list):
uses_s = [uses_s]

sy = self._create_sonic_yang_with_loaded_models()
# find yang module for current table
table_module = sy.confDbYangMap[table]['yangModule']
# uses Example: "@name": "bgpcmn:sonic-bgp-cmn"
for uses in uses_s:
if not isinstance(uses, dict):
raise GenericConfigUpdaterError(f"'uses' is expected to be a dictionary found '{type(uses)}'.\n" \
f" uses: {uses}\n model: {model}\n table: {table}\n token: {token}")

# Assume ':' means reference to another module
if ':' in uses['@name']:
name_parts = uses['@name'].split(':')
prefix = name_parts[0].strip()
uses_module_name = sy._findYangModuleFromPrefix(prefix, table_module)
grouping = name_parts[-1].strip()
else:
uses_module_name = table_module['@name']
grouping = uses['@name']

leafs = sy.preProcessedYang['grouping'][uses_module_name][grouping]

leaf_model = self._get_model(leafs, token)
if leaf_model:
return leaf_model

return None

class TitledLogger(logger.Logger):
def __init__(self, syslog_identifier, title, verbose, print_all_to_console):
super().__init__(syslog_identifier)
Expand Down
Loading

0 comments on commit c826391

Please sign in to comment.