Skip to content

Commit

Permalink
Merge branch 'master' into bugfix_9499_and_stdin
Browse files Browse the repository at this point in the history
  • Loading branch information
cchoate54 committed Mar 31, 2023
2 parents dcec187 + 10f31ea commit e71be0d
Show file tree
Hide file tree
Showing 11 changed files with 284 additions and 327 deletions.
28 changes: 12 additions & 16 deletions config/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,14 +1163,18 @@ def validate_gre_type(ctx, _, value):
except ValueError:
raise click.UsageError("{} is not a valid GRE type".format(value))

def validate_config_file(file):
def validate_config_file(file, remove_tmp_file=False):
"""
A validator to check config files for syntax errors
A validator to check config files for syntax errors. If an exception is raised,
use remove_tmp_file to determine if a temporary file was used to store
/dev/stdin contents and should be deleted.
"""
try:
# Load golden config json
read_json_file(file)
except Exception as e:
if remove_tmp_file:
os.remove(file)
click.secho("Bad format: json file '{}' broken.\n{}".format(file, str(e)),
fg='magenta')
sys.exit(1)
Expand Down Expand Up @@ -1591,21 +1595,17 @@ def reload(db, filename, yes, load_sysinfo, no_service_restart, force, file_form
if not sys.stdin.isatty():
# Pathway to store /dev/stdin contents in a temporary file
TMP_FILE = os.path.join('/', "tmp", f"tmp_config_stdin_{str(uuid.uuid4())}.json")

if os.path.exists(TMP_FILE):
click.secho("Unable to validate '{}' contents".format(file),
fg='magenta')
sys.exit(1)

with open(file, 'r' ) as input_file, open( TMP_FILE, 'w') as tmp:
for line in input_file:
tmp.write(line)
try:
# Load golden config json
read_json_file(file)
except Exception as e:
os.remove(TMP_FILE)
click.secho("Bad format: json file '{}' broken.\n{}".format(file, str(e)),
fg='magenta')
sys.exit(1)

validate_config_file(TMP_FILE, remove_tmp_file=True)
cfg_file_dict[inst] = [TMP_FILE, namespace, True]
else:
validate_config_file(file)
Expand Down Expand Up @@ -1790,12 +1790,6 @@ def load_minigraph(db, no_service_restart, traffic_shift_away, override_config,
if os.path.isfile('/etc/sonic/acl.json'):
clicommon.run_command("acl-loader update full /etc/sonic/acl.json", display_cmd=True)

# get the device type
device_type = _get_device_type()

# Load backend acl
load_backend_acl(db.cfgdb, device_type)

# Load port_config.json
try:
load_port_config(db.cfgdb, '/etc/sonic/port_config.json')
Expand All @@ -1805,6 +1799,8 @@ def load_minigraph(db, no_service_restart, traffic_shift_away, override_config,
# generate QoS and Buffer configs
clicommon.run_command("config qos reload --no-dynamic-buffer --no-delay", display_cmd=True)

# get the device type
device_type = _get_device_type()
if device_type != 'MgmtToRRouter' and device_type != 'MgmtTsToR' and device_type != 'BmcMgmtToRRouter' and device_type != 'EPMS':
clicommon.run_command("pfcwd start_default", display_cmd=True)

Expand Down
14 changes: 7 additions & 7 deletions scripts/dropstat
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# - Refactor calls to COUNTERS_DB to reduce redundancy
# - Cache DB queries to reduce # of expensive queries

import json
import _pickle as pickle
import argparse
import os
import socket
Expand Down Expand Up @@ -117,10 +117,10 @@ class DropStat(object):
"""

try:
json.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
open(self.port_drop_stats_file, 'w+'))
json.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
open(self.switch_drop_stats_file, 'w+'))
pickle.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
open(self.port_drop_stats_file, 'wb+'))
pickle.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
open(self.switch_drop_stats_file, 'wb+'))
except IOError as e:
print(e)
sys.exit(e.errno)
Expand All @@ -135,7 +135,7 @@ class DropStat(object):

# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.port_drop_stats_file):
port_drop_ckpt = json.load(open(self.port_drop_stats_file, 'r'))
port_drop_ckpt = pickle.load(open(self.port_drop_stats_file, 'rb'))

counters = self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP, group, counter_type)
headers = std_port_description_header + self.gather_headers(counters, DEBUG_COUNTER_PORT_STAT_MAP)
Expand All @@ -162,7 +162,7 @@ class DropStat(object):

# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.switch_drop_stats_file):
switch_drop_ckpt = json.load(open(self.switch_drop_stats_file, 'r'))
switch_drop_ckpt = pickle.load(open(self.switch_drop_stats_file, 'rb'))

counters = self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP, group, counter_type)
headers = std_switch_description_header + self.gather_headers(counters, DEBUG_COUNTER_SWITCH_STAT_MAP)
Expand Down
10 changes: 5 additions & 5 deletions scripts/flow_counters_stat
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import argparse
import os
import json
import _pickle as pickle
import sys

from natsort import natsorted
Expand Down Expand Up @@ -185,8 +185,8 @@ class FlowCounterStats(object):
if os.path.exists(self.data_file):
os.remove(self.data_file)

with open(self.data_file, 'w') as f:
json.dump(data, f)
with open(self.data_file, 'wb') as f:
pickle.dump(data, f)
except IOError as e:
print('Failed to save statistic - {}'.format(repr(e)))

Expand All @@ -200,8 +200,8 @@ class FlowCounterStats(object):
return None

try:
with open(self.data_file, 'r') as f:
data = json.load(f)
with open(self.data_file, 'rb') as f:
data = pickle.load(f)
except IOError as e:
print('Failed to load statistic - {}'.format(repr(e)))
return None
Expand Down
64 changes: 32 additions & 32 deletions scripts/intfstat
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
#####################################################################

import json
import _pickle as pickle
import argparse
import datetime
import sys
Expand All @@ -28,7 +28,7 @@ from collections import namedtuple, OrderedDict
from natsort import natsorted
from tabulate import tabulate
from utilities_common.netstat import ns_diff, table_as_json, STATUS_NA, format_brate, format_prate
from utilities_common.cli import json_serial, UserCache
from utilities_common.cli import UserCache
from swsscommon.swsscommon import SonicV2Connector

nstat_fields = (
Expand Down Expand Up @@ -96,7 +96,7 @@ class Intfstat(object):
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, counter_name)
if counter_data:
fields[pos] = str(counter_data)
cntr = NStats._make(fields)._asdict()
cntr = NStats._make(fields)
return cntr

def get_rates(table_id):
Expand Down Expand Up @@ -153,14 +153,14 @@ class Intfstat(object):
rates = ratestat_dict.get(key, RateStats._make([STATUS_NA] * len(rates_key_list)))

table.append((key,
data['rx_p_ok'],
data.rx_p_ok,
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
data['rx_p_err'],
data['tx_p_ok'],
data.rx_p_err,
data.tx_p_ok,
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
data['tx_p_err']))
data.tx_p_err))

if use_json:
print(table_as_json(table, header))
Expand All @@ -186,24 +186,24 @@ class Intfstat(object):

if old_cntr is not None:
table.append((key,
ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err'])))
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err)))
else:
table.append((key,
cntr['rx_p_ok'],
cntr.rx_p_ok,
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
cntr['rx_p_err'],
cntr['tx_p_ok'],
cntr.rx_p_err,
cntr.tx_p_ok,
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
cntr['tx_p_err']))
cntr.tx_p_err))

if use_json:
print(table_as_json(table, header))
Expand All @@ -229,17 +229,17 @@ class Intfstat(object):

if cnstat_old_dict and cnstat_old_dict.get(rif):
old_cntr = cnstat_old_dict.get(rif)
body = body % (ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
ns_diff(cntr['rx_b_ok'], old_cntr['rx_b_ok']),
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
ns_diff(cntr['rx_b_err'], old_cntr['rx_b_err']),
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
ns_diff(cntr['tx_b_ok'], old_cntr['tx_b_ok']),
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err']),
ns_diff(cntr['tx_b_err'], old_cntr['tx_b_err']))
body = body % (ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
ns_diff(cntr.rx_b_ok, old_cntr.rx_b_ok),
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
ns_diff(cntr.rx_b_err, old_cntr.rx_b_err),
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
ns_diff(cntr.tx_b_ok, old_cntr.tx_b_ok),
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err),
ns_diff(cntr.tx_b_err, old_cntr.tx_b_err))
else:
body = body % (cntr['rx_p_ok'], cntr['rx_b_ok'], cntr['rx_p_err'],cntr['rx_b_err'],
cntr['tx_p_ok'], cntr['tx_b_ok'], cntr['tx_p_err'], cntr['tx_b_err'])
body = body % (cntr.rx_p_ok, cntr.rx_b_ok, cntr.rx_p_err,cntr.rx_b_err,
cntr.tx_p_ok, cntr.tx_b_ok, cntr.tx_p_err, cntr.tx_b_err)

print(header)
print(body)
Expand Down Expand Up @@ -305,20 +305,20 @@ def main():
if tag_name is not None:
if os.path.isfile(cnstat_fqn_general_file):
try:
general_data = json.load(open(cnstat_fqn_general_file, 'r'))
general_data = pickle.load(open(cnstat_fqn_general_file, 'rb'))
for key, val in cnstat_dict.items():
general_data[key] = val
json.dump(general_data, open(cnstat_fqn_general_file, 'w'))
pickle.dump(general_data, open(cnstat_fqn_general_file, 'wb'))
except IOError as e:
sys.exit(e.errno)
# Add the information also to tag specific file
if os.path.isfile(cnstat_fqn_file):
data = json.load(open(cnstat_fqn_file, 'r'))
data = pickle.load(open(cnstat_fqn_file, 'rb'))
for key, val in cnstat_dict.items():
data[key] = val
json.dump(data, open(cnstat_fqn_file, 'w'))
pickle.dump(data, open(cnstat_fqn_file, 'wb'))
else:
json.dump(cnstat_dict, open(cnstat_fqn_file, 'w'), default=json_serial)
pickle.dump(cnstat_dict, open(cnstat_fqn_file, 'wb'))
except IOError as e:
sys.exit(e.errno)
else:
Expand All @@ -330,9 +330,9 @@ def main():
try:
cnstat_cached_dict = {}
if os.path.isfile(cnstat_fqn_file):
cnstat_cached_dict = json.load(open(cnstat_fqn_file, 'r'))
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file, 'rb'))
else:
cnstat_cached_dict = json.load(open(cnstat_fqn_general_file, 'r'))
cnstat_cached_dict = pickle.load(open(cnstat_fqn_general_file, 'rb'))

print("Last cached time was " + str(cnstat_cached_dict.get('time')))
if interface_name:
Expand Down
Loading

0 comments on commit e71be0d

Please sign in to comment.