Skip to content

Commit

Permalink
Merge branch 'master' into bugfix_9499_and_stdin
Browse files Browse the repository at this point in the history
  • Loading branch information
cchoate54 committed Mar 30, 2023
2 parents dcec187 + 10f31ea commit 1d01028
Show file tree
Hide file tree
Showing 11 changed files with 275 additions and 321 deletions.
13 changes: 3 additions & 10 deletions config/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1591,10 +1591,7 @@ def reload(db, filename, yes, load_sysinfo, no_service_restart, force, file_form
if not sys.stdin.isatty():
# Pathway to store /dev/stdin contents in a temporary file
TMP_FILE = os.path.join('/', "tmp", f"tmp_config_stdin_{str(uuid.uuid4())}.json")
if os.path.exists(TMP_FILE):
click.secho("Unable to validate '{}' contents".format(file),
fg='magenta')
sys.exit(1)

with open(file, 'r' ) as input_file, open( TMP_FILE, 'w') as tmp:
for line in input_file:
tmp.write(line)
Expand Down Expand Up @@ -1790,12 +1787,6 @@ def load_minigraph(db, no_service_restart, traffic_shift_away, override_config,
if os.path.isfile('/etc/sonic/acl.json'):
clicommon.run_command("acl-loader update full /etc/sonic/acl.json", display_cmd=True)

# get the device type
device_type = _get_device_type()

# Load backend acl
load_backend_acl(db.cfgdb, device_type)

# Load port_config.json
try:
load_port_config(db.cfgdb, '/etc/sonic/port_config.json')
Expand All @@ -1805,6 +1796,8 @@ def load_minigraph(db, no_service_restart, traffic_shift_away, override_config,
# generate QoS and Buffer configs
clicommon.run_command("config qos reload --no-dynamic-buffer --no-delay", display_cmd=True)

# get the device type
device_type = _get_device_type()
if device_type != 'MgmtToRRouter' and device_type != 'MgmtTsToR' and device_type != 'BmcMgmtToRRouter' and device_type != 'EPMS':
clicommon.run_command("pfcwd start_default", display_cmd=True)

Expand Down
14 changes: 7 additions & 7 deletions scripts/dropstat
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# - Refactor calls to COUNTERS_DB to reduce redundancy
# - Cache DB queries to reduce # of expensive queries

import json
import _pickle as pickle
import argparse
import os
import socket
Expand Down Expand Up @@ -117,10 +117,10 @@ class DropStat(object):
"""

try:
json.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
open(self.port_drop_stats_file, 'w+'))
json.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
open(self.switch_drop_stats_file, 'w+'))
pickle.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
open(self.port_drop_stats_file, 'wb+'))
pickle.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
open(self.switch_drop_stats_file, 'wb+'))
except IOError as e:
print(e)
sys.exit(e.errno)
Expand All @@ -135,7 +135,7 @@ class DropStat(object):

# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.port_drop_stats_file):
port_drop_ckpt = json.load(open(self.port_drop_stats_file, 'r'))
port_drop_ckpt = pickle.load(open(self.port_drop_stats_file, 'rb'))

counters = self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP, group, counter_type)
headers = std_port_description_header + self.gather_headers(counters, DEBUG_COUNTER_PORT_STAT_MAP)
Expand All @@ -162,7 +162,7 @@ class DropStat(object):

# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.switch_drop_stats_file):
switch_drop_ckpt = json.load(open(self.switch_drop_stats_file, 'r'))
switch_drop_ckpt = pickle.load(open(self.switch_drop_stats_file, 'rb'))

counters = self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP, group, counter_type)
headers = std_switch_description_header + self.gather_headers(counters, DEBUG_COUNTER_SWITCH_STAT_MAP)
Expand Down
10 changes: 5 additions & 5 deletions scripts/flow_counters_stat
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import argparse
import os
import json
import _pickle as pickle
import sys

from natsort import natsorted
Expand Down Expand Up @@ -185,8 +185,8 @@ class FlowCounterStats(object):
if os.path.exists(self.data_file):
os.remove(self.data_file)

with open(self.data_file, 'w') as f:
json.dump(data, f)
with open(self.data_file, 'wb') as f:
pickle.dump(data, f)
except IOError as e:
print('Failed to save statistic - {}'.format(repr(e)))

Expand All @@ -200,8 +200,8 @@ class FlowCounterStats(object):
return None

try:
with open(self.data_file, 'r') as f:
data = json.load(f)
with open(self.data_file, 'rb') as f:
data = pickle.load(f)
except IOError as e:
print('Failed to load statistic - {}'.format(repr(e)))
return None
Expand Down
64 changes: 32 additions & 32 deletions scripts/intfstat
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
#####################################################################

import json
import _pickle as pickle
import argparse
import datetime
import sys
Expand All @@ -28,7 +28,7 @@ from collections import namedtuple, OrderedDict
from natsort import natsorted
from tabulate import tabulate
from utilities_common.netstat import ns_diff, table_as_json, STATUS_NA, format_brate, format_prate
from utilities_common.cli import json_serial, UserCache
from utilities_common.cli import UserCache
from swsscommon.swsscommon import SonicV2Connector

nstat_fields = (
Expand Down Expand Up @@ -96,7 +96,7 @@ class Intfstat(object):
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, counter_name)
if counter_data:
fields[pos] = str(counter_data)
cntr = NStats._make(fields)._asdict()
cntr = NStats._make(fields)
return cntr

def get_rates(table_id):
Expand Down Expand Up @@ -153,14 +153,14 @@ class Intfstat(object):
rates = ratestat_dict.get(key, RateStats._make([STATUS_NA] * len(rates_key_list)))

table.append((key,
data['rx_p_ok'],
data.rx_p_ok,
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
data['rx_p_err'],
data['tx_p_ok'],
data.rx_p_err,
data.tx_p_ok,
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
data['tx_p_err']))
data.tx_p_err))

if use_json:
print(table_as_json(table, header))
Expand All @@ -186,24 +186,24 @@ class Intfstat(object):

if old_cntr is not None:
table.append((key,
ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err'])))
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err)))
else:
table.append((key,
cntr['rx_p_ok'],
cntr.rx_p_ok,
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
cntr['rx_p_err'],
cntr['tx_p_ok'],
cntr.rx_p_err,
cntr.tx_p_ok,
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
cntr['tx_p_err']))
cntr.tx_p_err))

if use_json:
print(table_as_json(table, header))
Expand All @@ -229,17 +229,17 @@ class Intfstat(object):

if cnstat_old_dict and cnstat_old_dict.get(rif):
old_cntr = cnstat_old_dict.get(rif)
body = body % (ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
ns_diff(cntr['rx_b_ok'], old_cntr['rx_b_ok']),
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
ns_diff(cntr['rx_b_err'], old_cntr['rx_b_err']),
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
ns_diff(cntr['tx_b_ok'], old_cntr['tx_b_ok']),
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err']),
ns_diff(cntr['tx_b_err'], old_cntr['tx_b_err']))
body = body % (ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
ns_diff(cntr.rx_b_ok, old_cntr.rx_b_ok),
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
ns_diff(cntr.rx_b_err, old_cntr.rx_b_err),
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
ns_diff(cntr.tx_b_ok, old_cntr.tx_b_ok),
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err),
ns_diff(cntr.tx_b_err, old_cntr.tx_b_err))
else:
body = body % (cntr['rx_p_ok'], cntr['rx_b_ok'], cntr['rx_p_err'],cntr['rx_b_err'],
cntr['tx_p_ok'], cntr['tx_b_ok'], cntr['tx_p_err'], cntr['tx_b_err'])
body = body % (cntr.rx_p_ok, cntr.rx_b_ok, cntr.rx_p_err,cntr.rx_b_err,
cntr.tx_p_ok, cntr.tx_b_ok, cntr.tx_p_err, cntr.tx_b_err)

print(header)
print(body)
Expand Down Expand Up @@ -305,20 +305,20 @@ def main():
if tag_name is not None:
if os.path.isfile(cnstat_fqn_general_file):
try:
general_data = json.load(open(cnstat_fqn_general_file, 'r'))
general_data = pickle.load(open(cnstat_fqn_general_file, 'rb'))
for key, val in cnstat_dict.items():
general_data[key] = val
json.dump(general_data, open(cnstat_fqn_general_file, 'w'))
pickle.dump(general_data, open(cnstat_fqn_general_file, 'wb'))
except IOError as e:
sys.exit(e.errno)
# Add the information also to tag specific file
if os.path.isfile(cnstat_fqn_file):
data = json.load(open(cnstat_fqn_file, 'r'))
data = pickle.load(open(cnstat_fqn_file, 'rb'))
for key, val in cnstat_dict.items():
data[key] = val
json.dump(data, open(cnstat_fqn_file, 'w'))
pickle.dump(data, open(cnstat_fqn_file, 'wb'))
else:
json.dump(cnstat_dict, open(cnstat_fqn_file, 'w'), default=json_serial)
pickle.dump(cnstat_dict, open(cnstat_fqn_file, 'wb'))
except IOError as e:
sys.exit(e.errno)
else:
Expand All @@ -330,9 +330,9 @@ def main():
try:
cnstat_cached_dict = {}
if os.path.isfile(cnstat_fqn_file):
cnstat_cached_dict = json.load(open(cnstat_fqn_file, 'r'))
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file, 'rb'))
else:
cnstat_cached_dict = json.load(open(cnstat_fqn_general_file, 'r'))
cnstat_cached_dict = pickle.load(open(cnstat_fqn_general_file, 'rb'))

print("Last cached time was " + str(cnstat_cached_dict.get('time')))
if interface_name:
Expand Down
62 changes: 31 additions & 31 deletions scripts/pfcstat
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
#####################################################################

import json
import _pickle as pickle
import argparse
import datetime
import os.path
Expand Down Expand Up @@ -37,7 +37,7 @@ except KeyError:
from utilities_common.netstat import ns_diff, STATUS_NA, format_number_with_comma
from utilities_common import multi_asic as multi_asic_util
from utilities_common import constants
from utilities_common.cli import json_serial, UserCache
from utilities_common.cli import UserCache


PStats = namedtuple("PStats", "pfc0, pfc1, pfc2, pfc3, pfc4, pfc5, pfc6, pfc7")
Expand Down Expand Up @@ -101,7 +101,7 @@ class Pfcstat(object):
fields[pos] = STATUS_NA
else:
fields[pos] = str(int(counter_data))
cntr = PStats._make(fields)._asdict()
cntr = PStats._make(fields)
return cntr

# Get the info from database
Expand Down Expand Up @@ -144,14 +144,14 @@ class Pfcstat(object):
if key == 'time':
continue
table.append((key,
format_number_with_comma(data['pfc0']),
format_number_with_comma(data['pfc1']),
format_number_with_comma(data['pfc2']),
format_number_with_comma(data['pfc3']),
format_number_with_comma(data['pfc4']),
format_number_with_comma(data['pfc5']),
format_number_with_comma(data['pfc6']),
format_number_with_comma(data['pfc7'])))
format_number_with_comma(data.pfc0),
format_number_with_comma(data.pfc1),
format_number_with_comma(data.pfc2),
format_number_with_comma(data.pfc3),
format_number_with_comma(data.pfc4),
format_number_with_comma(data.pfc5),
format_number_with_comma(data.pfc6),
format_number_with_comma(data.pfc7)))

if rx:
print(tabulate(table, header_Rx, tablefmt='simple', stralign='right'))
Expand All @@ -173,24 +173,24 @@ class Pfcstat(object):

if old_cntr is not None:
table.append((key,
ns_diff(cntr['pfc0'], old_cntr['pfc0']),
ns_diff(cntr['pfc1'], old_cntr['pfc1']),
ns_diff(cntr['pfc2'], old_cntr['pfc2']),
ns_diff(cntr['pfc3'], old_cntr['pfc3']),
ns_diff(cntr['pfc4'], old_cntr['pfc4']),
ns_diff(cntr['pfc5'], old_cntr['pfc5']),
ns_diff(cntr['pfc6'], old_cntr['pfc6']),
ns_diff(cntr['pfc7'], old_cntr['pfc7'])))
ns_diff(cntr.pfc0, old_cntr.pfc0),
ns_diff(cntr.pfc1, old_cntr.pfc1),
ns_diff(cntr.pfc2, old_cntr.pfc2),
ns_diff(cntr.pfc3, old_cntr.pfc3),
ns_diff(cntr.pfc4, old_cntr.pfc4),
ns_diff(cntr.pfc5, old_cntr.pfc5),
ns_diff(cntr.pfc6, old_cntr.pfc6),
ns_diff(cntr.pfc7, old_cntr.pfc7)))
else:
table.append((key,
format_number_with_comma(cntr['pfc0']),
format_number_with_comma(cntr['pfc1']),
format_number_with_comma(cntr['pfc2']),
format_number_with_comma(cntr['pfc3']),
format_number_with_comma(cntr['pfc4']),
format_number_with_comma(cntr['pfc5']),
format_number_with_comma(cntr['pfc6']),
format_number_with_comma(cntr['pfc7'])))
format_number_with_comma(cntr.pfc0),
format_number_with_comma(cntr.pfc1),
format_number_with_comma(cntr.pfc2),
format_number_with_comma(cntr.pfc3),
format_number_with_comma(cntr.pfc4),
format_number_with_comma(cntr.pfc5),
format_number_with_comma(cntr.pfc6),
format_number_with_comma(cntr.pfc7)))

if rx:
print(tabulate(table, header_Rx, tablefmt='simple', stralign='right'))
Expand Down Expand Up @@ -256,8 +256,8 @@ Examples:

if save_fresh_stats:
try:
json.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'w'), default=json_serial)
json.dump(cnstat_dict_tx, open(cnstat_fqn_file_tx, 'w'), default=json_serial)
pickle.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'wb'))
pickle.dump(cnstat_dict_tx, open(cnstat_fqn_file_tx, 'wb'))
except IOError as e:
print(e.errno, e)
sys.exit(e.errno)
Expand All @@ -271,7 +271,7 @@ Examples:
"""
if os.path.isfile(cnstat_fqn_file_rx):
try:
cnstat_cached_dict = json.load(open(cnstat_fqn_file_rx, 'r'))
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file_rx, 'rb'))
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
pfcstat.cnstat_diff_print(cnstat_dict_rx, cnstat_cached_dict, True)
except IOError as e:
Expand All @@ -286,7 +286,7 @@ Examples:
"""
if os.path.isfile(cnstat_fqn_file_tx):
try:
cnstat_cached_dict = json.load(open(cnstat_fqn_file_tx, 'r'))
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file_tx, 'rb'))
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
pfcstat.cnstat_diff_print(cnstat_dict_tx, cnstat_cached_dict, False)
except IOError as e:
Expand Down
Loading

0 comments on commit 1d01028

Please sign in to comment.