Skip to content

Commit

Permalink
Replace pickle by json (#2636)
Browse files Browse the repository at this point in the history
Signed-off-by: maipbui <maibui@microsoft.com>
#### What I did
`pickle` can lead to lead to code execution vulnerabilities. Recommend to serializing the relevant data as JSON.
#### How I did it
Replace `pickle` by `json`
#### How to verify it
Pass UT
Manual test
  • Loading branch information
maipbui authored Feb 24, 2023
1 parent 33e85d3 commit 54e2635
Show file tree
Hide file tree
Showing 8 changed files with 235 additions and 235 deletions.
14 changes: 7 additions & 7 deletions scripts/dropstat
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# - Refactor calls to COUNTERS_DB to reduce redundancy
# - Cache DB queries to reduce # of expensive queries

import _pickle as pickle
import json
import argparse
import os
import socket
Expand Down Expand Up @@ -117,10 +117,10 @@ class DropStat(object):
"""

try:
pickle.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
open(self.port_drop_stats_file, 'wb+'))
pickle.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
open(self.switch_drop_stats_file, 'wb+'))
json.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
open(self.port_drop_stats_file, 'w+'))
json.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
open(self.switch_drop_stats_file, 'w+'))
except IOError as e:
print(e)
sys.exit(e.errno)
Expand All @@ -135,7 +135,7 @@ class DropStat(object):

# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.port_drop_stats_file):
port_drop_ckpt = pickle.load(open(self.port_drop_stats_file, 'rb'))
port_drop_ckpt = json.load(open(self.port_drop_stats_file, 'r'))

counters = self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP, group, counter_type)
headers = std_port_description_header + self.gather_headers(counters, DEBUG_COUNTER_PORT_STAT_MAP)
Expand All @@ -162,7 +162,7 @@ class DropStat(object):

# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.switch_drop_stats_file):
switch_drop_ckpt = pickle.load(open(self.switch_drop_stats_file, 'rb'))
switch_drop_ckpt = json.load(open(self.switch_drop_stats_file, 'r'))

counters = self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP, group, counter_type)
headers = std_switch_description_header + self.gather_headers(counters, DEBUG_COUNTER_SWITCH_STAT_MAP)
Expand Down
10 changes: 5 additions & 5 deletions scripts/flow_counters_stat
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import argparse
import os
import _pickle as pickle
import json
import sys

from natsort import natsorted
Expand Down Expand Up @@ -185,8 +185,8 @@ class FlowCounterStats(object):
if os.path.exists(self.data_file):
os.remove(self.data_file)

with open(self.data_file, 'wb') as f:
pickle.dump(data, f)
with open(self.data_file, 'w') as f:
json.dump(data, f)
except IOError as e:
print('Failed to save statistic - {}'.format(repr(e)))

Expand All @@ -200,8 +200,8 @@ class FlowCounterStats(object):
return None

try:
with open(self.data_file, 'rb') as f:
data = pickle.load(f)
with open(self.data_file, 'r') as f:
data = json.load(f)
except IOError as e:
print('Failed to load statistic - {}'.format(repr(e)))
return None
Expand Down
64 changes: 32 additions & 32 deletions scripts/intfstat
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
#####################################################################

import _pickle as pickle
import json
import argparse
import datetime
import sys
Expand All @@ -28,7 +28,7 @@ from collections import namedtuple, OrderedDict
from natsort import natsorted
from tabulate import tabulate
from utilities_common.netstat import ns_diff, table_as_json, STATUS_NA, format_brate, format_prate
from utilities_common.cli import UserCache
from utilities_common.cli import json_serial, UserCache
from swsscommon.swsscommon import SonicV2Connector

nstat_fields = (
Expand Down Expand Up @@ -96,7 +96,7 @@ class Intfstat(object):
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, counter_name)
if counter_data:
fields[pos] = str(counter_data)
cntr = NStats._make(fields)
cntr = NStats._make(fields)._asdict()
return cntr

def get_rates(table_id):
Expand Down Expand Up @@ -153,14 +153,14 @@ class Intfstat(object):
rates = ratestat_dict.get(key, RateStats._make([STATUS_NA] * len(rates_key_list)))

table.append((key,
data.rx_p_ok,
data['rx_p_ok'],
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
data.rx_p_err,
data.tx_p_ok,
data['rx_p_err'],
data['tx_p_ok'],
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
data.tx_p_err))
data['tx_p_err']))

if use_json:
print(table_as_json(table, header))
Expand All @@ -186,24 +186,24 @@ class Intfstat(object):

if old_cntr is not None:
table.append((key,
ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err)))
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err'])))
else:
table.append((key,
cntr.rx_p_ok,
cntr['rx_p_ok'],
format_brate(rates.rx_bps),
format_prate(rates.rx_pps),
cntr.rx_p_err,
cntr.tx_p_ok,
cntr['rx_p_err'],
cntr['tx_p_ok'],
format_brate(rates.tx_bps),
format_prate(rates.tx_pps),
cntr.tx_p_err))
cntr['tx_p_err']))

if use_json:
print(table_as_json(table, header))
Expand All @@ -229,17 +229,17 @@ class Intfstat(object):

if cnstat_old_dict and cnstat_old_dict.get(rif):
old_cntr = cnstat_old_dict.get(rif)
body = body % (ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
ns_diff(cntr.rx_b_ok, old_cntr.rx_b_ok),
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
ns_diff(cntr.rx_b_err, old_cntr.rx_b_err),
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
ns_diff(cntr.tx_b_ok, old_cntr.tx_b_ok),
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err),
ns_diff(cntr.tx_b_err, old_cntr.tx_b_err))
body = body % (ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
ns_diff(cntr['rx_b_ok'], old_cntr['rx_b_ok']),
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
ns_diff(cntr['rx_b_err'], old_cntr['rx_b_err']),
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
ns_diff(cntr['tx_b_ok'], old_cntr['tx_b_ok']),
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err']),
ns_diff(cntr['tx_b_err'], old_cntr['tx_b_err']))
else:
body = body % (cntr.rx_p_ok, cntr.rx_b_ok, cntr.rx_p_err,cntr.rx_b_err,
cntr.tx_p_ok, cntr.tx_b_ok, cntr.tx_p_err, cntr.tx_b_err)
body = body % (cntr['rx_p_ok'], cntr['rx_b_ok'], cntr['rx_p_err'],cntr['rx_b_err'],
cntr['tx_p_ok'], cntr['tx_b_ok'], cntr['tx_p_err'], cntr['tx_b_err'])

print(header)
print(body)
Expand Down Expand Up @@ -305,20 +305,20 @@ def main():
if tag_name is not None:
if os.path.isfile(cnstat_fqn_general_file):
try:
general_data = pickle.load(open(cnstat_fqn_general_file, 'rb'))
general_data = json.load(open(cnstat_fqn_general_file, 'r'))
for key, val in cnstat_dict.items():
general_data[key] = val
pickle.dump(general_data, open(cnstat_fqn_general_file, 'wb'))
json.dump(general_data, open(cnstat_fqn_general_file, 'w'))
except IOError as e:
sys.exit(e.errno)
# Add the information also to tag specific file
if os.path.isfile(cnstat_fqn_file):
data = pickle.load(open(cnstat_fqn_file, 'rb'))
data = json.load(open(cnstat_fqn_file, 'r'))
for key, val in cnstat_dict.items():
data[key] = val
pickle.dump(data, open(cnstat_fqn_file, 'wb'))
json.dump(data, open(cnstat_fqn_file, 'w'))
else:
pickle.dump(cnstat_dict, open(cnstat_fqn_file, 'wb'))
json.dump(cnstat_dict, open(cnstat_fqn_file, 'w'), default=json_serial)
except IOError as e:
sys.exit(e.errno)
else:
Expand All @@ -330,9 +330,9 @@ def main():
try:
cnstat_cached_dict = {}
if os.path.isfile(cnstat_fqn_file):
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file, 'rb'))
cnstat_cached_dict = json.load(open(cnstat_fqn_file, 'r'))
else:
cnstat_cached_dict = pickle.load(open(cnstat_fqn_general_file, 'rb'))
cnstat_cached_dict = json.load(open(cnstat_fqn_general_file, 'r'))

print("Last cached time was " + str(cnstat_cached_dict.get('time')))
if interface_name:
Expand Down
62 changes: 31 additions & 31 deletions scripts/pfcstat
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
#####################################################################

import _pickle as pickle
import json
import argparse
import datetime
import os.path
Expand Down Expand Up @@ -37,7 +37,7 @@ except KeyError:
from utilities_common.netstat import ns_diff, STATUS_NA, format_number_with_comma
from utilities_common import multi_asic as multi_asic_util
from utilities_common import constants
from utilities_common.cli import UserCache
from utilities_common.cli import json_serial, UserCache


PStats = namedtuple("PStats", "pfc0, pfc1, pfc2, pfc3, pfc4, pfc5, pfc6, pfc7")
Expand Down Expand Up @@ -101,7 +101,7 @@ class Pfcstat(object):
fields[pos] = STATUS_NA
else:
fields[pos] = str(int(counter_data))
cntr = PStats._make(fields)
cntr = PStats._make(fields)._asdict()
return cntr

# Get the info from database
Expand Down Expand Up @@ -144,14 +144,14 @@ class Pfcstat(object):
if key == 'time':
continue
table.append((key,
format_number_with_comma(data.pfc0),
format_number_with_comma(data.pfc1),
format_number_with_comma(data.pfc2),
format_number_with_comma(data.pfc3),
format_number_with_comma(data.pfc4),
format_number_with_comma(data.pfc5),
format_number_with_comma(data.pfc6),
format_number_with_comma(data.pfc7)))
format_number_with_comma(data['pfc0']),
format_number_with_comma(data['pfc1']),
format_number_with_comma(data['pfc2']),
format_number_with_comma(data['pfc3']),
format_number_with_comma(data['pfc4']),
format_number_with_comma(data['pfc5']),
format_number_with_comma(data['pfc6']),
format_number_with_comma(data['pfc7'])))

if rx:
print(tabulate(table, header_Rx, tablefmt='simple', stralign='right'))
Expand All @@ -173,24 +173,24 @@ class Pfcstat(object):

if old_cntr is not None:
table.append((key,
ns_diff(cntr.pfc0, old_cntr.pfc0),
ns_diff(cntr.pfc1, old_cntr.pfc1),
ns_diff(cntr.pfc2, old_cntr.pfc2),
ns_diff(cntr.pfc3, old_cntr.pfc3),
ns_diff(cntr.pfc4, old_cntr.pfc4),
ns_diff(cntr.pfc5, old_cntr.pfc5),
ns_diff(cntr.pfc6, old_cntr.pfc6),
ns_diff(cntr.pfc7, old_cntr.pfc7)))
ns_diff(cntr['pfc0'], old_cntr['pfc0']),
ns_diff(cntr['pfc1'], old_cntr['pfc1']),
ns_diff(cntr['pfc2'], old_cntr['pfc2']),
ns_diff(cntr['pfc3'], old_cntr['pfc3']),
ns_diff(cntr['pfc4'], old_cntr['pfc4']),
ns_diff(cntr['pfc5'], old_cntr['pfc5']),
ns_diff(cntr['pfc6'], old_cntr['pfc6']),
ns_diff(cntr['pfc7'], old_cntr['pfc7'])))
else:
table.append((key,
format_number_with_comma(cntr.pfc0),
format_number_with_comma(cntr.pfc1),
format_number_with_comma(cntr.pfc2),
format_number_with_comma(cntr.pfc3),
format_number_with_comma(cntr.pfc4),
format_number_with_comma(cntr.pfc5),
format_number_with_comma(cntr.pfc6),
format_number_with_comma(cntr.pfc7)))
format_number_with_comma(cntr['pfc0']),
format_number_with_comma(cntr['pfc1']),
format_number_with_comma(cntr['pfc2']),
format_number_with_comma(cntr['pfc3']),
format_number_with_comma(cntr['pfc4']),
format_number_with_comma(cntr['pfc5']),
format_number_with_comma(cntr['pfc6']),
format_number_with_comma(cntr['pfc7'])))

if rx:
print(tabulate(table, header_Rx, tablefmt='simple', stralign='right'))
Expand Down Expand Up @@ -256,8 +256,8 @@ Examples:

if save_fresh_stats:
try:
pickle.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'wb'))
pickle.dump(cnstat_dict_tx, open(cnstat_fqn_file_tx, 'wb'))
json.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'w'), default=json_serial)
json.dump(cnstat_dict_tx, open(cnstat_fqn_file_tx, 'w'), default=json_serial)
except IOError as e:
print(e.errno, e)
sys.exit(e.errno)
Expand All @@ -271,7 +271,7 @@ Examples:
"""
if os.path.isfile(cnstat_fqn_file_rx):
try:
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file_rx, 'rb'))
cnstat_cached_dict = json.load(open(cnstat_fqn_file_rx, 'r'))
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
pfcstat.cnstat_diff_print(cnstat_dict_rx, cnstat_cached_dict, True)
except IOError as e:
Expand All @@ -286,7 +286,7 @@ Examples:
"""
if os.path.isfile(cnstat_fqn_file_tx):
try:
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file_tx, 'rb'))
cnstat_cached_dict = json.load(open(cnstat_fqn_file_tx, 'r'))
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
pfcstat.cnstat_diff_print(cnstat_dict_tx, cnstat_cached_dict, False)
except IOError as e:
Expand Down
8 changes: 4 additions & 4 deletions scripts/pg-drop
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# pg-drop is a tool for show/clear ingress pg dropped packet stats.
#
#####################################################################
import _pickle as pickle
import json
import argparse
import os
import sys
Expand Down Expand Up @@ -144,7 +144,7 @@ class PgDropStat(object):
port_drop_ckpt = {}
# Grab the latest clear checkpoint, if it exists
if os.path.isfile(self.port_drop_stats_file):
port_drop_ckpt = pickle.load(open(self.port_drop_stats_file, 'rb'))
port_drop_ckpt = json.load(open(self.port_drop_stats_file, 'r'))

# Header list contains the port name followed by the PGs. Fields is used to populate the pg values
fields = ["0"]* (len(self.header_list) - 1)
Expand Down Expand Up @@ -216,10 +216,10 @@ class PgDropStat(object):

counter_pg_drop_array = [ "SAI_INGRESS_PRIORITY_GROUP_STAT_DROPPED_PACKETS"]
try:
pickle.dump(self.get_counts_table(
json.dump(self.get_counts_table(
counter_pg_drop_array,
COUNTERS_PG_NAME_MAP),
open(self.port_drop_stats_file, 'wb+'))
open(self.port_drop_stats_file, 'w+'))
except IOError as e:
print(e)
sys.exit(e.errno)
Expand Down
Loading

0 comments on commit 54e2635

Please sign in to comment.