|
| 1 | +import os |
| 2 | +import sys |
| 3 | +import json |
| 4 | +import re |
| 5 | +import click |
| 6 | +from tabulate import tabulate |
| 7 | +from sonic_py_common import multi_asic |
| 8 | +from utilities_common.constants import DEFAULT_NAMESPACE |
| 9 | +from dump.match_infra import RedisSource, JsonSource, ConnectionPool |
| 10 | +from dump import plugins |
| 11 | + |
| 12 | + |
| 13 | +# Autocompletion Helper |
| 14 | +def get_available_modules(ctx, args, incomplete): |
| 15 | + return [k for k in plugins.dump_modules.keys() if incomplete in k] |
| 16 | + |
| 17 | + |
| 18 | +# Display Modules Callback |
| 19 | +def show_modules(ctx, param, value): |
| 20 | + if not value or ctx.resilient_parsing: |
| 21 | + return |
| 22 | + header = ["Module", "Identifier"] |
| 23 | + display = [] |
| 24 | + for mod in plugins.dump_modules: |
| 25 | + display.append((mod, plugins.dump_modules[mod].ARG_NAME)) |
| 26 | + click.echo(tabulate(display, header)) |
| 27 | + ctx.exit() |
| 28 | + |
| 29 | + |
| 30 | +@click.group() |
| 31 | +def dump(): |
| 32 | + pass |
| 33 | + |
| 34 | + |
| 35 | +@dump.command() |
| 36 | +@click.pass_context |
| 37 | +@click.argument('module', required=True, type=str, autocompletion=get_available_modules) |
| 38 | +@click.argument('identifier', required=True, type=str) |
| 39 | +@click.option('--show', '-s', is_flag=True, default=False, expose_value=False, |
| 40 | + callback=show_modules, help='Display Modules Available', is_eager=True) |
| 41 | +@click.option('--db', '-d', multiple=True, |
| 42 | + help='Only dump from these Databases or the CONFIG_FILE') |
| 43 | +@click.option('--table', '-t', is_flag=True, default=False, |
| 44 | + help='Print in tabular format', show_default=True) |
| 45 | +@click.option('--key-map', '-k', is_flag=True, default=False, show_default=True, |
| 46 | + help="Only fetch the keys matched, don't extract field-value dumps") |
| 47 | +@click.option('--verbose', '-v', is_flag=True, default=False, show_default=True, |
| 48 | + help="Prints any intermediate output to stdout useful for dev & troubleshooting") |
| 49 | +@click.option('--namespace', '-n', default=DEFAULT_NAMESPACE, type=str, |
| 50 | + show_default=True, help='Dump the redis-state for this namespace.') |
| 51 | +def state(ctx, module, identifier, db, table, key_map, verbose, namespace): |
| 52 | + """ |
| 53 | + Dump the current state of the identifier for the specified module from Redis DB or CONFIG_FILE |
| 54 | + """ |
| 55 | + if not multi_asic.is_multi_asic() and namespace != DEFAULT_NAMESPACE: |
| 56 | + click.echo("Namespace option is not valid for a single-ASIC device") |
| 57 | + ctx.exit() |
| 58 | + |
| 59 | + if multi_asic.is_multi_asic() and (namespace != DEFAULT_NAMESPACE and namespace not in multi_asic.get_namespace_list()): |
| 60 | + click.echo("Namespace option is not valid. Choose one of {}".format(multi_asic.get_namespace_list())) |
| 61 | + ctx.exit() |
| 62 | + |
| 63 | + if module not in plugins.dump_modules: |
| 64 | + click.echo("No Matching Plugin has been Implemented") |
| 65 | + ctx.exit() |
| 66 | + |
| 67 | + if verbose: |
| 68 | + os.environ["VERBOSE"] = "1" |
| 69 | + else: |
| 70 | + os.environ["VERBOSE"] = "0" |
| 71 | + |
| 72 | + ctx.module = module |
| 73 | + obj = plugins.dump_modules[module]() |
| 74 | + |
| 75 | + if identifier == "all": |
| 76 | + ids = obj.get_all_args(namespace) |
| 77 | + else: |
| 78 | + ids = identifier.split(",") |
| 79 | + |
| 80 | + params = {} |
| 81 | + collected_info = {} |
| 82 | + params['namespace'] = namespace |
| 83 | + for arg in ids: |
| 84 | + params[plugins.dump_modules[module].ARG_NAME] = arg |
| 85 | + collected_info[arg] = obj.execute(params) |
| 86 | + |
| 87 | + if len(db) > 0: |
| 88 | + collected_info = filter_out_dbs(db, collected_info) |
| 89 | + |
| 90 | + vidtorid = extract_rid(collected_info, namespace) |
| 91 | + |
| 92 | + if not key_map: |
| 93 | + collected_info = populate_fv(collected_info, module, namespace) |
| 94 | + |
| 95 | + for id in vidtorid.keys(): |
| 96 | + collected_info[id]["ASIC_DB"]["vidtorid"] = vidtorid[id] |
| 97 | + |
| 98 | + print_dump(collected_info, table, module, identifier, key_map) |
| 99 | + |
| 100 | + return |
| 101 | + |
| 102 | + |
| 103 | +def extract_rid(info, ns): |
| 104 | + r = RedisSource(ConnectionPool()) |
| 105 | + r.connect("ASIC_DB", ns) |
| 106 | + vidtorid = {} |
| 107 | + vid_cache = {} # Cache Entries to reduce number of Redis Calls |
| 108 | + for arg in info.keys(): |
| 109 | + mp = get_v_r_map(r, info[arg], vid_cache) |
| 110 | + if mp: |
| 111 | + vidtorid[arg] = mp |
| 112 | + return vidtorid |
| 113 | + |
| 114 | + |
| 115 | +def get_v_r_map(r, single_dict, vid_cache): |
| 116 | + v_r_map = {} |
| 117 | + asic_obj_ptrn = "ASIC_STATE:.*:oid:0x\w{1,14}" |
| 118 | + |
| 119 | + if "ASIC_DB" in single_dict and "keys" in single_dict["ASIC_DB"]: |
| 120 | + for redis_key in single_dict["ASIC_DB"]["keys"]: |
| 121 | + if re.match(asic_obj_ptrn, redis_key): |
| 122 | + matches = re.findall(r"oid:0x\w{1,14}", redis_key) |
| 123 | + if matches: |
| 124 | + vid = matches[0] |
| 125 | + if vid in vid_cache: |
| 126 | + rid = vid_cache[vid] |
| 127 | + else: |
| 128 | + rid = r.hget("ASIC_DB", "VIDTORID", vid) |
| 129 | + vid_cache[vid] = rid |
| 130 | + v_r_map[vid] = rid if rid else "Real ID Not Found" |
| 131 | + return v_r_map |
| 132 | + |
| 133 | + |
| 134 | +# Filter dbs which are not required |
| 135 | +def filter_out_dbs(db_list, collected_info): |
| 136 | + args_ = list(collected_info.keys()) |
| 137 | + for arg in args_: |
| 138 | + dbs = list(collected_info[arg].keys()) |
| 139 | + for db in dbs: |
| 140 | + if db not in db_list: |
| 141 | + del collected_info[arg][db] |
| 142 | + return collected_info |
| 143 | + |
| 144 | + |
| 145 | +def populate_fv(info, module, namespace): |
| 146 | + all_dbs = set() |
| 147 | + for id in info.keys(): |
| 148 | + for db_name in info[id].keys(): |
| 149 | + all_dbs.add(db_name) |
| 150 | + |
| 151 | + db_cfg_file = JsonSource() |
| 152 | + db_conn = ConnectionPool().initialize_connector(namespace) |
| 153 | + for db_name in all_dbs: |
| 154 | + if db_name is "CONFIG_FILE": |
| 155 | + db_cfg_file.connect(plugins.dump_modules[module].CONFIG_FILE, namespace) |
| 156 | + else: |
| 157 | + db_conn.connect(db_name) |
| 158 | + |
| 159 | + final_info = {} |
| 160 | + for id in info.keys(): |
| 161 | + final_info[id] = {} |
| 162 | + for db_name in info[id].keys(): |
| 163 | + final_info[id][db_name] = {} |
| 164 | + final_info[id][db_name]["keys"] = [] |
| 165 | + final_info[id][db_name]["tables_not_found"] = info[id][db_name]["tables_not_found"] |
| 166 | + for key in info[id][db_name]["keys"]: |
| 167 | + if db_name is "CONFIG_FILE": |
| 168 | + fv = db_dict[db_name].get(db_name, key) |
| 169 | + else: |
| 170 | + fv = db_conn.get_all(db_name, key) |
| 171 | + final_info[id][db_name]["keys"].append({key: fv}) |
| 172 | + |
| 173 | + return final_info |
| 174 | + |
| 175 | + |
| 176 | +def get_dict_str(key_obj): |
| 177 | + table = [] |
| 178 | + for pair in key_obj.items(): |
| 179 | + table.append(list(pair)) |
| 180 | + return tabulate(table, headers=["field", "value"], tablefmt="psql") |
| 181 | + |
| 182 | + |
| 183 | +# print dump |
| 184 | +def print_dump(collected_info, table, module, identifier, key_map): |
| 185 | + if not table: |
| 186 | + click.echo(json.dumps(collected_info, indent=4)) |
| 187 | + return |
| 188 | + |
| 189 | + top_header = [plugins.dump_modules[module].ARG_NAME, "DB_NAME", "DUMP"] |
| 190 | + final_collection = [] |
| 191 | + for ids in collected_info.keys(): |
| 192 | + for db in collected_info[ids].keys(): |
| 193 | + total_info = "" |
| 194 | + |
| 195 | + if collected_info[ids][db]["tables_not_found"]: |
| 196 | + tabulate_fmt = [] |
| 197 | + for tab in collected_info[ids][db]["tables_not_found"]: |
| 198 | + tabulate_fmt.append([tab]) |
| 199 | + total_info += tabulate(tabulate_fmt, ["Tables Not Found"], tablefmt="grid") |
| 200 | + total_info += "\n" |
| 201 | + |
| 202 | + if not key_map: |
| 203 | + values = [] |
| 204 | + hdrs = ["Keys", "field-value pairs"] |
| 205 | + for key_obj in collected_info[ids][db]["keys"]: |
| 206 | + if isinstance(key_obj, dict) and key_obj: |
| 207 | + key = list(key_obj.keys())[0] |
| 208 | + values.append([key, get_dict_str(key_obj[key])]) |
| 209 | + total_info += str(tabulate(values, hdrs, tablefmt="grid")) |
| 210 | + else: |
| 211 | + temp = [] |
| 212 | + for key_ in collected_info[ids][db]["keys"]: |
| 213 | + temp.append([key_]) |
| 214 | + total_info += str(tabulate(temp, headers=["Keys Collected"], tablefmt="grid")) |
| 215 | + |
| 216 | + total_info += "\n" |
| 217 | + if "vidtorid" in collected_info[ids][db]: |
| 218 | + temp = [] |
| 219 | + for pair in collected_info[ids][db]["vidtorid"].items(): |
| 220 | + temp.append(list(pair)) |
| 221 | + total_info += str(tabulate(temp, headers=["vid", "rid"], tablefmt="grid")) |
| 222 | + final_collection.append([ids, db, total_info]) |
| 223 | + |
| 224 | + click.echo(tabulate(final_collection, top_header, tablefmt="grid")) |
| 225 | + return |
| 226 | + |
| 227 | + |
| 228 | +if __name__ == '__main__': |
| 229 | + dump() |
0 commit comments