Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial overhaul of Analyze mode #210

Merged
merged 7 commits into from
Dec 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 10 additions & 4 deletions src/argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,13 @@
import shutil
import os

def omniarg_parser(parser, omniperf_home, omniperf_version):
def print_avail_arch(avail_arch: list):
ret_str = "\t\tList all available metrics for analysis on specified arch:"
for arch in avail_arch:
ret_str += "\n\t\t {}".format(arch)
return ret_str

def omniarg_parser(parser, omniperf_home, supported_archs, omniperf_version):
# -----------------------------------------
# Parse arguments (dependent on mode)
# -----------------------------------------
Expand Down Expand Up @@ -389,8 +395,8 @@ def omniarg_parser(parser, omniperf_home, omniperf_version):
analyze_group.add_argument(
"--list-metrics",
metavar="",
choices=["gfx906", "gfx908", "gfx90a"],
help="\t\tList all available metrics for analysis on specified arch:\n\t\t gfx906\n\t\t gfx908\n\t\t gfx90a",
choices=supported_archs.keys(),#["gfx906", "gfx908", "gfx90a"],
help=print_avail_arch(supported_archs.keys()),
)
analyze_group.add_argument(
"-k",
Expand Down Expand Up @@ -483,7 +489,7 @@ def omniarg_parser(parser, omniperf_home, omniperf_version):
dest="config_dir",
metavar="",
help="\t\tSpecify the directory of customized configs.",
default=omniperf_home.joinpath("omniperf_analyze/configs"),
default=omniperf_home.joinpath("omniperf_soc/analysis_configs/"),
)
analyze_advanced_group.add_argument(
"--save-dfs",
Expand Down
156 changes: 150 additions & 6 deletions src/omniperf_analyze/analysis_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,22 +23,166 @@
##############################################################################el

from abc import ABC, abstractmethod
import os
import logging
import sys
import copy
from collections import OrderedDict
from pathlib import Path
from utils.utils import demarcate, error
from utils import schema, file_io, parser
import pandas as pd
from tabulate import tabulate

class OmniAnalyze_Base():
def __init__(self,args,options):
def __init__(self,args,supported_archs):
self.__args = args
self.__options = options
self._runs = OrderedDict() #NB: I made this public so children can modify/add to obj properties
self._arch_configs = {} #NB: I made this public so children can modify/add to obj properties
self.__supported_archs = supported_archs
self._output = None #NB: I made this public so children can modify/add to obj properties

def get_args(self):
return self.__args

@demarcate
def generate_configs(self, arch, config_dir, list_kernels, filter_metrics):
single_panel_config = file_io.is_single_panel_config(Path(config_dir), self.__supported_archs)

ac = schema.ArchConfig()
if list_kernels:
ac.panel_configs = file_io.top_stats_build_in_config
else:
arch_panel_config = (
config_dir if single_panel_config else config_dir.joinpath(arch)
)
ac.panel_configs = file_io.load_panel_configs(arch_panel_config)

# TODO: filter_metrics should/might be one per arch
# print(ac)

parser.build_dfs(ac, filter_metrics)
self._arch_configs[arch] = ac
return self._arch_configs

@demarcate
def list_metrics(self):
args = self.__args
if args.list_metrics in file_io.supported_arch.keys():
arch = args.list_metrics
if arch not in self._arch_configs.keys():
self.generate_configs(arch, args.config_dir, args.list_kernels, args.filter_metrics)
print(
tabulate(
pd.DataFrame.from_dict(
self._arch_configs[args.list_metrics].metric_list,
orient="index",
columns=["Metric"],
),
headers="keys",
tablefmt="fancy_grid"
),
file=self._output
)
sys.exit(0)
else:
error("Unsupported arch")

@demarcate
def load_options(self, normalization_filter):
if not normalization_filter:
for k, v in self._arch_configs.items():
parser.build_metric_value_string(v.dfs, v.dfs_type, self.__args.normal_unit)
else:
for k, v in self._arch_configs.items():
parser.build_metric_value_string(v.dfs, v.dfs_type, normalization_filter)

args = self.__args
# Error checking for multiple runs and multiple gpu_kernel filters
if args.gpu_kernel and (len(args.path) != len(args.gpu_kernel)):
if len(args.gpu_kernel) == 1:
for i in range(len(args.path) - 1):
args.gpu_kernel.extend(args.gpu_kernel)
else:
error("Error: the number of --filter-kernels doesn't match the number of --dir.")

@demarcate
def initalize_runs(self, avail_socs, normalization_filter=None):
if self.__args.list_metrics:
self.list_metrics()

# load required configs
for d in self.__args.path:
sys_info = file_io.load_sys_info(Path(d[0], "sysinfo.csv"))
arch = sys_info.iloc[0]["gpu_soc"]
args = self.__args
self.generate_configs(arch, args.config_dir, args.list_kernels, args.filter_metrics)

self.load_options(normalization_filter)

for d in self.__args.path:
w = schema.Workload()
w.sys_info = file_io.load_sys_info(Path(d[0], "sysinfo.csv"))
w.avail_ips = w.sys_info["ip_blocks"].item().split("|")
arch = w.sys_info.iloc[0]["gpu_soc"]
w.dfs = copy.deepcopy(self._arch_configs[arch].dfs)
w.dfs_type = self._arch_configs[arch].dfs_type
w.soc_spec = avail_socs[arch].get_soc_param()
self._runs[d[0]] = w

return self._runs


@demarcate
def sanitize(self):
"""Perform sanitization of inputs
"""
if not self.__args.list_metrics and not self.__args.path:
error("The following arguments are required: -p/--path")
# verify not accessing parent directories
if ".." in str(self.__args.path):
error("Access denied. Cannot access parent directories in path (i.e. ../)")
# ensure absolute path
for dir in self.__args.path:
full_path = os.path.abspath(dir[0])
dir[0] = full_path
if not os.path.isdir(dir[0]):
error("Invalid directory {}\nPlease try again.".format(dir[0]))

#----------------------------------------------------
# Required methods to be implemented by child classes
#----------------------------------------------------
@abstractmethod
def pre_processing(self):
def pre_processing(self, omni_socs: set):
"""Perform initialization prior to analysis.
"""
pass
logging.debug("[analysis] prepping to do some analysis")
logging.info("[analysis] deriving Omniperf metrics...")
# initalize output file
self._output = open(self.__args.output_file, "w+") if self.__args.output_file else sys.stdout

# initalize runs
self._runs = self.initalize_runs(omni_socs)

# set filters
if self.__args.gpu_kernel:
for d, gk in zip(self.__args.path, self.__args.gpu_kernel):
self._runs[d[0]].filter_kernel_ids = gk
if self.__args.gpu_id:
if len(self.__args.gpu_id) == 1 and len(self.__args.path) != 1:
for i in range(len(self.__args.path) - 1):
self.__args.gpu_id.extend(self.__args.gpu_id)
for d, gi in zip(self.__args.path, self.__args.gpu_id):
self._runs[d[0]].filter_gpu_ids = gi
if self.__args.gpu_dispatch_id:
if len(self.__args.gpu_dispatch_id) == 1 and len(self.__args.path) != 1:
for i in range(len(self.__args.path) - 1):
self.__args.gpu_dispatch_id.extend(self.__args.gpu_dispatch_id)
for d, gd in zip(self.__args.path, self.__args.gpu_dispatch_id):
self._runs[d[0]].filter_dispatch_ids = gd

@abstractmethod
def run_analysis(self):
"""Run analysis.
"""
pass

logging.debug("[analysis] generating analysis")
49 changes: 44 additions & 5 deletions src/omniperf_analyze/analysis_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,21 +22,60 @@
# SOFTWARE.
##############################################################################el

import logging
from omniperf_analyze.analysis_base import OmniAnalyze_Base
from utils.utils import demarcate
from utils.utils import demarcate, error
from utils import file_io, parser, tty
from utils.csv_processor import kernel_name_shortener

class cli_analysis(OmniAnalyze_Base):

#-----------------------
# Required child methods
#-----------------------
@demarcate
def pre_processing(self):
def pre_processing(self, omni_soc):
"""Perform any pre-processing steps prior to analysis.
"""
logging.debug("[analysis] prepping to do some analysis")
super().pre_processing(omni_soc)
if self.get_args().random_port:
error("--gui flag is required to enable --random-port")
for d in self.get_args().path:
# demangle and overwrite original 'KernelName'
kernel_name_shortener(d[0], self.get_args().kernel_verbose)

file_io.create_df_kernel_top_stats(
d[0],
self._runs[d[0]].filter_gpu_ids,
self._runs[d[0]].filter_dispatch_ids,
self.get_args().time_unit,
self.get_args().max_kernel_num
)
# create 'mega dataframe'
self._runs[d[0]].raw_pmc = file_io.create_df_pmc(
d[0], self.get_args().verbose
)
is_gui = False
# create the loaded table
parser.load_table_data(
self._runs[d[0]], d[0], is_gui, self.get_args().g, self.get_args().verbose
)


@demarcate
def run_analysis(self):
"""Run CLI analysis.
"""
logging.debug("[analysis] check out this wicked cli ascii art")
if self.get_args().list_kernels:
tty.show_kernels(
self.get_args(),
self._runs,
self._arch_configs[self._runs[self.get_args().path[0][0]].sys_info.iloc[0]["gpu_soc"]],
self._output
)
else:
tty.show_all(
self.get_args(),
self._runs,
self._arch_configs[self._runs[self.get_args().path[0][0]].sys_info.iloc[0]["gpu_soc"]],
self._output
)
2 changes: 2 additions & 0 deletions src/omniperf_analyze/analysis_webui.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@

class webui_analysis(OmniAnalyze_Base):

#-----------------------
# Required child methods
#-----------------------
@demarcate
def pre_processing(self):
"""Perform any pre-processing steps prior to analysis.
Expand Down
Loading
Loading