Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions app/src/components/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ export default {
sockets: {
config(data) {
this.data = JSON.parse(data);
this.runCounts = this.data.dataset_names.length;
this.runCounts = this.data["properties"]["runs"].length;
// set the data for runtime.
for (let dataset of this.data.dataset_names) {
for (let dataset of this.data["properties"]["runs"]) {
this.runtime.push({
dataset: dataset,
min_inclusive_runtime: this.data.minIncTime[dataset],
Expand Down
4 changes: 2 additions & 2 deletions app/src/components/callflowEnsemble.js
Original file line number Diff line number Diff line change
Expand Up @@ -280,8 +280,8 @@ export default {
setupStore(data) {
data = JSON.parse(data);
console.log("Config file: ", data);
this.$store.numOfRuns = data["datasets"].length;
this.$store.selectedDatasets = data["names"];
this.$store.numOfRuns = data["properties"]["runs"].length;
this.$store.selectedDatasets = data["properties"]["runs"];
this.selectedCaseStudy = data["runName"];
this.datasets = this.$store.selectedDatasets;

Expand Down
5 changes: 3 additions & 2 deletions app/src/components/callflowSingle.js
Original file line number Diff line number Diff line change
Expand Up @@ -259,8 +259,8 @@ export default {
setupStore(data) {
data = JSON.parse(data);
console.log("Config file: ", data);
this.$store.numOfRuns = data["datasets"].length;
this.$store.selectedDatasets = data["names"];
this.$store.numOfRuns = data["properties"]["runs"].length;
this.$store.selectedDatasets = data["properties"]["runs"];
this.datasets = this.$store.selectedDatasets;

// Enable diff mode only if the number of datasets >= 2
Expand Down Expand Up @@ -436,6 +436,7 @@ export default {
// Create a map for each dataset mapping the respective mean times.
let map = {};
for (let module_name of module_list) {
console.log(module_name, this.$store.modules[this.selectedTargetDataset][module_name])
map[module_name] = this.$store.modules[this.selectedTargetDataset][module_name][this.$store.selectedMetric]["mean_time"];
}

Expand Down
124 changes: 61 additions & 63 deletions callflow/callflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,15 @@


class CallFlow:
def __init__(self, config, ensemble=False):
def __init__(self, config: dict, ensemble=False):
"""
Entry interface to access CallFlow's functionalities. "
"""

# Assert if config is provided.
assert isinstance(config, callflow.operations.ConfigFileReader)

# Convert config json to props. Never touch self.config ever.
self.props = json.loads(json.dumps(config, default=lambda o: o.__dict__))
assert isinstance(config, dict)

self.config = config
self.ensemble = ensemble

# --------------------------------------------------------------------------
Expand All @@ -37,19 +35,19 @@ def _create_dot_callflow_folder(self):
"""
Create a .callflow directory and empty files.
"""
LOGGER.debug(f"Saved .callflow directory is: {self.props['save_path']}")
LOGGER.debug(f"Saved .callflow directory is: {self.config['save_path']}")

if not os.path.exists(self.props["save_path"]):
os.makedirs(self.props["save_path"])
os.makedirs(os.path.join(self.props["save_path"], "ensemble"))
if not os.path.exists(self.config["save_path"]):
os.makedirs(self.config["save_path"])
os.makedirs(os.path.join(self.config["save_path"], "ensemble"))

dataset_folders = []
for dataset in self.props["datasets"]:
dataset_folders.append(dataset["name"])
dataset_folders = [k for k in self.config["properties"]["paths"].keys()]
# for dataset in self.config["properties"][""]:
# dataset_folders.append(self.config["properties"]["name"])
dataset_folders.append("ensemble")

for dataset in dataset_folders:
dataset_dir = os.path.join(self.props["save_path"], dataset)
dataset_dir = os.path.join(self.config["save_path"], dataset)
LOGGER.debug(dataset_dir)
if not os.path.exists(dataset_dir):
# if self.debug:
Expand All @@ -72,20 +70,20 @@ def process(self):
"""
Process the datasets based on the format (i.e., either single or ensemble)
"""
ndatasets = len(self.props["dataset_names"])
ndatasets = len(self.config["properties"]["runs"])
assert self.ensemble == (ndatasets > 1)

self._create_dot_callflow_folder()
if self.ensemble:
self._process_ensemble(self.props["dataset_names"])
self._process_ensemble(self.config["properties"]["runs"])
else:
self._process_single(self.props["dataset_names"][0])
self._process_single(self.config["properties"]["runs"][0])

def load(self):
"""
Load the processed datasets by the format.
"""
ndatasets = len(self.props["dataset_names"])
ndatasets = len(self.config["properties"]["runs"])
if self.ensemble:
self.supergraphs = self._read_ensemble()
# assertion here is 1 less than self.supergraph.keys, becasuse
Expand All @@ -95,18 +93,18 @@ def load(self):
self.supergraphs = self._read_single()
assert len(self.supergraphs.keys()) == 1

# Adds basic information to props.
# Props is later return to client app on "init" request.
self.add_basic_info_to_props()
# Adds basic information to config.
# Config is later return to client app on "init" request.
self.add_basic_info_to_config()

def _process_single(self, dataset):
"""
Single dataset processing.
"""
supergraph = SuperGraph(props=self.props, tag=dataset, mode="process")
LOGGER.info("#########################################")
LOGGER.info(f"Run: {dataset}")
LOGGER.info("#########################################")
LOGGER.debug("#########################################")
LOGGER.debug(f"Single Mode: {dataset}")
LOGGER.debug("#########################################")
supergraph = SuperGraph(config=self.config, tag=dataset, mode="process")

# Process each graphframe.
supergraph.process_gf()
Expand All @@ -132,12 +130,12 @@ def _process_ensemble(self, datasets):
single_supergraphs = {}
for idx, dataset_name in enumerate(datasets):
# Create an instance of dataset.
LOGGER.debug("#########################################")
LOGGER.debug(f"Ensemble Mode: {dataset_name}")
LOGGER.debug("#########################################")
single_supergraphs[dataset_name] = SuperGraph(
props=self.props, tag=dataset_name, mode="process"
config=self.config, tag=dataset_name, mode="process"
)
LOGGER.info("#########################################")
LOGGER.info(f"Run: {dataset_name}")
LOGGER.info("#########################################")

# Process each graphframe.
single_supergraphs[dataset_name].process_gf()
Expand All @@ -156,7 +154,7 @@ def _process_ensemble(self, datasets):

# Create a supergraph class for ensemble case.
ensemble_supergraph = EnsembleGraph(
self.props, "ensemble", mode="process", supergraphs=single_supergraphs
self.config, "ensemble", mode="process", supergraphs=single_supergraphs
)

# Write the graphframe to file.
Expand All @@ -178,7 +176,7 @@ def _process_ensemble(self, datasets):
ensemble_supergraph.ensemble_auxiliary(
# MPIBinCount=self.currentMPIBinCount,
# RunBinCount=self.currentRunBinCount,
datasets=self.props["dataset_names"],
datasets=self.config["properties"]["runs"],
MPIBinCount=20,
RunBinCount=20,
process=True,
Expand All @@ -191,9 +189,9 @@ def _read_single(self):
"""
supergraphs = {}
# Only consider the first dataset from the listing.
dataset_name = self.props["dataset_names"][0]
dataset_name = self.config["properties"]["runs"][0]
supergraphs[dataset_name] = SuperGraph(
props=self.props, tag=dataset_name, mode="render"
config=self.config, tag=dataset_name, mode="render"
)

return supergraphs
Expand All @@ -204,59 +202,59 @@ def _read_ensemble(self):
"""
supergraphs = {}

for idx, dataset_name in enumerate(self.props["dataset_names"]):
for idx, dataset_name in enumerate(self.config["properties"]["runs"]):
supergraphs[dataset_name] = SuperGraph(
self.props, dataset_name, mode="render"
config=self.config, tag=dataset_name, mode="render"
)
# supergraphs[dataset_name].read_gf(read_parameter=self.props["read_parameter"])
# supergraphs[dataset_name].read_gf(read_parameter=self.config["read_parameter"])

supergraphs["ensemble"] = EnsembleGraph(
props=self.props, tag="ensemble", mode="render"
config=self.config, tag="ensemble", mode="render"
)
# supergraphs["ensemble"].read_gf(read_parameter=self.props["read_parameter"])
# supergraphs["ensemble"].read_gf(read_parameter=self.config["read_parameter"])
# supergraphs["ensemble"].read_auxiliary_data()
return supergraphs

# --------------------------------------------------------------------------
# Reading and rendering methods.
# All the functions below are Public methods that are accessed by the server.

def add_basic_info_to_props(self):
def add_basic_info_to_config(self):
"""
Adds basic information (like max, min inclusive and exclusive runtime) to self.props.
Adds basic information (like max, min inclusive and exclusive runtime) to self.config.
"""
self.props["maxIncTime"] = {}
self.props["maxExcTime"] = {}
self.props["minIncTime"] = {}
self.props["minExcTime"] = {}
self.props["numOfRanks"] = {}
self.config["maxIncTime"] = {}
self.config["maxExcTime"] = {}
self.config["minIncTime"] = {}
self.config["minExcTime"] = {}
self.config["numOfRanks"] = {}
maxIncTime = 0
maxExcTime = 0
minIncTime = 0
minExcTime = 0
for idx, tag in enumerate(self.supergraphs):
self.props["maxIncTime"][tag] = (
self.config["maxIncTime"][tag] = (
self.supergraphs[tag].gf.df["time (inc)"].max()
)
self.props["maxExcTime"][tag] = self.supergraphs[tag].gf.df["time"].max()
self.props["minIncTime"][tag] = (
self.config["maxExcTime"][tag] = self.supergraphs[tag].gf.df["time"].max()
self.config["minIncTime"][tag] = (
self.supergraphs[tag].gf.df["time (inc)"].min()
)
self.props["minExcTime"][tag] = self.supergraphs[tag].gf.df["time"].min()
# self.props["numOfRanks"][dataset] = len(
self.config["minExcTime"][tag] = self.supergraphs[tag].gf.df["time"].min()
# self.config["numOfRanks"][dataset] = len(
# self.datasets[dataset].gf.df["rank"].unique()
# )
maxExcTime = max(self.props["maxExcTime"][tag], maxExcTime)
maxIncTime = max(self.props["maxIncTime"][tag], maxIncTime)
minExcTime = min(self.props["minExcTime"][tag], minExcTime)
minIncTime = min(self.props["minIncTime"][tag], minIncTime)
# maxNumOfRanks = max(self.props["numOfRanks"][dataset], maxNumOfRanks)

self.props["maxIncTime"]["ensemble"] = maxIncTime
self.props["maxExcTime"]["ensemble"] = maxExcTime
self.props["minIncTime"]["ensemble"] = minIncTime
self.props["minExcTime"]["ensemble"] = minExcTime
# self.props["numOfRanks"]["ensemble"] = maxNumOfRanks
maxExcTime = max(self.config["maxExcTime"][tag], maxExcTime)
maxIncTime = max(self.config["maxIncTime"][tag], maxIncTime)
minExcTime = min(self.config["minExcTime"][tag], minExcTime)
minIncTime = min(self.config["minIncTime"][tag], minIncTime)
# maxNumOfRanks = max(self.config["numOfRanks"][dataset], maxNumOfRanks)

self.config["maxIncTime"]["ensemble"] = maxIncTime
self.config["maxExcTime"]["ensemble"] = maxExcTime
self.config["minIncTime"]["ensemble"] = minIncTime
self.config["minExcTime"]["ensemble"] = minExcTime
# self.config["numOfRanks"]["ensemble"] = maxNumOfRanks

def request_single(self, operation):
"""
Expand All @@ -278,7 +276,7 @@ def request_single(self, operation):
operation_name = operation["name"]

if operation_name == "init":
return self.props
return self.config

elif operation_name == "auxiliary":
return self.supergraphs[operation["dataset"]].auxiliary_data
Expand Down Expand Up @@ -312,10 +310,10 @@ def request_ensemble(self, operation):
Handles all the socket requests connected to Single CallFlow.
"""
operation_name = operation["name"]
datasets = self.props["dataset_names"]
datasets = self.config["properties"]["runs"]

if operation_name == "init":
return self.props
return self.config

elif operation_name == "ensemble_cct":
result = NodeLinkLayout(
Expand Down
4 changes: 2 additions & 2 deletions callflow/datastructures/ensemblegraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ class EnsembleGraph(SuperGraph):
"""

# --------------------------------------------------------------------------
def __init__(self, props={}, tag="", mode="process", supergraphs={}):
def __init__(self, config={}, tag="", mode="process", supergraphs={}):
"""
Arguments:
supergraphs (dict): dictionary of supergraphs keyed by their tag.
"""
self.supergraphs = supergraphs

super().__init__(props, tag, mode)
super().__init__(config, tag, mode)

# --------------------------------------------------------------------------
def create_gf(self):
Expand Down
23 changes: 13 additions & 10 deletions callflow/datastructures/graphframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,14 @@ def from_config(config, name):
LOGGER.info(f"Creating graphframes: {name}")
LOGGER.info(f"Data path: {config['data_path']}")

data_path = os.path.join(config["data_path"], config["paths"][name])
if config["format"][name] == "hpctoolkit":
data_path = os.path.join(
config["data_path"], config["properties"]["paths"][name]
)
profile_format = config["properties"]["format"][name]
if profile_format == "hpctoolkit":
gf = ht.GraphFrame.from_hpctoolkit(data_path)

elif config["format"][name] == "caliper":
elif profile_format == "caliper":
grouping_attribute = "function"
default_metric = "sum(sum#time.duration), inclusive_sum(sum#time.duration)"
query = "select function,%s group by %s format json-split" % (
Expand All @@ -129,16 +132,16 @@ def from_config(config, name):
)
gf = ht.GraphFrame.from_caliper(data_path, query=query)

elif config["format"][name] == "caliper_json":
gf = ht.GraphFrame.from_caliper(data_path, query="")
elif profile_format == "caliper-json":
gf = ht.GraphFrame.from_caliper_json(data_path)

elif config["format"][name] == "gprof":
elif profile_format == "gprof":
gf = ht.GraphFrame.from_gprof_dot(data_path)

elif config["format"][name] == "literal":
elif profile_format == "literal":
gf = ht.GraphFrame.from_literal(config["data_path"])

elif config["format"][name] == "lists":
elif profile_format == "lists":
gf = ht.GraphFrame.from_lists(config["data_path"])

return GraphFrame.from_hatchet(gf)
Expand All @@ -155,8 +158,8 @@ def hatchet_graph_to_nxg(ht_graph):

def _get_node_name(nd):
nm = callflow.utils.sanitize_name(nd["name"])
if nd["line"] != "NA":
nm += ":" + str(nd["line"])
if nd.get("line") != "NA" and nd.get("line") != None:
nm += ":" + str(nd.get("line"))
return nm

# `node_dict_from_frame` converts the hatchet's frame to a dictionary
Expand Down
Loading