diff --git a/entrypoint.sh b/entrypoint.sh index 86fa6bd..a22114b 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -104,29 +104,14 @@ tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX) # Generate graph python3 /generate_build_graph.py -vt "$vt_build_time" -te "$tests_and_examples_build" -r "$GITHUB_RUN_NUMBER" - # perf_test_files=$(find "$VT_BUILD_FOLDER/tests/" -name "*_mem.csv" | sed 's!.*/!!' | sed -e 's/_mem.csv$//') cd perf_tests - python3 /generate_perf_graph.py - - # for file in $perf_test_files - # do - # # Each test generates both time/mem files - # time_file="${file}_time.csv" - # memory_file="${file}_mem.csv" - - # echo "Test files $VT_BUILD_FOLDER/tests/$time_file $VT_BUILD_FOLDER/tests/$memory_file for test: $file" - - # python3 /generate_perf_graph.py -time "$VT_BUILD_FOLDER/tests/$time_file"\ - # -mem "$VT_BUILD_FOLDER/tests/$memory_file" -r "$GITHUB_RUN_NUMBER" -wiki "$tmp_dir" - # done - cd - cp "$GITHUB_WORKSPACE/build_result.txt" "$INPUT_BUILD_STATS_OUTPUT" eval cp "$GITHUB_WORKSPACE/flame_heaptrack*" "./perf_tests/" - python3 /generate_wiki_pages.py -t "$perf_test_files" + python3 /generate_wiki_pages.py git add . git commit -m "$INPUT_COMMIT_MESSAGE" diff --git a/generate_perf_graph.py b/generate_perf_graph.py index bbb550f..a40fa88 100644 --- a/generate_perf_graph.py +++ b/generate_perf_graph.py @@ -8,18 +8,30 @@ GRAPH_HEIGHT = 10 NUM_LAST_BUILDS = int(os.getenv("INPUT_NUM_LAST_BUILD", "30")) - 1 VT_BUILD_FOLDER = os.getenv("VT_BUILD_FOLDER", "/build/vt") +RUN_NUM = os.getenv("RUN_NUMBER") +DATE = date.today().strftime("%d %B %Y") +COMMIT_ID = os.getenv("GITHUB_SHA", "") -def generate_bar_graph_for_single_value(test_file_name, title): + +def generate_bar_graph_for_single_value(test_file_name, title, hisotry_title): time_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/{test_file_name}.csv") - _, ax = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) + _, ax_1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) x_pos = range(len(time_df)) - ax.bar(x=x_pos, height=time_df['mean'], yerr=time_df['stdev'], align='center', alpha=0.7, ecolor='black', capsize=10) + ax_1.bar( + x=x_pos, + height=time_df["mean"], + yerr=time_df["stdev"], + align="center", + alpha=0.7, + ecolor="black", + capsize=10, + ) plt.title(title) - plt.xticks(x_pos, time_df['name']) + plt.xticks(x_pos, time_df["name"]) plt.xlabel("") plt.ylabel("Time (ms)") @@ -27,12 +39,51 @@ def generate_bar_graph_for_single_value(test_file_name, title): plt.savefig(f"{test_file_name}.png") + time_df["commit"] = COMMIT_ID + time_df["run_num"] = RUN_NUM + + file_path = f"{test_file_name}_history.csv" + if os.path.exists(file_path): + past_results = pd.read_csv(file_path) + else: + past_results = pd.DataFrame(columns=time_df.columns) + past_results.to_csv(file_path, index=False) + + updated_results = pd.concat([past_results, time_df], ignore_index=True) + updated_results.to_csv(file_path, index=False) + + last_n_results = updated_results.tail(NUM_LAST_BUILDS) + + _, ax_1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) + x_pos = range(len(last_n_results)) + + ax_1.bar( + x=x_pos, + height=last_n_results["mean"], + yerr=last_n_results["stdev"], + align="center", + alpha=0.7, + ecolor="black", + capsize=10, + ) + + plt.title(hisotry_title) + + plt.xticks(x_pos, last_n_results["run_num"]) + plt.xlabel("Run numbers") + + plt.ylabel("Time (ms)") + plt.tight_layout() + + plt.savefig(f"{test_file_name}_history.png") + + def ping_pong(): - df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_ping_pong_time.csv") + time_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_ping_pong_time.csv") # Split data by nodes - num_nodes = df['node'].nunique() - time_data = [df[df["node"] == node] for node in range(num_nodes)] + num_nodes = time_df["node"].nunique() + time_data = [time_df[time_df["node"] == node] for node in range(num_nodes)] # Create the plot _, ax_1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) @@ -47,7 +98,7 @@ def ping_pong(): ] for node in range(1, num_nodes): - bar_positions.append([x + bar_width for x in bar_positions[node-1]]) + bar_positions.append([x + bar_width for x in bar_positions[node - 1]]) for node in range(num_nodes): ax_1.bar( @@ -72,7 +123,7 @@ def ping_pong(): # Set y-axis label and scale ax_1.set_ylabel("Time (ms)") - ax_1.set_yscale('log') + ax_1.set_yscale("log") # Customize y-ticks y_ticks = [0.03, 1, 5, 40] @@ -83,149 +134,161 @@ def ping_pong(): plt.xticks(rotation=85) plt.tight_layout() - plt.savefig("ping_pong_time.png") + plt.savefig("test_ping_pong_time.png") memory_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_ping_pong_mem.csv") generate_memory_graph("ping_pong", memory_df) + def ping_pong_am(): - generate_bar_graph_for_single_value("test_ping_pong_am_time", "Time for sending message (ping-pong) 1000 times") + generate_bar_graph_for_single_value( + "test_ping_pong_am_time", + "Time for sending message (ping-pong) 1000 times", + "Past runs of ping_pong_am", + ) + def make_runnable_micro(): - generate_bar_graph_for_single_value("test_make_runnable_micro_time", "Time for calling makeRunnable 1000 times") + generate_bar_graph_for_single_value( + "test_make_runnable_micro_time", + "Time for calling makeRunnable 1000 times", + "Past runs of make_runnable_micro", + ) + def objgroup_local_send(): - generate_bar_graph_for_single_value("test_objgroup_local_send_time", "Time for ObjectGroup Local Send (1000 Iterations)") + generate_bar_graph_for_single_value( + "test_objgroup_local_send_time", + "Time for ObjectGroup Local Send (1000 Iterations)", + "Past runs of objgroup_local_send", + ) + def collection_local_send(): - time_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_collection_local_send_time.csv") - time_prealloc_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_collection_local_send_preallocate_time.csv") + # Read data + time_df = pd.read_csv( + f"{VT_BUILD_FOLDER}/tests/test_collection_local_send_time.csv" + ) + time_prealloc_df = pd.read_csv( + f"{VT_BUILD_FOLDER}/tests/test_collection_local_send_preallocate_time.csv" + ) time_df["name"] = "allocate" time_prealloc_df["name"] = "preallocate" combined_df = pd.concat([time_df, time_prealloc_df], axis=0) - _, ax = plt.subplots() + # Plot current data + _, ax_1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) x_pos = range(len(combined_df)) - ax.bar(x=x_pos, height=combined_df['mean'], yerr=combined_df['stdev'], align='center', alpha=0.7, ecolor='black', capsize=10) - + ax_1.bar( + x=x_pos, + height=combined_df["mean"], + yerr=combined_df["stdev"], + align="center", + alpha=0.7, + ecolor="black", + capsize=10, + ) plt.title("Time for Collection Local Send (1000 Iterations)") - - plt.xticks(x_pos, combined_df['name']) - plt.xlabel("") - + plt.xticks(x_pos, combined_df["name"]) + plt.xlabel("Type") plt.ylabel("Time (ms)") plt.tight_layout() + plt.savefig("test_collection_local_send_time.png") - plt.savefig("./test_collection_local_send_time.png") - -def reduce(): - time_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_reduce_time.csv") - memory_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_reduce_mem.csv") - - # Extract iteration number from 'name' - time_df['iteration'] = time_df['name'].apply(lambda x: int(x.split()[0])) - - _, ax = plt.subplots() - for node in time_df['node'].unique(): - node_data = time_df[time_df['node'] == node] - _, caps, bars = ax.errorbar(node_data['iteration'], node_data['mean'], yerr=node_data['stdev'], fmt='-', label=f'Node {node}') - - # loop through bars and caps and set the alpha value - [bar.set_alpha(0.3) for bar in bars] - [cap.set_alpha(0.3) for cap in caps] - - ax.set_xlabel('Iteration') - ax.set_ylabel('Time (ms)') - ax.set_title('Reduce times over 100 iterations') - ax.legend() - plt.tight_layout() - plt.savefig("test_reduce_time.png") - - generate_memory_graph("reduce", memory_df) - -def prepare_data(): - """Parse the input data, read CSV file and append the new results""" - - parser = argparse.ArgumentParser() - parser.add_argument( - "-time", "--time_test", help="Time-based results", required=True - ) - parser.add_argument("-mem", "--memory_test", help="Memory usage", required=True) - parser.add_argument( - "-r", "--run_num", help="Run number", required=False, type=int, default=0 - ) - parser.add_argument( - "-wiki", - "--wiki_dir", - help="vt.wiki directory", - required=False, - type=str, - default=".", - ) + ### Create historic graph - time_test_file = parser.parse_args().time_test - memory_test_file = parser.parse_args().memory_test - path_to_wiki = parser.parse_args().wiki_dir + combined_df["commit"] = COMMIT_ID + combined_df["run_num"] = RUN_NUM - time_df = pd.read_csv(time_test_file) - memory_df = pd.read_csv(memory_test_file) + file_path = "test_collection_local_send_time_history.csv" + if os.path.exists(file_path): + past_results = pd.read_csv(file_path) + else: + past_results = pd.DataFrame(columns=combined_df.columns) - num_nodes = time_df["node"].max() + 1 + # Append new results and save + updated_results = pd.concat([past_results, combined_df], ignore_index=True) + updated_results.to_csv(file_path, index=False) - memory_data = [] - time_data = [] - for node in range(num_nodes): - node_memory_data = memory_df.loc[memory_df["node"] == node] - node_time_data = time_df.tail(-num_nodes).loc[time_df["node"] == node] + # Get last N results + last_n_results = updated_results.tail(NUM_LAST_BUILDS) - if not node_memory_data.empty: - memory_data.append(node_memory_data) + # Split data by type + time_data = { + "allocate": last_n_results[last_n_results["name"] == "allocate"], + "preallocate": last_n_results[last_n_results["name"] == "preallocate"], + } - if not node_time_data.empty: - time_data.append(node_time_data) + # Create the plot for historical data + _, ax_1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) + ax_1.set_title("Past Runs of Collection Local Send") - print(f"Memory: {memory_data}") - print(f"Time: {time_data}") + num_iter = list(range(len(time_data["allocate"]))) + bar_width = 0.4 - new_run_num = parser.parse_args().run_num - new_date = date.today().strftime("%d %B %Y") - commit_id = os.getenv("GITHUB_SHA", "") + bar_positions = { + "allocate": [i - bar_width / 2 for i in num_iter], + "preallocate": [i + bar_width / 2 for i in num_iter], + } - test_name = time_df["name"].loc[1] - out_dir = f"{path_to_wiki}/perf_tests" - file_name = f"{out_dir}/{test_name}_times.csv" + for name in ["allocate", "preallocate"]: + ax_1.bar( + bar_positions[name], + time_data[name]["mean"], + yerr=time_data[name]["stdev"], + label=f"{name}", + width=bar_width, + align="center", + alpha=0.9, + ecolor="black", + capsize=5.0, + ) - current = time_df.head(num_nodes).copy() + ax_1.grid(True, which="both", ls="--", linewidth=0.5) + ax_1.set_xlabel("Run Number") + ax_1.set_xticks(num_iter) + ax_1.set_xticklabels(time_data["allocate"]["run_num"].astype(str)) + ax_1.set_ylabel("Time (ms)") + ax_1.legend() + plt.tight_layout() + plt.savefig("test_collection_local_send_time_history.png") - if os.path.isfile(file_name): - total_df = pd.read_csv(file_name) - total_df = total_df.tail(NUM_LAST_BUILDS * num_nodes) - if new_run_num == 0: - new_run_num = total_df["run_num"].iloc[-1] + 1 +def reduce(): + time_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_reduce_time.csv") + memory_df = pd.read_csv(f"{VT_BUILD_FOLDER}/tests/test_reduce_mem.csv") - for node in range(num_nodes): - current.loc[current["node"] == node, "run_num"] = new_run_num - current.loc[current["node"] == node, "date"] = new_date - current.loc[current["node"] == node, "commit"] = commit_id - current = pd.concat([total_df, current]) - else: - for node in range(num_nodes): - current.loc[current["node"] == node, "run_num"] = new_run_num - current.loc[current["node"] == node, "date"] = new_date - current.loc[current["node"] == node, "commit"] = commit_id + # Extract iteration number from 'name' + time_df["iteration"] = time_df["name"].apply(lambda x: int(x.split()[0])) - if not os.path.exists(out_dir): - os.mkdir(out_dir) + _, ax_1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) + for node in time_df["node"].unique(): + node_data = time_df[time_df["node"] == node] + _, caps, bars = ax_1.errorbar( + node_data["iteration"], + node_data["mean"], + yerr=node_data["stdev"], + fmt="-", + label=f"Node {node}", + ) - current.to_csv(file_name, index=False, float_format="%.3f") - generate_historic_graph(test_name, num_nodes, current) + # loop through bars and caps and set the alpha value + for my_bar in bars: + my_bar.set_alpha(0.3) + for my_cap in caps: + my_cap.set_alpha(0.3) - return test_name, time_data, memory_data + ax_1.set_xlabel("Iteration") + ax_1.set_ylabel("Time (ms)") + ax_1.set_title("Reduce times over 100 iterations") + ax_1.legend() + plt.tight_layout() + plt.savefig("test_reduce_time.png") + generate_memory_graph("reduce", memory_df) def set_graph_properties(): small_size = 15 @@ -239,6 +302,7 @@ def set_graph_properties(): plt.rc("legend", fontsize=small_size) plt.rc("figure", titlesize=big_size) + def generate_memory_graph(test_name, memory_data): _, ax1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT)) @@ -246,17 +310,16 @@ def generate_memory_graph(test_name, memory_data): plt.xlabel("Iteration") plt.ylabel("Size (MiB)") - num_nodes = memory_data['node'].max() + 1 + num_nodes = memory_data["node"].max() + 1 for node in range(num_nodes): - node_data = memory_data[memory_data['node'] == node] - num_iter = list(range(len(node_data))) # Ensure num_iter matches the length of node_data + node_data = memory_data[memory_data["node"] == node] + num_iter = list( + range(len(node_data)) + ) # Ensure num_iter matches the length of node_data ax1.plot( - num_iter, - node_data['mem'] / 1024 / 1024, - label=f"Node {node}", - linewidth=4 + num_iter, node_data["mem"] / 1024 / 1024, label=f"Node {node}", linewidth=4 ) ax1.xaxis.get_major_locator().set_params(integer=True) @@ -264,56 +327,7 @@ def generate_memory_graph(test_name, memory_data): ax1.grid(True) plt.tight_layout() - plt.savefig(f"{test_name}_memory.png") - - -def generate_historic_graph(test_name, num_nodes, dataframe): - _, ax1 = plt.subplots(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT), nrows=1, ncols=1) - - ax1.set_title(f"{test_name} run history") - plt.xlabel("Run number") - - run_nums = pd.unique(dataframe["run_num"]).tolist() - times = [] - errors = [] - for node in range(num_nodes): - times.append(dataframe["mean"].loc[dataframe["node"] == node].tolist()) - errors.append(dataframe["stdev"].loc[dataframe["node"] == node].tolist()) - - print(f"generate_historic_graph::times: {times}") - print(f"generate_historic_graph::errors: {errors}") - - bar_width = 1.0 / (2 * num_nodes) - - bar_positions = [ - [i - bar_width * (num_nodes / 2) + bar_width / 2 for i in run_nums] - ] - - for node in range(num_nodes - 1): - bar_positions.append([x + bar_width for x in bar_positions[node]]) - - for node in range(num_nodes): - ax1.bar( - bar_positions[node], - times[node], - yerr=errors[node], - label=f"node {node}", - width=bar_width, - align="center", - alpha=0.9, - ecolor="black", - capsize=5.0, - ) - - ax1.xaxis.get_major_locator().set_params(integer=True) - ax1.legend() - ax1.grid(True) - ax1.set_ylabel("Time (ms)") - - plt.tight_layout() - - plt.savefig(f"{test_name}_past_runs.png") - + plt.savefig(f"test_{test_name}_mem.png") if __name__ == "__main__": set_graph_properties() @@ -324,4 +338,3 @@ def generate_historic_graph(test_name, num_nodes, dataframe): make_runnable_micro() ping_pong_am() ping_pong() - diff --git a/generate_wiki_pages.py b/generate_wiki_pages.py index ce46a71..733e780 100644 --- a/generate_wiki_pages.py +++ b/generate_wiki_pages.py @@ -1,5 +1,4 @@ import os -import argparse import matplotlib.pyplot as plt import pandas as pd @@ -368,31 +367,26 @@ def create_md_build_page(last_builds, exp_temp_inst, exp_temp_sets, exp_headers) def create_md_perf_page(last_builds): perf_test_url = f"https://github.com/{REPO_NAME}/wiki/perf_tests/" content_with_all_tests = ( - '# Test Results\n' - '## test_reduce\n' + "# Test Results\n" + "## test_reduce\n" f"{create_image_hyperlink(f'{perf_test_url}test_reduce_time.png')}\n" f"{create_image_hyperlink(f'{perf_test_url}test_reduce_mem.png')}\n" - '## collection_local_send\n' - f"{create_image_hyperlink(f'{perf_test_url}collection_local_send_time.png')}\n" - '## objgroup_local_send\n' - f"{create_image_hyperlink(f'{perf_test_url}objgroup_local_send_time.png')}\n" - '## make_runnable_micro\n' - f"{create_image_hyperlink(f'{perf_test_url}make_runnable_micro_time.png')}\n" + "## collection_local_send\n" + f"{create_image_hyperlink(f'{perf_test_url}test_collection_local_send_time.png')}\n" + f"{create_image_hyperlink(f'{perf_test_url}test_collection_local_send_time_history.png')}\n" + "## objgroup_local_send\n" + f"{create_image_hyperlink(f'{perf_test_url}test_objgroup_local_send_time.png')}\n" + f"{create_image_hyperlink(f'{perf_test_url}test_objgroup_local_send_time_history.png')}\n" + "## make_runnable_micro\n" + f"{create_image_hyperlink(f'{perf_test_url}test_make_runnable_micro_time.png')}\n" + f"{create_image_hyperlink(f'{perf_test_url}test_make_runnable_micro_time_history.png')}\n" + "## ping_pong\n" + f"{create_image_hyperlink(f'{perf_test_url}test_ping_pong_time.png')}\n" + f"{create_image_hyperlink(f'{perf_test_url}test_ping_pong_mem.png')}\n" + "## ping_pong_am\n" + f"{create_image_hyperlink(f'{perf_test_url}test_ping_pong_am_time.png')}\n" + f"{create_image_hyperlink(f'{perf_test_url}test_ping_pong_am_time_history.png')}\n" ) - # content_with_all_tests = "# Test Results\n" - - # list_of_test_names = test_names_string.split() - # for test_name in list_of_test_names: - # past_runs_name = f"{test_name}_past_runs.png" - # content_with_all_tests += ( - # f"## {test_name}\n" - # f"{create_image_hyperlink(f'{perf_test_url}{past_runs_name}')}\n" - # ) - - # for file in os.listdir(f"{OUTPUT_DIR}/../perf_tests/"): - # if file.startswith(test_name) and (file != past_runs_name): - # link = create_image_hyperlink(f"{perf_test_url}{file}") - # content_with_all_tests += f"{link}\n" file_content = ( f"# Performance Tests\n" diff --git a/script/local.sh b/script/local.sh index eb23c00..83b9889 100755 --- a/script/local.sh +++ b/script/local.sh @@ -18,16 +18,16 @@ export INPUT_GRAPH_WIDTH=20 export INPUT_GRAPH_HEIGHT=20 export INPUT_BADGE_LOGO="logo" export INPUT_BADGE_FILENAME="badge_file" -export CXX=clang++-13 -export CC=clang-13 +export CXX=clang++-15 +export CC=clang-15 WORKSPACE=$1 -RUN_NUMBER=$2 BUILD_STATS_DIR=$3 cd "$WORKSPACE" -VT_BUILD_FOLDER="$WORKSPACE/build/vt" +export RUN_NUMBER=$2 +export VT_BUILD_FOLDER="$WORKSPACE/build/vt" ######################## ## CLONE DEPENDENCIES ## @@ -53,12 +53,17 @@ export VT_TESTS_ARGUMENTS="--vt_perf_gen_file" # Build VT lib [ ! -d 'vt' ] && git clone https://github.com/$GITHUB_REPOSITORY.git +cd vt +GITHUB_SHA=$(git rev-parse HEAD) +export GITHUB_SHA=$GITHUB_SHA +cd - eval "$BUILD_STATS_DIR/build_vt.sh" "$WORKSPACE/vt" "$WORKSPACE/build" "-ftime-trace" vt -vt_build_time=$(grep -oP 'real\s+\K\d+m\d+\.\d+s' "$VT_BUILD_FOLDER/build_time.txt") +vt_build_time=$(grep -oP 'real\s+\K\d+m\d+\,\d+s' "$VT_BUILD_FOLDER/build_time.txt") + # Build tests and examples eval "$BUILD_STATS_DIR/build_vt.sh" "$WORKSPACE/vt" "$WORKSPACE/build" "-ftime-trace" all -tests_and_examples_build=$(grep -oP 'real\s+\K\d+m\d+\.\d+s' "$VT_BUILD_FOLDER/build_time.txt") +tests_and_examples_build=$(grep -oP 'real\s+\K\d+m\d+\,\d+s' "$VT_BUILD_FOLDER/build_time.txt") cp "$BUILD_STATS_DIR/ClangBuildAnalyzer.ini" . $ClangBuildTool --all "$VT_BUILD_FOLDER" vt-build @@ -78,7 +83,7 @@ cd - # Running 'mpirun -n x heaptrack' will generate x number of separate files, one for each node/rank mpirun -n 2 heaptrack "$WORKSPACE/build/vt/examples/collection/jacobi2d_vt" 10 10 200 -jacobi_output_list=$(ls -- *heaptrack.jacobi2d_vt.*.gz) +jacobi_output_list=$(ls -- *heaptrack.jacobi2d_vt.*.zst) node_num=0 for file in ${jacobi_output_list} @@ -105,26 +110,15 @@ done cd "$WIKI_DIR" || exit 1 # Generate graph -# python3 "$BUILD_STATS_DIR/generate_build_graph.py" -vt "$vt_build_time" -te "$tests_and_examples_build" -r "$RUN_NUMBER" -perf_test_files=$(find "$VT_BUILD_FOLDER/tests/" -name "*_mem.csv" | sed 's!.*/!!' | sed -e 's/_mem.csv$//') +python3 "$BUILD_STATS_DIR/generate_build_graph.py" -vt "$vt_build_time" -te "$tests_and_examples_build" -r "$RUN_NUMBER" cd perf_tests - -# for file in $perf_test_files -# do -# # Each test generates both time/mem files -# time_file="${file}_time.csv" -# memory_file="${file}_mem.csv" -# echo "Test files $VT_BUILD_FOLDER/tests/$time_file $VT_BUILD_FOLDER/tests/$memory_file for test: $file" -# python3 "$BUILD_STATS_DIR/generate_perf_graph.py" -time "$VT_BUILD_FOLDER/tests/$time_file"\ -# -mem "$VT_BUILD_FOLDER/tests/$memory_file" -r "$RUN_NUMBER" -wiki "$WIKI_DIR" -# done python3 "$BUILD_STATS_DIR/generate_perf_graph.py" cd - cp "$WORKSPACE/build_result.txt" "$INPUT_BUILD_STATS_OUTPUT" eval cp "$WORKSPACE/flame_heaptrack*" "./perf_tests/" -# python3 "$BUILD_STATS_DIR/generate_wiki_pages.py" -t "$perf_test_files" +python3 "$BUILD_STATS_DIR/generate_wiki_pages.py" exit 0