Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

23: Fix runtime and deprecation issues #25

Merged
merged 4 commits into from
Nov 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM lifflander1/vt:amd64-ubuntu-22.04-clang-11-cpp
FROM lifflander1/vt:amd64-ubuntu-22.04-clang-14-cpp

RUN apt-get update -y -q && \
apt-get install -y -q --no-install-recommends \
Expand Down
29 changes: 0 additions & 29 deletions build_vt.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -5,36 +5,15 @@ set -ex
source_dir=${1}
build_dir=${2}
extra_flags=${3}
build_tests=${4}

# Dependency versions, when fetched via git.
detector_rev=master
checkpoint_rev=develop

mkdir -p "${build_dir}"
pushd "${build_dir}"

export DETECTOR_BUILD=${build_dir}/detector
export CHECKPOINT_BUILD=${build_dir}/checkpoint

if test -d "${build_dir}/detector"
then
{ echo "Detector already in lib... not downloading, building, and installing"; } 2>/dev/null
else
git clone -b "${detector_rev}" --depth 1 https://github.com/DARMA-tasking/detector.git
export DETECTOR=$PWD/detector

mkdir -p "$DETECTOR_BUILD"
cd "$DETECTOR_BUILD"
mkdir build
cd build
cmake -G "${CMAKE_GENERATOR:-Ninja}" \
-DCMAKE_INSTALL_PREFIX="$DETECTOR_BUILD/install" \
"$DETECTOR"
cmake --build . --target install
fi


if test -d "${build_dir}/checkpoint"
then
{ echo "Checkpoint already in lib... not downloading, building, and installing"; } 2>/dev/null
Expand Down Expand Up @@ -89,7 +68,6 @@ cmake -G "${CMAKE_GENERATOR:-Ninja}" \
-DCMAKE_CXX_COMPILER="${CXX:-c++}" \
-DCMAKE_C_COMPILER="${CC:-cc}" \
-DCMAKE_EXE_LINKER_FLAGS="${CMAKE_EXE_LINKER_FLAGS:-}" \
-Ddetector_DIR="$DETECTOR_BUILD/install" \
-Dcheckpoint_DIR="$CHECKPOINT_BUILD/install" \
-DCMAKE_PREFIX_PATH="${CMAKE_PREFIX_PATH:-}" \
-DCMAKE_INSTALL_PREFIX="$VT_BUILD/install" \
Expand All @@ -98,10 +76,3 @@ cmake -G "${CMAKE_GENERATOR:-Ninja}" \
"$VT"

{ time cmake --build . --target "${4}" ; } 2> >(tee build_time.txt)


if test "$use_ccache"
then
{ echo -e "===\n=== ccache statistics after build\n==="; } 2>/dev/null
ccache -s
fi
27 changes: 15 additions & 12 deletions generate_build_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,19 +53,22 @@ def prepare_data():

data_frame = pd.read_csv(previous_builds_filename)
last_builds = data_frame.tail(int(os.getenv("INPUT_NUM_LAST_BUILD")) - 1)
updated = last_builds.append(
pd.DataFrame(
[
updated = pd.concat(
[
last_builds,
pd.DataFrame(
[
vt_total_time_seconds,
tests_total_time_seconds,
new_run_num,
new_date,
commit_id,
]
],
columns=["vt", "tests", "run_num", "date", "commit"],
)
[
vt_total_time_seconds,
tests_total_time_seconds,
new_run_num,
new_date,
commit_id,
]
],
columns=["vt", "tests", "run_num", "date", "commit"],
),
]
)

# Data to be plotted
Expand Down
54 changes: 35 additions & 19 deletions generate_perf_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,22 @@ def prepare_data():
time_df = pd.read_csv(time_test_file)
memory_df = pd.read_csv(memory_test_file)

num_nodes = memory_df["node"].max() + 1
num_nodes = time_df["node"].max() + 1

memory_data = list()
time_data = list()
memory_data = []
time_data = []
for node in range(num_nodes):
memory_data.append(memory_df.loc[memory_df["node"] == node])
time_data.append(time_df.tail(-num_nodes).loc[time_df["node"] == node])
node_memory_data = memory_df.loc[memory_df["node"] == node]
node_time_data = time_df.tail(-num_nodes).loc[time_df["node"] == node]

if not node_memory_data.empty:
memory_data.append(node_memory_data)

if not node_time_data.empty:
time_data.append(node_time_data)

print(f"Memory: {memory_data}")
print(f"Time: {time_data}")

new_run_num = parser.parse_args().run_num
new_date = date.today().strftime("%d %B %Y")
Expand All @@ -56,23 +63,25 @@ def prepare_data():
out_dir = f"{path_to_wiki}/perf_tests"
file_name = f"{out_dir}/{test_name}_times.csv"

current = time_df.head(num_nodes).copy()

if os.path.isfile(file_name):
total_df = pd.read_csv(file_name)
total_df = total_df.tail(NUM_LAST_BUILDS * num_nodes)
current = time_df.head(num_nodes)

if new_run_num == 0:
new_run_num = total_df["run_num"].iloc[-1] + 1

current["run_num"] = [new_run_num for node in range(num_nodes)]
current["date"] = [new_date for node in range(num_nodes)]
current["commit"] = [commit_id for node in range(num_nodes)]
current = total_df.append(current)
for node in range(num_nodes):
current.loc[current["node"] == node, "run_num"] = new_run_num
current.loc[current["node"] == node, "date"] = new_date
current.loc[current["node"] == node, "commit"] = commit_id
current = pd.concat([total_df, current])
else:
current = time_df.head(num_nodes)
current["run_num"] = [new_run_num for node in range(num_nodes)]
current["date"] = [new_date for node in range(num_nodes)]
current["commit"] = [commit_id for node in range(num_nodes)]
for node in range(num_nodes):
current.loc[current["node"] == node, "run_num"] = new_run_num
current.loc[current["node"] == node, "date"] = new_date
current.loc[current["node"] == node, "commit"] = commit_id

if not os.path.exists(out_dir):
os.mkdir(out_dir)
Expand Down Expand Up @@ -191,12 +200,15 @@ def generate_historic_graph(test_name, num_nodes, dataframe):
plt.xlabel("Run number")

run_nums = pd.unique(dataframe["run_num"]).tolist()
times = list()
errors = list()
times = []
errors = []
for node in range(num_nodes):
times.append(dataframe["mean"].loc[dataframe["node"] == node].tolist())
errors.append(dataframe["stdev"].loc[dataframe["node"] == node].tolist())

print(f"generate_historic_graph::times: {times}")
print(f"generate_historic_graph::errors: {errors}")

bar_width = 1.0 / (2 * num_nodes)

bar_positions = [
Expand All @@ -210,7 +222,7 @@ def generate_historic_graph(test_name, num_nodes, dataframe):
ax1.bar(
bar_positions[node],
times[node],
yerr=errors,
yerr=errors[node],
label=f"node {node}",
width=bar_width,
align="center",
Expand All @@ -232,5 +244,9 @@ def generate_historic_graph(test_name, num_nodes, dataframe):
if __name__ == "__main__":
set_graph_properties()
test_name_in, time_data_in, memory_data_in = prepare_data()
generate_memory_graph(test_name_in, memory_data_in)
generate_time_graph(test_name_in, time_data_in)

if len(memory_data_in) > 0:
generate_memory_graph(test_name_in, memory_data_in)

if len(time_data_in) > 0:
generate_time_graph(test_name_in, time_data_in)
31 changes: 16 additions & 15 deletions generate_wiki_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def get_name_times_avg(lines):
avg_ms_threshold = 20

total_times = []
name_times_avg = dict()
name_times_avg = {}

index = 0

Expand Down Expand Up @@ -80,7 +80,7 @@ def get_headers(lines):
"""

header_times = []
name_included_avg = dict()
name_included_avg = {}

index = 0

Expand Down Expand Up @@ -124,15 +124,15 @@ def generate_name_times_avg_table(templates_text):
def prepare_data():
# Expensive template instantiations
templates_total_times = []
templates = dict()
templates = {}

# Expensive template sets
template_sets_times = []
template_sets = dict()
template_sets = {}

# Expensive headers
headers_times = []
headers = dict()
headers = {}

with open(CLANG_BUILD_REPORT) as file:
lines = file.read().splitlines()
Expand All @@ -146,7 +146,7 @@ def prepare_data():
lines[idx + 1 :]
)

if line.startswith("*** Expensive headers:"):
if line.startswith("**** Expensive headers:"):
headers_times, headers = get_headers(lines[idx + 1 :])

return (
Expand Down Expand Up @@ -177,7 +177,7 @@ def generate_graph(name, templates_total_times):
templates_total_times = [t // 1000 for t in templates_total_times]

# Add x, y gridlines
ax_1.grid(b=True, color="grey", linestyle="-.", linewidth=0.5, alpha=0.8)
ax_1.grid(visible=True, color="grey", linestyle="-.", linewidth=0.5, alpha=0.8)

# Remove x, y Ticks
ax_1.xaxis.set_ticks_position("none")
Expand Down Expand Up @@ -304,12 +304,13 @@ def create_image_hyperlink(image_link):
def get_runner_info():
return (
"**NOTE. The following builds were run on GitHub Action runners"
"that use [2-core CPU and 7 GB RAM]"
" that use [2-core CPU and 7 GB RAM]"
"(https://docs.github.com/en/actions/using-github-hosted-runners/"
"about-github-hosted-runners/"
"about-github-hosted-runners#supported-runners-and-hardware-resources)** <br><br> \n"
"Configuration:\n"
"- Compiler: **Clang-10**\n"
"- Linux: **Ubuntu 20.04**\n"
"- Compiler: **Clang-14**\n"
"- Linux: **Ubuntu 22.04**\n"
"- Build Type: **Release**\n"
"- Unity Build: **OFF**\n"
"- Production Mode: **OFF**\n"
Expand Down Expand Up @@ -370,19 +371,19 @@ def create_md_perf_page(last_builds):
"-t",
"--tests_names",
help="Perf tests names",
nargs="+",
default=[],
default="",
required=True,
)

test_names = parser.parse_args().tests_names
test_names_string = parser.parse_args().tests_names

perf_test_url = f"https://github.com/{REPO_NAME}/wiki/perf_tests/"
content_with_all_tests = "# Test Results\n"

for test_name in test_names:
list_of_test_names = test_names_string.split()
for test_name in list_of_test_names:
past_runs_name = f"{test_name}_past_runs.png"
content_with_all_tests = (
content_with_all_tests += (
f"## {test_name}\n"
f"{create_image_hyperlink(f'{perf_test_url}{past_runs_name}')}\n"
)
Expand Down
Loading
Loading