From 9c01f760691d0cbced7c73248dba47e050cb7ec2 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Thu, 20 Jun 2024 12:45:41 +0200 Subject: [PATCH 01/23] #2291: Add script for generating sample LBDatafile with lb_iter example --- scripts/check_lb_data_files.sh | 8 ++++ scripts/generate_and_validate_lb_data_file.py | 42 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 scripts/generate_and_validate_lb_data_file.py diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index af17686c44..3f04f385bf 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -18,6 +18,10 @@ function run_schema_validator() { fi } +# Use vt to generate LB Datafile +python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" \ + -g -s "${path_to_vt_src_dir}" -b "${path_to_vt_build_dir}" -i "LBData_from_lb_iter.%p.json" + find . -iname "*.json" | grep -v "compile_commands" | while read f do run_schema_validator "$f" @@ -27,3 +31,7 @@ find . -iname "*.json.br" | while read f do run_schema_validator "$f" done + +# # Use vt to generate LB Datafile +# python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" \ +# -v -s "${path_to_vt_src_dir}" -b "${path_to_vt_build_dir}" -i "${path_to_vt_build_dir}/LBData_from_lb_iter.json" diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py new file mode 100644 index 0000000000..7b0fa9b99a --- /dev/null +++ b/scripts/generate_and_validate_lb_data_file.py @@ -0,0 +1,42 @@ +import subprocess +import argparse + +def generate(vt_build, out_path): + """ + Runs vt lb_iter example to generate LBDatafile + """ + exe_path = vt_build + "/examples/collection/lb_iter" + out_dir = "--vt_lb_data_dir=" + vt_build + out_file = "--vt_lb_data_file=" + out_path + + args = (exe_path, "8", "1.0", "2", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) + runner = subprocess.Popen(args, stdout=subprocess.PIPE) + runner.wait() + +def validate(file_to_validate, reference_file): + """ + Compares file to validate wih reference + """ + + +def main(): + parser = argparse.ArgumentParser() + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--generate", "-g", dest='generate', required=False, action='store_true') + group.add_argument("--validate", "-v", dest='validate', required=False, action='store_true') + + parser.add_argument("--vt-source-dir", "-s", dest='vt_source_dir', required=True) + parser.add_argument("--vt-build-dir", "-b", dest='vt_build_dir', required=True) + + parser.add_argument("--inout-file", "-i", dest='inout_file', required=True) + parser.add_argument("--reference-file", "-r", dest='reference_file', required=False) + args = parser.parse_args() + + if args.generate: + generate(args.vt_build_dir, args.inout_file) + if args.validate: + validate(args.inout_file, args.reference_file) + + +if __name__ == '__main__': + main() From 05ed83fc159aa56ec25d0dca2663441052395370 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Thu, 20 Jun 2024 12:46:01 +0200 Subject: [PATCH 02/23] #2291: Add sample LBDatafile --- examples/LBDatafile_example.json | 597 +++++++++++++++++++++++++++++++ 1 file changed, 597 insertions(+) create mode 100644 examples/LBDatafile_example.json diff --git a/examples/LBDatafile_example.json b/examples/LBDatafile_example.json new file mode 100644 index 0000000000..ab35bf164b --- /dev/null +++ b/examples/LBDatafile_example.json @@ -0,0 +1,597 @@ +{ + "metadata": { + "phases": { + "identical_to_previous": { + "list": [], + "range": [] + }, + "skipped": { + "list": [], + "range": [] + } + }, + "rank": 0, + "shared_node": { + "id": 0, + "num_nodes": 1, + "rank": 0, + "size": 1 + }, + "type": "LBDatafile" + }, + "phases": [ + { + "id": 0, + "tasks": [ + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 262147, + "index": [ + 0 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03478400000000004 + }, + { + "id": 1, + "time": 0.03648499999999999 + }, + { + "id": 2, + "time": 0.03616600000000003 + } + ], + "time": 0.10743500000000006 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1048579, + "index": [ + 3 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03548399999999999 + }, + { + "id": 1, + "time": 0.035468 + }, + { + "id": 2, + "time": 0.03576800000000002 + } + ], + "time": 0.10672000000000001 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1310723, + "index": [ + 4 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.035595 + }, + { + "id": 1, + "time": 0.03531400000000001 + }, + { + "id": 2, + "time": 0.035714000000000024 + } + ], + "time": 0.10662300000000004 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1572867, + "index": [ + 5 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.036309999999999995 + }, + { + "id": 1, + "time": 0.03493199999999996 + }, + { + "id": 2, + "time": 0.03623399999999988 + } + ], + "time": 0.10747599999999984 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 2097155, + "index": [ + 7 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03642 + }, + { + "id": 1, + "time": 0.034806000000000004 + }, + { + "id": 2, + "time": 0.036859000000000086 + } + ], + "time": 0.1080850000000001 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 786435, + "index": [ + 2 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03512700000000002 + }, + { + "id": 1, + "time": 0.03557100000000002 + }, + { + "id": 2, + "time": 0.035739999999999994 + } + ], + "time": 0.10643800000000003 + }, + { + "entity": { + "home": 0, + "id": 3145740, + "migratable": false, + "objgroup_id": 786435, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "time": 0.0 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1835011, + "index": [ + 6 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03513600000000001 + }, + { + "id": 1, + "time": 0.03484799999999999 + }, + { + "id": 2, + "time": 0.03629199999999999 + } + ], + "time": 0.10627599999999998 + }, + { + "entity": { + "home": 0, + "id": 4194316, + "migratable": false, + "objgroup_id": 1048579, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "time": 0.0 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 524291, + "index": [ + 1 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03499199999999997 + }, + { + "id": 1, + "time": 0.03605199999999997 + }, + { + "id": 2, + "time": 0.035818000000000016 + } + ], + "time": 0.10686199999999996 + }, + { + "entity": { + "home": 0, + "id": 1, + "migratable": false, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 3.999999999999989e-06 + } + ], + "time": 3.999999999999989e-06 + }, + { + "entity": { + "home": 0, + "id": 0, + "migratable": false, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "time": 0 + } + ] + }, + { + "id": 1, + "tasks": [ + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 262147, + "index": [ + 0 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03597600000000001 + }, + { + "id": 1, + "time": 0.035268999999999995 + }, + { + "id": 2, + "time": 0.03499399999999997 + } + ], + "time": 0.10624699999999998 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1048579, + "index": [ + 3 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.0353699999999999 + }, + { + "id": 1, + "time": 0.036229999999999984 + }, + { + "id": 2, + "time": 0.036222999999999894 + } + ], + "time": 0.10783199999999982 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1310723, + "index": [ + 4 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.035548000000000024 + }, + { + "id": 1, + "time": 0.036605999999999916 + }, + { + "id": 2, + "time": 0.036622000000000154 + } + ], + "time": 0.10878600000000005 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1572867, + "index": [ + 5 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.036402000000000045 + }, + { + "id": 1, + "time": 0.03631400000000018 + }, + { + "id": 2, + "time": 0.03611299999999984 + } + ], + "time": 0.10884700000000003 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 2097155, + "index": [ + 7 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03591599999999995 + }, + { + "id": 1, + "time": 0.03705500000000006 + }, + { + "id": 2, + "time": 0.04765599999999992 + } + ], + "time": 0.12065899999999985 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 786435, + "index": [ + 2 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03472300000000006 + }, + { + "id": 1, + "time": 0.03625999999999996 + }, + { + "id": 2, + "time": 0.03612100000000007 + } + ], + "time": 0.10711300000000012 + }, + { + "entity": { + "home": 0, + "id": 3145740, + "migratable": false, + "objgroup_id": 786435, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 4.599999999999049e-05 + } + ], + "time": 4.599999999999049e-05 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 1835011, + "index": [ + 6 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03603400000000001 + }, + { + "id": 1, + "time": 0.03668999999999989 + }, + { + "id": 2, + "time": 0.03646700000000003 + } + ], + "time": 0.10920600000000003 + }, + { + "entity": { + "home": 0, + "id": 4194316, + "migratable": false, + "objgroup_id": 1048579, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "time": 0.0 + }, + { + "entity": { + "collection_id": 7, + "home": 0, + "id": 524291, + "index": [ + 1 + ], + "migratable": true, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 0.03766899999999995 + }, + { + "id": 1, + "time": 0.03615600000000008 + }, + { + "id": 2, + "time": 0.03602900000000009 + } + ], + "time": 0.10986200000000013 + }, + { + "entity": { + "home": 0, + "id": 1, + "migratable": false, + "type": "object" + }, + "node": 0, + "resource": "cpu", + "subphases": [ + { + "id": 0, + "time": 2.2999999999884224e-05 + } + ], + "time": 2.2999999999884224e-05 + } + ] + } + ] +} \ No newline at end of file From da1a4bacd622c5b02c2dea94252ef127d260b701 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Thu, 20 Jun 2024 15:31:22 +0200 Subject: [PATCH 03/23] #2291: Add script for comparing the generate LBDatafile with the reference --- ci/docker/ubuntu-gnu-cpp.dockerfile | 2 +- scripts/check_lb_data_files.sh | 16 ++++--- scripts/generate_and_validate_lb_data_file.py | 42 +++++++++++++------ 3 files changed, 42 insertions(+), 18 deletions(-) diff --git a/ci/docker/ubuntu-gnu-cpp.dockerfile b/ci/docker/ubuntu-gnu-cpp.dockerfile index e5bf682cf7..40e06e9d7c 100644 --- a/ci/docker/ubuntu-gnu-cpp.dockerfile +++ b/ci/docker/ubuntu-gnu-cpp.dockerfile @@ -87,7 +87,7 @@ RUN apt-get update -y -q && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* -RUN pip3 install schema +RUN pip3 install schema deepdiff FROM base as build COPY . /vt diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 3f04f385bf..3914a5b35b 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -19,8 +19,11 @@ function run_schema_validator() { } # Use vt to generate LB Datafile -python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" \ - -g -s "${path_to_vt_src_dir}" -b "${path_to_vt_build_dir}" -i "LBData_from_lb_iter.%p.json" +if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -g \ + -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.%p.json" +then + exit 2; +fi find . -iname "*.json" | grep -v "compile_commands" | while read f do @@ -32,6 +35,9 @@ do run_schema_validator "$f" done -# # Use vt to generate LB Datafile -# python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" \ -# -v -s "${path_to_vt_src_dir}" -b "${path_to_vt_build_dir}" -i "${path_to_vt_build_dir}/LBData_from_lb_iter.json" +# Use vt to generate LB Datafile +if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -v \ + -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.0.json" -r "${path_to_vt_src_dir}/examples/LBDatafile_example.json" +then + exit 3; +fi diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py index 7b0fa9b99a..086603daed 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/generate_and_validate_lb_data_file.py @@ -1,23 +1,43 @@ import subprocess import argparse +import json +import sys +from deepdiff import DeepDiff -def generate(vt_build, out_path): +def generate(vt_build, out_file_name): """ Runs vt lb_iter example to generate LBDatafile """ exe_path = vt_build + "/examples/collection/lb_iter" out_dir = "--vt_lb_data_dir=" + vt_build - out_file = "--vt_lb_data_file=" + out_path - + out_file = "--vt_lb_data_file=" + out_file_name + args = (exe_path, "8", "1.0", "2", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) runner = subprocess.Popen(args, stdout=subprocess.PIPE) - runner.wait() + exit_code = runner.wait() + if exit_code != 0: + sys.exit(1) -def validate(file_to_validate, reference_file): +def validate(vt_build, file_to_validate, reference_file): """ Compares file to validate wih reference """ - + print("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'.") + + with open(vt_build + "/" + file_to_validate) as val_file, open(reference_file) as ref_file: + to_validate = json.load(val_file) + reference = json.load(ref_file) + diff = DeepDiff(to_validate, reference, report_repetition=True, math_epsilon=0.001) + is_valid = not len(diff.affected_paths) + + if not is_valid: + sys.stderr.write("Detected differences:\n") + json.dump(str(diff), sys.stderr, indent=4) + sys.stderr.write("\n") + sys.stderr.flush() + sys.exit(1) + else: + print("Comparison OK.") def main(): parser = argparse.ArgumentParser() @@ -25,18 +45,16 @@ def main(): group.add_argument("--generate", "-g", dest='generate', required=False, action='store_true') group.add_argument("--validate", "-v", dest='validate', required=False, action='store_true') - parser.add_argument("--vt-source-dir", "-s", dest='vt_source_dir', required=True) parser.add_argument("--vt-build-dir", "-b", dest='vt_build_dir', required=True) - - parser.add_argument("--inout-file", "-i", dest='inout_file', required=True) + parser.add_argument("--file-name", "-f", dest='file_name', required=True) parser.add_argument("--reference-file", "-r", dest='reference_file', required=False) args = parser.parse_args() if args.generate: - generate(args.vt_build_dir, args.inout_file) + generate(args.vt_build_dir, args.file_name) if args.validate: - validate(args.inout_file, args.reference_file) - + validate(args.vt_build_dir, args.file_name, args.reference_file) + if __name__ == '__main__': main() From 32c0d5439735c812b5571d2405e0acd9a3aefd5a Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Mon, 24 Jun 2024 16:24:37 +0200 Subject: [PATCH 04/23] #2291: Increase math_epsilon for comparing time field --- scripts/generate_and_validate_lb_data_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py index 086603daed..8eb63ec0a5 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/generate_and_validate_lb_data_file.py @@ -27,7 +27,7 @@ def validate(vt_build, file_to_validate, reference_file): with open(vt_build + "/" + file_to_validate) as val_file, open(reference_file) as ref_file: to_validate = json.load(val_file) reference = json.load(ref_file) - diff = DeepDiff(to_validate, reference, report_repetition=True, math_epsilon=0.001) + diff = DeepDiff(to_validate, reference, report_repetition=True, math_epsilon=0.1) is_valid = not len(diff.affected_paths) if not is_valid: From 1e42ecdc70147edf145db32a2447b5c02570ba3c Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Mon, 24 Jun 2024 16:38:31 +0200 Subject: [PATCH 05/23] #2291: Generate smaller example LBDatafile --- examples/LBDatafile_example.json | 448 +----------------- scripts/generate_and_validate_lb_data_file.py | 2 +- 2 files changed, 25 insertions(+), 425 deletions(-) diff --git a/examples/LBDatafile_example.json b/examples/LBDatafile_example.json index ab35bf164b..c4a3b38595 100644 --- a/examples/LBDatafile_example.json +++ b/examples/LBDatafile_example.json @@ -39,142 +39,26 @@ "subphases": [ { "id": 0, - "time": 0.03478400000000004 + "time": 0.037242 }, { "id": 1, - "time": 0.03648499999999999 + "time": 0.03718499999999997 }, { "id": 2, - "time": 0.03616600000000003 + "time": 0.03715999999999997 } ], - "time": 0.10743500000000006 + "time": 0.11158699999999994 }, { "entity": { "collection_id": 7, "home": 0, - "id": 1048579, - "index": [ - 3 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03548399999999999 - }, - { - "id": 1, - "time": 0.035468 - }, - { - "id": 2, - "time": 0.03576800000000002 - } - ], - "time": 0.10672000000000001 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 1310723, - "index": [ - 4 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.035595 - }, - { - "id": 1, - "time": 0.03531400000000001 - }, - { - "id": 2, - "time": 0.035714000000000024 - } - ], - "time": 0.10662300000000004 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 1572867, - "index": [ - 5 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.036309999999999995 - }, - { - "id": 1, - "time": 0.03493199999999996 - }, - { - "id": 2, - "time": 0.03623399999999988 - } - ], - "time": 0.10747599999999984 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 2097155, - "index": [ - 7 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03642 - }, - { - "id": 1, - "time": 0.034806000000000004 - }, - { - "id": 2, - "time": 0.036859000000000086 - } - ], - "time": 0.1080850000000001 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 786435, + "id": 524291, "index": [ - 2 + 1 ], "migratable": true, "type": "object" @@ -184,18 +68,18 @@ "subphases": [ { "id": 0, - "time": 0.03512700000000002 + "time": 0.037272 }, { "id": 1, - "time": 0.03557100000000002 + "time": 0.03724400000000003 }, { "id": 2, - "time": 0.035739999999999994 + "time": 0.037127999999999994 } ], - "time": 0.10643800000000003 + "time": 0.11164400000000002 }, { "entity": { @@ -209,138 +93,6 @@ "resource": "cpu", "time": 0.0 }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 1835011, - "index": [ - 6 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03513600000000001 - }, - { - "id": 1, - "time": 0.03484799999999999 - }, - { - "id": 2, - "time": 0.03629199999999999 - } - ], - "time": 0.10627599999999998 - }, - { - "entity": { - "home": 0, - "id": 4194316, - "migratable": false, - "objgroup_id": 1048579, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "time": 0.0 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 524291, - "index": [ - 1 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03499199999999997 - }, - { - "id": 1, - "time": 0.03605199999999997 - }, - { - "id": 2, - "time": 0.035818000000000016 - } - ], - "time": 0.10686199999999996 - }, - { - "entity": { - "home": 0, - "id": 1, - "migratable": false, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 3.999999999999989e-06 - } - ], - "time": 3.999999999999989e-06 - }, - { - "entity": { - "home": 0, - "id": 0, - "migratable": false, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "time": 0 - } - ] - }, - { - "id": 1, - "tasks": [ - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 262147, - "index": [ - 0 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03597600000000001 - }, - { - "id": 1, - "time": 0.035268999999999995 - }, - { - "id": 2, - "time": 0.03499399999999997 - } - ], - "time": 0.10624699999999998 - }, { "entity": { "collection_id": 7, @@ -357,86 +109,24 @@ "subphases": [ { "id": 0, - "time": 0.0353699999999999 - }, - { - "id": 1, - "time": 0.036229999999999984 - }, - { - "id": 2, - "time": 0.036222999999999894 - } - ], - "time": 0.10783199999999982 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 1310723, - "index": [ - 4 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.035548000000000024 + "time": 0.037580999999999996 }, { "id": 1, - "time": 0.036605999999999916 + "time": 0.037215 }, { "id": 2, - "time": 0.036622000000000154 + "time": 0.037173999999999985 } ], - "time": 0.10878600000000005 + "time": 0.11196999999999999 }, { "entity": { - "collection_id": 7, "home": 0, - "id": 1572867, - "index": [ - 5 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.036402000000000045 - }, - { - "id": 1, - "time": 0.03631400000000018 - }, - { - "id": 2, - "time": 0.03611299999999984 - } - ], - "time": 0.10884700000000003 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 2097155, - "index": [ - 7 - ], - "migratable": true, + "id": 1, + "migratable": false, "type": "object" }, "node": 0, @@ -444,18 +134,10 @@ "subphases": [ { "id": 0, - "time": 0.03591599999999995 - }, - { - "id": 1, - "time": 0.03705500000000006 - }, - { - "id": 2, - "time": 0.04765599999999992 + "time": 2.1999999999999884e-05 } ], - "time": 0.12065899999999985 + "time": 2.1999999999999884e-05 }, { "entity": { @@ -473,65 +155,18 @@ "subphases": [ { "id": 0, - "time": 0.03472300000000006 + "time": 0.03727900000000001 }, { "id": 1, - "time": 0.03625999999999996 + "time": 0.037498000000000004 }, { "id": 2, - "time": 0.03612100000000007 + "time": 0.03705200000000003 } ], - "time": 0.10711300000000012 - }, - { - "entity": { - "home": 0, - "id": 3145740, - "migratable": false, - "objgroup_id": 786435, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 4.599999999999049e-05 - } - ], - "time": 4.599999999999049e-05 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 1835011, - "index": [ - 6 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03603400000000001 - }, - { - "id": 1, - "time": 0.03668999999999989 - }, - { - "id": 2, - "time": 0.03646700000000003 - } - ], - "time": 0.10920600000000003 + "time": 0.11182900000000004 }, { "entity": { @@ -545,51 +180,16 @@ "resource": "cpu", "time": 0.0 }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 524291, - "index": [ - 1 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03766899999999995 - }, - { - "id": 1, - "time": 0.03615600000000008 - }, - { - "id": 2, - "time": 0.03602900000000009 - } - ], - "time": 0.10986200000000013 - }, { "entity": { "home": 0, - "id": 1, + "id": 0, "migratable": false, "type": "object" }, "node": 0, "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 2.2999999999884224e-05 - } - ], - "time": 2.2999999999884224e-05 + "time": 0 } ] } diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py index 8eb63ec0a5..8a7cc37dd6 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/generate_and_validate_lb_data_file.py @@ -12,7 +12,7 @@ def generate(vt_build, out_file_name): out_dir = "--vt_lb_data_dir=" + vt_build out_file = "--vt_lb_data_file=" + out_file_name - args = (exe_path, "8", "1.0", "2", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) + args = (exe_path, "4", "1.0", "1", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) runner = subprocess.Popen(args, stdout=subprocess.PIPE) exit_code = runner.wait() if exit_code != 0: From 7d265f8b80a12481fa26744b184acceb30e2477f Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Mon, 24 Jun 2024 17:21:02 +0200 Subject: [PATCH 06/23] #2291: Run schema validation also on LBDatafile example --- scripts/check_lb_data_files.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 3914a5b35b..1daf2a6db5 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -30,6 +30,11 @@ do run_schema_validator "$f" done +find "${path_to_vt_src_dir}/examples" -iname "*.json" | while read f +do + run_schema_validator "$f" +done + find . -iname "*.json.br" | while read f do run_schema_validator "$f" From 83493653411b105f01643f04ccd07694b3b789b5 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Mon, 24 Jun 2024 17:21:32 +0200 Subject: [PATCH 07/23] #2291: Use subprocess.call instead of Popen to improve security --- scripts/generate_and_validate_lb_data_file.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py index 8a7cc37dd6..12739bc359 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/generate_and_validate_lb_data_file.py @@ -13,10 +13,9 @@ def generate(vt_build, out_file_name): out_file = "--vt_lb_data_file=" + out_file_name args = (exe_path, "4", "1.0", "1", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) - runner = subprocess.Popen(args, stdout=subprocess.PIPE) - exit_code = runner.wait() - if exit_code != 0: - sys.exit(1) + return_code = subprocess.call(args) + if return_code != 0: + sys.exit(return_code) def validate(vt_build, file_to_validate, reference_file): """ From f1b362abddb0f29c1004182f1ec7826d208c2153 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Mon, 24 Jun 2024 17:26:46 +0200 Subject: [PATCH 08/23] #2291: Make testing logs more readable --- scripts/check_lb_data_files.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 1daf2a6db5..47095bfc41 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -8,6 +8,7 @@ cd "$path_to_vt_build_dir" || exit 1 function run_schema_validator() { file=$1 + echo "" echo "Running schema validator on: $file" if python3 "${path_to_vt_src_dir}/scripts/JSON_data_files_validator.py" --file_path="$file" then From ba9fb14ecf89c78be67e64cd1ec45c9f387780b4 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Tue, 25 Jun 2024 15:31:38 +0200 Subject: [PATCH 09/23] #2291: Use shortest possible LBDatafile example --- examples/LBDatafile_example.json | 107 ++---------------- scripts/check_lb_data_files.sh | 2 +- scripts/generate_and_validate_lb_data_file.py | 10 +- 3 files changed, 16 insertions(+), 103 deletions(-) diff --git a/examples/LBDatafile_example.json b/examples/LBDatafile_example.json index c4a3b38595..50b4e47433 100644 --- a/examples/LBDatafile_example.json +++ b/examples/LBDatafile_example.json @@ -39,47 +39,18 @@ "subphases": [ { "id": 0, - "time": 0.037242 + "time": 0.037584 }, { "id": 1, - "time": 0.03718499999999997 + "time": 0.037717999999999995 }, { "id": 2, - "time": 0.03715999999999997 + "time": 0.038012000000000004 } ], - "time": 0.11158699999999994 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 524291, - "index": [ - 1 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.037272 - }, - { - "id": 1, - "time": 0.03724400000000003 - }, - { - "id": 2, - "time": 0.037127999999999994 - } - ], - "time": 0.11164400000000002 + "time": 0.113314 }, { "entity": { @@ -95,32 +66,15 @@ }, { "entity": { - "collection_id": 7, "home": 0, - "id": 1048579, - "index": [ - 3 - ], - "migratable": true, + "id": 4194316, + "migratable": false, + "objgroup_id": 1048579, "type": "object" }, "node": 0, "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.037580999999999996 - }, - { - "id": 1, - "time": 0.037215 - }, - { - "id": 2, - "time": 0.037173999999999985 - } - ], - "time": 0.11196999999999999 + "time": 0.0 }, { "entity": { @@ -134,51 +88,10 @@ "subphases": [ { "id": 0, - "time": 2.1999999999999884e-05 - } - ], - "time": 2.1999999999999884e-05 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 786435, - "index": [ - 2 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 0.03727900000000001 - }, - { - "id": 1, - "time": 0.037498000000000004 - }, - { - "id": 2, - "time": 0.03705200000000003 + "time": 2.2000000000000318e-05 } ], - "time": 0.11182900000000004 - }, - { - "entity": { - "home": 0, - "id": 4194316, - "migratable": false, - "objgroup_id": 1048579, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "time": 0.0 + "time": 2.2000000000000318e-05 }, { "entity": { diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 47095bfc41..2256403dec 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -42,7 +42,7 @@ do done # Use vt to generate LB Datafile -if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -v \ +if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -c \ -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.0.json" -r "${path_to_vt_src_dir}/examples/LBDatafile_example.json" then exit 3; diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py index 12739bc359..4fd7b173c5 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/generate_and_validate_lb_data_file.py @@ -12,12 +12,12 @@ def generate(vt_build, out_file_name): out_dir = "--vt_lb_data_dir=" + vt_build out_file = "--vt_lb_data_file=" + out_file_name - args = (exe_path, "4", "1.0", "1", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) + args = (exe_path, "1", "1.0", "1", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) return_code = subprocess.call(args) if return_code != 0: sys.exit(return_code) -def validate(vt_build, file_to_validate, reference_file): +def compare(vt_build, file_to_validate, reference_file): """ Compares file to validate wih reference """ @@ -42,7 +42,7 @@ def main(): parser = argparse.ArgumentParser() group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--generate", "-g", dest='generate', required=False, action='store_true') - group.add_argument("--validate", "-v", dest='validate', required=False, action='store_true') + group.add_argument("--compare", "-c", dest='compare', required=False, action='store_true') parser.add_argument("--vt-build-dir", "-b", dest='vt_build_dir', required=True) parser.add_argument("--file-name", "-f", dest='file_name', required=True) @@ -51,8 +51,8 @@ def main(): if args.generate: generate(args.vt_build_dir, args.file_name) - if args.validate: - validate(args.vt_build_dir, args.file_name, args.reference_file) + if args.compare: + compare(args.vt_build_dir, args.file_name, args.reference_file) if __name__ == '__main__': From 7d54e972c9bd2a7e89fe51f3990d49f7df7fda39 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Tue, 25 Jun 2024 17:23:52 +0200 Subject: [PATCH 10/23] #2291: Extract LBDatafile schema to separate python file --- .gitignore | 1 + scripts/JSON_data_files_validator.py | 89 +--------------------------- scripts/LBDatafile_schema.py | 84 ++++++++++++++++++++++++++ 3 files changed, 88 insertions(+), 86 deletions(-) create mode 100644 scripts/LBDatafile_schema.py diff --git a/.gitignore b/.gitignore index ee9ae770e9..e9ede979b0 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ compile_commands.json *.lo *.o *.obj +*.pyc # Precompiled Headers *.gch diff --git a/scripts/JSON_data_files_validator.py b/scripts/JSON_data_files_validator.py index 1f6301592f..9bed3eb368 100644 --- a/scripts/JSON_data_files_validator.py +++ b/scripts/JSON_data_files_validator.py @@ -15,7 +15,8 @@ import brotli from schema import And, Optional, Schema - +# Import VT related schemas +import LBDatafile_schema def exc_handler(exception_type, exception, traceback): """ Exception handler for hiding traceback. """ @@ -38,91 +39,7 @@ def get_error_message(iterable_collection: Iterable) -> str: def _get_valid_schema(self) -> Schema: """ Returns representation of a valid schema """ - allowed_types_data = ("LBDatafile") - valid_schema_data = Schema( - { - Optional('type'): And(str, lambda a: a in allowed_types_data, - error=f"{self.get_error_message(allowed_types_data)} must be chosen"), - Optional('metadata'): { - Optional('type'): And(str, lambda a: a in allowed_types_data, - error=f"{self.get_error_message(allowed_types_data)} must be chosen"), - Optional('rank'): int, - Optional('shared_node'): { - 'id': int, - 'size': int, - 'rank': int, - 'num_nodes': int, - }, - Optional('phases'): { - Optional('count'): int, - 'skipped': { - 'list': [int], - 'range': [[int]], - }, - 'identical_to_previous': { - 'list': [int], - 'range': [[int]], - }, - }, - Optional('attributes'): dict - }, - 'phases': [ - { - 'id': int, - 'tasks': [ - { - 'entity': { - Optional('collection_id'): int, - 'home': int, - 'id': int, - Optional('index'): [int], - 'type': str, - 'migratable': bool, - Optional('objgroup_id'): int - }, - 'node': int, - 'resource': str, - Optional('subphases'): [ - { - 'id': int, - 'time': float, - } - ], - 'time': float, - Optional('user_defined'): dict, - Optional('attributes'): dict - }, - ], - Optional('communications'): [ - { - 'type': str, - 'to': { - 'type': str, - 'id': int, - Optional('home'): int, - Optional('collection_id'): int, - Optional('migratable'): bool, - Optional('index'): [int], - Optional('objgroup_id'): int, - }, - 'messages': int, - 'from': { - 'type': str, - 'id': int, - Optional('home'): int, - Optional('collection_id'): int, - Optional('migratable'): bool, - Optional('index'): [int], - Optional('objgroup_id'): int, - }, - 'bytes': float - } - ], - Optional('user_defined'): dict - }, - ] - } - ) + valid_schema_data = LBDatafile_schema.LBDatafile_schema allowed_types_stats = ("LBStatsfile") valid_schema_stats = Schema( { diff --git a/scripts/LBDatafile_schema.py b/scripts/LBDatafile_schema.py new file mode 100644 index 0000000000..fadb471b32 --- /dev/null +++ b/scripts/LBDatafile_schema.py @@ -0,0 +1,84 @@ +from schema import And, Optional, Schema + +LBDatafile_schema = Schema( + { + Optional('type'): And(str, "LBDatafile", error="'LBDatafile' must be chosen."), + Optional('metadata'): { + Optional('type'): And(str, "LBDatafile", error="'LBDatafile' must be chosen."), + Optional('rank'): int, + Optional('shared_node'): { + 'id': int, + 'size': int, + 'rank': int, + 'num_nodes': int, + }, + Optional('phases'): { + Optional('count'): int, + 'skipped': { + 'list': [int], + 'range': [[int]], + }, + 'identical_to_previous': { + 'list': [int], + 'range': [[int]], + }, + }, + Optional('attributes'): dict + }, + 'phases': [ + { + 'id': int, + 'tasks': [ + { + 'entity': { + Optional('collection_id'): int, + 'home': int, + 'id': int, + Optional('index'): [int], + 'type': str, + 'migratable': bool, + Optional('objgroup_id'): int + }, + 'node': int, + 'resource': str, + Optional('subphases'): [ + { + 'id': int, + 'time': float, + } + ], + 'time': float, + Optional('user_defined'): dict, + Optional('attributes'): dict + }, + ], + Optional('communications'): [ + { + 'type': str, + 'to': { + 'type': str, + 'id': int, + Optional('home'): int, + Optional('collection_id'): int, + Optional('migratable'): bool, + Optional('index'): [int], + Optional('objgroup_id'): int, + }, + 'messages': int, + 'from': { + 'type': str, + 'id': int, + Optional('home'): int, + Optional('collection_id'): int, + Optional('migratable'): bool, + Optional('index'): [int], + Optional('objgroup_id'): int, + }, + 'bytes': float + } + ], + Optional('user_defined'): dict + }, + ] + } +) \ No newline at end of file From 704ab38f9cfc251cc5d8efb17d106472ea1115a4 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Tue, 25 Jun 2024 17:24:28 +0200 Subject: [PATCH 11/23] #2291: Include scripts directory in doxygen --- cmake/load_doxygen.cmake | 1 + docs/Doxyfile.in | 1 + 2 files changed, 2 insertions(+) diff --git a/cmake/load_doxygen.cmake b/cmake/load_doxygen.cmake index 616dcdf7cd..fc66775090 100644 --- a/cmake/load_doxygen.cmake +++ b/cmake/load_doxygen.cmake @@ -19,6 +19,7 @@ if (${vt_doxygen_enabled}) set(DOXYGEN_CHECKPOINT_SRC_DIR "${CMAKE_CURRENT_SOURCE_DIR}/lib/checkpoint/src") set(DOXYGEN_DOCS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/docs/") set(DOXYGEN_EXAMPLE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/examples/") + set(DOXYGEN_SCRIPTS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/scripts/") set(DOXYGEN_TUTORIAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/tutorial/") set(DOXYGEN_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/docs/") set(DOXYGEN_MAIN_PAGE "${CMAKE_CURRENT_SOURCE_DIR}/src/vt.md") diff --git a/docs/Doxyfile.in b/docs/Doxyfile.in index 7ad64443af..7f93254cd1 100644 --- a/docs/Doxyfile.in +++ b/docs/Doxyfile.in @@ -936,6 +936,7 @@ EXCLUDE_SYMBOLS = EXAMPLE_PATH = "@DOXYGEN_EXAMPLE_DIR@" \ "@DOXYGEN_TUTORIAL_DIR@" \ + "@DOXYGEN_SCRIPTS_DIR@" \ "@DOXYGEN_CHECKPOINT_EXAMPLE_DIR@" # If the value of the EXAMPLE_PATH tag contains directories, you can use the From 1133ded13b31229582b527ead7917251e44c1814 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Tue, 25 Jun 2024 17:26:54 +0200 Subject: [PATCH 12/23] #2291: Replace static code snippets with real files for LBDatafile example and validation schema --- docs/md/node-lb-data.md | 207 +--------------------------------------- 1 file changed, 4 insertions(+), 203 deletions(-) diff --git a/docs/md/node-lb-data.md b/docs/md/node-lb-data.md index 3a4268bff5..0a8aac784a 100644 --- a/docs/md/node-lb-data.md +++ b/docs/md/node-lb-data.md @@ -42,122 +42,7 @@ contains information about the task that performed this work. If that `entity` is a virtual collection object, it will specify the unique `id` for the object, and optionally the `index`, `home`, and `collection_id` for that object. -\code{.json} -{ - "phases": [ - { - "id": 0, - "tasks": [ - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 3407875, - "index": [ - 12 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 1.1263000033068238e-05 - }, - { - "id": 1, - "time": 1.1333999964335817e-05 - } - ], - "time": 3.379300005690311e-05 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 3145731, - "index": [ - 11 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 1.1653000001388136e-05 - }, - { - "id": 1, - "time": 1.1435000033088727e-05 - } - ], - "time": 3.452300006756559e-05 - } - ] - }, - { - "id": 1, - "tasks": [ - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 3407875, - "index": [ - 12 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 3.207300005669822e-05 - }, - { - "id": 1, - "time": 1.1347999816280208e-05 - } - ], - "time": 5.658399982166884e-05 - }, - { - "entity": { - "collection_id": 7, - "home": 0, - "id": 3145731, - "index": [ - 11 - ], - "migratable": true, - "type": "object" - }, - "node": 0, - "resource": "cpu", - "subphases": [ - { - "id": 0, - "time": 1.3647000059791026e-05 - }, - { - "id": 1, - "time": 1.1320000112391426e-05 - } - ], - "time": 3.787500008911593e-05 - } - ] - } - ] -} -\endcode +\include examples/LBDatafile_example.json Each phase in the file may also have a `communications` array that specify any communication between tasks that occurred during the phase. Each communication @@ -228,98 +113,14 @@ The type of communication lines up with the enum For all the broadcast-like edges, the communication logging will occur on the receive of the broadcast side (one entry per broadcast recipient). -\section JSON_data_files_validator.py +\section lb-data-file-validator LB Data File Validator All input JSON files will be validated using the `JSON_data_files_validator.py` found in the `scripts` directory, which ensures that a given JSON adheres to the following schema: -\code{.py} -Schema( - { - Optional('type'): And(str, lambda a: a in allowed_types_data, - error=f"{self.get_error_message(allowed_types_data)} must be chosen"), - Optional('metadata'): { - Optional('type'): And(str, lambda a: a in allowed_types_data, - error=f"{self.get_error_message(allowed_types_data)} must be chosen"), - Optional('rank'): int, - Optional('shared_node'): { - 'id': int, - 'size': int, - 'rank': int, - 'num_nodes': int, - }, - Optional('phases'): { - Optional('count'): int, - 'skipped': { - 'list': [int], - 'range': [[int]], - }, - 'identical_to_previous': { - 'list': [int], - 'range': [[int]], - }, - }, - Optional('attributes'): dict - }, - 'phases': [ - { - 'id': int, - 'tasks': [ - { - 'entity': { - Optional('collection_id'): int, - 'home': int, - 'id': int, - Optional('index'): [int], - 'type': str, - 'migratable': bool, - Optional('objgroup_id'): int - }, - 'node': int, - 'resource': str, - Optional('subphases'): [ - { - 'id': int, - 'time': float, - } - ], - 'time': float, - Optional('user_defined'): dict, - Optional('attributes'): dict - }, - ], - Optional('communications'): [ - { - 'type': str, - 'to': { - 'type': str, - 'id': int, - Optional('home'): int, - Optional('collection_id'): int, - Optional('migratable'): bool, - Optional('index'): [int], - Optional('objgroup_id'): int, - }, - 'messages': int, - 'from': { - 'type': str, - 'id': int, - Optional('home'): int, - Optional('collection_id'): int, - Optional('migratable'): bool, - Optional('index'): [int], - Optional('objgroup_id'): int, - }, - 'bytes': float - } - ], - Optional('user_defined'): dict - }, - ] - } -) -\endcode +\include scripts/LBDatafile_schema.py \section lb-spec-file LB Specification File + In order to customize when LB output is enabled and disabled, a LB specification file can be passed to \vt via a command-line flag: `--vt_lb_spec --vt_lb_spec_file=filename.spec`. From 99b4849161a223832d5020a66c88fa0a7aaf3a22 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Wed, 26 Jun 2024 13:29:24 +0200 Subject: [PATCH 13/23] #2291: Fix possibility for false positive CI result when checking LBDatafiles --- scripts/check_lb_data_files.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 2256403dec..0e5581c41d 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -xo pipefail +set -exo pipefail path_to_vt_build_dir=${1} path_to_vt_src_dir=${2} From 7f80722aa6ac46e9d7d6459f767e1387e3eab556 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Wed, 26 Jun 2024 15:40:18 +0200 Subject: [PATCH 14/23] #2291: Update LBDatafile example --- docs/md/node-lb-data.md | 2 +- .../lb_data_file_example.json} | 43 +++++++++++++------ scripts/check_lb_data_files.sh | 2 +- 3 files changed, 33 insertions(+), 14 deletions(-) rename examples/{LBDatafile_example.json => lb_data/lb_data_file_example.json} (71%) diff --git a/docs/md/node-lb-data.md b/docs/md/node-lb-data.md index 0a8aac784a..c706fcd014 100644 --- a/docs/md/node-lb-data.md +++ b/docs/md/node-lb-data.md @@ -42,7 +42,7 @@ contains information about the task that performed this work. If that `entity` is a virtual collection object, it will specify the unique `id` for the object, and optionally the `index`, `home`, and `collection_id` for that object. -\include examples/LBDatafile_example.json +\include examples/lb_data/lb_data_file_example.json Each phase in the file may also have a `communications` array that specify any communication between tasks that occurred during the phase. Each communication diff --git a/examples/LBDatafile_example.json b/examples/lb_data/lb_data_file_example.json similarity index 71% rename from examples/LBDatafile_example.json rename to examples/lb_data/lb_data_file_example.json index 50b4e47433..f2350bd369 100644 --- a/examples/LBDatafile_example.json +++ b/examples/lb_data/lb_data_file_example.json @@ -21,6 +21,33 @@ }, "phases": [ { + "communications": [ + { + "bytes": 160.0, + "from": { + "collection_id": 7, + "home": 0, + "id": 262147, + "index": [ + 0 + ], + "migratable": true, + "type": "object" + }, + "messages": 1, + "to": { + "collection_id": 7, + "home": 0, + "id": 262147, + "index": [ + 0 + ], + "migratable": true, + "type": "object" + }, + "type": "SendRecv" + } + ], "id": 0, "tasks": [ { @@ -39,18 +66,10 @@ "subphases": [ { "id": 0, - "time": 0.037584 - }, - { - "id": 1, - "time": 0.037717999999999995 - }, - { - "id": 2, - "time": 0.038012000000000004 + "time": 0.0009759999999999994 } ], - "time": 0.113314 + "time": 0.0009759999999999994 }, { "entity": { @@ -88,10 +107,10 @@ "subphases": [ { "id": 0, - "time": 2.2000000000000318e-05 + "time": 2.400000000000015e-05 } ], - "time": 2.2000000000000318e-05 + "time": 2.400000000000015e-05 }, { "entity": { diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 0e5581c41d..6cdd866049 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -43,7 +43,7 @@ done # Use vt to generate LB Datafile if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -c \ - -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.0.json" -r "${path_to_vt_src_dir}/examples/LBDatafile_example.json" + -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.0.json" -r "${path_to_vt_src_dir}/examples/lb_data/lb_data_file_example.json" then exit 3; fi From 12a7ae99df242a740e9e38c69a98295d238e36da Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Wed, 26 Jun 2024 15:41:07 +0200 Subject: [PATCH 15/23] #2291: Add simple example to generate LBDatafile with communications field populated --- examples/CMakeLists.txt | 1 + examples/lb_data/CMakeLists.txt | 9 ++ examples/lb_data/lb_data_file_generator.cc | 98 ++++++++++++++++++++++ 3 files changed, 108 insertions(+) create mode 100644 examples/lb_data/CMakeLists.txt create mode 100644 examples/lb_data/lb_data_file_generator.cc diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index e8023a20ec..2b7aed7791 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -46,5 +46,6 @@ add_subdirectory(callback) add_subdirectory(collection) add_subdirectory(group) add_subdirectory(hello_world) +add_subdirectory(lb_data) add_subdirectory(rdma) add_subdirectory(termination) diff --git a/examples/lb_data/CMakeLists.txt b/examples/lb_data/CMakeLists.txt new file mode 100644 index 0000000000..8caa947ca4 --- /dev/null +++ b/examples/lb_data/CMakeLists.txt @@ -0,0 +1,9 @@ + +set( + LB_DATA_EXAMPLES + lb_data_file_generator +) + +foreach(EXAMPLE_NAME ${LB_DATA_EXAMPLES}) + add_example(${EXAMPLE_NAME}) +endforeach() diff --git a/examples/lb_data/lb_data_file_generator.cc b/examples/lb_data/lb_data_file_generator.cc new file mode 100644 index 0000000000..53478a92d9 --- /dev/null +++ b/examples/lb_data/lb_data_file_generator.cc @@ -0,0 +1,98 @@ +/* +//@HEADER +// ***************************************************************************** +// +// lb_data_file_generator.cc +// DARMA/vt => Virtual Transport +// +// Copyright 2019-2021 National Technology & Engineering Solutions of Sandia, LLC +// (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. +// Government retains certain rights in this software. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// * Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// * Neither the name of the copyright holder nor the names of its +// contributors may be used to endorse or promote products derived from this +// software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. +// +// Questions? Contact darma@sandia.gov +// +// ***************************************************************************** +//@HEADER +*/ + +#include + +struct WorkCol : vt::Collection { + void someWork(int subphase) { + this->lb_data_.setSubPhase(subphase); + + // Do some work + double someVal = 0.1f; + double someVal2 = 1.0f; + for (int i = 0; i < 100000; i++) { + someVal *= i + someVal2; + someVal2 += someVal; + } + + // Generate data for communications field + vt::NodeType this_node = vt::theContext()->getNode(); + auto proxy = this->getCollectionProxy(); + proxy(this_node).send<&WorkCol::receiveVal>(someVal2); + } + + void receiveVal(int) { } +}; + +int main(int argc, char** argv) { + vt::initialize(argc, argv); + + int32_t num_phases = argc > 1 ? atoi(argv[1]) : 1; + int32_t num_subphases = argc > 2 ? atoi(argv[2]) : 1; + int32_t num_elms = argc > 3 ? atoi(argv[3]) : 1; + + if (vt::theContext()->getNode() == 0) { + fmt::print( + "lb_data_file_generator: num_phases={}, num_subphases={}, num_elms={}, " + "\n", + num_phases, num_subphases, num_elms); + } + + auto range = vt::Index1D(num_elms); + auto proxy = vt::makeCollection("examples_lb_data_file_generator") + .bounds(range) + .bulkInsert() + .wait(); + + for (int32_t phase = 0; phase < num_phases; phase++) { + for (int32_t sub = 0; sub < num_subphases; sub++) { + vt::runInEpochCollective( + [=] { proxy.broadcastCollective<&WorkCol::someWork>(sub); }); + } + + vt::thePhase()->nextPhaseCollective(); + } + + vt::finalize(); + return 0; +} \ No newline at end of file From 8ab7e623927aa17dffba8aeab0f7c6663cc92e21 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Thu, 27 Jun 2024 16:11:25 +0200 Subject: [PATCH 16/23] #2291: Enable generation of LBDatafiles by all examples present in vt --- ci/build_cpp.sh | 1 + cmake/configure_options.cmake | 1 + cmake/test_vt.cmake | 18 ++++++++++++++++-- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/ci/build_cpp.sh b/ci/build_cpp.sh index 76e405ddb5..669914e5b4 100755 --- a/ci/build_cpp.sh +++ b/ci/build_cpp.sh @@ -151,6 +151,7 @@ cmake -G "${CMAKE_GENERATOR:-Ninja}" \ -Dvt_tests_num_nodes="${VT_TESTS_NUM_NODES:-}" \ -Dvt_external_fmt="${VT_EXTERNAL_FMT:-0}" \ -Dvt_no_color_enabled="${VT_NO_COLOR_ENABLED:-0}" \ + -Dvt_test_lb_schema="${TEST_LB_SCHEMA:-0}" \ -DCMAKE_CXX_STANDARD="${CMAKE_CXX_STANDARD:-17}" \ -DBUILD_SHARED_LIBS="${BUILD_SHARED_LIBS:-0}" \ "$VT" diff --git a/cmake/configure_options.cmake b/cmake/configure_options.cmake index edf15845bf..8927d0873b 100644 --- a/cmake/configure_options.cmake +++ b/cmake/configure_options.cmake @@ -150,6 +150,7 @@ define_option(vt_werror_enabled "-Werror" "Build VT with -Werror enabled" OFF em define_option(vt_build_tests "tests" "Build VT tests" ON empty_feature) define_option(vt_build_tools "tools" "Build VT tools" ON empty_feature) define_option(vt_build_examples "examples" "Build VT examples" ON empty_feature) +define_option(vt_test_lb_schema "lb schema tests" "Enable testing of LBDatafile schema" OFF empty_feature) option(vt_external_fmt "Build VT with external fmt" OFF) if(${vt_external_fmt}) diff --git a/cmake/test_vt.cmake b/cmake/test_vt.cmake index 8412dcc412..2fb7af5ce1 100644 --- a/cmake/test_vt.cmake +++ b/cmake/test_vt.cmake @@ -17,16 +17,20 @@ function(run_executable_with_mpi) WRAPPER_EXECUTABLE ) set( - multiValueArg + multiValueArgs TARGET_ARGS WRAPPER_ARGS ) - set(allKeywords ${noValOption} ${singleValArg} ${multiValueArg}) cmake_parse_arguments( ARG "${noValOption}" "${singleValArg}" "${multiValueArgs}" ${ARGN} ) + # Stop the configurtion if there are any unparsed arguments + if (ARG_UNPARSED_ARGUMENTS) + message(FATAL_ERROR "found unparsed arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + if (NOT DEFINED ARG_EXECUTE_WITH_WRAPPER) set(ARG_WRAPPER_EXECUTABLE "") set(ARG_WRAPPER_ARGS "") @@ -108,6 +112,16 @@ macro(add_test_for_example_vt test_target test_exec test_list) list(APPEND EXEC_ARGS "--vt_trace") endif() + # Append parameters required for the examples to output LBDatafiles. + if (vt_test_lb_schema) + list(APPEND EXEC_ARGS + "--vt_lb_interval=1" + "--vt_lb_data" + "--vt_lb_data_compress=false" + "--vt_lb_data_file=${test_name}_${PROC}_LBDatafile.%p.json" + "--vt_lb_data_dir=.") + endif() + run_executable_with_mpi( TARGET_EXECUTABLE ${test_name} TARGET_ARGS ${EXEC_ARGS} From 1f3d8fbf1165d04b11a9c5e6f7bb2ec62fe67055 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Thu, 27 Jun 2024 16:47:07 +0200 Subject: [PATCH 17/23] #2291: Update python script to use output of the lb_data_file_generator example --- cmake/test_vt.cmake | 2 +- scripts/check_lb_data_files.sh | 16 ++++------ scripts/generate_and_validate_lb_data_file.py | 30 +++---------------- 3 files changed, 10 insertions(+), 38 deletions(-) diff --git a/cmake/test_vt.cmake b/cmake/test_vt.cmake index 2fb7af5ce1..7fa90c330f 100644 --- a/cmake/test_vt.cmake +++ b/cmake/test_vt.cmake @@ -118,7 +118,7 @@ macro(add_test_for_example_vt test_target test_exec test_list) "--vt_lb_interval=1" "--vt_lb_data" "--vt_lb_data_compress=false" - "--vt_lb_data_file=${test_name}_${PROC}_LBDatafile.%p.json" + "--vt_lb_data_file=${test_name}_${PROC}_LBDatafile.%p.json" "--vt_lb_data_dir=.") endif() diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 6cdd866049..7c6e79e9e9 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -19,13 +19,6 @@ function run_schema_validator() { fi } -# Use vt to generate LB Datafile -if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -g \ - -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.%p.json" -then - exit 2; -fi - find . -iname "*.json" | grep -v "compile_commands" | while read f do run_schema_validator "$f" @@ -41,9 +34,10 @@ do run_schema_validator "$f" done -# Use vt to generate LB Datafile -if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" -c \ - -b "${path_to_vt_build_dir}" -f "LBData_from_lb_iter.0.json" -r "${path_to_vt_src_dir}/examples/lb_data/lb_data_file_example.json" +# Compare output of the lb_data_file_generator example with reference file +if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" \ + -f "${path_to_vt_build_dir}/examples/lb_data/lb_data_file_generator_1_LBDatafile.0.json" \ + -r "${path_to_vt_src_dir}/examples/lb_data/lb_data_file_example.json" then - exit 3; + exit 2; fi diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/generate_and_validate_lb_data_file.py index 4fd7b173c5..66ebd904c6 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/generate_and_validate_lb_data_file.py @@ -1,29 +1,15 @@ -import subprocess import argparse import json import sys from deepdiff import DeepDiff -def generate(vt_build, out_file_name): - """ - Runs vt lb_iter example to generate LBDatafile - """ - exe_path = vt_build + "/examples/collection/lb_iter" - out_dir = "--vt_lb_data_dir=" + vt_build - out_file = "--vt_lb_data_file=" + out_file_name - - args = (exe_path, "1", "1.0", "1", "--vt_lb", "--vt_lb_interval=1", "--vt_lb_name=RotateLB", "--vt_lb_data", "--vt_lb_data_compress=false", out_dir, out_file) - return_code = subprocess.call(args) - if return_code != 0: - sys.exit(return_code) - -def compare(vt_build, file_to_validate, reference_file): +def compare(file_to_validate, reference_file): """ Compares file to validate wih reference """ print("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'.") - with open(vt_build + "/" + file_to_validate) as val_file, open(reference_file) as ref_file: + with open(file_to_validate) as val_file, open(reference_file) as ref_file: to_validate = json.load(val_file) reference = json.load(ref_file) diff = DeepDiff(to_validate, reference, report_repetition=True, math_epsilon=0.1) @@ -40,19 +26,11 @@ def compare(vt_build, file_to_validate, reference_file): def main(): parser = argparse.ArgumentParser() - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument("--generate", "-g", dest='generate', required=False, action='store_true') - group.add_argument("--compare", "-c", dest='compare', required=False, action='store_true') - - parser.add_argument("--vt-build-dir", "-b", dest='vt_build_dir', required=True) - parser.add_argument("--file-name", "-f", dest='file_name', required=True) + parser.add_argument("--file-to-check", "-f", dest='file', required=True) parser.add_argument("--reference-file", "-r", dest='reference_file', required=False) args = parser.parse_args() - if args.generate: - generate(args.vt_build_dir, args.file_name) - if args.compare: - compare(args.vt_build_dir, args.file_name, args.reference_file) + compare(args.file, args.reference_file) if __name__ == '__main__': From 60066eac7d5535555f9e1cddb0157d04ba07217d Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Sat, 29 Jun 2024 13:42:18 +0200 Subject: [PATCH 18/23] #2291: Update LBDatafile example --- examples/lb_data/lb_data_file_example.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/lb_data/lb_data_file_example.json b/examples/lb_data/lb_data_file_example.json index f2350bd369..061a3fe210 100644 --- a/examples/lb_data/lb_data_file_example.json +++ b/examples/lb_data/lb_data_file_example.json @@ -23,7 +23,7 @@ { "communications": [ { - "bytes": 160.0, + "bytes": 152.0, "from": { "collection_id": 7, "home": 0, @@ -66,10 +66,10 @@ "subphases": [ { "id": 0, - "time": 0.0009759999999999994 + "time": 0.0003119999999999999 } ], - "time": 0.0009759999999999994 + "time": 0.0003119999999999999 }, { "entity": { @@ -107,10 +107,10 @@ "subphases": [ { "id": 0, - "time": 2.400000000000015e-05 + "time": 9.000000000000002e-06 } ], - "time": 2.400000000000015e-05 + "time": 9.000000000000002e-06 }, { "entity": { @@ -121,7 +121,7 @@ }, "node": 0, "resource": "cpu", - "time": 0 + "time": 0.0 } ] } From a0ec659569bb7c0bfe91a27e5317e69944961158 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Mon, 1 Jul 2024 15:04:05 +0200 Subject: [PATCH 19/23] #2291: Update pip3 in the docker image to be able to install newer packages --- ci/docker/ubuntu-gnu-cpp.dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ci/docker/ubuntu-gnu-cpp.dockerfile b/ci/docker/ubuntu-gnu-cpp.dockerfile index 40e06e9d7c..89cd6e498d 100644 --- a/ci/docker/ubuntu-gnu-cpp.dockerfile +++ b/ci/docker/ubuntu-gnu-cpp.dockerfile @@ -87,7 +87,8 @@ RUN apt-get update -y -q && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* -RUN pip3 install schema deepdiff +RUN pip3 install --upgrade pip \ + && pip3 install schema deepdiff FROM base as build COPY . /vt From df3886cf74b6e962f689d29e345a50f6cb921e3e Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Thu, 11 Jul 2024 16:11:31 +0200 Subject: [PATCH 20/23] #2291: Update scripts for the output to be less noisy --- examples/lb_data/lb_data_file_generator.cc | 2 +- scripts/LBDatafile_schema.py | 2 +- scripts/check_lb_data_files.sh | 18 ++++-------------- ...lb_data_file.py => compare_lb_data_file.py} | 7 +++---- 4 files changed, 9 insertions(+), 20 deletions(-) rename scripts/{generate_and_validate_lb_data_file.py => compare_lb_data_file.py} (79%) diff --git a/examples/lb_data/lb_data_file_generator.cc b/examples/lb_data/lb_data_file_generator.cc index 53478a92d9..7b6ed04794 100644 --- a/examples/lb_data/lb_data_file_generator.cc +++ b/examples/lb_data/lb_data_file_generator.cc @@ -95,4 +95,4 @@ int main(int argc, char** argv) { vt::finalize(); return 0; -} \ No newline at end of file +} diff --git a/scripts/LBDatafile_schema.py b/scripts/LBDatafile_schema.py index fadb471b32..bdbbeb84d2 100644 --- a/scripts/LBDatafile_schema.py +++ b/scripts/LBDatafile_schema.py @@ -81,4 +81,4 @@ }, ] } -) \ No newline at end of file +) diff --git a/scripts/check_lb_data_files.sh b/scripts/check_lb_data_files.sh index 7c6e79e9e9..66301ef535 100755 --- a/scripts/check_lb_data_files.sh +++ b/scripts/check_lb_data_files.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -exo pipefail +set -eo pipefail path_to_vt_build_dir=${1} path_to_vt_src_dir=${2} @@ -8,7 +8,6 @@ cd "$path_to_vt_build_dir" || exit 1 function run_schema_validator() { file=$1 - echo "" echo "Running schema validator on: $file" if python3 "${path_to_vt_src_dir}/scripts/JSON_data_files_validator.py" --file_path="$file" then @@ -19,23 +18,14 @@ function run_schema_validator() { fi } -find . -iname "*.json" | grep -v "compile_commands" | while read f -do - run_schema_validator "$f" -done - -find "${path_to_vt_src_dir}/examples" -iname "*.json" | while read f -do - run_schema_validator "$f" -done - -find . -iname "*.json.br" | while read f +find . "${path_to_vt_src_dir}/examples" -iname "*.json" -o -iname "*.json.br" \ + | grep -v "compile_commands" | while read f do run_schema_validator "$f" done # Compare output of the lb_data_file_generator example with reference file -if ! python3 "${path_to_vt_src_dir}/scripts/generate_and_validate_lb_data_file.py" \ +if ! python3 "${path_to_vt_src_dir}/scripts/compare_lb_data_file.py" \ -f "${path_to_vt_build_dir}/examples/lb_data/lb_data_file_generator_1_LBDatafile.0.json" \ -r "${path_to_vt_src_dir}/examples/lb_data/lb_data_file_example.json" then diff --git a/scripts/generate_and_validate_lb_data_file.py b/scripts/compare_lb_data_file.py similarity index 79% rename from scripts/generate_and_validate_lb_data_file.py rename to scripts/compare_lb_data_file.py index 66ebd904c6..ce16b0d656 100644 --- a/scripts/generate_and_validate_lb_data_file.py +++ b/scripts/compare_lb_data_file.py @@ -7,8 +7,6 @@ def compare(file_to_validate, reference_file): """ Compares file to validate wih reference """ - print("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'.") - with open(file_to_validate) as val_file, open(reference_file) as ref_file: to_validate = json.load(val_file) reference = json.load(ref_file) @@ -16,18 +14,19 @@ def compare(file_to_validate, reference_file): is_valid = not len(diff.affected_paths) if not is_valid: + sys.stderr.write("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'... Failed!\n") sys.stderr.write("Detected differences:\n") json.dump(str(diff), sys.stderr, indent=4) sys.stderr.write("\n") sys.stderr.flush() sys.exit(1) else: - print("Comparison OK.") + print("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'... Status OK.") def main(): parser = argparse.ArgumentParser() parser.add_argument("--file-to-check", "-f", dest='file', required=True) - parser.add_argument("--reference-file", "-r", dest='reference_file', required=False) + parser.add_argument("--reference-file", "-r", dest='reference_file', required=True) args = parser.parse_args() compare(args.file, args.reference_file) From 7684481b4ed9756eedf4dbef2288ba82df6a6d1e Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Fri, 12 Jul 2024 13:29:34 +0200 Subject: [PATCH 21/23] #2291: Update printing in the python compare script --- scripts/compare_lb_data_file.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/compare_lb_data_file.py b/scripts/compare_lb_data_file.py index ce16b0d656..f17b491576 100644 --- a/scripts/compare_lb_data_file.py +++ b/scripts/compare_lb_data_file.py @@ -11,17 +11,17 @@ def compare(file_to_validate, reference_file): to_validate = json.load(val_file) reference = json.load(ref_file) diff = DeepDiff(to_validate, reference, report_repetition=True, math_epsilon=0.1) - is_valid = not len(diff.affected_paths) - if not is_valid: - sys.stderr.write("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'... Failed!\n") + message = f"Comparing '{file_to_validate}' with reference file '{reference_file}'..." + if diff: + sys.stderr.write(f"{message} Failed!\n") sys.stderr.write("Detected differences:\n") json.dump(str(diff), sys.stderr, indent=4) sys.stderr.write("\n") sys.stderr.flush() sys.exit(1) else: - print("Comparing '" + file_to_validate + "' with reference file '" + reference_file + "'... Status OK.") + print(f"{message} Status OK.") def main(): parser = argparse.ArgumentParser() From 7eefe77855fdea024b483ea4450b96f56d3f0694 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Fri, 12 Jul 2024 15:04:02 +0200 Subject: [PATCH 22/23] #2291: Fix typo in test_vt cmake file --- cmake/test_vt.cmake | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/cmake/test_vt.cmake b/cmake/test_vt.cmake index 7fa90c330f..3f28f82307 100644 --- a/cmake/test_vt.cmake +++ b/cmake/test_vt.cmake @@ -66,12 +66,17 @@ function(build_mpi_proc_test_list) set(noValOption) set(singleValArg MAX_PROC VARIABLE_OUT ) - set(multiValueArg) - set(allKeywords ${noValOption} ${singleValArg} ${multiValueArg}) + set(multiValueArgs) + set(allKeywords ${noValOption} ${singleValArg} ${multiValueArgs}) cmake_parse_arguments( ARG "${noValOption}" "${singleValArg}" "${multiValueArgs}" ${ARGN} ) + # Stop the configurtion if there are any unparsed arguments + if (ARG_UNPARSED_ARGUMENTS) + message(FATAL_ERROR "found unparsed arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + if (NOT DEFINED ARG_MAX_PROC) # Default to 8 processors set(ARG_MAX_PROC "8") From f52d6686c25d261cae9bf317dc6ba42d2641c573 Mon Sep 17 00:00:00 2001 From: Arkadiusz Szczepkowicz Date: Fri, 12 Jul 2024 15:09:45 +0200 Subject: [PATCH 23/23] #2291: Update name of the variable used to enable the tests of LBdatafiles --- ci/azure/azure-clang-10-ubuntu-mpich.yml | 2 +- ci/azure/azure-clang-11-ubuntu-mpich.yml | 2 +- ci/azure/azure-clang-12-ubuntu-mpich.yml | 2 +- ci/azure/azure-clang-13-ubuntu-mpich.yml | 2 +- ci/azure/azure-clang-14-ubuntu-mpich.yml | 2 +- ci/azure/azure-clang-9-ubuntu-mpich.yml | 2 +- ci/azure/azure-clang-alpine-mpich.yml | 2 +- ci/azure/azure-gcc-10-ubuntu-openmpi.yml | 2 +- ci/azure/azure-gcc-11-ubuntu-mpich.yml | 2 +- ci/azure/azure-gcc-12-ubuntu-mpich.yml | 2 +- ci/azure/azure-gcc-8-ubuntu-mpich.yml | 2 +- ci/azure/azure-gcc-9-ubuntu-mpich.yml | 2 +- ci/azure/azure-intel-oneapi-icpc-ubuntu-mpich.yml | 2 +- ci/azure/azure-intel-oneapi-icpx-ubuntu-mpich.yml | 2 +- ci/azure/azure-nvidia-11-2-ubuntu-mpich.yml | 2 +- ci/azure/azure-nvidia-12-ubuntu-mpich.yml | 2 +- ci/build_cpp.sh | 2 +- ci/test_cpp.sh | 2 +- cmake/configure_options.cmake | 2 +- cmake/test_vt.cmake | 2 +- docker-compose.yml | 2 +- scripts/workflow-azure-template.yml | 2 +- scripts/workflows-azure.ini | 4 ++-- 23 files changed, 24 insertions(+), 24 deletions(-) diff --git a/ci/azure/azure-clang-10-ubuntu-mpich.yml b/ci/azure/azure-clang-10-ubuntu-mpich.yml index 0f6b2cb65b..a27dfe336e 100644 --- a/ci/azure/azure-clang-10-ubuntu-mpich.yml +++ b/ci/azure/azure-clang-10-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-clang-10-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-clang-11-ubuntu-mpich.yml b/ci/azure/azure-clang-11-ubuntu-mpich.yml index 281a162626..472f2621a3 100644 --- a/ci/azure/azure-clang-11-ubuntu-mpich.yml +++ b/ci/azure/azure-clang-11-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-clang-11-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-clang-12-ubuntu-mpich.yml b/ci/azure/azure-clang-12-ubuntu-mpich.yml index 8139910e3e..ed04fe94e2 100644 --- a/ci/azure/azure-clang-12-ubuntu-mpich.yml +++ b/ci/azure/azure-clang-12-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-clang-12-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-clang-13-ubuntu-mpich.yml b/ci/azure/azure-clang-13-ubuntu-mpich.yml index 73619bf76c..0cf61444a7 100644 --- a/ci/azure/azure-clang-13-ubuntu-mpich.yml +++ b/ci/azure/azure-clang-13-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-clang-13-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-clang-14-ubuntu-mpich.yml b/ci/azure/azure-clang-14-ubuntu-mpich.yml index 84015c9de0..49241f8d2b 100644 --- a/ci/azure/azure-clang-14-ubuntu-mpich.yml +++ b/ci/azure/azure-clang-14-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 1 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-clang-14-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-clang-9-ubuntu-mpich.yml b/ci/azure/azure-clang-9-ubuntu-mpich.yml index f610c3880c..94558b9e23 100644 --- a/ci/azure/azure-clang-9-ubuntu-mpich.yml +++ b/ci/azure/azure-clang-9-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-clang-9-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-clang-alpine-mpich.yml b/ci/azure/azure-clang-alpine-mpich.yml index 3c41ba56e2..b9ae33d1e3 100644 --- a/ci/azure/azure-clang-alpine-mpich.yml +++ b/ci/azure/azure-clang-alpine-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: alpine-clang-13-cache volume_name: alpine-cpp diff --git a/ci/azure/azure-gcc-10-ubuntu-openmpi.yml b/ci/azure/azure-gcc-10-ubuntu-openmpi.yml index 62cea11228..7497831d01 100644 --- a/ci/azure/azure-gcc-10-ubuntu-openmpi.yml +++ b/ci/azure/azure-gcc-10-ubuntu-openmpi.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-gcc-10-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-gcc-11-ubuntu-mpich.yml b/ci/azure/azure-gcc-11-ubuntu-mpich.yml index 7805b1f6c0..4c27fe6faa 100644 --- a/ci/azure/azure-gcc-11-ubuntu-mpich.yml +++ b/ci/azure/azure-gcc-11-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-gcc-11-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-gcc-12-ubuntu-mpich.yml b/ci/azure/azure-gcc-12-ubuntu-mpich.yml index 35be998973..b8b73ec0dc 100644 --- a/ci/azure/azure-gcc-12-ubuntu-mpich.yml +++ b/ci/azure/azure-gcc-12-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 1 VT_KOKKOS_ENABLED: 1 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-gcc-12-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-gcc-8-ubuntu-mpich.yml b/ci/azure/azure-gcc-8-ubuntu-mpich.yml index b286ab0bc5..50c5b26ab4 100644 --- a/ci/azure/azure-gcc-8-ubuntu-mpich.yml +++ b/ci/azure/azure-gcc-8-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-gcc-8-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-gcc-9-ubuntu-mpich.yml b/ci/azure/azure-gcc-9-ubuntu-mpich.yml index 358d8f9047..d9d4c84f7a 100644 --- a/ci/azure/azure-gcc-9-ubuntu-mpich.yml +++ b/ci/azure/azure-gcc-9-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 1 + VT_CI_TEST_LB_SCHEMA: 1 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-gcc-9-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-intel-oneapi-icpc-ubuntu-mpich.yml b/ci/azure/azure-intel-oneapi-icpc-ubuntu-mpich.yml index d60306c4ae..f2918c71b7 100644 --- a/ci/azure/azure-intel-oneapi-icpc-ubuntu-mpich.yml +++ b/ci/azure/azure-intel-oneapi-icpc-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-intel-oneapi-icpc-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-intel-oneapi-icpx-ubuntu-mpich.yml b/ci/azure/azure-intel-oneapi-icpx-ubuntu-mpich.yml index ceac87ff31..108a2de15f 100644 --- a/ci/azure/azure-intel-oneapi-icpx-ubuntu-mpich.yml +++ b/ci/azure/azure-intel-oneapi-icpx-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 1 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-intel-oneapi-icpx-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-nvidia-11-2-ubuntu-mpich.yml b/ci/azure/azure-nvidia-11-2-ubuntu-mpich.yml index e30c8760f1..9f6a642c61 100644 --- a/ci/azure/azure-nvidia-11-2-ubuntu-mpich.yml +++ b/ci/azure/azure-nvidia-11-2-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 0 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-nvidia-11.2-cache volume_name: ubuntu-cpp diff --git a/ci/azure/azure-nvidia-12-ubuntu-mpich.yml b/ci/azure/azure-nvidia-12-ubuntu-mpich.yml index 5a19ca429d..3ae10b3251 100644 --- a/ci/azure/azure-nvidia-12-ubuntu-mpich.yml +++ b/ci/azure/azure-nvidia-12-ubuntu-mpich.yml @@ -57,7 +57,7 @@ variables: VT_DEBUG_VERBOSE: 1 VT_KOKKOS_ENABLED: 0 CMAKE_CXX_STANDARD: 17 - TEST_LB_SCHEMA: 0 + VT_CI_TEST_LB_SCHEMA: 0 CACHE: "$(Agent.TempDirectory)/cache/" cache_name: ubuntu-nvidia-12-cache volume_name: ubuntu-cpp diff --git a/ci/build_cpp.sh b/ci/build_cpp.sh index 669914e5b4..899cd80bec 100755 --- a/ci/build_cpp.sh +++ b/ci/build_cpp.sh @@ -147,11 +147,11 @@ cmake -G "${CMAKE_GENERATOR:-Ninja}" \ -DCMAKE_PREFIX_PATH="${CMAKE_PREFIX_PATH:-}" \ -DCMAKE_INSTALL_PREFIX="$VT_BUILD/install" \ -Dvt_ci_build="${VT_CI_BUILD:-0}" \ + -Dvt_ci_generate_lb_files="${VT_CI_TEST_LB_SCHEMA:-0}" \ -Dvt_debug_verbose="${VT_DEBUG_VERBOSE:-0}" \ -Dvt_tests_num_nodes="${VT_TESTS_NUM_NODES:-}" \ -Dvt_external_fmt="${VT_EXTERNAL_FMT:-0}" \ -Dvt_no_color_enabled="${VT_NO_COLOR_ENABLED:-0}" \ - -Dvt_test_lb_schema="${TEST_LB_SCHEMA:-0}" \ -DCMAKE_CXX_STANDARD="${CMAKE_CXX_STANDARD:-17}" \ -DBUILD_SHARED_LIBS="${BUILD_SHARED_LIBS:-0}" \ "$VT" diff --git a/ci/test_cpp.sh b/ci/test_cpp.sh index 461505b71c..9b32b48ff1 100755 --- a/ci/test_cpp.sh +++ b/ci/test_cpp.sh @@ -23,7 +23,7 @@ then popd fi -if test "${TEST_LB_SCHEMA:-0}" -eq 1 +if test "${VT_CI_TEST_LB_SCHEMA:-0}" -eq 1 then echo "Validating schema of json files..." "${VT}/scripts/check_lb_data_files.sh" "${VT_BUILD}" "${VT}" diff --git a/cmake/configure_options.cmake b/cmake/configure_options.cmake index 8927d0873b..031671c495 100644 --- a/cmake/configure_options.cmake +++ b/cmake/configure_options.cmake @@ -150,7 +150,7 @@ define_option(vt_werror_enabled "-Werror" "Build VT with -Werror enabled" OFF em define_option(vt_build_tests "tests" "Build VT tests" ON empty_feature) define_option(vt_build_tools "tools" "Build VT tools" ON empty_feature) define_option(vt_build_examples "examples" "Build VT examples" ON empty_feature) -define_option(vt_test_lb_schema "lb schema tests" "Enable testing of LBDatafile schema" OFF empty_feature) +define_option(vt_ci_generate_lb_files "generate lb files" "Enable generation of LBDatafiles when runnning examples" OFF empty_feature) option(vt_external_fmt "Build VT with external fmt" OFF) if(${vt_external_fmt}) diff --git a/cmake/test_vt.cmake b/cmake/test_vt.cmake index 3f28f82307..37a9100a4d 100644 --- a/cmake/test_vt.cmake +++ b/cmake/test_vt.cmake @@ -118,7 +118,7 @@ macro(add_test_for_example_vt test_target test_exec test_list) endif() # Append parameters required for the examples to output LBDatafiles. - if (vt_test_lb_schema) + if (vt_ci_generate_lb_files) list(APPEND EXEC_ARGS "--vt_lb_interval=1" "--vt_lb_data" diff --git a/docker-compose.yml b/docker-compose.yml index 77fee64a9f..d81e1ac6f0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -115,7 +115,7 @@ x-vtopts: &vtopts BUILD_SHARED_LIBS: ${VT_BUILD_SHARED_LIBS:-0} VT_INCLUSION_TYPE: ${VT_INCLUSION:-TPL} CODECOV_TOKEN: ${CODECOV_TOKEN:-} - TEST_LB_SCHEMA: ${TEST_LB_SCHEMA:-0} + VT_CI_TEST_LB_SCHEMA: ${VT_CI_TEST_LB_SCHEMA:-0} CMAKE_CXX_STANDARD: ${CMAKE_CXX_STANDARD:-17} services: diff --git a/scripts/workflow-azure-template.yml b/scripts/workflow-azure-template.yml index 6cd0c9f447..c4da44fe9f 100644 --- a/scripts/workflow-azure-template.yml +++ b/scripts/workflow-azure-template.yml @@ -45,7 +45,7 @@ variables: VT_DEBUG_VERBOSE: [% vt_debug_verbose %] VT_KOKKOS_ENABLED: [% vt_kokkos_enabled %] CMAKE_CXX_STANDARD: [% cmake_cxx_standard %] - TEST_LB_SCHEMA: [% test_lb_schema %] + VT_CI_TEST_LB_SCHEMA: [% vt_ci_test_lb_schema %] CACHE: "$(Agent.TempDirectory)/cache/" cache_name: [% cache_name %] volume_name: [% volume_name %] diff --git a/scripts/workflows-azure.ini b/scripts/workflows-azure.ini index e7ba1a75fb..a8da0b759d 100644 --- a/scripts/workflows-azure.ini +++ b/scripts/workflows-azure.ini @@ -18,7 +18,7 @@ vt_zoltan = 0 vt_ci_build = 1 vt_tests_num_nodes = 2 vt_external_fmt = 0 -test_lb_schema = 0 +vt_ci_test_lb_schema = 0 ulimit_core = 0 vt_code_coverage = 0 build_type = release @@ -114,7 +114,7 @@ test_configuration = "gcc-9, ubuntu, mpich, zoltan, json schema test" compiler_type = gnu compiler = gcc-9 output_name = ci/azure/azure-gcc-9-ubuntu-mpich.yml -test_lb_schema = 1 +vt_ci_test_lb_schema = 1 vt_zoltan = 1 [PR-tests-gcc-10]