From 73d9d832ed7090b97e4fb149bce3de50dfad4ca5 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Tue, 23 Jan 2024 10:10:35 -0800 Subject: [PATCH] Synchronize stable with develop branch (#172) * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * Merge tools/llsm_importer (#81) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format --------- Co-authored-by: Houjun Tang * added a tutorial for llsm_importer * added a tutorial for llsm_importer * Adding tutorial for llsm_importer tool. (#84) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer --------- Co-authored-by: Houjun Tang * make sure the line feed is included for string attribute * update timing for overall completion time * Update .gitlab-ci.yml removing Cori Remove Cori CI in advance of Cori's decommission by the end of the month. * Remove unnecessary fflush call Signed-off-by: Chen Wang * update formatting * LLSM_importer Tutorial and Timing for job completion time. (#86) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting --------- Co-authored-by: Houjun Tang * Fix Issue #85, server segfault when another client application with different number of ranks connects to it * Committing clang-format changes * update metrics * Update .gitlab-ci.yml * update VPIC output timing precision (#88) * update VPIC output timing precision * update timing to make consistent * llsm_importer (#1) formatter on llsm_importer * Tiff Parallel Reader sync to latest version (#89) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * llsm_importer (#1) formatter on llsm_importer --------- Co-authored-by: Houjun Tang * Update .gitlab-ci.yml * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * Data type for new kvtag (including refactoring and serde framework) (#90) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * llsm_importer (#1) formatter on llsm_importer * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake --------- Co-authored-by: Houjun Tang * LLSM Importer update: new job script + new data type update on kvtags (#92) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * update metrics * forcibly enable openmp * adding C flags from the mex compiler * Update .gitlab-ci.yml * updated code * clang format * llsm_importer (#1) formatter on llsm_importer * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * update * print numWorkers * update scripts * update script * update script * formatting * update llsm_tools.c * remove unnecessary hash table init --------- Co-authored-by: Houjun Tang Co-authored-by: Jean Luca Bez * Update .gitlab-ci.yml * fix warnings, commenting off 'find_path' and 'find_library' for Mercury in src/commons/CMakeLists.txt (#93) * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * update metrics * forcibly enable openmp * adding C flags from the mex compiler * Update .gitlab-ci.yml * updated code * clang format * llsm_importer (#1) formatter on llsm_importer * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * update * print numWorkers * update scripts * update script * update script * formatting * update llsm_tools.c * remove unnecessary hash table init * update script * fix some warnings * fix some warnings * update * update * fix warning * update * update * update * update * update * update * update * update * update * fix warnings * fix warnings * fix warnings * fix warnings * fix warnings * fix warnings * fix warnings * update * update * update * server address and file paths using 1024, TMP_DIR path using 1024/2, NAME_MAX for appname and objname takes 1024/2, HOSTNAME takes 1024/8, NA_INFO_STRING takes 1024/4 * update * update * update * update --------- Co-authored-by: Houjun Tang Co-authored-by: Jean Luca Bez * Update clang-format-fix.yml * Update clang-format-fix.yml * Increase the default server cache size to 32GB and flush frequency to 30s * Committing clang-format changes * add FindMERCURY.cmake * update commons/CMakeLists.txt * Fix unnecessary memory allocation (#103) * Fix an issue with opening a deleted container, added test (#101) * Fix an issue with opening a deleted container, added test * Refactor the query aggregation process * Fix container tag delete error (#102) * Fix container tag delete error * Committing clang-format changes * Update tag delete function * Refactor metdata lookup process for tag deletion * Committing clang-format changes * Formatting and comment * Committing clang-format changes --------- Co-authored-by: github-actions * Fix the server cache issue when cache becomes full and needs flush (#113) * Fix the server cache issue when cache becomes full and need flush * Committing clang-format changes --------- Co-authored-by: github-actions * Fix a wrong option description. (#115) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * update option message --------- Co-authored-by: Houjun Tang * Install header files needed by PDCpy (#114) * Support unlimited object dimension size (#117) * Support unlimited object dimension szie * Add function description for PDC_SIZE_UNLIMITED * Fix obj_round_robin_io_all test code * Committing clang-format changes * More header files fix for PDCpy * Fix cmake path (#121) * Fix cmake path * Fix cmake path * Kvtag query (#122) * Add a new collective kvtag query api to return full/aggregated results to all clients * Committing clang-format changes * Add test code * Committing clang-format changes * Add an optimization when multiple clients issue different queries * Add test program * Fix free issue * Committing clang-format changes --------- Co-authored-by: github-actions * Rebase develop to stable (#125) * updates in documentation * update docs * trigger update * trigger update * remove extension * include docs build dependencies * update file * update metrics * Update .gitlab-ci.yml * Update .gitlab-ci.yml * update logo * Update .gitlab-ci.yml * adding recovered documentation * update text * fix typo * update CSS * increase content width * remove build docs * Refer documentation to readthedocs website (#95) * Update README.md * Update getting_started.rst * Update getting_started.rst * include updated text * fix links * fix typo * Update README.md * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * update code * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * clang format * clang format * clang-format-10 * change file name * update llsm importer * update llsm importer * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update tag names * update tag names * update query startingpos * update job scripts * fix iteration count in final report * update job scripts and benckmark program * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * Remove unnecessary fflush call Signed-off-by: Chen Wang * Fix Issue #85, server segfault when another client application with different number of ranks connects to it * Committing clang-format changes * update VPIC output timing precision (#88) * update VPIC output timing precision * update timing to make consistent * llsm_importer (#1) formatter on llsm_importer * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * add FindMERCURY.cmake * LLSM Importer update: new job script + new data type update on kvtags (#92) * remove unnecessary install block from CMakeLists.txt * update output * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * update metrics * forcibly enable openmp * adding C flags from the mex compiler * Update .gitlab-ci.yml * updated code * clang format * llsm_importer (#1) formatter on llsm_importer * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * update * print numWorkers * update scripts * update script * update script * formatting * update llsm_tools.c * remove unnecessary hash table init --------- Co-authored-by: Houjun Tang Co-authored-by: Jean Luca Bez * fix warnings, commenting off 'find_path' and 'find_library' for Mercury in src/commons/CMakeLists.txt (#93) * Revert "update output" This reverts commit fe1f8b44995bc0dabd3b957e1032c2da26f56fdd. * build kvtag_add_get_scale * comment off free * update code * 1. kvtag_scale_add_get added \n 2. uint64_t support for obj/tag/query count \n 3. moving work assigning block downwards right before creating objects \n 4. everything is tested working * do while loop added, tested with 1m object and works * 1m objects test works, 10m object test fail as the original also fails * add new executable to test set * enlarge PDC_SERVER_ID_INTERVAL * update code * update console args * add p search test * add console arg for changing number of attributes per object * free allocated memory * fix query count issue * fix attr length definition * code refactored * code refactored * code refactored * code refactored * code refactored * code refactored * fix data type * fix data type * fix data type * add client side statistics * add client side statistics * fix format * clang formatter * update CMake * update CMake * update CMake * free allocated memory properly * clang format * clang format * clang-format-10 * change file name * address review comments * update llsm importer * update llsm importer * update server checkpoint intervals * update gitignore * adding job scripts * adding one debugging msg * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update container creation to collective mode for debugging purpose * update output for uint64_t * add scripts * update output for uint64_t * update output for uint64_t * update output for uint64_t * update scripts * update scripts * delete debugging message * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * make Cmake to publish scripts directory * update tag names * update tag names * update query startingpos * update query startingpos * update job scripts * add progressive timing for kvtag_add_get_scale * fix iteration count in final report * update job scripts and benckmark program * update message format * update message format * update message format * update message format * clang format * update job scripts * comment off object/container close procedure in benchmark to save node hours * change the max number of object to 1M * change the max length of attribute value * change the max length of attribute value * llsm tiff import test * llsm tiff import test * llsm tiff import test * llsm tiff import test * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update code * update cmake and llsm_importer * update cmake and llsm_importer * close if in cmake * cmake fix tiff * cmake policy to suppress warning * add pdc include dir * update code * update code * update code * update code * update code * update code * update array generating method * update array generating method * update array generating method * update array generating method * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * update CMakeLists * fix return type * fix return type * add timing * add timing * fix output * llsm tiff importer 1st version: read csv and import tiff files to PDC, adding metadata available in CSV files and TIFF loader * fix vairable name * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * fix cmake * add scripts * add scripts * add scripts * debugging for nonMPI program * debugging for nonMPI program * debugging for nonMPI program * clang format, without PDC, everything works perfectly. program fails at PDC init stage where PDCprop_create(PDC_CONT_CREATE, pdc) is being created * enable MPI * enable MPI * enlarge BCase size * enlarge BCase size * enlarge BCase size * resolve bcast count * llsm data path in script * llsm data path in script * update csv reader * update csv reader * update csv reader * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * update pdc * enlarge max write * update pdc * update pdc * update pdc * update pdc * update pdc_import.c * update pdc_import.c * update pdc_export.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update pdc_import.c * update tools/cmake * clang format * clang format * added a tutorial for llsm_importer * added a tutorial for llsm_importer * make sure the line feed is included for string attribute * update timing for overall completion time * update formatting * update metrics * forcibly enable openmp * adding C flags from the mex compiler * Update .gitlab-ci.yml * updated code * clang format * llsm_importer (#1) formatter on llsm_importer * add type for kvtag structure (#2) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * Feature/metadata type (#3) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * update * print numWorkers * update scripts * update script * update script * formatting * update llsm_tools.c * remove unnecessary hash table init * update script * fix some warnings * fix some warnings * update * update * fix warning * update * update * update * update * update * update * update * update * update * fix warnings * fix warnings * fix warnings * fix warnings * fix warnings * fix warnings * fix warnings * update * update * update * server address and file paths using 1024, TMP_DIR path using 1024/2, NAME_MAX for appname and objname takes 1024/2, HOSTNAME takes 1024/8, NA_INFO_STRING takes 1024/4 * update * update * update * update --------- Co-authored-by: Houjun Tang Co-authored-by: Jean Luca Bez * Update clang-format-fix.yml * Update clang-format-fix.yml * Increase the default server cache size to 32GB and flush frequency to 30s * Committing clang-format changes * update commons/CMakeLists.txt * Fix unnecessary memory allocation (#103) * Fix an issue with opening a deleted container, added test (#101) * Fix an issue with opening a deleted container, added test * Refactor the query aggregation process * Fix container tag delete error (#102) * Fix container tag delete error * Committing clang-format changes * Update tag delete function * Refactor metdata lookup process for tag deletion * Committing clang-format changes * Formatting and comment * Committing clang-format changes --------- Co-authored-by: github-actions * Fix the server cache issue when cache becomes full and needs flush (#113) * Fix the server cache issue when cache becomes full and need flush * Committing clang-format changes --------- Co-authored-by: github-actions * Fix a wrong option description. (#115) * upate metadata type system * update serde framework to coupe with the new data type system * replace unnecessary data types * adding type for pdc_kvtag_t, all occurances are fixed * update new commons CMake for publishing commons * commons compilation passed * compiled * remove unnecessary header files from installation * resolve conflict * add important files * clang formatting * update cmake * update option message --------- Co-authored-by: Houjun Tang * Install header files needed by PDCpy (#114) * Support unlimited object dimension size (#117) * Support unlimited object dimension szie * Add function description for PDC_SIZE_UNLIMITED * Fix obj_round_robin_io_all test code * Committing clang-format changes * More header files fix for PDCpy * Fix cmake path (#121) * Fix cmake path * Fix cmake path * Kvtag query (#122) * Add a new collective kvtag query api to return full/aggregated results to all clients * Committing clang-format changes * Add test code * Committing clang-format changes * Add an optimization when multiple clients issue different queries * Add test program * Fix free issue * Committing clang-format changes --------- Co-authored-by: github-actions * fix conflict issue * fix conflict issue --------- Signed-off-by: Chen Wang Co-authored-by: Jean Luca Bez Co-authored-by: Wei Zhang Co-authored-by: Chen Wang Co-authored-by: github-actions Co-authored-by: Wei Zhang * remove gitmodules file (#131) * Fix tests (#129) * Fix test * Fix region_transfer_all_append test * Committing clang-format changes * Fix cont_tags with MPI --------- Co-authored-by: github-actions * DART Integration (#124) DART-Integration --------- Co-authored-by: github-actions Co-authored-by: Houjun Tang * update to new CI flow * migrate to new setup * update doc * update * update * update * update * update * update * update * update * update * update * update * pdc import, export, ls compiled successfully (#123) * pdc import, export, ls compiled successfully * removed requested files * formatting issues * changed install tools --------- Co-authored-by: Houjun Tang * Update nersc.yml * Update nersc.yml * Update nersc.yml * fix MAX_CACHE_SIZE compilation issue * Compile parallel tests only when PDC_ENABLE_MPI is ON (#144) * Remove copy of Spack recipe (#135) Remove this from our repository * Include GitHub templates (#145) * Include GitHub templates * Update bug_report.md * Update feature_request.md * IDIOMS v0.9 (#151) Major changes: DART integration - single-client v.s. multi-client queries, suffix-tree mode Benchmark -> mpi_query_scale_col docker support for local development (very useful if you don't have a proper linux environment) Develop a docker image for developing/testing/debugging PDC. #150 Other changes: unifying all work_todo_g and bulk_todo_g to use atomic operations Github codespace support -> initiative fixing duplicated FindMercury.cmake cmake find Mercury and MPI #149 * Use cc on Perlmutter (#161) Dr. Tang fixed a compilation issue in NERSC CI where HDF5 cannot be detected even if the cray-parallel-hdf5 module is loaded on Perlmultter. * Fix pdc ls (#154) * pdc import, export, ls compiled successfully * removed requested files * formatting issues * changed install tools * gets checkpoint files * grabbing checkpoint files from within sub-directories, minor comments * Committing clang-format changes * Committing clang-format changes * Fix a few issues with pdc_ls * Committing clang-format changes --------- Co-authored-by: nickaruwang Co-authored-by: Nick Wang <66816536+nickaruwang@users.noreply.github.com> Co-authored-by: github-actions Co-authored-by: Jean Luca Bez * Complete support for Docker and Github Codespace (#157) Include support for Docker and Github Codespace so we can run our dev environment with the support of Docker. * SQLite and RocksDB support for KVtags (#165) SQLite and RocksDB support for KVtags * Update requirements.txt (#166) * fixes for MacOS and new CI to prevent future breaks (#164) * include MacOS in CI * include dependencies * replace TCP with sockets * add libuuid * fix find UUID to correctly locate files in Ubuntu and MacOS * fix random segfault in strdup + malloc due to wrong allocation * ensure consistent use of defined variables * change transport for MacOS tests * update env * update dependencies-macos.sh * replace found to TRUE/FALSE * update documentation with timeout and MacOS specifics * fix git link to avoid authentication * change transport for tests * configure network for MacOS tests --------- Co-authored-by: github-actions * Enable cache/no-cache PDC CI tests (#171) CI with cache and no-cache versions of PDC * Resolving conflict between develop and stable (#173) * Synchronize stable with develop branch (#152) * Resolve conflicts * Synchronize stable with develop branch (#152) (#175) Synchronize stable with develop branch --------- Signed-off-by: Chen Wang Co-authored-by: Wei Zhang Co-authored-by: Houjun Tang Co-authored-by: Chen Wang Co-authored-by: github-actions Co-authored-by: Zhang Wei Co-authored-by: Nick Wang <66816536+nickaruwang@users.noreply.github.com> Co-authored-by: nickaruwang --- .devcontainer/devcontainer.Dockerfile | 95 +-- .devcontainer/devcontainer.json | 60 +- .devcontainer/post-attach.sh | 13 + .devcontainer/post-create.sh | 40 + .devcontainer/post-start.sh | 31 +- .docker/dev.Dockerfile | 29 + .docker/dev.Dockerfile.dockerignore | 2 + .docker/dev_base.Dockerfile | 135 ++++ .docker/publish_dev_base.sh | 75 ++ .docker/run_dev_base.sh | 23 + .github/workflows/dependencies-linux.sh | 3 +- .github/workflows/dependencies-macos.sh | 26 + .github/workflows/macos.yml | 33 + .github/workflows/ubuntu-cache.yml | 28 + .github/workflows/ubuntu-no-cache.yaml | 28 + .github/workflows/{linux.yml => ubuntu.yml} | 12 +- .gitlab-ci.yml | 280 +++++-- CMake/FindMERCURY.cmake | 14 +- CMake/FindUUID.cmake | 67 +- CMakeLists.txt | 17 +- docs/requirements.txt | 2 + docs/source/api.rst | 41 +- docs/source/developer-notes.rst | 59 +- docs/source/getting_started.rst | 59 +- examples/llsm/.gitignore | 1 + examples/llsm/CMakeLists.txt | 96 +++ {tools => examples/llsm}/LLSM_IMPORTER.md | 0 .../llsm/llsm_aux}/csvReader.c | 0 .../llsm/llsm_aux}/csvReader.h | 0 .../llsm/llsm_aux}/parallelReadTiff.c | 11 +- .../llsm/llsm_aux}/parallelReadTiff.h | 0 .../llsm/llsm_aux}/pdc_list.c | 0 .../llsm/llsm_aux}/pdc_list.h | 0 {tools => examples/llsm}/llsm_importer.c | 17 +- src/api/CMakeLists.txt | 2 +- src/api/pdc_client_connect.c | 12 +- src/api/pdc_obj/pdc_cont.c | 10 +- src/commons/utils/include/string_utils.h | 5 - src/server/CMakeLists.txt | 33 +- src/server/include/pdc_server.h | 12 + src/server/include/pdc_server_metadata.h | 10 + src/server/pdc_server.c | 205 ++++- src/server/pdc_server_metadata.c | 724 +++++++++++++++--- src/tests/CMakeLists.txt | 53 +- src/tests/cont_del.c | 2 +- src/tests/dart_attr_dist_test.c | 12 +- src/tests/kvtag_query.c | 300 +++++--- src/tests/kvtag_query_scale_col.c | 43 +- src/tools/CMakeLists.txt | 85 +- src/tools/pdc_ls.c | 125 ++- tools/.gitignore | 6 - tools/CMakeLists.txt | 136 ---- 52 files changed, 2344 insertions(+), 728 deletions(-) create mode 100755 .devcontainer/post-attach.sh mode change 100644 => 100755 .devcontainer/post-create.sh mode change 100644 => 100755 .devcontainer/post-start.sh create mode 100644 .docker/dev.Dockerfile create mode 100644 .docker/dev.Dockerfile.dockerignore create mode 100644 .docker/dev_base.Dockerfile create mode 100755 .docker/publish_dev_base.sh create mode 100755 .docker/run_dev_base.sh create mode 100755 .github/workflows/dependencies-macos.sh create mode 100644 .github/workflows/macos.yml create mode 100644 .github/workflows/ubuntu-cache.yml create mode 100644 .github/workflows/ubuntu-no-cache.yaml rename .github/workflows/{linux.yml => ubuntu.yml} (70%) create mode 100644 examples/llsm/.gitignore create mode 100644 examples/llsm/CMakeLists.txt rename {tools => examples/llsm}/LLSM_IMPORTER.md (100%) rename {tools/llsm => examples/llsm/llsm_aux}/csvReader.c (100%) rename {tools/llsm => examples/llsm/llsm_aux}/csvReader.h (100%) rename {tools/llsm => examples/llsm/llsm_aux}/parallelReadTiff.c (98%) rename {tools/llsm => examples/llsm/llsm_aux}/parallelReadTiff.h (100%) rename {tools/llsm => examples/llsm/llsm_aux}/pdc_list.c (100%) rename {tools/llsm => examples/llsm/llsm_aux}/pdc_list.h (100%) rename {tools => examples/llsm}/llsm_importer.c (97%) delete mode 100644 tools/.gitignore delete mode 100644 tools/CMakeLists.txt diff --git a/.devcontainer/devcontainer.Dockerfile b/.devcontainer/devcontainer.Dockerfile index 2eecb45b1..433bc37f8 100644 --- a/.devcontainer/devcontainer.Dockerfile +++ b/.devcontainer/devcontainer.Dockerfile @@ -1,95 +1,8 @@ # Note: Run `docker build -f .devcontainer/Dockerfile -t pdc:latest .` from the root directory of the repository to build the docker image. # Use Ubuntu Jammy (latest LTS) as the base image -FROM ubuntu:jammy - - - -# Install necessary tools, MPICH, UUID library and developer files -RUN apt-get update && apt-get install -y \ - build-essential \ - git \ - mpich \ - libmpich-dev \ - uuid \ - uuid-dev \ - autoconf \ - libtool \ - cmake \ - cmake-curses-gui \ - wget \ - axel \ - curl \ - vim \ - nano \ - gdb \ - cgdb \ - curl \ - valgrind - -# Set WORK_SPACE environment variable and create necessary directories -RUN mkdir -p /workspaces -ENV WORK_SPACE=/workspaces - - -# Clone the repositories -WORKDIR $WORK_SPACE/source -RUN git clone https://github.com/ofiwg/libfabric.git && \ - git clone https://github.com/mercury-hpc/mercury.git --recursive - -COPY ./ ${WORK_SPACE}/source/pdc - -ENV LIBFABRIC_SRC_DIR=$WORK_SPACE/source/libfabric -ENV MERCURY_SRC_DIR=$WORK_SPACE/source/mercury -ENV PDC_SRC_DIR=$WORK_SPACE/source/pdc -ENV LIBFABRIC_DIR=$WORK_SPACE/install/libfabric -ENV MERCURY_DIR=$WORK_SPACE/install/mercury -ENV PDC_DIR=$WORK_SPACE/install/pdc - -RUN mkdir -p $LIBFABRIC_SRC_DIR && \ - mkdir -p $MERCURY_SRC_DIR && \ - mkdir -p $LIBFABRIC_DIR && \ - mkdir -p $MERCURY_DIR && \ - mkdir -p $PDC_DIR - - -# Save the environment variables to a file -RUN echo "export LIBFABRIC_SRC_DIR=$WORK_SPACE/source/libfabric" > $WORK_SPACE/pdc_env.sh && \ - echo "export MERCURY_SRC_DIR=$WORK_SPACE/source/mercury" >> $WORK_SPACE/pdc_env.sh && \ - echo "export PDC_SRC_DIR=$WORK_SPACE/source/pdc" >> $WORK_SPACE/pdc_env.sh && \ - echo "export LIBFABRIC_DIR=$WORK_SPACE/install/libfabric" >> $WORK_SPACE/pdc_env.sh && \ - echo "export MERCURY_DIR=$WORK_SPACE/install/mercury" >> $WORK_SPACE/pdc_env.sh && \ - echo "export PDC_DIR=$WORK_SPACE/install/pdc" >> $WORK_SPACE/pdc_env.sh - - -# Build and install libfabric -WORKDIR $LIBFABRIC_SRC_DIR -RUN git checkout v1.18.0 && \ - ./autogen.sh && \ - ./configure --prefix=$LIBFABRIC_DIR CC=mpicc CFLAG="-O2" && \ - make clean && \ - make -j && make install && \ - make check - -ENV LD_LIBRARY_PATH="$LIBFABRIC_DIR/lib:$LD_LIBRARY_PATH" -ENV PATH="$LIBFABRIC_DIR/include:$LIBFABRIC_DIR/lib:$PATH" -RUN echo 'export LD_LIBRARY_PATH=$LIBFABRIC_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh && \ - echo 'export PATH=$LIBFABRIC_DIR/include:$LIBFABRIC_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh - - -# Build and install Mercury -WORKDIR $MERCURY_SRC_DIR -ENV MERCURY_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DNA_USE_OFI=ON -DNA_USE_SM=OFF -DNA_OFI_TESTING_PROTOCOL=tcp " -RUN git checkout v2.2.0 \ - mkdir -p build -WORKDIR ${MERCURY_SRC_DIR}/build -RUN cmake $MERCURY_CMAKE_FLAGS ../ && \ - make -j && make install && \ - ctest - -# Set the environment variables -ENV LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" -ENV PATH="$MERCURY_DIR/include:$MERCURY_DIR/lib:$PATH" -RUN echo 'export LD_LIBRARY_PATH=$MERCURY_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh \ - echo 'export PATH=$MERCURY_DIR/include:$MERCURY_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh +# FROM ubuntu:jammy +FROM hpcio/pdc-dev-base:latest +RUN rm -rf $PDC_SRC_DIR && \ + rm -rf $PDC_DIR diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index ac2f53e57..f2cea6564 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -5,5 +5,63 @@ 3000 ], "postCreateCommand": ".devcontainer/post-create.sh", - "postStartCommand": ".devcontainer/post-start.sh" + "postStartCommand": ".devcontainer/post-start.sh", + "postAttachCommand": ".devcontainer/post-attach.sh", + "customizations": { + "vscode": { + "extensions": [ + "1YiB.rust-bundle", + "batisteo.vscode-django", + "ChrisChinchilla.vscode-pandoc", + "DamianKoper.gdb-debug", + "donjayamanne.python-environment-manager", + "donjayamanne.python-extension-pack", + "dustypomerleau.rust-syntax", + "eamodio.gitlens", + "formulahendry.code-runner", + "GitHub.copilot", + "GitHub.copilot-chat", + "JScearcy.rust-doc-viewer", + "julialang.language-julia", + "KevinRose.vsc-python-indent", + "lextudio.iis", + "lextudio.restructuredtext", + "lextudio.restructuredtext-pack", + "ms-azuretools.vscode-docker", + "ms-python.python", + "ms-python.vscode-pylance", + "ms-toolsai.jupyter", + "ms-toolsai.jupyter-keymap", + "ms-toolsai.jupyter-renderers", + "ms-toolsai.vscode-jupyter-cell-tags", + "ms-toolsai.vscode-jupyter-slideshow", + "ms-vscode.cmake-tools", + "ms-vscode.cpptools", + "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools-themes", + "njpwerner.autodocstring", + "PolyMeilex.rust-targets", + "rogalmic.bash-debug", + "rust-lang.rust-analyzer", + "serayuzgur.crates", + "shakram02.bash-beautify", + "shd101wyy.markdown-preview-enhanced", + "Swellaby.rust-pack", + "tamasfe.even-better-toml", + "trond-snekvik.simple-rst", + "twxs.cmake", + "VisualStudioExptTeam.intellicode-api-usage-examples", + "VisualStudioExptTeam.vscodeintellicode", + "wholroyd.jinja", + "xaver.clang-format", + "yzane.markdown-pdf", + "yzhang.markdown-all-in-one", + "ZhangYue.rust-mod-generator" + ], + "settings": { + "C_Cpp.clang_format_path": "/home/project/software/clang-format-lint-action/clang-format/clang-format10", + "terminal.integrated.scrollback": 10000 + } + } + } } \ No newline at end of file diff --git a/.devcontainer/post-attach.sh b/.devcontainer/post-attach.sh new file mode 100755 index 000000000..dc5a0cbc3 --- /dev/null +++ b/.devcontainer/post-attach.sh @@ -0,0 +1,13 @@ +#!/bin/bash + + +WORK_SPACE_INITIALIZED_FILE=/workspaces/.workspace_initialized + +if ! [ -f $WORK_SPACE_INITIALIZED_FILE ]; then + /bin/bash /workspaces/pdc/.devcontainer/post-start.sh + watch -t -n 5 'echo "Press Ctrl+C when there is no building processes."; echo "Number of initial PDC building processes:"; ps -ef | grep make | grep -v -c grep' +else + echo "Welcome Back!" +fi + +/bin/bash \ No newline at end of file diff --git a/.devcontainer/post-create.sh b/.devcontainer/post-create.sh old mode 100644 new mode 100755 index a9bf588e2..a50855b00 --- a/.devcontainer/post-create.sh +++ b/.devcontainer/post-create.sh @@ -1 +1,41 @@ #!/bin/bash + + +WORK_SPACE_INITIALIZED_FILE=/workspaces/.workspace_initialized + +if ! [ -f $WORK_SPACE_INITIALIZED_FILE ]; then + touch $WORK_SPACE_INITIALIZED_FILE + echo "First time to create workspace, start to install PDC" +else + echo "Workspace already initialized, skip the installation" + exit 0 +fi + +rm -rf $PDC_SRC_DIR +rm -rf $PDC_DIR + + +ln -s /workspaces/pdc $(dirname $PDC_SRC_DIR) + +mkdir -p /workspaces/source +ln -s $PDC_SRC_DIR /workspaces/source/pdc + +mkdir -p /workspaces/install/pdc +ln -s /workspaces/install/pdc $(dirname $PDC_SRC_DIR) + +# Build and install PDC +export PDC_CMAKE_FLAGS="-DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DBUILD_TOOLS=OFF -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DMPI_RUN_CMD=mpiexec " + +cd $PDC_SRC_DIR +rm -rf build && mkdir -p build + + +cd ${PDC_SRC_DIR}/build +cmake $PDC_CMAKE_FLAGS ../ 2>&1 > ./cmake_config.log || echo "ignoring cmake config error and proceed" +make -j && make install + +# Set the environment variables +export LD_LIBRARY_PATH="$PDC_DIR/lib:$LD_LIBRARY_PATH" +export PATH="$PDC_DIR/include:$PDC_DIR/lib:$PATH" +echo 'export LD_LIBRARY_PATH=$PDC_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh +echo 'export PATH=$PDC_DIR/include:$PDC_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh diff --git a/.devcontainer/post-start.sh b/.devcontainer/post-start.sh old mode 100644 new mode 100755 index d4d6dbc5f..1f77c0c6e --- a/.devcontainer/post-start.sh +++ b/.devcontainer/post-start.sh @@ -1,31 +1,6 @@ #!/bin/bash +nohup /bin/bash /workspaces/pdc/.devcontainer/post-create.sh 2>&1 > /workspaces/pdc_install.out & -ln -s /workspaces/pdc /home/codespace/source/pdc -mkdir -p /workspaces/install -mkdir -p /workspaces/source -ln -s $PDC_SRC_DIR /workspaces/source/pdc -ln -s $PDC_DIR /workspaces/install/pdc - -export PDC_SRC_DIR=/workspaces/source/pdc - -# Build and install PDC -export PDC_CMAKE_FLAGS="-DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DMPI_RUN_CMD=mpiexec " - -cd $PDC_SRC_DIR -rm -rf build && mkdir -p build - - -cd ${PDC_SRC_DIR}/build -cmake $PDC_CMAKE_FLAGS ../ 2>&1 > ./cmake_config.log || echo "ignoring cmake config error and proceed" -make -j && make install - -# Set the environment variables -export LD_LIBRARY_PATH="$PDC_DIR/lib:$LD_LIBRARY_PATH" -export PATH="$PDC_DIR/include:$PDC_DIR/lib:$PATH" -echo 'export LD_LIBRARY_PATH=$PDC_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh -echo 'export PATH=$PDC_DIR/include:$PDC_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh - - -cd $PDC_SRC_DIR/build -# ctest \ No newline at end of file +echo "Wait for 10 seconds for the building processes to start." +sleep 10s diff --git a/.docker/dev.Dockerfile b/.docker/dev.Dockerfile new file mode 100644 index 000000000..f5a0e0302 --- /dev/null +++ b/.docker/dev.Dockerfile @@ -0,0 +1,29 @@ +# Note: Run `docker build -f .docker/Dockerfile -t pdc:latest .` from the root directory of the repository to build the docker image. + +# Use Ubuntu Jammy (latest LTS) as the base image +FROM zhangwei217245/pdc_dev_base:latest + +# Build and install PDC +ENV PDC_CMAKE_FLAGS="-DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DMPI_RUN_CMD=mpiexec " + + +WORKDIR $PDC_SRC_DIR +RUN rm -rf build && \ + mkdir -p build + +# COPY ../ ${PDC_SRC_DIR} +# RUN ls -l $PDC_SRC_DIR + +WORKDIR ${PDC_SRC_DIR}/build +RUN cmake $PDC_CMAKE_FLAGS ../ 2>&1 > ./cmake_config.log || echo "ignoring cmake config error and proceed" && \ + make -j && make install + +# Set the environment variables +ENV LD_LIBRARY_PATH="$PDC_DIR/lib:$LD_LIBRARY_PATH" +ENV PATH="$PDC_DIR/include:$PDC_DIR/lib:$PATH" +RUN echo 'export LD_LIBRARY_PATH=$PDC_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh && \ + echo 'export PATH=$PDC_DIR/include:$PDC_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh + + +# WORKDIR $PDC_SRC_DIR/build +# RUN ctest \ No newline at end of file diff --git a/.docker/dev.Dockerfile.dockerignore b/.docker/dev.Dockerfile.dockerignore new file mode 100644 index 000000000..c29c58f5a --- /dev/null +++ b/.docker/dev.Dockerfile.dockerignore @@ -0,0 +1,2 @@ +# Exclude files and directories from the Docker build context +!/.git/ diff --git a/.docker/dev_base.Dockerfile b/.docker/dev_base.Dockerfile new file mode 100644 index 000000000..fde596838 --- /dev/null +++ b/.docker/dev_base.Dockerfile @@ -0,0 +1,135 @@ +# Note: Run `docker build -f .devcontainer/Dockerfile -t pdc:latest .` from the root directory of the repository to build the docker image. + +# Use Ubuntu Jammy (latest LTS) as the base image +ARG ARCH +FROM ${ARCH}ubuntu:jammy + +RUN echo "ARCH=${ARCH}" && sleep 3 + +ARG ARCH_CODE + +RUN echo "ARCH_CODE=${ARCH_CODE}" && sleep 3 +# Install necessary tools, MPICH, UUID library and developer files +RUN apt-get update && apt-get install -y \ + build-essential \ + git \ + mpich \ + libmpich-dev \ + libhdf5-dev \ + libhdf5-mpich-dev \ + libtiff5 \ + libtiff5-dev \ + uuid \ + uuid-dev \ + autoconf \ + libtool \ + cmake \ + cmake-curses-gui \ + wget \ + axel \ + curl \ + vim \ + nano \ + gdb \ + cgdb \ + curl \ + valgrind \ + python3 + +# Install Oh My Bash +RUN bash -c "$(curl -fsSL https://raw.githubusercontent.com/ohmybash/oh-my-bash/master/tools/install.sh)" && \ + sed -i 's/OSH_THEME="font"/OSH_THEME="powerline-multiline"/g' ~/.bashrc + +# Install Julia + +RUN echo "https://julialang-s3.julialang.org/bin/linux/aarch64/1.6/julia-1.6.7-linux-aarch64.tar.gz" > /julia_url_arm64v8.txt && \ + echo "https://julialang-s3.julialang.org/bin/linux/x64/1.6/julia-1.6.7-linux-x86_64.tar.gz" > /julia_url_amd64.txt + +RUN echo $(cat /julia_url_${ARCH_CODE}.txt) && sleep 3 + +RUN mkdir -p /opt/julia && wget -O - $(cat /julia_url_${ARCH_CODE}.txt) | tar -xz -C /opt/julia --strip-components=1 && \ + ln -s /opt/julia/bin/julia /usr/local/bin/julia + +RUN rm -rf /tmp/julia_url_*.txt + +ENV JULIA_HOME=/opt/julia + +# Install Rust +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +RUN echo 'source $HOME/.cargo/env' >> ~/.bashrc + + +# Set WORK_SPACE environment variable and create necessary directories +ENV WORK_SPACE=/home/project +RUN mkdir -p $WORK_SPACE + +# Install clang-format repo +RUN mkdir -p $WORK_SPACE/software +RUN cd $WORK_SPACE/software && git clone https://github.com/DoozyX/clang-format-lint-action.git +ENV CLANG_FORMAT_PATH=$WORK_SPACE/software/clang-format-lint-action/clang-format/clang-format10 + +# Clone the repositories +WORKDIR $WORK_SPACE/source +RUN git clone https://github.com/ofiwg/libfabric.git && \ + git clone https://github.com/mercury-hpc/mercury.git --recursive + +ENV LIBFABRIC_SRC_DIR=$WORK_SPACE/source/libfabric +ENV LIBFABRIC_DIR=$WORK_SPACE/install/libfabric +ENV MERCURY_SRC_DIR=$WORK_SPACE/source/mercury +ENV MERCURY_DIR=$WORK_SPACE/install/mercury + +ENV PDC_SRC_DIR=$WORK_SPACE/source/pdc +ENV PDC_DIR=$WORK_SPACE/install/pdc + +RUN mkdir -p $LIBFABRIC_SRC_DIR && \ + mkdir -p $MERCURY_SRC_DIR && \ + mkdir -p $PDC_SRC_DIR && \ + mkdir -p $LIBFABRIC_DIR && \ + mkdir -p $MERCURY_DIR && \ + mkdir -p $PDC_DIR + + +# Save the environment variables to a file +RUN echo "export LIBFABRIC_SRC_DIR=$WORK_SPACE/source/libfabric" > $WORK_SPACE/pdc_env.sh && \ + echo "export LIBFABRIC_DIR=$WORK_SPACE/install/libfabric" >> $WORK_SPACE/pdc_env.sh && \ + echo "export MERCURY_SRC_DIR=$WORK_SPACE/source/mercury" >> $WORK_SPACE/pdc_env.sh && \ + echo "export MERCURY_DIR=$WORK_SPACE/install/mercury" >> $WORK_SPACE/pdc_env.sh && \ + echo "export PDC_SRC_DIR=$WORK_SPACE/source/pdc" >> $WORK_SPACE/pdc_env.sh && \ + echo "export PDC_DIR=$WORK_SPACE/install/pdc" >> $WORK_SPACE/pdc_env.sh + + +# Build and install libfabric +WORKDIR $LIBFABRIC_SRC_DIR +RUN git checkout v1.18.0 && \ + ./autogen.sh && \ + ./configure --prefix=$LIBFABRIC_DIR CC=mpicc CFLAG="-O2" && \ + make clean && \ + make -j 8 && make install && \ + make check + +ENV LD_LIBRARY_PATH="$LIBFABRIC_DIR/lib:$LD_LIBRARY_PATH" +ENV PATH="$LIBFABRIC_DIR/include:$LIBFABRIC_DIR/lib:$PATH" +RUN echo 'export LD_LIBRARY_PATH=$LIBFABRIC_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh && \ + echo 'export PATH=$LIBFABRIC_DIR/include:$LIBFABRIC_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh + + +# Build and install Mercury +WORKDIR $MERCURY_SRC_DIR +ENV MERCURY_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DNA_USE_OFI=ON -DNA_USE_SM=OFF -DNA_OFI_TESTING_PROTOCOL=tcp " +RUN git checkout v2.2.0 \ + mkdir -p build +WORKDIR ${MERCURY_SRC_DIR}/build +RUN cmake $MERCURY_CMAKE_FLAGS ../ && \ + make -j 16 && make install && \ + ctest + +# Set the environment variables +ENV LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" +ENV PATH="$MERCURY_DIR/include:$MERCURY_DIR/lib:$PATH" +RUN echo 'export LD_LIBRARY_PATH=$MERCURY_DIR/lib:$LD_LIBRARY_PATH' >> $WORK_SPACE/pdc_env.sh \ + echo 'export PATH=$MERCURY_DIR/include:$MERCURY_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh + + +ENV PDC_CMAKE_FLAGS="-DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DMPI_RUN_CMD=mpiexec " + +ENTRYPOINT [ "/workspaces/pdc/.devcontainer/post-attach.sh" ] \ No newline at end of file diff --git a/.docker/publish_dev_base.sh b/.docker/publish_dev_base.sh new file mode 100755 index 000000000..d20dd11d1 --- /dev/null +++ b/.docker/publish_dev_base.sh @@ -0,0 +1,75 @@ +#!/bin/bash + +# Check if $1 is unset or empty +if [ "$#" -lt 2 ]; then + echo "./publish_dev_base.sh " + echo "For building and publishing image, just ignore the second argument. For processing manifest, set the second argument to 1." + echo "For best performance, you need to run this script on the same architecture as the target image. But you may specify the second argument to 1 on the machine where fastest network is available to process the manifest." + exit 1 +fi + +IMG_NS=$1 +VERSION=$2 +arch=$(uname -m) +ARCH_CODE="" + +case $arch in + x86_64) + ARCH_CODE="amd64" + echo "You are running on x86_64 (AMD64) architecture." + ;; + arm64 | aarch64) + ARCH_CODE="arm64v8" + echo "You are running on ARM64 (AArch64) architecture." + ;; + i386 | i686) + ARCH_CODE="i386" + echo "You are running on x86 (32-bit) architecture." + ;; + arm*) + ARCH_CODE="arm32v7" + echo "You are running on ARM (32-bit) architecture." + ;; + ppc64le) + ARCH_CODE="ppc64le" + echo "You are running on PowerPC (64-bit little-endian) architecture." + ;; + s390x) + ARCH_CODE="s390x" + echo "You are running on IBM Z (s390x) architecture." + ;; + *) + echo "Unknown or unsupported architecture: $arch" + exit 1 + ;; +esac + +if [ -z "$3" ] || [ "$3" -eq 0 ]; then + docker build -t ${IMG_NS}/pdc-dev-base:${VERSION}-${ARCH_CODE} -f .docker/dev_base.Dockerfile --build-arg ARCH=${ARCH_CODE}/ --build-arg ARCH_CODE=${ARCH_CODE} . + docker push ${IMG_NS}/pdc-dev-base:${VERSION}-${ARCH_CODE} + exit 0 +else + echo "Processing manifest..." + # Process manifest + # arch_strings=("amd64" "arm64v8" "i386" "arm32v7" "ppc64le" "s390x") + arch_strings=("amd64" "arm64v8") + manifest_args=() + for arch in "${arch_strings[@]}"; do + echo "Processing architecture: $arch" + manifest_args+=("--amend" "${IMG_NS}/pdc-dev-base:${VERSION}-${arch}") + if [[ "$arch" == "$ARCH_CODE" ]]; then + echo "Skipping pulling current architecture: $arch" + continue + fi + docker pull ${IMG_NS}/pdc-dev-base:${VERSION}-${arch} + done + + docker manifest create ${IMG_NS}/pdc-dev-base:${VERSION} ${manifest_args[@]} + docker manifest push ${IMG_NS}/pdc-dev-base:${VERSION} + docker manifest rm ${IMG_NS}/pdc-dev-base:latest + docker manifest create ${IMG_NS}/pdc-dev-base:latest ${manifest_args[@]} + docker manifest push ${IMG_NS}/pdc-dev-base:latest + +fi + + diff --git a/.docker/run_dev_base.sh b/.docker/run_dev_base.sh new file mode 100755 index 000000000..67c22dedf --- /dev/null +++ b/.docker/run_dev_base.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +DEFAULT_WORKSPACE=/workspaces/pdc + +LAST_PDC_DEV_CONTAINER=$(docker ps -a | grep pdc_dev_base | head -1 | awk '{print $NF}') + +# if this is empty, then we need to create a new container +if [ -z "$LAST_PDC_DEV_CONTAINER" ]; then + echo "No existing pdc_dev_base container found, creating a new one" + docker image rm -f hpcio/pdc-dev-base:latest + docker create -it -v $(pwd):${DEFAULT_WORKSPACE} -w ${DEFAULT_WORKSPACE} --entrypoint /bin/bash hpcio/pdc-dev-base:latest + sleep 1 + LAST_PDC_DEV_CONTAINER=$(docker ps -a | grep pdc_dev_base | head -1 | awk '{print $NF}') + echo "Created pdc_dev_base container: $LAST_PDC_DEV_CONTAINER. To stop it, run 'docker stop $LAST_PDC_DEV_CONTAINER'" + docker start $LAST_PDC_DEV_CONTAINER + echo "Wait for 5 seconds for the container to start." + sleep 5 + docker exec -it $LAST_PDC_DEV_CONTAINER /bin/sh -c "/bin/bash ${DEFAULT_WORKSPACE}/.devcontainer/post-attach.sh" +else + echo "Found existing pdc_dev_base container $LAST_PDC_DEV_CONTAINER, start it. To stop it, run 'docker stop $LAST_PDC_DEV_CONTAINER'" + docker start $LAST_PDC_DEV_CONTAINER + docker exec -it $LAST_PDC_DEV_CONTAINER /bin/bash -c "/bin/bash ${DEFAULT_WORKSPACE}/.devcontainer/post-attach.sh" +fi \ No newline at end of file diff --git a/.github/workflows/dependencies-linux.sh b/.github/workflows/dependencies-linux.sh index 8bae74b5b..38955304a 100755 --- a/.github/workflows/dependencies-linux.sh +++ b/.github/workflows/dependencies-linux.sh @@ -3,10 +3,9 @@ set -eu -o pipefail sudo apt-get update -sudo apt-get install libopenmpi-dev +sudo apt-get install libopenmpi-dev libhdf5-dev # libfabric -# git clone https://github.com/ofiwg/libfabric.git wget https://github.com/ofiwg/libfabric/archive/refs/tags/v1.12.1.tar.gz tar xf v1.12.1.tar.gz cd libfabric-1.12.1 diff --git a/.github/workflows/dependencies-macos.sh b/.github/workflows/dependencies-macos.sh new file mode 100755 index 000000000..98b1d33d8 --- /dev/null +++ b/.github/workflows/dependencies-macos.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +set -eu -o pipefail + +export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=FALSE + +brew install open-mpi automake + +# libfabric +wget https://github.com/ofiwg/libfabric/archive/refs/tags/v1.15.2.tar.gz +tar xf v1.15.2.tar.gz +cd libfabric-1.15.2 +./autogen.sh +./configure --disable-usnic --disable-mrail --disable-rstream --disable-perf --disable-efa --disable-psm2 --disable-psm --disable-verbs --disable-shm --disable-static --disable-silent-rules +make -j2 && sudo make install +make check +cd .. + +# Mercury +git clone --recursive https://github.com/mercury-hpc/mercury.git +cd mercury +git checkout v2.2.0 +mkdir build && cd build +cmake ../ -DCMAKE_C_COMPILER=gcc -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DNA_USE_OFI=ON -DNA_USE_SM=OFF -DMERCURY_USE_CHECKSUMS=OFF -DNA_OFI_TESTING_PROTOCOL=sockets +make -j2 && sudo make install +ctest diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml new file mode 100644 index 000000000..4c5fdcf8b --- /dev/null +++ b/.github/workflows/macos.yml @@ -0,0 +1,33 @@ +name: MacOS + +on: + pull_request: + branches: [ stable, develop ] + + # Allows to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + PDC: + runs-on: macos-13 + timeout-minutes: 60 + + steps: + - uses: actions/checkout@v3 + + - name: Dependencies + run: .github/workflows/dependencies-macos.sh + + - name: Build PDC + run: | + mkdir build && cd build + cmake ../ -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DPDC_ENABLE_MPI=ON -DCMAKE_C_COMPILER=mpicc + make -j 2 + + - name: Test PDC + working-directory: build + run: | + sudo sh -c 'echo "`ipconfig getifaddr en0` PDC" >> /etc/hosts' + sudo scutil --set HostName PDC + export HG_TRANSPORT="sockets" + ctest -L serial diff --git a/.github/workflows/ubuntu-cache.yml b/.github/workflows/ubuntu-cache.yml new file mode 100644 index 000000000..6a10f2129 --- /dev/null +++ b/.github/workflows/ubuntu-cache.yml @@ -0,0 +1,28 @@ +name: Ubuntu (cache) + +on: + pull_request: + branches: [ stable, develop ] + + workflow_dispatch: + +jobs: + PDC: + runs-on: ubuntu-latest + timeout-minutes: 60 + + steps: + - uses: actions/checkout@v3 + + - name: Dependencies + run: .github/workflows/dependencies-linux.sh + + - name: Build PDC + run: | + mkdir build && cd build + cmake ../ -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DPDC_SERVER_CACHE=ON -DBUILD_TESTING=ON -DPDC_ENABLE_MPI=ON -DCMAKE_C_COMPILER=mpicc + make -j2 + + - name: Test PDC + working-directory: build + run: ctest -L serial diff --git a/.github/workflows/ubuntu-no-cache.yaml b/.github/workflows/ubuntu-no-cache.yaml new file mode 100644 index 000000000..c3df83093 --- /dev/null +++ b/.github/workflows/ubuntu-no-cache.yaml @@ -0,0 +1,28 @@ +name: Ubuntu (no-cache) + +on: + pull_request: + branches: [ stable, develop ] + + workflow_dispatch: + +jobs: + PDC: + runs-on: ubuntu-latest + timeout-minutes: 60 + + steps: + - uses: actions/checkout@v3 + + - name: Dependencies + run: .github/workflows/dependencies-linux.sh + + - name: Build PDC + run: | + mkdir build && cd build + cmake ../ -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DPDC_SERVER_CACHE=OFF -DBUILD_TESTING=ON -DPDC_ENABLE_MPI=ON -DCMAKE_C_COMPILER=mpicc + make -j2 + + - name: Test PDC + working-directory: build + run: ctest -L serial diff --git a/.github/workflows/linux.yml b/.github/workflows/ubuntu.yml similarity index 70% rename from .github/workflows/linux.yml rename to .github/workflows/ubuntu.yml index cdce9b776..c8c38ba96 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/ubuntu.yml @@ -1,8 +1,6 @@ -name: linux +name: Ubuntu on: - # push: - # branches: [ stable ] pull_request: branches: [ stable, develop ] @@ -27,14 +25,6 @@ jobs: cmake ../ -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=ON -DPDC_ENABLE_MPI=ON -DCMAKE_C_COMPILER=mpicc make -j2 - # - name: Debug test PDC - # working-directory: ./src/build/bin - # run: | - # mpirun -n 1 ./pdc_server.exe & - # sleep 1 - # mpirun -n 1 ./pdc_init - # mpirun -n 1 ./close_server - - name: Test PDC working-directory: build run: ctest -L serial diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ddef71e19..fd4c5e390 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -7,151 +7,309 @@ stages: - test - metrics -perlmutter-build: +perlmutter-no-cache-build: stage: build when: manual allow_failure: false tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" script: - module load libfabric/1.15.2.0 - module list - - mkdir -p ${PDC_BUILD_PATH}/perlmutter - - cd ${PDC_BUILD_PATH}/perlmutter - - cmake ../.. -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DPDC_SERVER_CACHE=ON -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=cc -DMPI_RUN_CMD="srun -A m2621 --qos=debug --constraint=cpu --tasks-per-node=64" -DCMAKE_INSTALL_PREFIX=${PDC_INSTALL_PATH}/perlmutter + - mkdir -p ${PDC_BUILD_PATH}/perlmutter/no-cache + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache + - cmake ../../.. -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DPDC_SERVER_CACHE=OFF -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=cc -DMPI_RUN_CMD="srun -A m2621 --qos=debug --constraint=cpu --tasks-per-node=64" -DCMAKE_INSTALL_PREFIX=${PDC_INSTALL_PATH}/perlmutter/no-cache + - make -j + - make install + artifacts: + paths: + - ${PDC_BUILD_PATH}/perlmutter/no-cache + - ${PDC_INSTALL_PATH}/perlmutter/no-cache + +perlmutter-cache-build: + stage: build + when: manual + allow_failure: false + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + script: + - module load libfabric/1.15.2.0 + - module list + - mkdir -p ${PDC_BUILD_PATH}/perlmutter/cache + - cd ${PDC_BUILD_PATH}/perlmutter/cache + - cmake ../../.. -DBUILD_MPI_TESTING=ON -DBUILD_SHARED_LIBS=ON -DPDC_SERVER_CACHE=ON -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=$PDC_DIR -DPDC_ENABLE_MPI=ON -DMERCURY_DIR=$MERCURY_DIR -DCMAKE_C_COMPILER=cc -DMPI_RUN_CMD="srun -A m2621 --qos=debug --constraint=cpu --tasks-per-node=64" -DCMAKE_INSTALL_PREFIX=${PDC_INSTALL_PATH}/perlmutter/cache - make -j - make install artifacts: paths: - - ${PDC_BUILD_PATH}/perlmutter - - ${PDC_INSTALL_PATH}/perlmutter + - ${PDC_BUILD_PATH}/perlmutter/cache + - ${PDC_INSTALL_PATH}/perlmutter/cache + +# ==================================================================== +# Perlmutter +# PDC (no-cache) +# ==================================================================== -perlmutter-parallel-pdc: +perlmutter-no-cache-parallel-pdc: stage: test rules: - if: '$METRICS == null' needs: - - perlmutter-build + - perlmutter-no-cache-build tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" - PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-paralell-pdc" - PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-paralell-pdc" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/no-cache/pdc-tmp-paralell-pdc" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/no-cache/pdc-data-paralell-pdc" script: - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" - - cd ${PDC_BUILD_PATH}/perlmutter + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache - ctest -L parallel_pdc - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} -perlmutter-parallel-obj: +perlmutter-no-cache-parallel-obj: stage: test rules: - if: '$METRICS == null' needs: - - perlmutter-build - - perlmutter-parallel-pdc + - perlmutter-no-cache-build + - perlmutter-no-cache-parallel-pdc tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" - PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-paralell-obj" - PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-paralell-obj" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/no-cache/pdc-tmp-paralell-obj" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/no-cache/pdc-data-paralell-obj" script: - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" - - cd ${PDC_BUILD_PATH}/perlmutter + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache - ctest -L parallel_obj - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} -perlmutter-parallel-cont: +perlmutter-no-cache-parallel-cont: stage: test rules: - if: '$METRICS == null' needs: - - perlmutter-build - - perlmutter-parallel-pdc + - perlmutter-no-cache-build + - perlmutter-no-cache-parallel-pdc tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" - PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-paralell-cont" - PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-paralell-cont" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/no-cache/pdc-tmp-paralell-cont" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/no-cache/pdc-data-paralell-cont" script: - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" - - cd ${PDC_BUILD_PATH}/perlmutter + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache - ctest -L parallel_cont - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} -perlmutter-parallel-prop: +perlmutter-no-cache-parallel-prop: stage: test rules: - if: '$METRICS == null' needs: - - perlmutter-build - - perlmutter-parallel-pdc + - perlmutter-no-cache-build + - perlmutter-no-cache-parallel-pdc tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" - PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-paralell-prop" - PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-paralell-prop" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/no-cache/pdc-tmp-paralell-prop" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/no-cache/pdc-data-paralell-prop" script: - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" - - cd ${PDC_BUILD_PATH}/perlmutter + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache - ctest -L parallel_prop - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} -perlmutter-parallel-region: +perlmutter-no-cache-parallel-region: stage: test rules: - if: '$METRICS == null' needs: - - perlmutter-build - - perlmutter-parallel-pdc + - perlmutter-no-cache-build + - perlmutter-no-cache-parallel-pdc tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" - PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-paralell-region" - PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-paralell-region" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/no-cache/pdc-tmp-paralell-region" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/no-cache/pdc-data-paralell-region" script: - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" - - cd ${PDC_BUILD_PATH}/perlmutter + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache - ctest -L parallel_region_transfer - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} -perlmutter-parallel-region-all: +perlmutter-no-cache-parallel-region-all: stage: test rules: - if: '$METRICS == null' needs: - - perlmutter-build - - perlmutter-parallel-pdc + - perlmutter-no-cache-build + - perlmutter-no-cache-parallel-pdc tags: - perlmutter variables: - SCHEDULER_PARAMETERS: "-A m1248 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" - PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-paralell-region-all" - PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-paralell-region-all" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/no-cache/pdc-tmp-paralell-region-all" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/no-cache/pdc-data-paralell-region-all" script: - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" - - cd ${PDC_BUILD_PATH}/perlmutter + - cd ${PDC_BUILD_PATH}/perlmutter/no-cache + - ctest -L parallel_region_transfer_all + - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} + +# ==================================================================== +# Perlmutter +# PDC (cache) +# ==================================================================== + +perlmutter-cache-parallel-pdc: + stage: test + rules: + - if: '$METRICS == null' + needs: + - perlmutter-cache-build + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/cache/pdc-tmp-paralell-pdc" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/cache/pdc-data-paralell-pdc" + script: + - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" + - cd ${PDC_BUILD_PATH}/perlmutter/cache + - ctest -L parallel_pdc + - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} + +perlmutter-cache-parallel-obj: + stage: test + rules: + - if: '$METRICS == null' + needs: + - perlmutter-cache-build + - perlmutter-cache-parallel-pdc + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/cache/pdc-tmp-paralell-obj" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/cache/pdc-data-paralell-obj" + script: + - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" + - cd ${PDC_BUILD_PATH}/perlmutter/cache + - ctest -L parallel_obj + - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} + +perlmutter-cache-parallel-cont: + stage: test + rules: + - if: '$METRICS == null' + needs: + - perlmutter-cache-build + - perlmutter-cache-parallel-pdc + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/cache/pdc-tmp-paralell-cont" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/cache/pdc-data-paralell-cont" + script: + - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" + - cd ${PDC_BUILD_PATH}/perlmutter/cache + - ctest -L parallel_cont + - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} + +perlmutter-cache-parallel-prop: + stage: test + rules: + - if: '$METRICS == null' + needs: + - perlmutter-cache-build + - perlmutter-cache-parallel-pdc + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/cache/pdc-tmp-paralell-prop" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/cache/pdc-data-paralell-prop" + script: + - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" + - cd ${PDC_BUILD_PATH}/perlmutter/cache + - ctest -L parallel_prop + - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} + +perlmutter-cache-parallel-region: + stage: test + rules: + - if: '$METRICS == null' + needs: + - perlmutter-cache-build + - perlmutter-cache-parallel-pdc + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/cache/pdc-tmp-paralell-region" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/cache/pdc-data-paralell-region" + script: + - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" + - cd ${PDC_BUILD_PATH}/perlmutter/cache + - ctest -L parallel_region_transfer + - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} + +perlmutter-cache-parallel-region-all: + stage: test + rules: + - if: '$METRICS == null' + needs: + - perlmutter-cache-build + - perlmutter-cache-parallel-pdc + tags: + - perlmutter + variables: + SCHEDULER_PARAMETERS: "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 1 -t 00:30:00" + SUPERCOMPUTER: "perlmutter" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" + PDC_TMPDIR: "${PDC_BUILD_PATH}/cache/pdc-tmp-paralell-region-all" + PDC_DATA_LOC: "${PDC_BUILD_PATH}/cache/pdc-data-paralell-region-all" + script: + - export LD_LIBRARY_PATH="$MERCURY_DIR/lib:$LD_LIBRARY_PATH" + - cd ${PDC_BUILD_PATH}/perlmutter/cache - ctest -L parallel_region_transfer_all - rm -rf ${PDC_TMPDIR} ${PDC_DATA_LOC} @@ -160,21 +318,21 @@ perlmutter-metrics: rules: - if: '$METRICS == "true"' needs: - - perlmutter-build + - perlmutter-cache-build tags: - perlmutter variables: PDC_N_NODES: 64 PDC_N_CLIENTS: 127 - SCHEDULER_PARAMETERS: "-A m1248 --qos=regular --constraint=cpu --tasks-per-node=${PDC_N_CLIENTS} -N ${PDC_N_NODES} -t 00:30:00" + SCHEDULER_PARAMETERS: "-A m2621 --qos=regular --constraint=cpu --tasks-per-node=${PDC_N_CLIENTS} -N ${PDC_N_NODES} -t 00:30:00" SUPERCOMPUTER: "perlmutter" - MERCURY_DIR: "/global/cfs/cdirs/m1248/pdc-perlmutter/mercury/install" + MERCURY_DIR: "/global/cfs/cdirs/m2621/pdc-perlmutter/mercury/install" PDC_TMPDIR: "${PDC_BUILD_PATH}/pdc-tmp-metrics" PDC_DATA_LOC: "${PDC_BUILD_PATH}/pdc-data-metrics" PDC_CLIENT_LOOKUP: "NONE" - PDC_SERVER: "${PDC_BUILD_PATH}/perlmutter/bin/pdc_server.exe" - PDC_SERVER_CLOSE: "${PDC_BUILD_PATH}/perlmutter/bin/close_server" - PDC_CLIENT: "${PDC_BUILD_PATH}/perlmutter/bin/vpicio_mts" + PDC_SERVER: "${PDC_BUILD_PATH}/perlmutter/cache/bin/pdc_server.exe" + PDC_SERVER_CLOSE: "${PDC_BUILD_PATH}/perlmutter/cache/bin/close_server" + PDC_CLIENT: "${PDC_BUILD_PATH}/perlmutter/cache/bin/vpicio_mts" PDC_JOB_OUTPUT: "pdc-metrics.log" script: - hostname diff --git a/CMake/FindMERCURY.cmake b/CMake/FindMERCURY.cmake index d159efbaf..00fd5893d 100644 --- a/CMake/FindMERCURY.cmake +++ b/CMake/FindMERCURY.cmake @@ -27,14 +27,14 @@ if(MERCURY_FOUND) HINTS ${MERCURY_DIR} ) - # find_library(MERCURY_UTIL_LIBRARY - # NAMES - # mercury_util - # HINTS ${MERCURY_DIR} - # ) + find_library(MERCURY_UTIL_LIBRARY + NAMES + mercury_util + HINTS ${MERCURY_DIR} + ) - # set(MERCURY_LIBRARIES ${MERCURY_LIBRARY} ${MERCURY_NA_LIBRARY} ${MERCURY_UTIL_LIBRARY}) - set(MERCURY_LIBRARIES ${MERCURY_LIBRARY} ${MERCURY_NA_LIBRARY}) + set(MERCURY_LIBRARIES ${MERCURY_LIBRARY} ${MERCURY_NA_LIBRARY} ${MERCURY_UTIL_LIBRARY}) + # set(MERCURY_LIBRARIES ${MERCURY_LIBRARY} ${MERCURY_NA_LIBRARY}) set(MERCURY_INCLUDE_DIRS ${MERCURY_INCLUDE_DIR}) message(STATUS "mercury include dir = ${MERCURY_INCLUDE_DIRS}") message(STATUS "mercury lib = ${MERCURY_LIBRARIES}") diff --git a/CMake/FindUUID.cmake b/CMake/FindUUID.cmake index 22d87b38f..777e3029c 100644 --- a/CMake/FindUUID.cmake +++ b/CMake/FindUUID.cmake @@ -1,36 +1,41 @@ -# FindUUID.cmake +# On Mac OS X the uuid functions are in the System library -# Find the system's UUID library -# This will define: -# -# UUID_FOUND - System has UUID -# UUID_INCLUDE_DIRS - The UUID include directory -# UUID_LIBRARIES - The libraries needed to use UUID +if(APPLE) + set(UUID_LIBRARY_VAR System) +else() + set(UUID_LIBRARY_VAR uuid) +endif() -# - Try to find UUID -# Once done this will define -# UUID_FOUND - System has UUID -# UUID_INCLUDE_DIRS - The UUID include directories -# UUID_LIBRARIES - The libraries needed to use UUID - -find_package(PkgConfig) -pkg_check_modules(PC_UUID uuid) +find_library(UUID_LIBRARY + NAMES ${UUID_LIBRARY_VAR} + PATHS /usr/local/lib64 /usr/local/lib /usr/lib64 /usr/lib +) find_path(UUID_INCLUDE_DIR uuid/uuid.h HINTS ${PC_UUID_INCLUDEDIR} ${PC_UUID_INCLUDE_DIRS} - PATHS /usr/local/include /usr/include) - -find_library(UUID_LIBRARY NAMES uuid - HINTS ${PC_DRC_LIBDIR} ${PC_DRC_LIBRARY_DIRS} - PATHS /usr/local/lib64 /usr/local/lib /usr/lib64 /usr/lib) - -set(UUID_INCLUDE_DIRS ${UUID_INCLUDE_DIR}) -set(UUID_LIBRARIES ${UUID_LIBRARY}) - -include(FindPackageHandleStandardArgs) -# handle the QUIETLY and REQUIRED arguments and set UUID_FOUND to TRUE -# if all listed variables are TRUE -find_package_handle_standard_args(UUID DEFAULT_MSG - UUID_INCLUDE_DIR UUID_LIBRARY) - -mark_as_advanced(UUID_INCLUDE_DIR UUID_LIBRARY) \ No newline at end of file + PATHS /usr/local/include /usr/include +) + +if (UUID_LIBRARY AND UUID_INCLUDE_DIR) + set(UUID_LIBRARIES ${UUID_LIBRARY}) + set(UUID_FOUND "TRUE") +else () + set(UUID_FOUND "FALSE") +endif () + +if (UUID_FOUND) + if (NOT UUID_FIND_QUIETLY) + message(STATUS "Found UUID: ${UUID_LIBRARIES}") + endif () +else () + if (UUID_FIND_REQUIRED) + message( "library: ${UUID_LIBRARY}" ) + message( "include: ${UUID_INCLUDE_DIR}" ) + message(FATAL_ERROR "Could not find UUID library") + endif () +endif () + +mark_as_advanced( + UUID_LIBRARY + UUID_INCLUDE_DIR +) diff --git a/CMakeLists.txt b/CMakeLists.txt index 975dac351..70bc8ce2e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -467,7 +467,21 @@ if(PDC_ENABLE_FASTBIT) set(ENABLE_FASTBIT 1) endif() + +# Metadata with RocksDB +#----------------------------------------------------------------------------- +option(PDC_ENABLE_ROCKSDB "Enable RocksDB (experimental)." OFF) +if(PDC_ENABLE_ROCKSDB) + set(ENABLE_ROCKSDB 1) +endif() + +# Metadata with SQLite #----------------------------------------------------------------------------- +option(PDC_ENABLE_SQLITE3 "Enable SQLite3 (experimental)." OFF) +if(PDC_ENABLE_SQLITE3) + set(ENABLE_SQLITE3 1) +endif() + # Check availability of symbols #----------------------------------------------------------------------------- check_symbol_exists(malloc_usable_size "malloc.h" HAVE_MALLOC_USABLE_SIZE) @@ -649,5 +663,4 @@ add_custom_target(format | xargs -0 clang-format -i -style=file && echo "... done" COMMENT "clang-format all source codes" VERBATIM -) - +) \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt index ffc6fc227..b9245e074 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,4 @@ +sphinx==6.2.1 +sphinx-rtd-theme==1.2.2 sphinxemoji breathe diff --git a/docs/source/api.rst b/docs/source/api.rst index ab058f10a..4dc7dc226 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -253,6 +253,33 @@ PDC object APIs * Delete data from an object. * For developers: see pdc_client_connect.c. Use PDC_obj_get_info to retrieve name. Then forward name to servers to fulfill requests. +--------------------------- +PDC region APIs +--------------------------- + + +--------------------------- +PDC property APIs +--------------------------- + + +--------------------------- +PDC metadata APIs +--------------------------- +PDC maintains object metadata (obj name, dimension, create time, etc.) in a distributed hash table. Each object's metadata can be +accessed with its object ID. Users can also issue metadata queries to retrieve the object IDs that meet the query constraints. + +PDC allows users to add key-value tags to each object, where key is a string and value can be a binary array of any datatype and length. +The key-value tags are stored in an in-memory linked list by default. + +PDC has metadata indexing and querying support when DART is enabled. See ``DART`` section in the Developer Notes. + +PDC additionally supports managing the key-value tags with RocksDB and SQLite, both are considered experimental at the moment. +Either RocksDB or SQLite can be enabled by turning on the ``PDC_ENABLE_ROCKSDB`` or ``PDC_USE_SQLITE3`` flag in CMake, setting the +``ROCKSDB_DIR`` or ``SQLITE3_DIR`` and setting the environment variable ``PDC_USE_ROCKSDB`` or ``PDC_USE_SQLITE3`` to 1 before launching the server. +Users can use the same PDC query APIs when RocksDB or SQLite is enabled. + + * perr_t PDCobj_put_tag(pdcid_t obj_id, char *tag_name, void *tag_value, psize_t value_size) * Input: * obj_id: Local object ID @@ -285,17 +312,7 @@ PDC object APIs * For developers: see pdc_client_connect.c. Need to use PDCtag_delete to submit RPCs to the servers for metadata update. --------------------------- -PDC region APIs ---------------------------- - - ---------------------------- -PDC property APIs ---------------------------- - - ---------------------------- -PDC query APIs +PDC Data query APIs --------------------------- * pdc_query_t *PDCquery_create(pdcid_t obj_id, pdc_query_op_t op, pdc_var_type_t type, void *value) @@ -883,4 +900,4 @@ Developers notes * Object * Object property See `Object Property `_ - * Object structure (pdc_obj_pkg.h and pdc_obj.h) See `Object Structure `_ \ No newline at end of file + * Object structure (pdc_obj_pkg.h and pdc_obj.h) See `Object Structure `_ diff --git a/docs/source/developer-notes.rst b/docs/source/developer-notes.rst index f9d9cdd28..0db7c96f1 100644 --- a/docs/source/developer-notes.rst +++ b/docs/source/developer-notes.rst @@ -138,6 +138,13 @@ For No-index approach, here are the APIs you can call for different communicatio * PDC_Client_query_kvtag (point-to-point) * PDC_Client_query_kvtag_mpi (collective) +The default PDC kvtags are stored within each object's metadata as a linked list, and any query involves traversing the list in memory. + +We have additional support to manage the kvtags with RocksDB and SQLite. With this approach, each PDC server creates and accesses its own RocksDB and SQLite database file, which is stored as an in-memory file in /tmp directory. When RocksDB or SQLite is enabled with setting the environment variable ``PDC_USE_ROCKSDB=1`` or ``PDC_USE_SQLITE3=1``. +With the RocksDB implementation, each kvtag is stored as a RocksDB key-value pair. To differenciate the kvtags for different objects, we encode the object ID to the key string used for the RocksDB, and store the value as the RocksDB value. As a result, the value can be retrieved directly when its object ID and key string is known. Otherwise we must iterate over the entire DB to search for an kvtag. +With the SQLite3 implementation, each kvtag is inserted as a row in a SQLite3 table. Currently, the table has the following columns and SQLite3 datatypes: objid (INTEGER), name (TEXT), value_text(TEXT), value_int(INTEGER), value_float(REAL), value_double(REAL), value_blob(BLOB). We create a SQL SELECT statement automatically on the server when receiving a query request from the PDC client. Currently this implementation is focused on supporting string/text affix search and integer/float (single) value match search. +Currently, both the RocksDB and the SQLite implementation are developed for benchmarking purpose, the database files are removed at server finalization time, and restart is not supported. + Index-facilitated Approach --------------------------------------------- @@ -398,18 +405,56 @@ Also, to make sure your code with Julia function calls doesn't get compiled when For more info on embedded Julia support, please visit: `Embedded Julia https://docs.julialang.org/en/v1/manual/embedding/`_. - - --------------------------------------------- -Github Codespace Support +Docker Support --------------------------------------------- -This is a feature current in progress. -We are trying to make PDC work with Github Codespace. +Sometimes, you may want to have a development or testing environment to work on PDC. + +We provide docker support for PDC on such purpose. + +To build the docker image, you can run the following command in the root directory of PDC project: + +.. code-block:: Bash + .docker/run_dev_base.sh + +This will mount your PDC project directory to `/workspaces/pdc` directory in the docker container and an initial step will be performed once you attach to the container. +The experience will be pretty much similar to the Github Codespace. + + +------------------------------------------------- +Github Codespace && VSCode Dev Container Support +------------------------------------------------- + +Now the PDC project can be built and run in Github Codespace. For more information on how to create Github Codespace, please refer to `Github Codespace Documentation `_ + +You can also use VSCode Dev Container to develop PDC as long as you have VSCode and Docker installed on you local computer. For more information on VSCode dev container, please refer to `Developing inside a Container `_ . + +When you create a code space, you can find your PDC project in `/workspaces/pdc` directory. +And, you can find your PDC project and its dependencies in `/home/project` directory, you will see the same directory structure there as described in our standalone installation guide. + +Since you are using the same PDC dev_base docker image, everything should be the same as in the docker support described above. + + +------------------------------------------------ +Maintaining Docker Image +------------------------------------------------ + +We currently only support to architectures, amd64 and arm64v8. +To build the architecture-specific docker image on the machine with specific CPU architecture, you can run the following command in the root directory of PDC project: + +.. code-block:: Bash + .docker/publish_dev_base.sh + +If you run the above command on an ARM64v8 CPU (say, Apple Silicon Mac), it will generate an image named '/pdc_dev_base:-arm64v8'. +If you run the above command on any Intel X64/AMD x64 CPU (say, Microsoft surface or Apple Intel Mac, or an Intel CPU VM from AWS/Azure/GCP/OCI), it will generate an image named '/pdc_dev_base:-amd64'. +Once the above is done, you can pick the image build machine with fastest network and run the following -Currently, with `.devcontainer/devcontainer.json` and `.devcontainer/Dockerfile`, you can build a docker image that contains all the dependencies for PDC. -However, when the Codespace is created, the predefined directories in the docker file will disappear. +.. code-block:: Bash + .docker/publish_dev_base.sh 1 +This will create a multi-arch image with both amd64 and arm64v8 architectures in your registry under your namespace. +All two different architecture-specific imagest will be linked to a manifest in your docker registry named '/pdc_dev_base:latest'. ------------------------------------------------------------ Tracking your memory consumption with each memory allocation diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 2769c487a..295517128 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -42,6 +42,7 @@ PDC can use either MPICH or OpenMPI as the MPI library, if your system doesn't h We provide detailed instructions for installing libfabric, Mercury, and PDC below. .. attention:: + Following the instructions below will record all the environmental variables needed to run PDC in the ``$WORK_SPACE/pdc_env.sh`` file, which can be used for future PDC runs with ``source $WORK_SPACE/pdc_env.sh``. @@ -57,9 +58,9 @@ Before installing the dependencies and downloading the code repository, we assum mkdir -p $WORK_SPACE/install cd $WORK_SPACE/source - git clone git@github.com:ofiwg/libfabric.git - git clone git@github.com:mercury-hpc/mercury.git --recursive - git clone git@github.com:hpc-io/pdc.git + git clone https://github.com/ofiwg/libfabric + git clone https://github.com/mercury-hpc/mercury --recursive + git clone https://github.com/hpc-io/pdc export LIBFABRIC_SRC_DIR=$WORK_SPACE/source/libfabric export MERCURY_SRC_DIR=$WORK_SPACE/source/mercury @@ -118,9 +119,18 @@ Install libfabric .. note:: + ``CC=mpicc`` may need to be changed to the corresponding compiler in your system, e.g. ``CC=cc`` or ``CC=gcc``. On Perlmutter@NERSC, ``--disable-efa --disable-sockets`` should be added to the ``./configure`` command when compiling on login nodes. +.. attention:: + + If you're installing PDC on MacOS, you need to make sure you enable ``sockets``: + + .. code-block: Bash + + ./configure CFLAG=-O2 --enable-sockets=yes --enable-tcp=yes --enable-udp=yes --enable-rxm=yes + Install Mercury --------------- @@ -149,6 +159,15 @@ Install Mercury ``CC=mpicc`` may need to be changed to the corresponding compiler in your system, e.g. ``-DCMAKE_C_COMPILER=cc`` or ``-DCMAKE_C_COMPILER=gcc``. Make sure the ctest passes. PDC may not work without passing all the tests of Mercury. +.. attention:: + + If you're installing PDC on MacOS, for the tests to work you need to specify the protocol used by Mercury: + + .. code-block: Bash + + cmake -DCMAKE_INSTALL_PREFIX=$MERCURY_DIR -DCMAKE_C_COMPILER=mpicc -DBUILD_SHARED_LIBS=ON \ + -DBUILD_TESTING=ON -DNA_USE_OFI=ON -DNA_USE_SM=OFF -DNA_OFI_TESTING_PROTOCOL=sockets + Install PDC ----------- @@ -170,10 +189,20 @@ Install PDC echo 'export PATH=$PDC_DIR/include:$PDC_DIR/lib:$PATH' >> $WORK_SPACE/pdc_env.sh .. note:: + ``-DCMAKE_C_COMPILER=mpicc -DMPI_RUN_CMD=mpiexec`` may need to be changed to ``-DCMAKE_C_COMPILER=cc -DMPI_RUN_CMD=srun`` depending on your system environment. .. note:: - If you are trying to compile PDC on your Mac, ``LibUUID`` needs to be installed on your MacOS first. Simple use ``brew install ossp-uuid`` to install it. + + If you are trying to compile PDC on MacOS, ``LibUUID`` needs to be installed on your MacOS first. Simple use ``brew install ossp-uuid`` to install it. + If you are trying to compile PDC on Linux, you should also make sure ``LibUUID`` is installed on your system. If not, you can install it with ``sudo apt-get install uuid-dev`` on Ubuntu or ``yum install libuuid-devel`` on CentOS. + + In MacOS you also need to export the following environment variable so PDC (i.e., Mercury) uses the ``socket`` protocol, the only one supported in MacOS: + + .. code-block: Bash + + export HG_TRANSPORT="sockets" + Test Your PDC Installation -------------------------- @@ -183,6 +212,12 @@ PDC's ``ctest`` contains both sequential and parallel/MPI tests, and can be run ctest +You can also specify a timeout (e.g., 2 minutes) for the tests by specifying the ``timeout`` parameter when calling ``ctest``: + +.. code-block:: Bash + + ctest --timeout 120 + .. note:: If you are using PDC on an HPC system, e.g. Perlmutter@NERSC, ``ctest`` should be run on a compute node, you can submit an interactive job on Perlmutter: ``salloc --nodes 1 --qos interactive --time 01:00:00 --constraint cpu --account=mxxxx`` @@ -219,14 +254,18 @@ Then you can compile your PDC project with Julia support. Now, see Developer Notes to know how you can add your own Julia functions to enhance your test cases in PDC. -Build PDC Docker Image and Run PDC Docker Container +Build PDC in a Docker Container +--------------------------------------------------- +Simply run the following command from the project root directory to build PDC in a Docker container: +.. code-block:: Bash + .docker/run_dev_base.sh + + +Build PDC in Github Codespace --------------------------------------------------- -We provide a Dockerfile to build a PDC Docker image. The Dockerfile is located at `--$PDC_ROOT/.docker/local.Dockerfile` -To build the PDC Docker image, you can run the following command from `$PDC_ROOT`: -`docker build -t pdc_dev_base:latest -f $PDC_ROOT/.docker/base.Dockerfile .` +Simply start a Github Codespace from the targeting branch of your PDC project, and enjoy. +For more information on how to create Github Codespace, please refer to `Github Codespace Documentation `_ -To run the PDC Docker container, you can run the following command: -`docker run -it --rm --name pdc -v $PDC_ROOT:/home/codespace/source/pdc pdc_dev_base:latest /bin/bash` --------------------------- Running PDC diff --git a/examples/llsm/.gitignore b/examples/llsm/.gitignore new file mode 100644 index 000000000..c795b054e --- /dev/null +++ b/examples/llsm/.gitignore @@ -0,0 +1 @@ +build \ No newline at end of file diff --git a/examples/llsm/CMakeLists.txt b/examples/llsm/CMakeLists.txt new file mode 100644 index 000000000..95923865a --- /dev/null +++ b/examples/llsm/CMakeLists.txt @@ -0,0 +1,96 @@ +cmake_minimum_required (VERSION 2.8.12) + +# Setup cmake policies. +foreach(p + CMP0012 + CMP0013 + CMP0014 + CMP0022 # CMake 2.8.12 + CMP0025 # CMake 3.0 + CMP0053 # CMake 3.1 + CMP0054 # CMake 3.1 + CMP0074 # CMake 3.12 + CMP0075 # CMake 3.12 + CMP0083 # CMake 3.14 + CMP0093 # CMake 3.15 + ) + if(POLICY ${p}) + cmake_policy(SET ${p} NEW) + endif() +endforeach() + +project(PDC_LLSM_EXAM C) + +set(LLSM_EXT_INCLUDE_DIRS "") +set(LLSM_EXT_LIBRARIES "") + + + +set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Choose the type of build." FORCE) + +find_package(PDC REQUIRED) +if(PDC_FOUND) + #message(STATUS "PDC include directory: ${PDC_INCLUDE_DIR}") + set(LLSM_EXT_INCLUDE_DIRS ${PDC_INCLUDE_DIR} + ${LLSM_EXT_INCLUDE_DIRS} + ) + set(LLSM_EXT_LIBRARIES pdc ${LLSM_EXT_LIBRARIES}) +endif() + +option(USE_SYSTEM_MPI "Use system-installed OpenMP." ON) +if(USE_SYSTEM_MPI) + find_package(MPI) + if(MPI_FOUND) + add_definitions(-DLLSM_ENABLE_MPI=1) + SET(CMAKE_C_COMPILER ${MPI_C_COMPILER}) + SET(CMAKE_CXX_COMPILER ${MPI_CXX_COMPILER}) + set(LLSM_EXT_INCLUDE_DIRS ${MPI_C_INCLUDE_PATH} + ${LLSM_EXT_INCLUDE_DIRS} + ) + set(LLSM_EXT_LIBRARIES ${MPI_C_LIBRARIES} ${LLSM_EXT_LIBRARIES}) + endif() +endif() + +option(USE_SYSTEM_OPENMP "Use system-installed OpenMP." ON) +if(USE_SYSTEM_OPENMP) + find_package(OpenMP REQUIRED) + if(OPENMP_FOUND) + add_definitions(-DENABLE_OPENMP=1) + set(ENABLE_OPENMP 1) + set(OPENMP_LIBRARIES "${OpenMP_C_LIBRARIES}") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fopenmp") + else() + message(FATAL_ERROR "OpenMP not found") + endif() +endif() + +include_directories( + ${LLSM_EXT_INCLUDE_DIRS} +) + + +# Find LibTIFF +option(USE_LIB_TIFF "Enable LibTiff." ON) +if(USE_LIB_TIFF) + find_package(TIFF REQUIRED) + if(TIFF_FOUND) + set(LLSM_LIB_SOURCE + llsm_aux/parallelReadTiff.c + llsm_aux/csvReader.c + llsm_aux/pdc_list.c + ) + # Add the LibTIFF include directory to the include path + include_directories(${TIFF_INCLUDE_DIRS}) + add_library(llsm_tiff ${LLSM_LIB_SOURCE}) + target_compile_options(llsm_tiff PRIVATE ${OpenMP_C_FLAGS}) + target_link_libraries(llsm_tiff PUBLIC ${OpenMP_C_LIBRARIES}) + target_link_libraries(llsm_tiff PUBLIC ${TIFF_LIBRARIES}) + target_include_directories(llsm_tiff PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/llsm) + + add_executable(llsm_importer llsm_importer.c) + target_link_libraries(llsm_importer ${PDC_EXT_LIB_DEPENDENCIES} pdc ${TIFF_LIBRARIES} llsm_tiff ${LLSM_EXT_LIBRARIES}) + target_include_directories(llsm_importer PUBLIC ${LLSM_EXT_INCLUDE_DIRS}) + else() + message(WARNING "LibTiff not found, ignore building the executables which requires LibTiff support.") + endif() +endif() \ No newline at end of file diff --git a/tools/LLSM_IMPORTER.md b/examples/llsm/LLSM_IMPORTER.md similarity index 100% rename from tools/LLSM_IMPORTER.md rename to examples/llsm/LLSM_IMPORTER.md diff --git a/tools/llsm/csvReader.c b/examples/llsm/llsm_aux/csvReader.c similarity index 100% rename from tools/llsm/csvReader.c rename to examples/llsm/llsm_aux/csvReader.c diff --git a/tools/llsm/csvReader.h b/examples/llsm/llsm_aux/csvReader.h similarity index 100% rename from tools/llsm/csvReader.h rename to examples/llsm/llsm_aux/csvReader.h diff --git a/tools/llsm/parallelReadTiff.c b/examples/llsm/llsm_aux/parallelReadTiff.c similarity index 98% rename from tools/llsm/parallelReadTiff.c rename to examples/llsm/llsm_aux/parallelReadTiff.c index cc4026ac9..063c86f39 100644 --- a/tools/llsm/parallelReadTiff.c +++ b/examples/llsm/llsm_aux/parallelReadTiff.c @@ -1,7 +1,8 @@ #include "parallelReadTiff.h" #include "tiffio.h" +#include "inttypes.h" -#define ENABLE_OPENMP +// #define ENABLE_OPENMP #ifdef ENABLE_OPENMP #include "omp.h" @@ -50,8 +51,8 @@ readTiffParallelBak(uint64_t x, uint64_t y, uint64_t z, const char *fileName, vo int counter = 0; while (!TIFFSetDirectory(tif, (uint64_t)dir) && counter < 3) { - printf("Thread %d: File \"%s\" Directory \"%d\" failed to open. Try %d\n", w, fileName, dir, - counter + 1); + printf("Thread %d: File \"%s\" Directory \"%" PRId64 "\" failed to open. Try %d\n", w, + fileName, dir, counter + 1); counter++; } @@ -344,8 +345,8 @@ readTiffParallel2DBak(uint64_t x, uint64_t y, uint64_t z, const char *fileName, int counter = 0; while (!TIFFSetDirectory(tif, (uint64_t)0) && counter < 3) { - printf("Thread %d: File \"%s\" Directory \"%d\" failed to open. Try %d\n", w, fileName, dir, - counter + 1); + printf("Thread %d: File \"%s\" Directory \"%" PRId64 "\" failed to open. Try %d\n", w, + fileName, dir, counter + 1); counter++; } diff --git a/tools/llsm/parallelReadTiff.h b/examples/llsm/llsm_aux/parallelReadTiff.h similarity index 100% rename from tools/llsm/parallelReadTiff.h rename to examples/llsm/llsm_aux/parallelReadTiff.h diff --git a/tools/llsm/pdc_list.c b/examples/llsm/llsm_aux/pdc_list.c similarity index 100% rename from tools/llsm/pdc_list.c rename to examples/llsm/llsm_aux/pdc_list.c diff --git a/tools/llsm/pdc_list.h b/examples/llsm/llsm_aux/pdc_list.h similarity index 100% rename from tools/llsm/pdc_list.h rename to examples/llsm/llsm_aux/pdc_list.h diff --git a/tools/llsm_importer.c b/examples/llsm/llsm_importer.c similarity index 97% rename from tools/llsm_importer.c rename to examples/llsm/llsm_importer.c index ea5097278..9f6c0e043 100644 --- a/tools/llsm_importer.c +++ b/examples/llsm/llsm_importer.c @@ -4,22 +4,17 @@ #include #include -#ifndef ENABLE_MPI -#define ENABLE_MPI -#endif - -#ifdef ENABLE_MPI +#ifdef LLSM_ENABLE_MPI #include "mpi.h" -// #undef ENABLE_MPI #endif #include "pdc.h" // #include "pdc_client_server_common.h" // #include "pdc_client_connect.h" -#include "llsm/parallelReadTiff.h" -#include "llsm/pdc_list.h" -#include "llsm/csvReader.h" +#include "llsm_aux/parallelReadTiff.h" +#include "llsm_aux/pdc_list.h" +#include "llsm_aux/csvReader.h" #include typedef struct llsm_importer_args_t { @@ -126,7 +121,7 @@ import_to_pdc(image_info_t *image_info, csv_cell_t *fileName_cell) duration = getDoubleTimestamp() - start; // end timing the operation and calculate duration in nanoseconds - printf("[Rank %4d] Region_Transfer %s_[%d_Bytes] Done! Time taken: %.4f seconds\n", rank, + printf("[Rank %4d] Region_Transfer %s_[%ld_Bytes] Done! Time taken: %.4f seconds\n", rank, fileName_cell->field_value, image_info->tiff_size, duration); // add metadata tags based on the csv row @@ -372,7 +367,7 @@ main(int argc, char *argv[]) #endif if (rank == 0) { - printf("[Completion Time] LLSM IMPORTER FINISHES! Time taken: %.4f seconds\n", rank, duration); + printf("[Completion Time] LLSM IMPORTER FINISHES! Time taken: %.4f seconds\n", duration); } // free memory for csv table csv_free_table(csv_table); diff --git a/src/api/CMakeLists.txt b/src/api/CMakeLists.txt index 7b9a356fb..c92b2c3b0 100644 --- a/src/api/CMakeLists.txt +++ b/src/api/CMakeLists.txt @@ -42,7 +42,7 @@ if(MERCURY_FOUND) set(PDC_EXT_INCLUDE_DEPENDENCIES ${MERCURY_INCLUDE_DIR} ${PDC_EXT_INCLUDE_DEPENDENCIES} ) - set(PDC_EXT_LIB_DEPENDENCIES mercury ${PDC_EXT_LIB_DEPENDENCIES}) + set(PDC_EXT_LIB_DEPENDENCIES ${MERCURY_LIBRARIES} ${PDC_EXT_LIB_DEPENDENCIES}) endif() include_directories(${PDC_EXT_INCLUDE_DEPENDENCIES}) diff --git a/src/api/pdc_client_connect.c b/src/api/pdc_client_connect.c index 44c1786e6..b89006293 100644 --- a/src/api/pdc_client_connect.c +++ b/src/api/pdc_client_connect.c @@ -9020,8 +9020,16 @@ _standard_all_gather_result(int query_sent, int *n_res, uint64_t **pdc_ids, MPI_ uint64_t *all_ids = (uint64_t *)malloc(ntotal * sizeof(uint64_t)); MPI_Allgatherv(*pdc_ids, *n_res, MPI_UINT64_T, all_ids, all_nmeta_array, disp, MPI_UINT64_T, world_comm); + if (*pdc_ids) + free(*pdc_ids); + *n_res = ntotal; *pdc_ids = all_ids; + + free(all_nmeta_array); + free(disp); + + return; } void @@ -9127,7 +9135,7 @@ PDC_Client_query_kvtag_mpi(const pdc_kvtag_t *kvtag, int *n_res, uint64_t **pdc_ if (*n_res <= 0) { *n_res = 0; - *pdc_ids = (uint64_t *)malloc(0); + *pdc_ids = NULL; } else { // print the pdc ids returned by this client, along with the client id @@ -9349,4 +9357,4 @@ PDC_Client_search_obj_ref_through_dart_mpi(dart_hash_algo_t hash_algo, char *que } #endif -/******************** Collective Object Selection Query Ends *******************************/ \ No newline at end of file +/******************** Collective Object Selection Query Ends *******************************/ diff --git a/src/api/pdc_obj/pdc_cont.c b/src/api/pdc_obj/pdc_cont.c index d0c896816..b589188e6 100644 --- a/src/api/pdc_obj/pdc_cont.c +++ b/src/api/pdc_obj/pdc_cont.c @@ -86,6 +86,7 @@ PDCcont_create(const char *cont_name, pdcid_t cont_prop_id) p->cont_pt->pdc->local_id = cont_prop->pdc->local_id; ret = PDC_Client_create_cont_id(cont_name, cont_prop_id, &(p->cont_info_pub->meta_id)); + if (ret == FAIL) PGOTO_ERROR(0, "Unable to create container on the server!"); @@ -316,8 +317,10 @@ PDC_cont_get_info(pdcid_t cont_id) FUNC_ENTER(NULL); id_info = PDC_find_id(cont_id); - info = (struct _pdc_cont_info *)(id_info->obj_ptr); + if (id_info == NULL) + PGOTO_ERROR(NULL, "cannot locate object"); + info = (struct _pdc_cont_info *)(id_info->obj_ptr); ret_value = PDC_CALLOC(1, struct _pdc_cont_info); if (ret_value) memcpy(ret_value, info, sizeof(struct _pdc_cont_info)); @@ -326,9 +329,8 @@ PDC_cont_get_info(pdcid_t cont_id) ret_value->cont_info_pub = PDC_CALLOC(1, struct pdc_cont_info); if (ret_value->cont_info_pub) - memcpy(ret_value, info, sizeof(struct pdc_cont_info)); - else - PGOTO_ERROR(NULL, "cannot allocate ret_value->cont_info_pub"); + memcpy(ret_value->cont_info_pub, info->cont_info_pub, sizeof(struct pdc_cont_info)); + if (info->cont_info_pub->name) ret_value->cont_info_pub->name = strdup(info->cont_info_pub->name); diff --git a/src/commons/utils/include/string_utils.h b/src/commons/utils/include/string_utils.h index 675944dda..865a282de 100644 --- a/src/commons/utils/include/string_utils.h +++ b/src/commons/utils/include/string_utils.h @@ -25,11 +25,6 @@ typedef enum { PATTERN_EXACT = 2, PATTERN_PREFIX = 3, PATTERN_SUFFIX = 4, PATTERN_MIDDLE = 5 } pattern_type_t; -typedef struct { - char * start; - size_t length; -} string; - /** * take the part starting from the start position * you need to free str after use. diff --git a/src/server/CMakeLists.txt b/src/server/CMakeLists.txt index aaedcd4ae..5653602d9 100644 --- a/src/server/CMakeLists.txt +++ b/src/server/CMakeLists.txt @@ -6,6 +6,17 @@ if(PDC_ENABLE_FASTBIT) find_library(FASTBIT_LIBRARY fastbit $ENV{HOME}/cori/fastbit-2.0.3/install) endif() +if(PDC_ENABLE_ROCKSDB) + add_definitions(-DENABLE_ROCKSDB=1) + find_path(ROCKSDB_INCLUDE_DIR include/db.h) + find_library(ROCKSDB_LIBRARY rocksdb 8.1.1< REQUIRED) +endif() + +if(PDC_ENABLE_SQLITE3) + add_definitions(-DENABLE_SQLITE3=1) + find_package(SQLite3 3.31.0 REQUIRED) +endif() + include_directories( ${PDC_COMMON_INCLUDE_DIRS} ${PDC_INCLUDES_BUILD_TIME} @@ -28,6 +39,7 @@ include_directories( ${PDC_SOURCE_DIR}/src/utils/include ${MERCURY_INCLUDE_DIR} ${FASTBIT_INCLUDE_DIR} + ${ROCKSDB_INCLUDE_DIR} ) add_definitions( -DIS_PDC_SERVER=1 ) @@ -57,9 +69,17 @@ add_library(pdc_server_lib ) if(PDC_ENABLE_FASTBIT) message(STATUS "Enabled fastbit") - target_link_libraries(pdc_server_lib mercury ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES} ${FASTBIT_LIBRARY}/libfastbit.so) + target_link_libraries(pdc_server_lib ${MERCURY_LIBRARY} ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES} ${FASTBIT_LIBRARY}/libfastbit.so) +elseif(PDC_ENABLE_ROCKSDB) + if(PDC_ENABLE_SQLITE3) + target_link_libraries(pdc_server_lib ${MERCURY_LIBRARY} ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES} ${ROCKSDB_LIBRARY} SQLite::SQLite3) + else() + target_link_libraries(pdc_server_lib ${MERCURY_LIBRARY} ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES} ${ROCKSDB_LIBRARY}) + endif() +elseif(PDC_ENABLE_SQLITE3) + target_link_libraries(pdc_server_lib ${MERCURY_LIBRARY} ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES} SQLite::SQLite3) else() - target_link_libraries(pdc_server_lib mercury ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES}) + target_link_libraries(pdc_server_lib ${MERCURY_LIBRARY} ${PDC_COMMONS_LIBRARIES} -lm -ldl ${PDC_EXT_LIB_DEPENDENCIES}) endif() add_executable(pdc_server.exe @@ -78,10 +98,9 @@ if(NOT ${PDC_INSTALL_BIN_DIR} MATCHES ${PROJECT_BINARY_DIR}/bin) install( TARGETS pdc_server.exe - DESTINATION ${PDC_INSTALL_BIN_DIR} + pdc_server_lib + LIBRARY DESTINATION ${PDC_INSTALL_LIB_DIR} + ARCHIVE DESTINATION ${PDC_INSTALL_LIB_DIR} + RUNTIME DESTINATION ${PDC_INSTALL_BIN_DIR} ) endif() - - - - diff --git a/src/server/include/pdc_server.h b/src/server/include/pdc_server.h index 09f527013..f2fca93d2 100644 --- a/src/server/include/pdc_server.h +++ b/src/server/include/pdc_server.h @@ -49,6 +49,18 @@ #include "iapi.h" #endif +#ifdef ENABLE_ROCKSDB +#include "rocksdb/c.h" +extern rocksdb_t *rocksdb_g; +extern int use_rocksdb_g; +#endif + +#ifdef ENABLE_SQLITE3 +#include "sqlite3.h" +extern sqlite3 *sqlite3_db_g; +extern int use_sqlite3_g; +#endif + #ifdef ENABLE_MULTITHREAD // Mercury multithread #include "mercury_thread.h" diff --git a/src/server/include/pdc_server_metadata.h b/src/server/include/pdc_server_metadata.h index d7832b6f9..de3c15c80 100644 --- a/src/server/include/pdc_server_metadata.h +++ b/src/server/include/pdc_server_metadata.h @@ -62,6 +62,8 @@ extern pdc_remote_server_info_t *pdc_remote_server_info_g; extern double total_mem_usage_g; extern int is_hash_table_init_g; extern int is_restart_g; +extern int use_rocksdb_g; +extern int use_sqlite3_g; /****************************/ /* Library Private Typedefs */ @@ -83,6 +85,14 @@ typedef struct pdc_cont_hash_table_entry_t { pdc_kvtag_list_t *kvtag_list_head; } pdc_cont_hash_table_entry_t; +#ifdef ENABLE_SQLITE3 +typedef struct pdc_sqlite3_query_t { + pdcid_t **obj_ids; + int nobj; + int nalloc; +} pdc_sqlite3_query_t; +#endif + /***************************************/ /* Library-private Function Prototypes */ /***************************************/ diff --git a/src/server/pdc_server.c b/src/server/pdc_server.c index a07e5596b..d73fd9b9f 100644 --- a/src/server/pdc_server.c +++ b/src/server/pdc_server.c @@ -34,6 +34,7 @@ #include #include #include +#include #include #include @@ -61,6 +62,16 @@ #include #endif +#ifdef ENABLE_ROCKSDB +#include "rocksdb/c.h" +rocksdb_t *rocksdb_g; +#endif + +#ifdef ENABLE_SQLITE3 +#include "sqlite3.h" +sqlite3 *sqlite3_db_g; +#endif + // Check how long PDC has run every OP_INTERVAL operations #define PDC_CHECKPOINT_CHK_OP_INTERVAL 2000 // Checkpoint every INTERVAL_SEC second and at least OP_INTERVAL operations @@ -131,6 +142,8 @@ int read_from_bb_size_g = 0; int gen_hist_g = 0; int gen_fastbit_idx_g = 0; int use_fastbit_idx_g = 0; +int use_rocksdb_g = 0; +int use_sqlite3_g = 0; char * gBinningOption = NULL; double server_write_time_g = 0.0; @@ -359,6 +372,71 @@ PDC_Server_write_addr_to_file(char **addr_strings, int n) FUNC_LEAVE(ret_value); } +static int +remove_directory(const char *dir) +{ + int ret = 0; + FTS * ftsp = NULL; + FTSENT *curr; + + // Cast needed (in C) because fts_open() takes a "char * const *", instead + // of a "const char * const *", which is only allowed in C++. fts_open() + // does not modify the argument. + char *files[] = {(char *)dir, NULL}; + + // FTS_NOCHDIR - Avoid changing cwd, which could cause unexpected behavior + // in multithreaded programs + // FTS_PHYSICAL - Don't follow symlinks. Prevents deletion of files outside + // of the specified directory + // FTS_XDEV - Don't cross filesystem boundaries + ftsp = fts_open(files, FTS_NOCHDIR | FTS_PHYSICAL | FTS_XDEV, NULL); + if (!ftsp) { + fprintf(stderr, "PDC_SERVER: %s: fts_open failed: %s\n", dir, strerror(curr->fts_errno)); + ret = -1; + goto done; + } + + while ((curr = fts_read(ftsp))) { + switch (curr->fts_info) { + case FTS_NS: + case FTS_DNR: + case FTS_ERR: + break; + + case FTS_DC: + case FTS_DOT: + case FTS_NSOK: + // Not reached unless FTS_LOGICAL, FTS_SEEDOT, or FTS_NOSTAT were + // passed to fts_open() + break; + + case FTS_D: + // Do nothing. Need depth-first search, so directories are deleted + // in FTS_DP + break; + + case FTS_DP: + case FTS_F: + case FTS_SL: + case FTS_SLNONE: + case FTS_DEFAULT: + if (remove(curr->fts_accpath) < 0) { + fprintf(stderr, "PDC_SERVER: %s: Failed to remove: %s\n", curr->fts_path, + strerror(curr->fts_errno)); + ret = -1; + } + break; + } + } + +done: + if (ftsp) { + fts_close(ftsp); + } + + return ret; +} + /* * Remove server config file * @@ -380,6 +458,20 @@ PDC_Server_rm_config_file() goto done; } +#ifdef ENABLE_ROCKSDB + if (use_rocksdb_g) { + snprintf(config_fname, ADDR_MAX, "/tmp/PDC_rocksdb_%d", pdc_server_rank_g); + remove_directory(config_fname); + } +#endif + +#ifdef ENABLE_SQLITE3 + if (use_sqlite3_g) { + snprintf(config_fname, ADDR_MAX, "/tmp/PDC_sqlite3_%d", pdc_server_rank_g); + remove_directory(config_fname); + } +#endif + done: FUNC_LEAVE(ret_value); } @@ -2069,11 +2161,27 @@ PDC_Server_get_env() gen_fastbit_idx_g = 1; tmp_env_char = getenv("PDC_USE_FASTBIT_IDX"); - if (tmp_env_char != NULL) + if (tmp_env_char != NULL) { use_fastbit_idx_g = 1; + printf("==PDC_SERVER[%d]: using FastBit for data indexing and querying\n"); + } + + tmp_env_char = getenv("PDC_USE_ROCKSDB"); + if (tmp_env_char != NULL && strcmp(tmp_env_char, "1") == 0) { + use_rocksdb_g = 1; + if (pdc_server_rank_g == 0) + printf("==PDC_SERVER[%d]: using RocksDB for kvtag\n"); + } + + tmp_env_char = getenv("PDC_USE_SQLITE3"); + if (tmp_env_char != NULL && strcmp(tmp_env_char, "1") == 0) { + use_sqlite3_g = 1; + if (pdc_server_rank_g == 0) + printf("==PDC_SERVER[%d]: using SQLite3 for kvtag\n", pdc_server_rank_g); + } if (pdc_server_rank_g == 0) { - printf("\n==PDC_SERVER[%d]: using [%s] as tmp dir, %d OSTs, %d OSTs per data file, %d%% to BB\n", + printf("==PDC_SERVER[%d]: using [%s] as tmp dir, %d OSTs, %d OSTs per data file, %d%% to BB\n", pdc_server_rank_g, pdc_server_tmp_dir_g, lustre_total_ost_g, pdc_nost_per_file_g, write_to_bb_percentage_g); } @@ -2142,6 +2250,75 @@ server_run(int argc, char *argv[]) if (pdc_server_rank_g == 0) if (PDC_Server_write_addr_to_file(all_addr_strings_g, pdc_server_size_g) != SUCCEED) printf("==PDC_SERVER[%d]: Error with write config file\n", pdc_server_rank_g); + +#ifdef ENABLE_ROCKSDB + if (use_rocksdb_g) { + /* rocksdb_backup_engine_t *be; */ + rocksdb_options_t *options = rocksdb_options_create(); + rocksdb_options_increase_parallelism(options, 2); + rocksdb_options_optimize_level_style_compaction(options, 0); + rocksdb_options_set_create_if_missing(options, 1); + + rocksdb_block_based_table_options_t *table_options = rocksdb_block_based_options_create(); + rocksdb_filterpolicy_t * filter_policy = rocksdb_filterpolicy_create_bloom(10); + rocksdb_block_based_options_set_filter_policy(table_options, filter_policy); + + rocksdb_options_set_block_based_table_factory(options, table_options); + rocksdb_slicetransform_t *slicetransform = rocksdb_slicetransform_create_fixed_prefix(3); + rocksdb_options_set_prefix_extractor(options, slicetransform); + + char *err = NULL; + char rocksdb_path[ADDR_MAX]; + snprintf(rocksdb_path, ADDR_MAX, "/tmp/PDC_rocksdb_%d", pdc_server_rank_g); + + // Remove the in-memory db + remove_directory(rocksdb_path); + + // Create db + rocksdb_g = rocksdb_open(options, rocksdb_path, &err); + assert(!err); + if (pdc_server_rank_g == 0) + printf("==PDC_SERVER[%d]: RocksDB initialized\n", pdc_server_rank_g); + } + +#endif + +#ifdef ENABLE_SQLITE3 + if (use_sqlite3_g) { + char *errMessage = 0; + char sqlite3_path[ADDR_MAX]; + snprintf(sqlite3_path, ADDR_MAX, "/tmp/PDC_sqlite3_%d", pdc_server_rank_g); + sqlite3_open(sqlite3_path, &sqlite3_db_g); + + sqlite3_exec(sqlite3_db_g, + "CREATE TABLE objects (objid INTEGER, name TEXT, value_text TEXT, " + "value_int INTEGER, value_float REAL, value_double REAL, value_blob BLOB);", + 0, 0, &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + + // Create indexes + sqlite3_exec(sqlite3_db_g, "CREATE INDEX index_name ON objects(name);", 0, 0, &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + sqlite3_exec(sqlite3_db_g, "CREATE INDEX index_value_int ON objects(value_int);", 0, 0, &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + sqlite3_exec(sqlite3_db_g, "CREATE INDEX index_value_text ON objects(value_text);", 0, 0, + &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + sqlite3_exec(sqlite3_db_g, "CREATE INDEX index_value_float ON objects(value_float);", 0, 0, + &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + sqlite3_exec(sqlite3_db_g, "CREATE INDEX index_value_double ON objects(value_double);", 0, 0, + &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + } +#endif + #ifdef PDC_TIMING #ifdef ENABLE_MPI pdc_server_timings->PDCserver_start_total += MPI_Wtime() - start; @@ -2175,6 +2352,30 @@ server_run(int argc, char *argv[]) #endif done: +#ifdef ENABLE_ROCKSDB + if (use_rocksdb_g) { + char rocksdb_fname[ADDR_MAX]; + struct stat st; + snprintf(rocksdb_fname, ADDR_MAX, "/tmp/PDC_rocksdb_%d", pdc_server_rank_g); + stat(rocksdb_fname, &st); + printf("==PDC_SERVER[%d]: RocksDB file size %lu\n", pdc_server_rank_g, st.st_size); + + rocksdb_close(rocksdb_g); + } +#endif + +#ifdef ENABLE_SQLITE3 + if (use_sqlite3_g) { + char sqlite3_fname[ADDR_MAX]; + struct stat st; + snprintf(sqlite3_fname, ADDR_MAX, "/tmp/PDC_sqlite3_%d", pdc_server_rank_g); + stat(sqlite3_fname, &st); + printf("==PDC_SERVER[%d]: SQLite3 max memory usage: %llu, DB file size %lu\n", pdc_server_rank_g, + sqlite3_memory_highwater(0), st.st_size); + sqlite3_close(sqlite3_db_g); + } +#endif + #ifdef PDC_TIMING PDC_server_timing_report(); #endif diff --git a/src/server/pdc_server_metadata.c b/src/server/pdc_server_metadata.c index 7eb2abd66..b33087173 100644 --- a/src/server/pdc_server_metadata.c +++ b/src/server/pdc_server_metadata.c @@ -50,6 +50,7 @@ #include "pdc_server.h" #include "mercury_hash_table.h" #include "pdc_malloc.h" +#include "string_utils.h" #define BLOOM_TYPE_T counting_bloom_t #define BLOOM_NEW new_counting_bloom @@ -968,7 +969,7 @@ PDC_Server_delete_metadata_by_id(metadata_delete_by_id_in_t *in, metadata_delete continue; if (cont_entry->cont_id == target_obj_id) { - hash_table_remove(container_hash_table_g, &pair.key); + hash_table_remove(container_hash_table_g, pair.key); out->ret = 1; ret_value = SUCCEED; goto done; @@ -1622,9 +1623,191 @@ _is_matching_kvtag(pdc_kvtag_t *in, pdc_kvtag_t *kvtag) FUNC_LEAVE(ret_value); } -perr_t -PDC_Server_get_kvtag_query_result(pdc_kvtag_t *in /*FIXME: query input should be string-based*/, - uint32_t *n_meta, uint64_t **obj_ids) +#ifdef ENABLE_SQLITE3 +static int +sqlite_query_kvtag_callback(void *data, int argc, char **argv, char **colName) +{ + pdc_sqlite3_query_t *query_data = (pdc_sqlite3_query_t *)data; + + if (NULL != argv[0]) { + pdcid_t id = strtoull(argv[0], NULL, 10); + if (query_data->nobj >= query_data->nalloc) { + query_data->nalloc *= 2; + *query_data->obj_ids = realloc(*query_data->obj_ids, query_data->nalloc * sizeof(uint64_t)); + } + (*query_data->obj_ids)[query_data->nobj] = id; + query_data->nobj += 1; + /* printf("SQLite3 found %s = %llu\n", colName[0], id); */ + } + else { + printf("SQLite3 found nothing\n"); + return 0; + } + + return 0; +} +#endif + +static perr_t +PDC_Server_query_kvtag_rocksdb(pdc_kvtag_t *in, uint32_t *n_meta, uint64_t **obj_ids, uint64_t alloc_size) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_ROCKSDB + const char *rocksdb_key; + pdc_kvtag_t tmp; + uint64_t obj_id; + char name[TAG_LEN_MAX]; + size_t len; + uint32_t iter = 0; + + rocksdb_readoptions_t *readoptions = rocksdb_readoptions_create(); + rocksdb_iterator_t * rocksdb_iter = rocksdb_create_iterator(rocksdb_g, readoptions); + rocksdb_iter_seek_to_first(rocksdb_iter); + + // Iterate over all rocksdb kv + while (rocksdb_iter_valid(rocksdb_iter)) { + rocksdb_key = rocksdb_iter_key(rocksdb_iter, &len); + /* sprintf(rocksdb_key, "%lu`%s", obj_id, in->kvtag.name); */ + sscanf(rocksdb_key, "%lu`%s", &obj_id, name); + tmp.name = name; + tmp.value = (void *)rocksdb_iter_value(rocksdb_iter, &len); + tmp.size = len; + tmp.type = in->type; + + if (_is_matching_kvtag(in, &tmp) == TRUE) { + if (iter >= alloc_size) { + alloc_size *= 2; + *obj_ids = (void *)realloc(*obj_ids, alloc_size * sizeof(uint64_t)); + } + (*obj_ids)[iter++] = obj_id; + } + + /* printf("==PDC_SERVER[%d]: rocksdb iter [%s] [%d], len %d\n", pdc_server_rank_g, tmp.name, + * *((int*)tmp.value), tmp.size); */ + rocksdb_iter_next(rocksdb_iter); + } + + *n_meta = iter; + // Debug + /* printf("==PDC_SERVER[%d]: rocksdb found %d objids \n", pdc_server_rank_g, iter); */ + + if (rocksdb_iter) + rocksdb_iter_destroy(rocksdb_iter); +#else + printf("==PDC_SERVER[%d]: enabled rocksdb but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; +#endif + + return ret_value; +} + +static perr_t +PDC_Server_query_kvtag_sqlite(pdc_kvtag_t *in, uint32_t *n_meta, uint64_t **obj_ids, uint64_t alloc_size) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_SQLITE3 + char sql[TAG_LEN_MAX]; + char * errMessage = NULL; + char * tmp_value, *tmp_name, *current_pos; + pdc_sqlite3_query_t query_data; + + // Check if there is * in tag name + if (NULL == strstr(in->name, "*")) { + // exact name match + if (in->type == PDC_STRING) { + // valut type is string + if (NULL == strstr((char *)in->value, "*")) { + // exact name and value string match + sprintf(sql, "SELECT objid FROM objects WHERE name = \'%s\' AND value_text = \'%s\';", + in->name, (char *)in->value); + } + else { + // value has * in it + tmp_value = strdup((char *)in->value); + // replace * with % for sqlite3 + current_pos = strchr(tmp_value, '*'); + while (current_pos) { + *current_pos = '%'; + current_pos = strchr(current_pos, '*'); + } + + sprintf(sql, "SELECT objid FROM objects WHERE name = \'%s\' AND value_text LIKE \'%s\';", + in->name, tmp_value); + if (tmp_value) + free(tmp_value); + } + } + else { + // Only check name for non string value type + sprintf(sql, "SELECT objid FROM objects WHERE name = \'%s\';", in->name); + } + } + else { + tmp_name = strdup(in->name); + // replace * with % for sqlite3 + current_pos = strchr(tmp_name, '*'); + while (current_pos) { + *current_pos = '%'; + current_pos = strchr(current_pos, '*'); + } + + sprintf(sql, "SELECT objid FROM objects WHERE name LIKE \'%s\';", tmp_name); + + if (in->type == PDC_STRING) { + // valut type is string + if (NULL == strstr((char *)in->value, "*")) { + // exact name and value string match + sprintf(sql, "SELECT objid FROM objects WHERE name LIKE \'%s\' AND value_text = \'%s\';", + tmp_name, (char *)in->value); + } + else { + // value has * in it + tmp_value = strdup((char *)in->value); + // replace * with % for sqlite3 + current_pos = strchr(tmp_value, '*'); + while (current_pos) { + *current_pos = '%'; + current_pos = strchr(current_pos, '*'); + } + + sprintf(sql, "SELECT objid FROM objects WHERE name LIKE \'%s\' AND value_text LIKE \'%s\';", + tmp_name, tmp_value); + if (tmp_value) + free(tmp_value); + } + } + else { + // Only check name for non string value type + sprintf(sql, "SELECT objid FROM objects WHERE name LIKE \'%s\';", tmp_name); + } + + if (tmp_name) + free(tmp_name); + } + + query_data.nobj = 0; + query_data.nalloc = alloc_size; + query_data.obj_ids = obj_ids; + + // debug + /* printf("==PDC_SERVER[%d]: constructed SQL [%s]\n", pdc_server_rank_g, sql); */ + + // Construct a SQL query + sqlite3_exec(sqlite3_db_g, sql, sqlite_query_kvtag_callback, &query_data, &errMessage); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + + *n_meta = query_data.nobj; +#else + printf("==PDC_SERVER[%d]: enabled SQLite3 but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; +#endif + + return ret_value; +} + +static perr_t +PDC_Server_query_kvtag_someta(pdc_kvtag_t *in, uint32_t *n_meta, uint64_t **obj_ids, uint64_t alloc_size) { perr_t ret_value = SUCCEED; uint32_t iter = 0; @@ -1634,13 +1817,6 @@ PDC_Server_get_kvtag_query_result(pdc_kvtag_t *in /*FIXME: query input should be HashTableIterator hash_table_iter; int n_entry, is_name_match, is_value_match; HashTablePair pair; - uint32_t alloc_size = 100; - - FUNC_ENTER(NULL); - - *n_meta = 0; - - *obj_ids = (void *)calloc(alloc_size, sizeof(uint64_t)); if (metadata_hash_table_g != NULL) { @@ -1652,14 +1828,14 @@ PDC_Server_get_kvtag_query_result(pdc_kvtag_t *in /*FIXME: query input should be head = pair.value; DL_FOREACH(head->metadata, elt) { - DL_FOREACH(elt->kvtag_list_head, kvtag_list_elt) - { #ifdef PDC_DEBUG_OUTPUT - printf("==PDC_SERVER: Matching kvtag [\"%s\":\"%s\"] of object %s on condition in->key: " - "%s, in->value: %s ", - (char *)kvtag_list_elt->kvtag->name, (char *)kvtag_list_elt->kvtag->value, - elt->obj_name, in->name, in->value); + printf("==PDC_SERVER: Matching kvtag [\"%s\":\"%s\"] of object %s on condition in->key: " + "%s, in->value: %s ", + (char *)kvtag_list_elt->kvtag->name, (char *)kvtag_list_elt->kvtag->value, + elt->obj_name, in->name, in->value); #endif + DL_FOREACH(elt->kvtag_list_head, kvtag_list_elt) + { if (_is_matching_kvtag(in, kvtag_list_elt->kvtag) == TRUE) { #ifdef PDC_DEBUG_OUTPUT println("[Found]"); @@ -1676,16 +1852,56 @@ PDC_Server_get_kvtag_query_result(pdc_kvtag_t *in /*FIXME: query input should be println("[NOT FOUND]"); #endif } - - } // End for each kvtag - } // End for each metadata - } // End while + } // End for each kvtag in list + } // End for each metadata from hash table entry + } // End looping metadata hash table *n_meta = iter; +#ifdef PDC_DEBUG_OUTPUT + printf("==PDC_SERVER[%d]: found %d objids \n", pdc_server_rank_g, iter); +#endif } // if (metadata_hash_table_g != NULL) else { printf("==PDC_SERVER: metadata_hash_table_g not initialized!\n"); ret_value = FAIL; - goto done; + } + + return ret_value; +} + +perr_t +PDC_Server_get_kvtag_query_result(pdc_kvtag_t *in /*FIXME: query input should be string-based*/, + uint32_t *n_meta, uint64_t **obj_ids) +{ + perr_t ret_value = SUCCEED; + + uint32_t alloc_size = 128; + + FUNC_ENTER(NULL); + + *n_meta = 0; + *obj_ids = (void *)calloc(alloc_size, sizeof(uint64_t)); + + if (use_rocksdb_g == 1) { + ret_value = PDC_Server_query_kvtag_rocksdb(in, n_meta, obj_ids, alloc_size); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_query_kvtag_rocksdb!\n", pdc_server_rank_g); + goto done; + } + } + else if (use_sqlite3_g) { + ret_value = PDC_Server_query_kvtag_sqlite(in, n_meta, obj_ids, alloc_size); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_query_kvtag_sqlite!\n", pdc_server_rank_g); + goto done; + } + } // End if SQLite3 + else { + // SoMeta backend + ret_value = PDC_Server_query_kvtag_someta(in, n_meta, obj_ids, alloc_size); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_query_kvtag_someta!\n", pdc_server_rank_g); + goto done; + } } done: @@ -2558,43 +2774,95 @@ PDC_add_kvtag_to_list(pdc_kvtag_list_t **list_head, pdc_kvtag_t *tag) FUNC_LEAVE(ret_value); } -perr_t -PDC_Server_add_kvtag(metadata_add_kvtag_in_t *in, metadata_add_tag_out_t *out) +static perr_t +PDC_Server_add_kvtag_rocksdb(metadata_add_kvtag_in_t *in, metadata_add_tag_out_t *out) { - - perr_t ret_value = SUCCEED; - uint32_t hash_key; - uint64_t obj_id; -#ifdef ENABLE_MULTITHREAD - int unlocked; + perr_t ret_value = SUCCEED; +#ifdef ENABLE_ROCKSDB + rocksdb_writeoptions_t *writeoptions = rocksdb_writeoptions_create(); + char rocksdb_key[TAG_LEN_MAX] = {0}; + sprintf(rocksdb_key, "%lu`%s", in->obj_id, in->kvtag.name); + char *err = NULL; + // Debug + /* printf("Put [%s] [%d], len%lu\n", in->kvtag.name, *((int*)in->kvtag.value), in->kvtag.size); */ + rocksdb_put(rocksdb_g, writeoptions, rocksdb_key, strlen(rocksdb_key) + 1, in->kvtag.value, + in->kvtag.size, &err); + if (err != NULL) { + printf("==PDC_SERVER[%d]: error with rocksdb_put %s, [%s]!\n", pdc_server_rank_g, in->kvtag.name, + err); + ret_value = FAIL; + } + else + out->ret = 1; +#else + printf("==PDC_SERVER[%d]: enabled rocksdb but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; #endif - pdc_hash_table_entry_head * lookup_value; - pdc_cont_hash_table_entry_t *cont_lookup_value; - FUNC_ENTER(NULL); + return ret_value; +} -#ifdef ENABLE_TIMING - struct timeval pdc_timer_start; - struct timeval pdc_timer_end; - double ht_total_sec; - gettimeofday(&pdc_timer_start, 0); -#endif +static perr_t +PDC_Server_add_kvtag_sqlite3(metadata_add_kvtag_in_t *in, metadata_add_tag_out_t *out) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_SQLITE3 + char sql[TAG_LEN_MAX] = {0}; + char *errMessage = NULL; - hash_key = in->hash_value; - obj_id = in->obj_id; + if (in->kvtag.type == PDC_STRING || in->kvtag.type == PDC_CHAR) { + sprintf(sql, "INSERT INTO objects (objid, name, value_text) VALUES (%llu, '%s', '%s');", in->obj_id, + in->kvtag.name, (char *)in->kvtag.value); + } + else if (in->kvtag.type == PDC_INT && in->kvtag.size == sizeof(int)) { + sprintf(sql, "INSERT INTO objects (objid, name, value_int) VALUES (%llu, '%s', '%d');", in->obj_id, + in->kvtag.name, *((int *)in->kvtag.value)); + } + else if (in->kvtag.type == PDC_FLOAT && in->kvtag.size == sizeof(float)) { + sprintf(sql, "INSERT INTO objects (objid, name, value_float) VALUES (%llu, '%s', '%f');", in->obj_id, + in->kvtag.name, *((float *)in->kvtag.value)); + } + else if (in->kvtag.type == PDC_DOUBLE && in->kvtag.size == sizeof(double)) { + sprintf(sql, "INSERT INTO objects (objid, name, value_double) VALUES (%llu, '%s', '%lf');", + in->obj_id, in->kvtag.name, *((double *)in->kvtag.value)); + } + else { + printf("==PDC_SERVER[%d]: datatype not supported %d!\n", pdc_server_rank_g, in->kvtag.type); + ret_value = FAIL; + goto done; + } - // printf("==SERVER[%d]: PDC_add_kvtag::in.obj_id = %llu \n ", pdc_server_rank_g, obj_id); + // debug + /* printf("==PDC_SERVER[%d]: constructed SQL [%s]\n", pdc_server_rank_g, sql); */ + sqlite3_exec(sqlite3_db_g, sql, NULL, 0, &errMessage); -#ifdef ENABLE_MULTITHREAD - // Obtain lock for hash table - unlocked = 0; - hg_thread_mutex_lock(&pdc_metadata_hash_table_mutex_g); + if (errMessage) + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + else + out->ret = 1; +#else + printf("==PDC_SERVER[%d]: enabled SQLite3 but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; #endif +done: + return ret_value; +} + +static perr_t +PDC_Server_add_kvtag_someta(metadata_add_kvtag_in_t *in, metadata_add_tag_out_t *out) +{ + perr_t ret_value = SUCCEED; + pdc_hash_table_entry_head * lookup_value; + pdc_cont_hash_table_entry_t *cont_lookup_value; + uint32_t hash_key; + + hash_key = in->hash_value; + lookup_value = hash_table_lookup(metadata_hash_table_g, &hash_key); if (lookup_value != NULL) { pdc_metadata_t *target; - target = find_metadata_by_id_from_list(lookup_value->metadata, obj_id); + target = find_metadata_by_id_from_list(lookup_value->metadata, in->obj_id); if (target != NULL) { PDC_add_kvtag_to_list(&target->kvtag_list_head, &in->kvtag); out->ret = 1; @@ -2612,12 +2880,64 @@ PDC_Server_add_kvtag(metadata_add_kvtag_in_t *in, metadata_add_tag_out_t *out) out->ret = 1; } else { - printf("==PDC_SERVER[%d]: add tag target %" PRIu64 " not found!\n", pdc_server_rank_g, obj_id); + printf("==PDC_SERVER[%d]: add tag target %" PRIu64 " not found!\n", pdc_server_rank_g, + in->obj_id); ret_value = FAIL; out->ret = -1; } } + return ret_value; +} + +perr_t +PDC_Server_add_kvtag(metadata_add_kvtag_in_t *in, metadata_add_tag_out_t *out) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_MULTITHREAD + int unlocked; +#endif + FUNC_ENTER(NULL); + +#ifdef ENABLE_TIMING + struct timeval pdc_timer_start; + struct timeval pdc_timer_end; + double ht_total_sec; + gettimeofday(&pdc_timer_start, 0); +#endif + + out->ret = -1; + // printf("==SERVER[%d]: PDC_add_kvtag::in.obj_id = %llu \n ", pdc_server_rank_g, obj_id); + +#ifdef ENABLE_MULTITHREAD + // Obtain lock for hash table + unlocked = 0; + hg_thread_mutex_lock(&pdc_metadata_hash_table_mutex_g); +#endif + + if (use_rocksdb_g == 1) { + ret_value = PDC_Server_add_kvtag_rocksdb(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_add_kvtag_rocksdb!\n", pdc_server_rank_g); + goto done; + } + } // End if rocksdb + else if (use_sqlite3_g == 1) { + ret_value = PDC_Server_add_kvtag_sqlite3(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_add_kvtag_sqlite3!\n", pdc_server_rank_g); + goto done; + } + } // End if sqlite3 + else { + ret_value = PDC_Server_add_kvtag_someta(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_add_kvtag_someta!\n", pdc_server_rank_g); + goto done; + } + } + +done: #ifdef ENABLE_MULTITHREAD // ^ Release hash table lock hg_thread_mutex_unlock(&pdc_metadata_hash_table_mutex_g); @@ -2674,37 +2994,126 @@ PDC_get_kvtag_value_from_list(pdc_kvtag_list_t **list_head, char *key, metadata_ FUNC_LEAVE(ret_value); } -perr_t -PDC_Server_get_kvtag(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) +#ifdef ENABLE_SQLITE3 +static int +sqlite_get_kvtag_callback(void *data, int argc, char **argv, char **colName) { + pdc_kvtag_t *out = (pdc_kvtag_t *)data; + + for (int i = 0; i < argc; i++) { + if (NULL != argv[i]) { + if (0 == strcmp(colName[i], "value_int")) { + int *int_tmp = (int *)malloc(sizeof(int)); + *int_tmp = atoi(argv[i]); + out->value = (void *)int_tmp; + out->size = sizeof(int); + /* printf("SQLite3 found %s = %d\n", colName[i], int_tmp); */ + break; + } + else if (0 == strcmp(colName[i], "value_real")) { + float *float_tmp = (float *)malloc(sizeof(float)); + *float_tmp = (float)atof(argv[i]); + out->value = (void *)float_tmp; + out->size = sizeof(float); + /* printf("SQLite3 found %s = %f\n", colName[i], float_tmp); */ + break; + } + else if (0 == strcmp(colName[i], "value_double")) { + double *double_tmp = (double *)malloc(sizeof(double)); + *double_tmp = atof(argv[i]); + out->value = (void *)double_tmp; + out->size = sizeof(double); + /* printf("SQLite3 found %s = %f\n", colName[i], double_tmp); */ + break; + } + else if (0 == strcmp(colName[i], "value_text")) { + out->value = strdup(argv[i]); + /* printf("SQLite3 found %s = %s\n", colName[i], argv[i]); */ + out->size = strlen(argv[i]) + 1; + break; + } + else { + out->value = NULL; + /* printf("SQLite3 found nothing\n"); */ + return 0; + } + } + } - perr_t ret_value = SUCCEED; - uint32_t hash_key; - uint64_t obj_id; -#ifdef ENABLE_MULTITHREAD - int unlocked; + return 0; +} #endif - pdc_hash_table_entry_head * lookup_value; - pdc_cont_hash_table_entry_t *cont_lookup_value; - FUNC_ENTER(NULL); +static perr_t +PDC_Server_get_kvtag_rocksdb(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) +{ + perr_t ret_value = SUCCEED; -#ifdef ENABLE_TIMING - struct timeval pdc_timer_start; - struct timeval pdc_timer_end; - double ht_total_sec; - gettimeofday(&pdc_timer_start, 0); +#ifdef ENABLE_ROCKSDB + rocksdb_readoptions_t *readoptions = rocksdb_readoptions_create(); + char rocksdb_key[TAG_LEN_MAX] = {0}; + sprintf(rocksdb_key, "%lu`%s", in->obj_id, in->key); + char * err = NULL; + size_t len; + char * value = rocksdb_get(rocksdb_g, readoptions, rocksdb_key, strlen(rocksdb_key) + 1, &len, &err); + if (value == NULL) { + printf("==PDC_SERVER[%d]: error with rocksdb_get %s, [%s]!\n", pdc_server_rank_g, in->key, err); + ret_value = FAIL; + } + out->kvtag.name = in->key; + out->kvtag.size = len; + out->kvtag.value = value; + out->ret = 1; +#else + printf("==PDC_SERVER[%d]: enabled rocksdb but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; #endif - hash_key = in->hash_value; - obj_id = in->obj_id; + return ret_value; +} -#ifdef ENABLE_MULTITHREAD - // Obtain lock for hash table - unlocked = 0; - hg_thread_mutex_lock(&pdc_metadata_hash_table_mutex_g); +static perr_t +PDC_Server_get_kvtag_sqlite3(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_SQLITE3 + char sql[TAG_LEN_MAX]; + char *errMessage = NULL; + sprintf(sql, + "SELECT value_text, value_int, value_float, value_double, value_blob FROM objects WHERE " + "objid = %llu AND name = \'%s\';", + in->obj_id, in->key); + + /* printf("==PDC_SERVER[%d]: get kvtag [%s]!\n", pdc_server_rank_g, in->key); */ + sqlite3_exec(sqlite3_db_g, sql, sqlite_get_kvtag_callback, &out->kvtag, &errMessage); + if (errMessage) { + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + } + else { + // size and value is filled in sqlite_get_kvtag_callback + out->kvtag.name = in->key; + out->ret = 1; + } +#else + printf("==PDC_SERVER[%d]: enabled SQLite3 but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; #endif + return ret_value; +} + +static perr_t +PDC_Server_get_kvtag_someta(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) +{ + perr_t ret_value = SUCCEED; + uint32_t hash_key; + uint64_t obj_id; + pdc_hash_table_entry_head * lookup_value; + pdc_cont_hash_table_entry_t *cont_lookup_value; + + hash_key = in->hash_value; + obj_id = in->obj_id; + lookup_value = hash_table_lookup(metadata_hash_table_g, &hash_key); if (lookup_value != NULL) { pdc_metadata_t *target; @@ -2731,11 +3140,58 @@ PDC_Server_get_kvtag(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) } } - if (ret_value != SUCCEED) { - printf("==PDC_SERVER[%d]: %s - error \n", pdc_server_rank_g, __func__); - goto done; + return ret_value; +} + +perr_t +PDC_Server_get_kvtag(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_MULTITHREAD + int unlocked; +#endif + + FUNC_ENTER(NULL); + +#ifdef ENABLE_TIMING + struct timeval pdc_timer_start; + struct timeval pdc_timer_end; + double ht_total_sec; + gettimeofday(&pdc_timer_start, 0); +#endif + + out->ret = -1; + +#ifdef ENABLE_MULTITHREAD + // Obtain lock for hash table + unlocked = 0; + hg_thread_mutex_lock(&pdc_metadata_hash_table_mutex_g); +#endif + + if (use_rocksdb_g == 1) { + ret_value = PDC_Server_get_kvtag_rocksdb(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_get_kvtag_rocksdb!\n", pdc_server_rank_g); + goto done; + } + } + else if (use_sqlite3_g == 1) { + ret_value = PDC_Server_get_kvtag_sqlite3(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_get_kvtag_sqlite3!\n", pdc_server_rank_g); + goto done; + } + } + else { + // Someta + ret_value = PDC_Server_get_kvtag_someta(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_get_kvtag_someta!\n", pdc_server_rank_g); + goto done; + } } +done: #ifdef ENABLE_MULTITHREAD // ^ Release hash table lock hg_thread_mutex_unlock(&pdc_metadata_hash_table_mutex_g); @@ -2760,7 +3216,6 @@ PDC_Server_get_kvtag(metadata_get_kvtag_in_t *in, metadata_get_kvtag_out_t *out) hg_thread_mutex_unlock(&pdc_time_mutex_g); #endif -done: #ifdef ENABLE_MULTITHREAD if (unlocked == 0) hg_thread_mutex_unlock(&pdc_metadata_hash_table_mutex_g); @@ -2795,36 +3250,68 @@ PDC_del_kvtag_value_from_list(pdc_kvtag_list_t **list_head, char *key) FUNC_LEAVE(ret_value); } -perr_t -PDC_Server_del_kvtag(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) +static perr_t +PDC_Server_del_kvtag_rocksdb(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) { - - perr_t ret_value = SUCCEED; - uint32_t hash_key; - uint64_t obj_id; -#ifdef ENABLE_MULTITHREAD - int unlocked; + perr_t ret_value = SUCCEED; +#ifdef ENABLE_ROCKSDB + char * err = NULL; + char rocksdb_key[TAG_LEN_MAX] = {0}; + rocksdb_writeoptions_t *writeoptions = rocksdb_writeoptions_create(); + + sprintf(rocksdb_key, "%lu`%s", in->obj_id, in->key); + rocksdb_delete(rocksdb_g, writeoptions, rocksdb_key, strlen(rocksdb_key) + 1, &err); + if (err != NULL) { + printf("==PDC_SERVER[%d]: error with rocksdb_delete [%s], [%s]!\n", pdc_server_rank_g, in->key, err); + ret_value = FAIL; + } + else + out->ret = 1; +#else + printf("==PDC_SERVER[%d]: enabled rocksdb but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; #endif - pdc_hash_table_entry_head * lookup_value; - pdc_cont_hash_table_entry_t *cont_lookup_value; - FUNC_ENTER(NULL); + return ret_value; +} -#ifdef ENABLE_TIMING - struct timeval pdc_timer_start; - struct timeval pdc_timer_end; - double ht_total_sec; - gettimeofday(&pdc_timer_start, 0); +static perr_t +PDC_Server_del_kvtag_sqlite3(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_SQLITE3 + char sql[TAG_LEN_MAX]; + char *errMessage = NULL; + + sprintf(sql, "DELETE FROM objects WHERE objid = %llu AND name = \'%s\';", in->obj_id, in->key); + + sqlite3_exec(sqlite3_db_g, sql, NULL, 0, &errMessage); + if (errMessage) { + printf("==PDC_SERVER[%d]: error from SQLite %s!\n", pdc_server_rank_g, errMessage); + ret_value = FAIL; + } + else + out->ret = 1; +#else + printf("==PDC_SERVER[%d]: enabled SQLite3 but PDC is not compiled with it!\n", pdc_server_rank_g); + ret_value = FAIL; #endif + return ret_value; +} + +static perr_t +PDC_Server_del_kvtag_someta(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) +{ + perr_t ret_value = SUCCEED; + uint32_t hash_key; + uint64_t obj_id; + pdc_hash_table_entry_head * lookup_value; + pdc_cont_hash_table_entry_t *cont_lookup_value; + hash_key = in->hash_value; obj_id = in->obj_id; -#ifdef ENABLE_MULTITHREAD - // Obtain lock for hash table - hg_thread_mutex_lock(&pdc_metadata_hash_table_mutex_g); -#endif - // Look obj tags first lookup_value = hash_table_lookup(metadata_hash_table_g, &hash_key); if (lookup_value != NULL) { @@ -2839,7 +3326,6 @@ PDC_Server_del_kvtag(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) out->ret = -1; printf("==PDC_SERVER[%d]: %s - failed to find requested kvtag [%s]\n", pdc_server_rank_g, __func__, in->key); - goto done; } } else { @@ -2853,6 +3339,54 @@ PDC_Server_del_kvtag(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) out->ret = -1; printf("==PDC_SERVER[%d]: %s - failed to find requested kvtag [%s]\n", pdc_server_rank_g, __func__, in->key); + } + } + + return ret_value; +} + +perr_t +PDC_Server_del_kvtag(metadata_get_kvtag_in_t *in, metadata_add_tag_out_t *out) +{ + perr_t ret_value = SUCCEED; +#ifdef ENABLE_MULTITHREAD + int unlocked; +#endif + + FUNC_ENTER(NULL); + +#ifdef ENABLE_TIMING + struct timeval pdc_timer_start; + struct timeval pdc_timer_end; + double ht_total_sec; + gettimeofday(&pdc_timer_start, 0); +#endif + + out->ret = -1; + +#ifdef ENABLE_MULTITHREAD + // Obtain lock for hash table + hg_thread_mutex_lock(&pdc_metadata_hash_table_mutex_g); +#endif + + if (use_rocksdb_g) { + ret_value = PDC_Server_del_kvtag_rocksdb(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_del_kvtag_rocksdb!\n", pdc_server_rank_g); + goto done; + } + } + else if (use_sqlite3_g) { + ret_value = PDC_Server_del_kvtag_sqlite3(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_del_kvtag_sqlite3!\n", pdc_server_rank_g); + goto done; + } + } + else { + ret_value = PDC_Server_del_kvtag_someta(in, out); + if (ret_value != SUCCEED) { + printf("==PDC_SERVER[%d]: Error with PDC_Server_del_kvtag_someta!\n", pdc_server_rank_g); goto done; } } diff --git a/src/tests/CMakeLists.txt b/src/tests/CMakeLists.txt index a780c409c..13c2a5a31 100644 --- a/src/tests/CMakeLists.txt +++ b/src/tests/CMakeLists.txt @@ -10,6 +10,10 @@ include_directories( $ENV{HOME}/include ) +set(TEST_EXT_LIB "") +set(TEST_EXT_INCLUDE_DIRS "") +set(EXTRA_SRC_FILE "") + # ************************************************* # Julia Support # ************************************************* @@ -20,11 +24,23 @@ endif(PDC_ENABLE_JULIA) # ************************************************* # * Find UUID library # ************************************************* -find_package(UUID REQUIRED) +find_package(UUID) if(UUID_FOUND) include_directories(${UUID_INCLUDE_DIRS}) endif(UUID_FOUND) +# ************************************************* +# * MERCURY +# ************************************************* +find_package(MERCURY REQUIRED) +if(MERCURY_FOUND) + set(TEST_EXT_INCLUDE_DIRS ${MERCURY_INCLUDE_DIRS} + ${TEST_EXT_INCLUDE_DIRS} + ) + set(TEST_EXT_LIB ${MERCURY_LIBRARIES} ${TEST_EXT_LIB}) +endif() +include_directories(${TEST_EXT_INCLUDE_DIRS}) + set(PROGRAMS pdc_init # create_prop @@ -79,7 +95,7 @@ set(PROGRAMS # data_server_meta_test kvtag_add_get # kvtag_get -# kvtag_query + kvtag_query kvtag_query_scale # obj_transformation region_transfer_query @@ -119,16 +135,14 @@ set(PROGRAMS query_data ) -set(MPI_PROGRAMS - kvtag_query_scale_col - kvtag_query_mpi - kvtag_add_get_benchmark - kvtag_add_get_scale - ) +# TODO: Check if import_vpic.c is needed. If yes, we have to add the following : +# if (HDF5_FOUND) +# set(TEST_EXT_LIB ${HDF5_LIBRARIES} ${TEST_EXT_LIB}) +# set(TEST_EXT_INCLUDE_DIRS ${HDF5_INCLUDE_DIRS} ${TEST_EXT_INCLUDE_DIRS}) +# else () +# message(FATAL_ERROR "Could not find HDF5, please make sure that HDF5 has been compiled with shared libraries enabled.") +# endif() -set(TEST_EXT_LIB "") -set(TEST_EXT_INCLUDE_DIRS "") -set(EXTRA_SRC_FILE "") foreach(program ${PROGRAMS}) add_executable(${program} ${program}.c) @@ -137,6 +151,13 @@ foreach(program ${PROGRAMS}) endforeach(program) if(BUILD_MPI_TESTING) + set(MPI_PROGRAMS + kvtag_query_scale_col + # kvtag_query_mpi + kvtag_add_get_benchmark + kvtag_add_get_scale + ) + foreach(program ${MPI_PROGRAMS}) add_executable(${program} ${program}.c) if(CMAKE_C_COMPILER_ID MATCHES "GNU|Clang") @@ -197,12 +218,6 @@ foreach(script ${SCRIPTS}) endforeach(script) -# ******************************************* -# Add the HDF5 library for pdc-neon -# ******************************************* -FIND_LIBRARY(HDF5_LIBRARY NAMES hdf5_debug PATHS $ENV{HOME}/lib) - - # ******************************************* # Create a transform library which contains: # 1. compression/depression functions @@ -243,6 +258,8 @@ add_test(NAME obj_life WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTO #add_test(NAME obj_dim WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./obj_dim ) add_test(NAME obj_buf WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./obj_buf ) add_test(NAME obj_tags WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./obj_tags ) +add_test(NAME kvtag_add_get WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./kvtag_add_get) +add_test(NAME kvtag_query WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./kvtag_query 100 1 10 0) add_test(NAME obj_info WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./obj_info ) add_test(NAME obj_put_data WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./obj_put_data ) add_test(NAME obj_get_data WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND run_test.sh ./obj_get_data ) @@ -322,6 +339,8 @@ set_tests_properties(obj_life PROPERTIES LABELS serial ) #set_tests_properties(obj_dim PROPERTIES LABELS serial ) set_tests_properties(obj_buf PROPERTIES LABELS serial ) set_tests_properties(obj_tags PROPERTIES LABELS serial ) +set_tests_properties(kvtag_add_get PROPERTIES LABELS serial ) +set_tests_properties(kvtag_query PROPERTIES LABELS serial ) set_tests_properties(obj_info PROPERTIES LABELS serial ) set_tests_properties(obj_put_data PROPERTIES LABELS serial ) set_tests_properties(obj_get_data PROPERTIES LABELS serial ) diff --git a/src/tests/cont_del.c b/src/tests/cont_del.c index 5ecdf2e6b..3abc1f837 100644 --- a/src/tests/cont_del.c +++ b/src/tests/cont_del.c @@ -80,7 +80,7 @@ main(int argc, char **argv) } printf("trying to open a deleted container, should fail\n"); - cont = PDCcont_open("VPIC_cont", pdc); + cont = PDCcont_open(cont_name, pdc); if (cont > 0) printf("Error: opened a container that was just deleted @ line %d!\n", __LINE__); diff --git a/src/tests/dart_attr_dist_test.c b/src/tests/dart_attr_dist_test.c index 04c6eab42..f333d76f2 100644 --- a/src/tests/dart_attr_dist_test.c +++ b/src/tests/dart_attr_dist_test.c @@ -26,7 +26,7 @@ #define JULIA_HELPER_NAME "JuliaHelper" // only define the following once, in an executable (not in a shared library) if you want fast // code. -JULIA_DEFINE_FAST_TLS +// JULIA_DEFINE_FAST_TLS void generate_incremental_associations(int64_t num_attr, int64_t num_obj, int64_t num_groups, int64_t **arr, @@ -88,7 +88,7 @@ main(int argc, char *argv[]) size_t total_num_attr = atoi(argv[2]); pdcid_t *obj_ids; int i, j, k, pct, q_repeat_count = 100; - double stime, total_time; + double stime, total_time = 0; int val; char pdc_context_name[40]; @@ -121,17 +121,23 @@ main(int argc, char *argv[]) // &arr_len); // broadcast the size from rank 0 to all other processes +#ifdef ENABLE_MPI MPI_Bcast(&arr_len, 1, MPI_UNSIGNED_LONG, 0, MPI_COMM_WORLD); +#endif } else { // receive the size on all other ranks +#ifdef ENABLE_MPI MPI_Bcast(&arr_len, 1, MPI_UNSIGNED_LONG, 0, MPI_COMM_WORLD); +#endif // allocate memory for the array attr_2_obj_array = (int64_t *)malloc(arr_len * sizeof(int64_t)); } // broadcast the array itself +#ifdef ENABLE_MPI MPI_Bcast(attr_2_obj_array, arr_len, MPI_LONG_LONG_INT, 0, MPI_COMM_WORLD); +#endif // print array. for (i = 0; i < arr_len; ++i) { @@ -374,4 +380,4 @@ main(int argc, char *argv[]) #endif return 0; -} \ No newline at end of file +} diff --git a/src/tests/kvtag_query.c b/src/tests/kvtag_query.c index cf1e80dcb..3a6038d3f 100644 --- a/src/tests/kvtag_query.c +++ b/src/tests/kvtag_query.c @@ -24,7 +24,6 @@ #include #include -#include #include #include #include @@ -32,160 +31,215 @@ #include "pdc_client_connect.h" int -main() +assign_work_to_rank(int rank, int size, int nwork, int *my_count, int *my_start) { + if (rank > size || my_count == NULL || my_start == NULL) { + printf("assign_work_to_rank(): Error with input!\n"); + return -1; + } + if (nwork < size) { + if (rank < nwork) + *my_count = 1; + else + *my_count = 0; + (*my_start) = rank * (*my_count); + } + else { + (*my_count) = nwork / size; + (*my_start) = rank * (*my_count); + + // Last few ranks may have extra work + if (rank >= size - nwork % size) { + (*my_count)++; + (*my_start) += (rank - (size - nwork % size)); + } + } + + return 1; +} - int i; - pdcid_t pdc, cont_prop, cont, obj_prop1, obj_prop2, obj1, obj2; - uint64_t * obj_ids = NULL, *obj_ids1 = NULL, *obj_ids2 = NULL; - int nobj; - pdc_kvtag_t kvtag1, kvtag2, kvtag3; - char * v1 = "value1"; - int v2 = 2; - double v3 = 3.45; +void +print_usage(char *name) +{ + printf("%s n_obj n_round n_selectivity is_using_dart\n", name); + printf("Summary: This test will create n_obj objects, and add n_selectivity tags to each object. Then it " + "will " + "perform n_round collective queries against the tags, each query from each client should get " + "a whole result set.\n"); + printf("Parameters:\n"); + printf(" n_obj: number of objects\n"); + printf(" n_round: number of rounds, it can be the total number of tags too, as each round will perform " + "one query against one tag\n"); + printf(" n_selectivity: selectivity, on a 100 scale. \n"); + printf(" is_using_dart: 1 for using dart, 0 for not using dart\n"); +} + +int +main(int argc, char *argv[]) +{ + pdcid_t pdc, cont_prop, cont, obj_prop; + pdcid_t * obj_ids; + int n_obj, n_add_tag, my_obj, my_obj_s, my_add_tag, my_add_tag_s; + int proc_num = 1, my_rank = 0, i, v, iter, round, selectivity, is_using_dart; + char obj_name[128]; + double stime, total_time; + pdc_kvtag_t kvtag; + uint64_t * pdc_ids; + int nres, ntotal; + +#ifdef ENABLE_MPI + MPI_Init(&argc, &argv); + MPI_Comm_size(MPI_COMM_WORLD, &proc_num); + MPI_Comm_rank(MPI_COMM_WORLD, &my_rank); +#endif + + if (argc < 5) { + if (my_rank == 0) + print_usage(argv[0]); + goto done; + } + n_obj = atoi(argv[1]); + round = atoi(argv[2]); + selectivity = atoi(argv[3]); + is_using_dart = atoi(argv[4]); + n_add_tag = n_obj * selectivity / 100; // create a pdc pdc = PDCinit("pdc"); - printf("create a new pdc\n"); // create a container property cont_prop = PDCprop_create(PDC_CONT_CREATE, pdc); - if (cont_prop > 0) - printf("Create a container property\n"); - else + if (cont_prop <= 0) printf("Fail to create container property @ line %d!\n", __LINE__); // create a container cont = PDCcont_create("c1", cont_prop); - if (cont > 0) - printf("Create a container c1\n"); - else + if (cont <= 0) printf("Fail to create container @ line %d!\n", __LINE__); // create an object property - obj_prop1 = PDCprop_create(PDC_OBJ_CREATE, pdc); - if (obj_prop1 > 0) - printf("Create an object property\n"); - else - printf("Fail to create object property @ line %d!\n", __LINE__); - - obj_prop2 = PDCprop_create(PDC_OBJ_CREATE, pdc); - if (obj_prop2 > 0) - printf("Create an object property\n"); - else + obj_prop = PDCprop_create(PDC_OBJ_CREATE, pdc); + if (obj_prop <= 0) printf("Fail to create object property @ line %d!\n", __LINE__); - // create first object - obj1 = PDCobj_create(cont, "o1", obj_prop1); - if (obj1 > 0) - printf("Create an object o1\n"); - else - printf("Fail to create object @ line %d!\n", __LINE__); - - // create second object - obj2 = PDCobj_create(cont, "o2", obj_prop2); - if (obj2 > 0) - printf("Create an object o2\n"); - else - printf("Fail to create object @ line %d!\n", __LINE__); - - kvtag1.name = "key1string"; - kvtag1.value = (void *)v1; - kvtag1.type = PDC_STRING; - kvtag1.size = strlen(v1) + 1; - - kvtag2.name = "key2int"; - kvtag2.value = (void *)&v2; - kvtag2.type = PDC_INT; - kvtag2.size = sizeof(int); - - kvtag3.name = "key3double"; - kvtag3.value = (void *)&v3; - kvtag3.type = PDC_DOUBLE; - kvtag3.size = sizeof(double); - - if (PDCobj_put_tag(obj1, kvtag1.name, kvtag1.value, kvtag1.type, kvtag1.size) < 0) - printf("fail to add a kvtag to o1\n"); - else - printf("successfully added a kvtag to o1\n"); - - if (PDCobj_put_tag(obj1, kvtag2.name, kvtag2.value, kvtag2.type, kvtag2.size) < 0) - printf("fail to add a kvtag to o1\n"); - else - printf("successfully added a kvtag to o1\n"); - - if (PDCobj_put_tag(obj2, kvtag2.name, kvtag2.value, kvtag2.type, kvtag2.size) < 0) - printf("fail to add a kvtag to o2\n"); - else - printf("successfully added a kvtag to o2\n"); - - if (PDCobj_put_tag(obj2, kvtag3.name, kvtag3.value, kvtag3.type, kvtag3.size) < 0) - printf("fail to add a kvtag to o2\n"); - else - printf("successfully added a kvtag to o2\n"); - - if (PDC_Client_query_kvtag(&kvtag1, &nobj, &obj_ids) < 0) - printf("fail to query a kvtag\n"); - else { - printf("successfully queried a tag, nres=%d\n", nobj); - for (i = 0; i < nobj; i++) - printf("%" PRIu64 ", ", obj_ids[i]); - printf("\n\n"); + // Create a number of objects, add at least one tag to that object + assign_work_to_rank(my_rank, proc_num, n_obj, &my_obj, &my_obj_s); + if (my_rank == 0) + printf("I will create %d obj\n", my_obj); + + obj_ids = (pdcid_t *)calloc(my_obj, sizeof(pdcid_t)); + for (i = 0; i < my_obj; i++) { + sprintf(obj_name, "obj%d", my_obj_s + i); + obj_ids[i] = PDCobj_create(cont, obj_name, obj_prop); + if (obj_ids[i] <= 0) + printf("Fail to create object @ line %d!\n", __LINE__); } - if (obj_ids != NULL) - free(obj_ids); - if (PDC_Client_query_kvtag(&kvtag3, &nobj, &obj_ids2) < 0) - printf("fail to query a kvtag\n"); - else { - printf("successfully queried a tag, nres=%d\n", nobj); - for (i = 0; i < nobj; i++) - printf("%" PRIu64 ", ", obj_ids2[i]); - printf("\n\n"); + if (my_rank == 0) + printf("Created %d objects\n", n_obj); + fflush(stdout); + + char *attr_name_per_rank = gen_random_strings(1, 6, 8, 26)[0]; + // Add tags + kvtag.name = attr_name_per_rank; + kvtag.value = (void *)&v; + kvtag.type = PDC_INT; + kvtag.size = sizeof(int); + + char key[32]; + char value[32]; + char exact_query[48]; + + dart_object_ref_type_t ref_type = REF_PRIMARY_ID; + dart_hash_algo_t hash_algo = DART_HASH; + + assign_work_to_rank(my_rank, proc_num, n_add_tag, &my_add_tag, &my_add_tag_s); + + // This is for adding #rounds tags to the objects. + for (i = 0; i < my_add_tag; i++) { + for (iter = 0; iter < round; iter++) { + v = iter; + sprintf(value, "%d", v); + if (is_using_dart) { + if (PDC_Client_insert_obj_ref_into_dart(hash_algo, kvtag.name, value, ref_type, + (uint64_t)obj_ids[i]) < 0) { + printf("fail to add a kvtag to o%d\n", i + my_obj_s); + } + } + else { + /* println("Rank %d: [%s] [%d], len %d\n", my_rank, kvtag.name, v, kvtag.size); */ + if (PDCobj_put_tag(obj_ids[i], kvtag.name, kvtag.value, kvtag.type, kvtag.size) < 0) { + printf("fail to add a kvtag to o%d\n", i + my_obj_s); + } + } + } + if (my_rank == 0) + println("Rank %d: Added %d kvtag to the %d th object\n", my_rank, round, i); } - if (obj_ids2 != NULL) - free(obj_ids2); - if (PDC_Client_query_kvtag(&kvtag2, &nobj, &obj_ids1) < 0) - printf("fail to query a kvtag\n"); - else { - printf("successfully queried a tag, nres=%d\n", nobj); - for (i = 0; i < nobj; i++) - printf("%" PRIu64 ", ", obj_ids1[i]); - printf("\n\n"); +#ifdef ENABLE_MPI + MPI_Barrier(MPI_COMM_WORLD); +#endif + + kvtag.name = attr_name_per_rank; + kvtag.value = (void *)&v; + kvtag.type = PDC_INT; + kvtag.size = sizeof(int); + +#ifdef ENABLE_MPI + MPI_Barrier(MPI_COMM_WORLD); + stime = MPI_Wtime(); +#endif + + for (iter = 0; iter < round; iter++) { + v = iter; + if (is_using_dart) { + sprintf(value, "%ld", v); + sprintf(exact_query, "%s=%s", kvtag.name, value); +#ifdef ENABLE_MPI + PDC_Client_search_obj_ref_through_dart_mpi(hash_algo, exact_query, ref_type, &nres, &pdc_ids, + MPI_COMM_WORLD); +#else + PDC_Client_search_obj_ref_through_dart(hash_algo, exact_query, ref_type, &nres, &pdc_ids); +#endif + } + else { + /* println("Rank %d: round %d, query kvtag [%s] [%d]\n", my_rank, round, kvtag.name, + * *((int*)kvtag.value)); */ +#ifdef ENABLE_MPI + if (PDC_Client_query_kvtag_mpi(&kvtag, &nres, &pdc_ids, MPI_COMM_WORLD) < 0) { +#else + if (PDC_Client_query_kvtag(&kvtag, &nres, &pdc_ids) < 0) { +#endif + printf("fail to query kvtag [%s] with rank %d\n", kvtag.name, my_rank); + break; + } + } } - if (obj_ids1 != NULL) - free(obj_ids1); - - // close first object - if (PDCobj_close(obj1) < 0) - printf("fail to close object o1\n"); - else - printf("successfully close object o1\n"); - // close second object - if (PDCobj_close(obj2) < 0) - printf("fail to close object o2\n"); - else - printf("successfully close object o2\n"); +#ifdef ENABLE_MPI + MPI_Barrier(MPI_COMM_WORLD); + MPI_Reduce(&nres, &ntotal, 1, MPI_INT, MPI_SUM, 0, MPI_COMM_WORLD); + total_time = MPI_Wtime() - stime; + if (my_rank == 0) + println("Total time to query %d objects with tag: %.5f", ntotal, total_time); +#else + println("Query found %d objects", nres); +#endif // close a container if (PDCcont_close(cont) < 0) printf("fail to close container c1\n"); else printf("successfully close container c1\n"); - // close a container property - if (PDCprop_close(obj_prop1) < 0) - printf("Fail to close property @ line %d\n", __LINE__); - else - printf("successfully close object property\n"); - - if (PDCprop_close(obj_prop2) < 0) + // close an object property + if (PDCprop_close(obj_prop) < 0) printf("Fail to close property @ line %d\n", __LINE__); else printf("successfully close object property\n"); + // close a container property if (PDCprop_close(cont_prop) < 0) printf("Fail to close property @ line %d\n", __LINE__); else @@ -194,6 +248,10 @@ main() // close pdc if (PDCclose(pdc) < 0) printf("fail to close PDC\n"); +done: +#ifdef ENABLE_MPI + MPI_Finalize(); +#endif return 0; } diff --git a/src/tests/kvtag_query_scale_col.c b/src/tests/kvtag_query_scale_col.c index 78d4b26b5..a2a4b8405 100644 --- a/src/tests/kvtag_query_scale_col.c +++ b/src/tests/kvtag_query_scale_col.c @@ -30,6 +30,7 @@ #include #include "pdc.h" #include "pdc_client_connect.h" +#include "string_utils.h" int assign_work_to_rank(int rank, int size, int nwork, int *my_count, int *my_start) @@ -140,6 +141,8 @@ main(int argc, char *argv[]) pdc_kvtag_t kvtag; uint64_t * pdc_ids; int nres, ntotal; + int * my_cnt_round; + int * total_cnt_round; #ifdef ENABLE_MPI MPI_Init(&argc, &argv); @@ -181,6 +184,9 @@ main(int argc, char *argv[]) dart_object_ref_type_t ref_type = REF_PRIMARY_ID; dart_hash_algo_t hash_algo = DART_HASH; + my_cnt_round = (int *)calloc(round, sizeof(int)); + total_cnt_round = (int *)calloc(round, sizeof(int)); + MPI_Barrier(MPI_COMM_WORLD); stime = MPI_Wtime(); @@ -193,8 +199,8 @@ main(int argc, char *argv[]) for (iter = 0; iter < round; iter++) { char attr_name[64]; char tag_value[64]; - snprintf(attr_name, 63, "%d%dattr_name%d%d", iter, iter, iter, iter); - snprintf(tag_value, 63, "%d%dtag_value%d%d", iter, iter, iter, iter); + snprintf(attr_name, 63, "%03d%03dattr_name%03d%03d", iter, iter, iter, iter); + snprintf(tag_value, 63, "%03d%03dtag_value%03d%03d", iter, iter, iter, iter); kvtag.name = strdup(attr_name); kvtag.value = (void *)strdup(tag_value); kvtag.type = PDC_STRING; @@ -212,8 +218,9 @@ main(int argc, char *argv[]) } free(kvtag.name); free(kvtag.value); + my_cnt_round[iter]++; } - if (my_rank == 0) { + if (my_rank == 0 && n_obj > 1000) { println("Rank %d: Added %d kvtag to the %d / %d th object, I'm applying selectivity %d to %d " "objects.\n", my_rank, round, i + 1, my_obj_after_selectivity, selectivity, my_obj); @@ -229,6 +236,9 @@ main(int argc, char *argv[]) } #ifdef ENABLE_MPI + for (i = 0; i < round; i++) + MPI_Allreduce(&my_cnt_round[i], &total_cnt_round[i], 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD); + MPI_Barrier(MPI_COMM_WORLD); #endif @@ -250,8 +260,8 @@ main(int argc, char *argv[]) #endif char attr_name[64]; char tag_value[64]; - snprintf(attr_name, 63, "%d%dattr_name%d%d", iter, iter, iter, iter); - snprintf(tag_value, 63, "%d%dtag_value%d%d", iter, iter, iter, iter); + snprintf(attr_name, 63, "%03d%03dattr_name%03d%03d", iter, iter, iter, iter); + snprintf(tag_value, 63, "%03d%03dtag_value%03d%03d", iter, iter, iter, iter); kvtag.name = strdup(attr_name); kvtag.value = (void *)strdup(tag_value); @@ -263,10 +273,11 @@ main(int argc, char *argv[]) input.base_tag = &kvtag; input.key_query_type = query_type; input.value_query_type = query_type; - input.affix_len = 4; + input.affix_len = 12; gen_query_key_value(&input, &output); + pdc_ids = NULL; if (is_using_dart) { char *query_string = gen_query_str(&output); ret_value = (comm_type == 0) @@ -278,7 +289,9 @@ main(int argc, char *argv[]) else { kvtag.name = output.key_query; kvtag.value = output.value_query; - ret_value = (comm_type == 0) + /* fprintf(stderr, " Rank %d: key [%s] value [%s]\n", my_rank, kvtag.name, + * kvtag.value); */ + ret_value = (comm_type == 0) ? PDC_Client_query_kvtag(&kvtag, &nres, &pdc_ids) : PDC_Client_query_kvtag_mpi(&kvtag, &nres, &pdc_ids, MPI_COMM_WORLD); } @@ -286,6 +299,13 @@ main(int argc, char *argv[]) printf("fail to query kvtag [%s] with rank %d\n", kvtag.name, my_rank); break; } + + if (iter >= 0) { + if (nres != total_cnt_round[iter]) + printf("Rank %d: query %d, comm %d, round %d - results %d do not match expected %d\n", + my_rank, query_type, comm_type, iter, nres, total_cnt_round[iter]); + } + round_total += nres; free(kvtag.name); free(kvtag.value); @@ -309,8 +329,8 @@ main(int argc, char *argv[]) is_using_dart == 0 ? " NO " : " DART ", round, round_total, total_time * 1000.0); } #endif - } - } + } // end query type + } // end comm type if (my_rank == 0) { println("Rank %d: All queries are done.", my_rank); @@ -326,8 +346,8 @@ main(int argc, char *argv[]) for (iter = 0; iter < round; iter++) { char attr_name[64]; char tag_value[64]; - snprintf(attr_name, 63, "%d%dattr_name%d%d", iter, iter, iter, iter); - snprintf(tag_value, 63, "%d%dtag_value%d%d", iter, iter, iter, iter); + snprintf(attr_name, 63, "%03d%03dattr_name%03d%03d", iter, iter, iter, iter); + snprintf(tag_value, 63, "%03d%03dtag_value%03d%03d", iter, iter, iter, iter); kvtag.name = strdup(attr_name); kvtag.value = (void *)strdup(tag_value); kvtag.type = PDC_STRING; @@ -340,6 +360,7 @@ main(int argc, char *argv[]) PDCobj_del_tag(obj_ids[i], kvtag.name); } free(kvtag.name); + free(kvtag.value); } } diff --git a/src/tools/CMakeLists.txt b/src/tools/CMakeLists.txt index c7f43576f..0e9574b01 100644 --- a/src/tools/CMakeLists.txt +++ b/src/tools/CMakeLists.txt @@ -1,5 +1,75 @@ -project(${PDC_SOURCE_DIR}) -cmake_minimum_required (VERSION 3.0) +set(TOOLS_EXT_INCLUDE "") +set(TOOLS_EXT_LIB "") + +# ************************************************* +# * MERCURY +# ************************************************* +find_package(MERCURY REQUIRED) +if(MERCURY_FOUND) + set(TOOLS_EXT_INCLUDE ${MERCURY_INCLUDE_DIRS} + ${TOOLS_EXT_INCLUDE} + ) + set(TOOLS_EXT_LIB ${MERCURY_LIBRARIES} ${TOOLS_EXT_LIB}) +endif() + +#HDF5 +find_package(HDF5 MODULE) + if(NOT HDF5_FOUND) + message(STATUS "Could not find HDF5, fallback to NO_MODULE mode.") + find_package(HDF5 NO_MODULE NAMES hdf5 COMPONENTS C shared) + if(NOT HDF5_FOUND) + message(FATAL_ERROR "Could not find HDF5, please check HDF5_DIR or make sure that HDF5 has ben compiled with shared libraries enabled.") + else() + set(HDF5_LIBRARIES ${HDF5_LIBRARIES} hdf5-shared) + set(HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIRS} ${HDF5_INCLUDE_DIR}) + endif() + endif() + +if (HDF5_FOUND) + set(TOOLS_EXT_INCLUDE + ${TOOLS_EXT_INCLUDE} + ${HDF5_INCLUDE_DIRS} + ) + set(TOOLS_EXT_LIB + ${TOOLS_EXT_LIB} + ${HDF5_LIBRARIES} + ) +endif() + +# option(USE_SYSTEM_HDF5 "Use system-installed HDF5." ON) +# if(USE_SYSTEM_HDF5) +# find_package(HDF5 NO_MODULE NAMES hdf5 COMPONENTS C shared) +# if(HDF5_FOUND) +# set(HDF5_C_SHARED_LIBRARY hdf5-shared) +# # if(NOT TARGET ${HDF5_C_SHARED_LIBRARY}) +# # message(FATAL_ERROR "Could not find hdf5 shared target, please make " +# #"sure that HDF5 has ben compiled with shared libraries enabled.") +# # endif() +# set(TOOLS_EXT_INCLUDE +# ${TOOLS_EXT_INCLUDE} +# ${HDF5_INCLUDE_DIR} +# ) +# set(TOOLS_EXT_LIB +# ${TOOLS_EXT_LIB} +# ${HDF5_C_SHARED_LIBRARY} +# ) +# endif() +# else() +# # Allow for HDF5 autotools builds +# find_package(HDF5 MODULE REQUIRED) +# if(HDF5_FOUND) +# set(TOOLS_EXT_INCLUDE +# ${TOOLS_EXT_INCLUDE} +# ${HDF5_INCLUDE_DIRS} +# ) +# set(TOOLS_EXT_LIB +# ${TOOLS_EXT_LIB} +# ${HDF5_LIBRARIES} +# ) +# else() +# message(FATAL_ERROR "Could not find HDF5, please check HDF5_DIR.") +# endif() +# endif() include_directories( ${CMAKE_CURRENT_SOURCE_DIR} @@ -10,8 +80,10 @@ include_directories( ${PDC_SOURCE_DIR}/src/client_api/include $ENV{HOME}/Sandbox/c-blosc/blosc $ENV{HOME}/include + ${TOOLS_EXT_INCLUDE} ) + set(PROGRAMS pdc_import pdc_export @@ -22,12 +94,5 @@ add_library(cjson cjson/cJSON.c) foreach(program ${PROGRAMS}) add_executable(${program} ${program}.c) - target_link_libraries(${program} pdc) - target_link_libraries(${program} cjson) + target_link_libraries(${program} pdc cjson ${TOOLS_EXT_LIB}) endforeach(program) - - -# ******************************************* -# Add the HDF5 library for pdc-neon -# ******************************************* -FIND_LIBRARY(HDF5_LIBRARY NAMES hdf5_debug PATHS $ENV{HOME}/lib) diff --git a/src/tools/pdc_ls.c b/src/tools/pdc_ls.c index 47fecbcd3..c4800298c 100644 --- a/src/tools/pdc_ls.c +++ b/src/tools/pdc_ls.c @@ -114,27 +114,89 @@ int pdc_server_rank_g = 0; int pdc_server_size_g = 1; double total_mem_usage_g = 0.0; +int +isDir(const char *fileName) +{ + struct stat path; + stat(fileName, &path); + return S_ISREG(path.st_mode); +} + static void pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]); int main(int argc, char *argv[]) { if (argc == 1) { - printf("Expected directory/checkpoint file.\n"); - return 1; + printf("Usage: ./pdc_ls pdc_checkpoint_directory/file [-n obj_name] [-i obj_id] [-json json_fname] " + "[-ln (list all names)] [-ls (list all ids)] [-s (summary)]\n"); + return 0; } else { FileNameNode * head = NULL; FileNameNode * cur_node = NULL; DIR * d; struct dirent *dir; + struct dirent *direc; d = opendir(argv[1]); + char *full_path; if (d) { while ((dir = readdir(d)) != NULL) { + // if it's directory + if (!isDir(dir->d_name)) { + if (strstr(dir->d_name, ".")) { + // ignore parent and current directories + continue; + } + // appends path together + char tmp[1024]; + sprintf(tmp, "%s/%s", argv[1], dir->d_name); + DIR *d1 = opendir(tmp); + /* printf("%s\n", tmp); */ + + while ((direc = readdir(d1)) != NULL) { // go into it and go for checkpoint files again + if (strstr(direc->d_name, "metadata_checkpoint.")) { + // printf("getting checkpoints\n"); + char last = argv[1][strlen(argv[1]) - 1]; + if (last == '/') { + full_path = (char *)malloc(sizeof(char) * + (strlen(argv[1]) + strlen(direc->d_name) + 1)); + strcpy(full_path, argv[1]); + strcat(full_path, direc->d_name); + strcat(full_path, "/"); + strcat(full_path, direc->d_name); + } + else { + full_path = (char *)malloc(sizeof(char) * + (strlen(argv[1]) + strlen(direc->d_name) + 2)); + strcpy(full_path, argv[1]); + strcat(full_path, "/"); + strcat(full_path, dir->d_name); + strcat(full_path, "/"); + strcat(full_path, direc->d_name); + } + if (head == NULL) { + FileNameNode *new_node = (FileNameNode *)malloc(sizeof(FileNameNode)); + new_node->file_name = full_path; + new_node->next = NULL; + head = new_node; + cur_node = new_node; + } + else { + FileNameNode *new_node = (FileNameNode *)malloc(sizeof(FileNameNode)); + new_node->file_name = full_path; + new_node->next = NULL; + cur_node->next = new_node; + cur_node = new_node; + } + } + } + closedir(d1); + } if (strstr(dir->d_name, "metadata_checkpoint.")) { - char last = argv[1][strlen(argv[1]) - 1]; - char *full_path; + printf("%s\n", dir->d_name); + char last = argv[1][strlen(argv[1]) - 1]; if (last == '/') { full_path = (char *)malloc(sizeof(char) * (strlen(argv[1]) + strlen(dir->d_name) + 1)); @@ -163,14 +225,15 @@ main(int argc, char *argv[]) cur_node = new_node; } } - } + } // End if dir = readdir closedir(d); - } + } // Ene if d else { + // Open one checkpoint file FILE *file = fopen(argv[1], "r"); if (file != NULL) { - FileNameNode *new_node = (FileNameNode *)malloc(sizeof(FileNameNode)); - char * full_path = (char *)malloc(sizeof(char) * (strlen(argv[1]) + 1)); + FileNameNode *new_node = (FileNameNode *)malloc(sizeof(FileNameNode)); + full_path = (char *)malloc(sizeof(char) * (strlen(argv[1]) + 1)); strcpy(full_path, argv[1]); new_node->file_name = full_path; new_node->next = NULL; @@ -179,9 +242,10 @@ main(int argc, char *argv[]) fclose(file); } } + if (head == NULL) { printf("Unable to open/locate checkpoint file(s).\n"); - return 1; + return -1; } else { pdc_ls(head, argc, argv); @@ -311,6 +375,7 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) int list_names = 0; int list_ids = 0; int summary = 0; + int print_all = 1; int arg_index = 2; while (arg_index < argc) { @@ -332,12 +397,15 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) } else if (strcmp(argv[arg_index], "-ln") == 0) { list_names = 1; + print_all = 0; } else if (strcmp(argv[arg_index], "-li") == 0) { - list_ids = 1; + list_ids = 1; + print_all = 0; } else if (strcmp(argv[arg_index], "-s") == 0) { - summary = 1; + summary = 1; + print_all = 0; } arg_index++; } @@ -365,7 +433,7 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) while (cur_file_node != NULL) { filename = cur_file_node->file_name; stat(filename, &attr); - printf("[INFO] File [%s] last modified at: %s\n", filename, ctime(&attr.st_mtime)); + printf("[INFO] File [%s] last modified at: %s", filename, ctime(&attr.st_mtime)); // Start server restart code perr_t ret_value = SUCCEED; int n_entry, count, i, j, nobj = 0, all_nobj = 0, all_n_region, n_region, n_objs, total_region = 0, @@ -387,19 +455,21 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) } if (fread(&n_cont, sizeof(int), 1, file) != 1) { - printf("Read failed for n_count\n"); + printf("Read failed for cont count\n"); } all_cont = n_cont; while (n_cont > 0) { hash_key = (uint32_t *)malloc(sizeof(uint32_t)); if (fread(hash_key, sizeof(uint32_t), 1, file) != 1) { - printf("Read failed for hash_key\n"); + printf("Read failed for cont hash_key\n"); + return; } // Reconstruct hash table cont_entry = (pdc_cont_hash_table_entry_t *)malloc(sizeof(pdc_cont_hash_table_entry_t)); if (fread(cont_entry, sizeof(pdc_cont_hash_table_entry_t), 1, file) != 1) { printf("Read failed for cont_entry\n"); + return; } n_cont--; @@ -410,12 +480,14 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) while (n_entry > 0) { if (fread(&count, sizeof(int), 1, file) != 1) { - printf("Read failed for count\n"); + printf("Read failed for obj count\n"); + return; } hash_key = (uint32_t *)malloc(sizeof(uint32_t)); if (fread(hash_key, sizeof(uint32_t), 1, file) != 1) { - printf("Read failed for hash_key\n"); + printf("Read failed for obj hash_key\n"); + return; } // Reconstruct hash table @@ -481,6 +553,9 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) if (fread(&kvtag_list->kvtag->size, sizeof(uint32_t), 1, file) != 1) { printf("Read failed for kvtag_list->kvtag->size\n"); } + if (fread(&kvtag_list->kvtag->type, sizeof(int8_t), 1, file) != 1) { + printf("Read failed for kvtag_list->kvtag->size\n"); + } kvtag_list->kvtag->value = malloc(kvtag_list->kvtag->size); if (fread(kvtag_list->kvtag->value, kvtag_list->kvtag->size, 1, file) != 1) { printf("Read failed for kvtag_list->kvtag->value\n"); @@ -653,11 +728,11 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) cJSON * start_arr_json = NULL; cJSON * output = cJSON_CreateObject(); int prev_cont_id = -1; + char buf[1024]; while (cur_m_node != NULL) { cur_metadata = cur_m_node->metadata_ptr; if (prev_cont_id != cur_metadata->cont_id) { cont_id_json = cJSON_CreateArray(); - char buf[20]; sprintf(buf, "cont_id: %d", cur_metadata->cont_id); cJSON_AddItemToObject(output, buf, cont_id_json); } @@ -678,7 +753,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) } } - char buf[12]; sprintf(buf, "%d", wanted_id); reti = regcomp(®ex, buf, 0); if (reti) { @@ -687,7 +761,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) } } else { - // char buf[12]; sprintf(buf, "%d", cur_metadata->obj_id); reti = regexec(®ex, buf, 0, NULL, 0); if (!reti) { @@ -715,8 +788,7 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) add_obj = matched_name; } else if (wanted_id) { - int matched_id = 0; - char buf[12]; + int matched_id = 0; sprintf(buf, "%d", wanted_id); reti = regcomp(®ex, buf, 0); if (reti) { @@ -725,7 +797,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) } } else { - // char buf[12]; sprintf(buf, "%d", cur_metadata->obj_id); reti = regexec(®ex, buf, 0, NULL, 0); if (!reti) { @@ -739,7 +810,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) add(obj_names, cur_metadata->obj_name); } if (list_ids) { - char buf[12]; sprintf(buf, "%d", cur_metadata->obj_id); add(obj_ids, buf); } @@ -768,8 +838,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) cJSON_AddStringToObject(region_info_json, "storage_loc", cur_region->storage_location); cJSON_AddNumberToObject(region_info_json, "offset", cur_region->offset); cJSON_AddNumberToObject(region_info_json, "num_dims", cur_region->ndim); - // FIXME: statement with no effect. what did we expect to do here? - // dims[cur_region->ndim]; for (int i = 0; i < (cur_metadata->ndim); i++) { dims[i] = (cur_region->start)[i]; } @@ -798,13 +866,13 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) FILE *fp; if (output_file_name) { fp = fopen(output_file_name, "w"); + printf("Output to [%s]\n", output_file_name); } else { fp = stdout; } if (list_names) { - cJSON *all_names_json = - cJSON_CreateStringArray((const char *const *)obj_names->items, obj_names->length); + cJSON *all_names_json = cJSON_CreateStringArray((const char **)obj_names->items, obj_names->length); cJSON_AddItemToObject(output, "all_obj_names", all_names_json); } if (list_ids) { @@ -816,7 +884,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) cJSON_AddItemToObject(output, "all_obj_ids", all_ids_json); } if (summary) { - char buf[100]; sprintf(buf, "pdc_ls found: %d containers, %d objects, %d regions", all_cont_total, all_nobj_total, all_n_region_total); cJSON_AddStringToObject(output, "summary", buf); @@ -827,4 +894,6 @@ pdc_ls(FileNameNode *file_name_node, int argc, char *argv[]) char *json_string = cJSON_Print(output); fprintf(fp, json_string); fprintf(fp, "\n"); + if (output_file_name) + fclose(fp); } diff --git a/tools/.gitignore b/tools/.gitignore deleted file mode 100644 index 51bce2904..000000000 --- a/tools/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -CMakeFiles -CMakeCache.txt -Makefile -cmake_install.cmake -pdc_ls -pdc_tmp \ No newline at end of file diff --git a/tools/CMakeLists.txt b/tools/CMakeLists.txt deleted file mode 100644 index b14402393..000000000 --- a/tools/CMakeLists.txt +++ /dev/null @@ -1,136 +0,0 @@ -cmake_minimum_required (VERSION 2.8.12) - -# Setup cmake policies. -foreach(p - CMP0012 - CMP0013 - CMP0014 - CMP0022 # CMake 2.8.12 - CMP0025 # CMake 3.0 - CMP0053 # CMake 3.1 - CMP0054 # CMake 3.1 - CMP0074 # CMake 3.12 - CMP0075 # CMake 3.12 - CMP0083 # CMake 3.14 - CMP0093 # CMake 3.15 - ) - if(POLICY ${p}) - cmake_policy(SET ${p} NEW) - endif() -endforeach() - -project(PDC_VOL C) - -include_directories( - ${PDC_EXT_INCLUDE_DEPENDENCIES} -) - -find_package(PDC REQUIRED) -if(PDC_FOUND) - #message(STATUS "PDC include directory: ${PDC_INCLUDE_DIR}") - set(PDC_EXT_INCLUDE_DEPENDENCIES ${PDC_INCLUDE_DIR} - ${PDC_EXT_INCLUDE_DEPENDENCIES} - ) - set(PDC_EXT_LIB_DEPENDENCIES pdc ${PDC_EXT_LIB_DEPENDENCIES}) -endif() - -#HDF5 -option(USE_SYSTEM_HDF5 "Use system-installed HDF5." ON) - if(USE_SYSTEM_HDF5) - find_package(HDF5 NO_MODULE NAMES hdf5 COMPONENTS C shared) - if(HDF5_FOUND) - set(HDF5_C_SHARED_LIBRARY hdf5-shared) -# if(NOT TARGET ${HDF5_C_SHARED_LIBRARY}) -# message(FATAL_ERROR "Could not find hdf5 shared target, please make " -#"sure that HDF5 has ben compiled with shared libraries enabled.") -# endif() - set(PDC_EXT_INCLUDE_DEPENDENCIES - ${PDC_EXT_INCLUDE_DEPENDENCIES} - ${HDF5_INCLUDE_DIR} - ) - set(PDC_EXT_LIB_DEPENDENCIES - ${PDC_EXT_LIB_DEPENDENCIES} - ${HDF5_C_SHARED_LIBRARY} - ) - else() - # Allow for HDF5 autotools builds - find_package(HDF5 MODULE REQUIRED) - if(HDF5_FOUND) - set(PDC_EXT_INCLUDE_DEPENDENCIES - ${PDC_EXT_INCLUDE_DEPENDENCIES} - ${HDF5_INCLUDE_DIRS} - ) - set(PDC_EXT_LIB_DEPENDENCIES - ${PDC_EXT_LIB_DEPENDENCIES} - ${HDF5_LIBRARIES} - ) - else() - message(FATAL_ERROR "Could not find HDF5, please check HDF5_DIR.") - endif() - endif() -endif() - -option(USE_SYSTEM_OPENMP "Use system-installed OpenMP." ON) -if(USE_SYSTEM_OPENMP) - find_package(OpenMP REQUIRED) - if(OPENMP_FOUND) - add_definitions(-DENABLE_OPENMP=1) - set(ENABLE_OPENMP 1) - set(OPENMP_LIBRARIES "${OpenMP_C_LIBRARIES}") - else() - message(FATAL_ERROR "OpenMP not found") - endif() -endif() - - -add_definitions(-DENABLE_MPI=1) -add_library(cjson cjson/cJSON.c) - - -# set(PROGRAMS -# pdc_ls -# pdc_import -# pdc_export -# ) - -# foreach(program ${PROGRAMS}) -# add_executable(${program} ${program}.c) -# target_link_libraries(${program} ${PDC_EXT_LIB_DEPENDENCIES}) -# target_link_libraries(${program} pdc) -# target_link_libraries(${program} cjson) -# target_include_directories(${program} PUBLIC ${PDC_INCLUDE_DIR}) -# endforeach(program) - - -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O3 -fopenmp -DNDEBUG") - -# Find LibTIFF -option(USE_LIB_TIFF "Enable LibTiff." ON) -if(USE_LIB_TIFF) - find_package(TIFF REQUIRED) - if(TIFF_FOUND) - set(LLSM_LIB_SOURCE - llsm/parallelReadTiff.c - llsm/csvReader.c - llsm/pdc_list.c - ) - # Add the LibTIFF include directory to the include path - include_directories(${TIFF_INCLUDE_DIRS}) - add_library(llsm_tiff ${LLSM_LIB_SOURCE}) - target_compile_options(llsm_tiff PRIVATE ${OpenMP_C_FLAGS}) - target_link_libraries(llsm_tiff PUBLIC ${OpenMP_C_LIBRARIES}) - target_link_libraries(llsm_tiff PUBLIC ${TIFF_LIBRARIES}) - target_include_directories(llsm_tiff PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/llsm) - - - add_executable(llsm_importer llsm_importer.c) - target_link_libraries(llsm_importer ${PDC_EXT_LIB_DEPENDENCIES}) - target_link_libraries(llsm_importer pdc) - target_link_libraries(llsm_importer cjson) - target_link_libraries(llsm_importer ${TIFF_LIBRARIES}) - target_link_libraries(llsm_importer llsm_tiff) - target_include_directories(llsm_importer PUBLIC ${PDC_INCLUDE_DIR}) - else() - message(WARNING "LibTiff not found, ignore building the executables which requires LibTiff support.") - endif() -endif() \ No newline at end of file