diff --git a/.github/workflows/ci-cpu-cpp.yml b/.github/workflows/ci-cpu-cpp.yml index 3391df8db4..ff7db9cead 100644 --- a/.github/workflows/ci-cpu-cpp.yml +++ b/.github/workflows/ci-cpu-cpp.yml @@ -16,17 +16,42 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-20.04, macOS-latest] + os: [ubuntu-20.04] steps: + # - name: Setup Python for M1 + # if: matrix.os == 'macos-14' + # uses: actions/setup-python@v5 + # with: + # python-version: '3.10' + - name: Setup Python for all other OS + if: matrix.os != 'macos-14' + uses: actions/setup-python@v5 + with: + python-version: 3.9 + architecture: x64 + - name: Setup Java 17 + uses: actions/setup-java@v3 + with: + distribution: 'zulu' + java-version: '17' - name: Checkout TorchServe - uses: actions/checkout@v2 - - name: Install libtorch - macOS - if: matrix.os == 'macOS-latest' - run: | - brew install libtorch + uses: actions/checkout@v3 + with: + submodules: recursive + # - name: Install libtorch - macOS + # if: matrix.os == 'macOS-latest' + # run: | + # brew install libtorch - name: Install dependencies run: | - python ts_scripts/install_dependencies.py --environment=dev --cpp + sudo apt update && python ts_scripts/install_dependencies.py --environment=dev --cpp + - name: Install TorchServe + run: | + python ts_scripts/install_from_src.py + - name: Print Env + run: | + python ts_scripts/print_env_info.py - name: Build run: | - cd cpp && ./build.sh + cd cpp && rm -rf _build && sudo mkdir /mnt/_build && sudo chmod 777 /mnt/_build && mkdir _build && sudo mount --bind /mnt/_build _build + ./build.sh diff --git a/.gitmodules b/.gitmodules index f24d0431c1..5da091df9a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,9 +1,6 @@ [submodule "third_party/google/rpc"] path = third_party/google/rpc url = https://github.com/googleapis/googleapis.git -[submodule "cpp/third-party/llama.cpp"] - path = cpp/third-party/llama.cpp - url = https://github.com/ggerganov/llama.cpp.git [submodule "cpp/third-party/llama2.c"] path = cpp/third-party/llama2.c url = https://github.com/karpathy/llama2.c diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index f466ee6a6b..6b05103bb5 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -18,10 +18,6 @@ if(CLANG_FORMAT_EXE) ${PROJECT_SOURCE_DIR}/test/*.hh ) - add_custom_target(format - COMMAND - ${CLANG_FORMAT_EXE} -i -style=google ${ALL_CXX_SOURCE_FILES} - ) endif() @@ -31,6 +27,21 @@ find_package(fmt REQUIRED) find_package(gflags REQUIRED) find_package(Torch REQUIRED) +include(FetchContent) + +FetchContent_Declare( + yaml-cpp + GIT_REPOSITORY https://github.com/jbeder/yaml-cpp.git + GIT_TAG 0.8.0 # Can be a tag (yaml-cpp-x.x.x), a commit hash, or a branch name (master) +) +FetchContent_GetProperties(yaml-cpp) + +if(NOT yaml-cpp_POPULATED) + message(STATUS "Fetching yaml-cpp...") + FetchContent_Populate(yaml-cpp) + add_subdirectory(${yaml-cpp_SOURCE_DIR} ${yaml-cpp_BINARY_DIR}) +endif() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}") include_directories(${TORCH_INCLUDE_DIRS}) diff --git a/cpp/README.md b/cpp/README.md index a264932e82..cded890b1b 100644 --- a/cpp/README.md +++ b/cpp/README.md @@ -5,29 +5,20 @@ * cmake version: 3.18+ ## Installation and Running TorchServe CPP +This installation instruction assumes that TorchServe is already installed through pip/conda/source. If this is not the case install it after the `Install dependencies` step through your preferred method. + ### Install dependencies ``` cd serve python ts_scripts/install_dependencies.py --cpp --environment dev [--cuda=cu121|cu118] ``` ### Building the backend +Don't forget to install or update TorchServe at this point if it wasn't previously installed. ``` ## Dev Build cd cpp ./build.sh [-g cu121|cu118] -## Install TorchServe from source -cd .. -python ts_scripts/install_from_src.py -``` -### Set Environment Var -#### On Mac -``` -export DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib -``` -#### On Ubuntu -``` -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib ``` ### Run TorchServe @@ -35,6 +26,13 @@ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib mkdir model_store torchserve --ncs --start --model-store model_store ``` + +### Clean the build directory +To clean the build directory in order to rebuild from scratch simply delete the cpp/_build directory with +``` +rm -rf cpp/_build +``` + ## Backend TorchServe cpp backend can run as a process, which is similar to [TorchServe Python backend](https://github.com/pytorch/serve/tree/master/ts). By default, TorchServe supports torch scripted model in cpp backend. Other platforms such as MxNet, ONNX can be supported through custom handlers following the TorchScript example [src/backends/handler/torch_scripted_handler.hh](https://github.com/pytorch/serve/blob/master/cpp/src/backends/handler/torch_scripted_handler.hh). ### Custom Handler @@ -89,11 +87,11 @@ python -c "import ts; from pathlib import Path; print((Path(ts.__file__).parent 3. Make sure you have the right conda/venv environment activated during building that you're also using to run TorchServe. Q: Build on Mac fails with `Library not loaded: @rpath/libomp.dylib` -A: Install libomp with brew and link in /usr/local/lib +A: Install libomp with brew and link in /usr/local/lib ```bash brew install libomp sudo ln -s /opt/homebrew/opt/libomp/lib/libomp.dylib /usr/local/lib/libomp.dylib ``` Q: When loading a handler which uses a model exported with torch._export.aot_compile the handler dies with "error: Error in dlopen: MODEL.SO : undefined symbol: SOME_SYMBOL". -A: Make sure that you are using matching libtorch and Pytorch versions for inference and export, respectively. \ No newline at end of file +A: Make sure that you are using matching libtorch and Pytorch versions for inference and export, respectively. diff --git a/cpp/build.sh b/cpp/build.sh index b986163852..a76c69c5b1 100755 --- a/cpp/build.sh +++ b/cpp/build.sh @@ -28,14 +28,9 @@ function install_folly() { echo -e "${COLOR_GREEN}[ INFO ] Building Folly ${COLOR_OFF}" cd $FOLLY_SRC_DIR - if [ "$PLATFORM" = "Linux" ]; then - SUDO="sudo" - elif [ "$PLATFORM" = "Mac" ]; then - SUDO="" - fi - $SUDO ./build/fbcode_builder/getdeps.py install-system-deps --recursive + ./build/fbcode_builder/getdeps.py install-system-deps --recursive - $SUDO ./build/fbcode_builder/getdeps.py build \ + ./build/fbcode_builder/getdeps.py build \ --allow-system-packages \ --scratch-path $FOLLY_BUILD_DIR \ --extra-cmake-defines='{"CMAKE_CXX_FLAGS": "-fPIC -D_GLIBCXX_USE_CXX11_ABI=1"}' @@ -47,36 +42,29 @@ function install_folly() { echo "$FOLLY_BUILD_DIR/installed" } -function install_kineto() { - if [ "$PLATFORM" = "Linux" ]; then - echo -e "${COLOR_GREEN}[ INFO ] Skip install kineto on Linux ${COLOR_OFF}" - elif [ "$PLATFORM" = "Mac" ]; then - KINETO_SRC_DIR=$BASE_DIR/third-party/kineto - - if [ ! -d "$KINETO_SRC_DIR/libkineto/build" ] ; then - cd $KINETO_SRC_DIR/libkineto - mkdir build && cd build - cmake .. - make install - fi - fi - - cd "$BWD" || exit -} - function install_libtorch() { + cd "$DEPS_DIR" || exit TORCH_VERSION="2.2.1" + if [ -d "$DEPS_DIR/libtorch" ]; then + RAW_VERSION=`cat "$DEPS_DIR/libtorch/build-version"` + VERSION=`cat "$DEPS_DIR/libtorch/build-version" | cut -d "+" -f 1` + if [ "$USE_NIGHTLIES" = "true" ] && [[ ! "${RAW_VERSION}" =~ .*"dev".* ]]; then + rm -rf "$DEPS_DIR/libtorch" + elif [ "$USE_NIGHTLIES" == "" ] && [ "$VERSION" != "$TORCH_VERSION" ]; then + rm -rf "$DEPS_DIR/libtorch" + fi + fi if [ "$PLATFORM" = "Mac" ]; then if [ ! -d "$DEPS_DIR/libtorch" ]; then if [[ $(uname -m) == 'x86_64' ]]; then echo -e "${COLOR_GREEN}[ INFO ] Install libtorch on Mac x86_64 ${COLOR_OFF}" - wget https://download.pytorch.org/libtorch/cpu/libtorch-macos-x86_64-${TORCH_VERSION}.zip - unzip libtorch-macos-x86_64-${TORCH_VERSION}.zip + wget -q https://download.pytorch.org/libtorch/cpu/libtorch-macos-x86_64-${TORCH_VERSION}.zip + unzip -q libtorch-macos-x86_64-${TORCH_VERSION}.zip rm libtorch-macos-x86_64-${TORCH_VERSION}.zip else echo -e "${COLOR_GREEN}[ INFO ] Install libtorch on Mac arm64 ${COLOR_OFF}" - wget https://download.pytorch.org/libtorch/cpu/libtorch-macos-arm64-${TORCH_VERSION}.zip - unzip libtorch-macos-arm64-${TORCH_VERSION}.zip + wget -q https://download.pytorch.org/libtorch/cpu/libtorch-macos-arm64-${TORCH_VERSION}.zip + unzip -q libtorch-macos-arm64-${TORCH_VERSION}.zip rm libtorch-macos-arm64-${TORCH_VERSION}.zip fi fi @@ -86,27 +74,17 @@ function install_libtorch() { echo -e "${COLOR_RED}[ ERROR ] Unknown platform: $PLATFORM ${COLOR_OFF}" exit 1 else # Linux - if [ -d "$DEPS_DIR/libtorch" ]; then - RAW_VERSION=`cat "$DEPS_DIR/libtorch/build-version"` - VERSION=`cat "$DEPS_DIR/libtorch/build-version" | cut -d "+" -f 1` - if [ "$USE_NIGHTLIES" = "true" ] && [[ ! "${RAW_VERSION}" =~ .*"dev".* ]]; then - rm -rf "$DEPS_DIR/libtorch" - elif [ "$USE_NIGHTLIES" == "" ] && [ "$VERSION" != "$TORCH_VERSION" ]; then - rm -rf "$DEPS_DIR/libtorch" - fi - fi if [ ! -d "$DEPS_DIR/libtorch" ]; then - cd "$DEPS_DIR" || exit echo -e "${COLOR_GREEN}[ INFO ] Install libtorch on Linux ${COLOR_OFF}" if [ "$USE_NIGHTLIES" == true ]; then URL=https://download.pytorch.org/libtorch/nightly/${CUDA}/libtorch-cxx11-abi-shared-with-deps-latest.zip else URL=https://download.pytorch.org/libtorch/${CUDA}/libtorch-cxx11-abi-shared-with-deps-${TORCH_VERSION}%2B${CUDA}.zip fi - wget $URL + wget -q $URL ZIP_FILE=$(basename "$URL") ZIP_FILE="${ZIP_FILE//%2B/+}" - unzip $ZIP_FILE + unzip -q $ZIP_FILE rm $ZIP_FILE fi echo -e "${COLOR_GREEN}[ INFO ] libtorch is installed ${COLOR_OFF}" @@ -115,58 +93,22 @@ function install_libtorch() { cd "$BWD" || exit } -function install_yaml_cpp() { - YAML_CPP_SRC_DIR=$BASE_DIR/third-party/yaml-cpp - YAML_CPP_BUILD_DIR=$DEPS_DIR/yaml-cpp-build - - if [ ! -d "$YAML_CPP_BUILD_DIR" ] ; then - echo -e "${COLOR_GREEN}[ INFO ] Building yaml-cpp ${COLOR_OFF}" - - if [ "$PLATFORM" = "Linux" ]; then - SUDO="sudo" - elif [ "$PLATFORM" = "Mac" ]; then - SUDO="" - fi - - mkdir $YAML_CPP_BUILD_DIR - cd $YAML_CPP_BUILD_DIR - cmake $YAML_CPP_SRC_DIR -DYAML_BUILD_SHARED_LIBS=ON -DYAML_CPP_BUILD_TESTS=OFF -DCMAKE_CXX_FLAGS="-fPIC" - $SUDO make install - - echo -e "${COLOR_GREEN}[ INFO ] yaml-cpp is installed ${COLOR_OFF}" - fi - - cd "$BWD" || exit -} - -function build_llama_cpp() { - BWD=$(pwd) - LLAMA_CPP_SRC_DIR=$BASE_DIR/third-party/llama.cpp - cd "${LLAMA_CPP_SRC_DIR}" - if [ "$PLATFORM" = "Mac" ]; then - make LLAMA_METAL=OFF -j - else - make -j - fi - cd "$BWD" || exit -} - function prepare_test_files() { echo -e "${COLOR_GREEN}[ INFO ]Preparing test files ${COLOR_OFF}" local EX_DIR="${TR_DIR}/examples/" rsync -a --link-dest=../../test/resources/ ${BASE_DIR}/test/resources/ ${TR_DIR}/ if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin" ]; then - wget https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin -O "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin" + wget -q https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin -O "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin" fi if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin" ]; then - wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin" + wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin" fi # PT2.2 torch.expport does not support Mac if [ "$PLATFORM" = "Linux" ]; then if [ ! -f "${EX_DIR}/aot_inductor/llama_handler/stories15M.so" ]; then local HANDLER_DIR=${EX_DIR}/aot_inductor/llama_handler/ if [ ! -f "${HANDLER_DIR}/stories15M.pt" ]; then - wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true -O "${HANDLER_DIR}/stories15M.pt" + wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true -O "${HANDLER_DIR}/stories15M.pt" fi local LLAMA_SO_DIR=${BASE_DIR}/third-party/llama2.so/ PYTHONPATH=${LLAMA_SO_DIR}:${PYTHONPATH} python ${BASE_DIR}/../examples/cpp/aot_inductor/llama2/compile.py --checkpoint ${HANDLER_DIR}/stories15M.pt ${HANDLER_DIR}/stories15M.so @@ -221,12 +163,11 @@ function build() { # Build torchserve_cpp with cmake cd "$BWD" || exit - YAML_CPP_CMAKE_DIR=$DEPS_DIR/yaml-cpp-build FOLLY_CMAKE_DIR=$DEPS_DIR/folly-build/installed find $FOLLY_CMAKE_DIR -name "lib*.*" -exec ln -s "{}" $LIBS_DIR/ \; if [ "$PLATFORM" = "Linux" ]; then cmake \ - -DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$YAML_CPP_CMAKE_DIR;$DEPS_DIR/libtorch" \ + -DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$DEPS_DIR/libtorch" \ -DCMAKE_INSTALL_PREFIX="$PREFIX" \ "$MAYBE_BUILD_QUIC" \ "$MAYBE_BUILD_TESTS" \ @@ -242,8 +183,10 @@ function build() { export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/cuda/bin/nvcc fi elif [ "$PLATFORM" = "Mac" ]; then + export LIBRARY_PATH=${LIBRARY_PATH}:`brew --prefix icu4c`/lib:`brew --prefix libomp`/lib + cmake \ - -DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$YAML_CPP_CMAKE_DIR;$DEPS_DIR/libtorch" \ + -DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$DEPS_DIR/libtorch" \ -DCMAKE_INSTALL_PREFIX="$PREFIX" \ "$MAYBE_BUILD_QUIC" \ "$MAYBE_BUILD_TESTS" \ @@ -252,9 +195,10 @@ function build() { "$MAYBE_USE_STATIC_DEPS" \ "$MAYBE_LIB_FUZZING_ENGINE" \ "$MAYBE_NIGHTLIES" \ + "-DLLAMA_METAL=OFF" \ .. - export LIBRARY_PATH=${LIBRARY_PATH}:/usr/local/opt/icu4c/lib + else # TODO: Windows echo -e "${COLOR_RED}[ ERROR ] Unknown platform: $PLATFORM ${COLOR_OFF}" @@ -282,16 +226,8 @@ function symlink_torch_libs() { fi } -function symlink_yaml_cpp_lib() { - if [ "$PLATFORM" = "Linux" ]; then - ln -sf ${DEPS_DIR}/yaml-cpp-build/*.so* ${LIBS_DIR} - elif [ "$PLATFORM" = "Mac" ]; then - ln -sf ${DEPS_DIR}/yaml-cpp-build/*.dylib* ${LIBS_DIR} - fi -} - function install_torchserve_cpp() { - TARGET_DIR=$BASE_DIR/../ts/cpp/ + TARGET_DIR=`python -c "import ts; from pathlib import Path; print(Path(ts.__file__).parent / 'cpp')"` if [ -d $TARGET_DIR ]; then rm -rf $TARGET_DIR @@ -370,12 +306,8 @@ cd $BASE_DIR git submodule update --init --recursive install_folly -#install_kineto install_libtorch -install_yaml_cpp -build_llama_cpp prepare_test_files build symlink_torch_libs -symlink_yaml_cpp_lib install_torchserve_cpp diff --git a/cpp/third-party/llama.cpp b/cpp/third-party/llama.cpp deleted file mode 160000 index cd4fddb29f..0000000000 --- a/cpp/third-party/llama.cpp +++ /dev/null @@ -1 +0,0 @@ -Subproject commit cd4fddb29f81d6a1f6d51a0c016bc6b486d68def diff --git a/examples/cpp/llamacpp/CMakeLists.txt b/examples/cpp/llamacpp/CMakeLists.txt index e071167585..084c667830 100644 --- a/examples/cpp/llamacpp/CMakeLists.txt +++ b/examples/cpp/llamacpp/CMakeLists.txt @@ -1,20 +1,20 @@ set(LLAMACPP_SRC_DIR "${torchserve_cpp_SOURCE_DIR}/third-party/llama.cpp") +set(CMAKE_POSITION_INDEPENDENT_CODE ON) add_library(llamacpp_handler SHARED src/llamacpp_handler.cc) -set(MY_OBJECT_FILES - ${LLAMACPP_SRC_DIR}/ggml.o - ${LLAMACPP_SRC_DIR}/llama.o - ${LLAMACPP_SRC_DIR}/common.o - ${LLAMACPP_SRC_DIR}/ggml-quants.o - ${LLAMACPP_SRC_DIR}/ggml-alloc.o - ${LLAMACPP_SRC_DIR}/grammar-parser.o - ${LLAMACPP_SRC_DIR}/console.o - ${LLAMACPP_SRC_DIR}/build-info.o - ${LLAMACPP_SRC_DIR}/ggml-backend.o - +FetchContent_Declare( + llama.cpp + GIT_REPOSITORY https://github.com/ggerganov/llama.cpp + GIT_TAG b2241 ) +FetchContent_GetProperties(llama.cpp) + +if(NOT llama.cpp_POPULATED) + message(STATUS "Fetching llama.cpp...") + FetchContent_Populate(llama.cpp) + add_subdirectory(${llama.cpp_SOURCE_DIR} ${llama.cpp_BINARY_DIR}) +endif() -target_sources(llamacpp_handler PRIVATE ${MY_OBJECT_FILES}) target_include_directories(llamacpp_handler PUBLIC ${LLAMACPP_SRC_DIR}) -target_link_libraries(llamacpp_handler PRIVATE ts_backends_core ts_utils ${TORCH_LIBRARIES}) +target_link_libraries(llamacpp_handler PRIVATE ts_backends_core ts_utils ${TORCH_LIBRARIES} common llama) diff --git a/examples/cpp/llamacpp/src/llamacpp_handler.cc b/examples/cpp/llamacpp/src/llamacpp_handler.cc index 6917ee44cf..ce5703a9be 100644 --- a/examples/cpp/llamacpp/src/llamacpp_handler.cc +++ b/examples/cpp/llamacpp/src/llamacpp_handler.cc @@ -44,7 +44,7 @@ LlamaCppHandler::LoadModel( params.main_gpu = 0; params.n_gpu_layers = 35; - llama_backend_init(params.numa); + llama_backend_init(); ctx_params = llama_context_default_params(); model_params = llama_model_default_params(); llamamodel = llama_load_model_from_file(params.model.c_str(), model_params); diff --git a/ts_scripts/install_dependencies.py b/ts_scripts/install_dependencies.py index 3296844482..f32d456f46 100644 --- a/ts_scripts/install_dependencies.py +++ b/ts_scripts/install_dependencies.py @@ -64,7 +64,9 @@ "xz", "openssl", "libsodium", - "llv", + "icu4c", + "libomp", + "llvm", ) CPP_DARWIN_DEPENDENCIES_LINK = ( @@ -286,13 +288,13 @@ def install_cpp_dependencies(self): os.system(f"brew install -f {' '.join(CPP_DARWIN_DEPENDENCIES)}") os.system(f"brew link {' '.join(CPP_DARWIN_DEPENDENCIES_LINK)}") os.system( - 'ln -s "$(brew --prefix llvm)/bin/clang-format" "/usr/local/bin/clang-format"' + f'{self.sudo_cmd} ln -s "$(brew --prefix llvm)/bin/clang-format" "/usr/local/bin/clang-format"' ) os.system( - 'ln -s "$(brew --prefix llvm)/bin/clang-tidy" "/usr/local/bin/clang-tidy"' + f'{self.sudo_cmd} ln -s "$(brew --prefix llvm)/bin/clang-tidy" "/usr/local/bin/clang-tidy"' ) os.system( - 'ln -s "$(brew --prefix llvm)/bin/clang-apply-replacements" "/usr/local/bin/clang-apply-replacements"' + f'{self.sudo_cmd} ln -s "$(brew --prefix llvm)/bin/clang-apply-replacements" "/usr/local/bin/clang-apply-replacements"' ) def install_neuronx_driver(self): diff --git a/ts_scripts/print_env_info.py b/ts_scripts/print_env_info.py index afb71b6660..0e74a61661 100644 --- a/ts_scripts/print_env_info.py +++ b/ts_scripts/print_env_info.py @@ -43,6 +43,8 @@ npm_env = {"npm_pkg_version": []} +cpp_env = {"LIBRARY_PATH": ""} + def get_nvidia_smi(): # Note: nvidia-smi is currently available only on Windows and Linux @@ -284,6 +286,16 @@ def get_torch_model_archiver(): return version +def get_library_path(): + platform = get_platform() + if platform == "darwin": + return os.environ.get("DYLD_LIBRARY_PATH", "") + elif platform == "linux": + return os.environ.get("LD_LIBRARY_PATH", "") + else: + return "" + + def populate_torchserve_env(torch_pkg): for pkg in torch_pkg: if pkg.split("==")[0] == "torch": @@ -338,6 +350,10 @@ def populate_npm_env(): npm_env["npm_pkg_version"] = get_npm_packages() +def populate_cpp_env(): + cpp_env["LIBRARY_PATH"] = get_library_path() + + def populate_env_info(): # torchserve packages _, torch_list_output = get_pip_packages("torch") @@ -361,6 +377,9 @@ def populate_env_info(): if get_platform() == "darwin": populate_npm_env() + if get_platform() in ("darwin", "linux"): + populate_cpp_env() + env_info_fmt = """ ------------------------------------------------------------------------------------------ @@ -403,11 +422,17 @@ def populate_env_info(): {npm_pkg_version} """ +cpp_env_info_fmt = """ +Environment: +library_path (LD_/DYLD_): {LIBRARY_PATH} +""" + def get_pretty_env_info(branch_name): global env_info_fmt global cuda_info_fmt global npm_info_fmt + global cpp_env_info_fmt populate_env_info() env_dict = { **torchserve_env, @@ -415,6 +440,7 @@ def get_pretty_env_info(branch_name): **java_env, **os_info, "torchserve_branch": branch_name, + **cpp_env, } if TORCH_AVAILABLE and torch.cuda.is_available(): @@ -425,6 +451,9 @@ def get_pretty_env_info(branch_name): env_dict.update(npm_env) env_info_fmt = env_info_fmt + "\n" + npm_info_fmt + if get_platform() in ("darwin", "linux"): + env_info_fmt = env_info_fmt + "\n" + cpp_env_info_fmt + return env_info_fmt.format(**env_dict)