Skip to content

Commit

Permalink
Fix cpp ci (#2958)
Browse files Browse the repository at this point in the history
* Remove install_dependencies from buld.sh call

* remove sudo from folly build

* Align cpp wf file with cpu wf file

* Make yaml statically linked dependency

* move cpp binaries to location based on ts.__file__

* Add FAQ to cpp readme

* Update readme to expect installed TS

* Fix libtorch install on mac

* Add cpp build faqs

* Remove llama.cpp as submodule and add through fetch_content

* Disable metal in llamacpp example

* Fix dangling pointer error in cpp worker

* Remove kineto from mac build

* Add llvm as mac cpp dependencies (req for clang-tidy)

* Enable fPIC for llama.cpp example

* Add sudo to clang-tidy link creation

* Add undefined symbol faq

* fix llv typo

* Add install from source to cpp_ci

* Correct install from source to cpp_ci

* bump up pyyaml version to avoid cython 3 issue yaml/pyyaml#702

* Move cpp ci to M1 mac

* Run workflow on self hosted runner

* Disable mac ci for cpp

* Fix workflow syntax

* Run on cpp-ci

* Remove sudo

* Add apt update for act docker

* print library_path in print_env_info.py

* print end in cpp ci workflow

* Run on github runner

* Add upterm session to workflow

* Move post mortem upterm session before build

* Remove first upterm session

* ci debugging

* Observe disk space

* move _build to /mnt on github runner

* fix permission denied

* use mount instead of ln

* Adjust llamacpp api

* Reactivate set -e

* Remove note on env variable in cpp readme

* Fix linting issue in print_env_info.py

* Cleanup ci-cpu-cpp.yml

* quieten wget

* Add build clean section in cpp/readme

* Readjust to llama.cpp api
  • Loading branch information
mreso authored Mar 6, 2024
1 parent 8b51d1e commit 2e26323
Show file tree
Hide file tree
Showing 10 changed files with 137 additions and 144 deletions.
41 changes: 33 additions & 8 deletions .github/workflows/ci-cpu-cpp.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,42 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macOS-latest]
os: [ubuntu-20.04]
steps:
# - name: Setup Python for M1
# if: matrix.os == 'macos-14'
# uses: actions/setup-python@v5
# with:
# python-version: '3.10'
- name: Setup Python for all other OS
if: matrix.os != 'macos-14'
uses: actions/setup-python@v5
with:
python-version: 3.9
architecture: x64
- name: Setup Java 17
uses: actions/setup-java@v3
with:
distribution: 'zulu'
java-version: '17'
- name: Checkout TorchServe
uses: actions/checkout@v2
- name: Install libtorch - macOS
if: matrix.os == 'macOS-latest'
run: |
brew install libtorch
uses: actions/checkout@v3
with:
submodules: recursive
# - name: Install libtorch - macOS
# if: matrix.os == 'macOS-latest'
# run: |
# brew install libtorch
- name: Install dependencies
run: |
python ts_scripts/install_dependencies.py --environment=dev --cpp
sudo apt update && python ts_scripts/install_dependencies.py --environment=dev --cpp
- name: Install TorchServe
run: |
python ts_scripts/install_from_src.py
- name: Print Env
run: |
python ts_scripts/print_env_info.py
- name: Build
run: |
cd cpp && ./build.sh
cd cpp && rm -rf _build && sudo mkdir /mnt/_build && sudo chmod 777 /mnt/_build && mkdir _build && sudo mount --bind /mnt/_build _build
./build.sh
3 changes: 0 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
[submodule "third_party/google/rpc"]
path = third_party/google/rpc
url = https://github.com/googleapis/googleapis.git
[submodule "cpp/third-party/llama.cpp"]
path = cpp/third-party/llama.cpp
url = https://github.com/ggerganov/llama.cpp.git
[submodule "cpp/third-party/llama2.c"]
path = cpp/third-party/llama2.c
url = https://github.com/karpathy/llama2.c
Expand Down
19 changes: 15 additions & 4 deletions cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,6 @@ if(CLANG_FORMAT_EXE)
${PROJECT_SOURCE_DIR}/test/*.hh
)

add_custom_target(format
COMMAND
${CLANG_FORMAT_EXE} -i -style=google ${ALL_CXX_SOURCE_FILES}
)
endif()


Expand All @@ -31,6 +27,21 @@ find_package(fmt REQUIRED)
find_package(gflags REQUIRED)
find_package(Torch REQUIRED)

include(FetchContent)

FetchContent_Declare(
yaml-cpp
GIT_REPOSITORY https://github.com/jbeder/yaml-cpp.git
GIT_TAG 0.8.0 # Can be a tag (yaml-cpp-x.x.x), a commit hash, or a branch name (master)
)
FetchContent_GetProperties(yaml-cpp)

if(NOT yaml-cpp_POPULATED)
message(STATUS "Fetching yaml-cpp...")
FetchContent_Populate(yaml-cpp)
add_subdirectory(${yaml-cpp_SOURCE_DIR} ${yaml-cpp_BINARY_DIR})
endif()

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}")

include_directories(${TORCH_INCLUDE_DIRS})
Expand Down
26 changes: 12 additions & 14 deletions cpp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,36 +5,34 @@
* cmake version: 3.18+
## Installation and Running TorchServe CPP

This installation instruction assumes that TorchServe is already installed through pip/conda/source. If this is not the case install it after the `Install dependencies` step through your preferred method.

### Install dependencies
```
cd serve
python ts_scripts/install_dependencies.py --cpp --environment dev [--cuda=cu121|cu118]
```
### Building the backend
Don't forget to install or update TorchServe at this point if it wasn't previously installed.
```
## Dev Build
cd cpp
./build.sh [-g cu121|cu118]
## Install TorchServe from source
cd ..
python ts_scripts/install_from_src.py
```
### Set Environment Var
#### On Mac
```
export DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib
```
#### On Ubuntu
```
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/_build/_deps/libtorch/lib
```

### Run TorchServe
```
mkdir model_store
torchserve --ncs --start --model-store model_store
```

### Clean the build directory
To clean the build directory in order to rebuild from scratch simply delete the cpp/_build directory with
```
rm -rf cpp/_build
```

## Backend
TorchServe cpp backend can run as a process, which is similar to [TorchServe Python backend](https://github.com/pytorch/serve/tree/master/ts). By default, TorchServe supports torch scripted model in cpp backend. Other platforms such as MxNet, ONNX can be supported through custom handlers following the TorchScript example [src/backends/handler/torch_scripted_handler.hh](https://github.com/pytorch/serve/blob/master/cpp/src/backends/handler/torch_scripted_handler.hh).
### Custom Handler
Expand Down Expand Up @@ -89,11 +87,11 @@ python -c "import ts; from pathlib import Path; print((Path(ts.__file__).parent
3. Make sure you have the right conda/venv environment activated during building that you're also using to run TorchServe.
Q: Build on Mac fails with `Library not loaded: @rpath/libomp.dylib`
A: Install libomp with brew and link in /usr/local/lib
A: Install libomp with brew and link in /usr/local/lib
```bash
brew install libomp
sudo ln -s /opt/homebrew/opt/libomp/lib/libomp.dylib /usr/local/lib/libomp.dylib
```
Q: When loading a handler which uses a model exported with torch._export.aot_compile the handler dies with "error: Error in dlopen: MODEL.SO : undefined symbol: SOME_SYMBOL".
A: Make sure that you are using matching libtorch and Pytorch versions for inference and export, respectively.
A: Make sure that you are using matching libtorch and Pytorch versions for inference and export, respectively.
124 changes: 28 additions & 96 deletions cpp/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,9 @@ function install_folly() {
echo -e "${COLOR_GREEN}[ INFO ] Building Folly ${COLOR_OFF}"
cd $FOLLY_SRC_DIR

if [ "$PLATFORM" = "Linux" ]; then
SUDO="sudo"
elif [ "$PLATFORM" = "Mac" ]; then
SUDO=""
fi
$SUDO ./build/fbcode_builder/getdeps.py install-system-deps --recursive
./build/fbcode_builder/getdeps.py install-system-deps --recursive

$SUDO ./build/fbcode_builder/getdeps.py build \
./build/fbcode_builder/getdeps.py build \
--allow-system-packages \
--scratch-path $FOLLY_BUILD_DIR \
--extra-cmake-defines='{"CMAKE_CXX_FLAGS": "-fPIC -D_GLIBCXX_USE_CXX11_ABI=1"}'
Expand All @@ -47,36 +42,29 @@ function install_folly() {
echo "$FOLLY_BUILD_DIR/installed"
}

function install_kineto() {
if [ "$PLATFORM" = "Linux" ]; then
echo -e "${COLOR_GREEN}[ INFO ] Skip install kineto on Linux ${COLOR_OFF}"
elif [ "$PLATFORM" = "Mac" ]; then
KINETO_SRC_DIR=$BASE_DIR/third-party/kineto

if [ ! -d "$KINETO_SRC_DIR/libkineto/build" ] ; then
cd $KINETO_SRC_DIR/libkineto
mkdir build && cd build
cmake ..
make install
fi
fi

cd "$BWD" || exit
}

function install_libtorch() {
cd "$DEPS_DIR" || exit
TORCH_VERSION="2.2.1"
if [ -d "$DEPS_DIR/libtorch" ]; then
RAW_VERSION=`cat "$DEPS_DIR/libtorch/build-version"`
VERSION=`cat "$DEPS_DIR/libtorch/build-version" | cut -d "+" -f 1`
if [ "$USE_NIGHTLIES" = "true" ] && [[ ! "${RAW_VERSION}" =~ .*"dev".* ]]; then
rm -rf "$DEPS_DIR/libtorch"
elif [ "$USE_NIGHTLIES" == "" ] && [ "$VERSION" != "$TORCH_VERSION" ]; then
rm -rf "$DEPS_DIR/libtorch"
fi
fi
if [ "$PLATFORM" = "Mac" ]; then
if [ ! -d "$DEPS_DIR/libtorch" ]; then
if [[ $(uname -m) == 'x86_64' ]]; then
echo -e "${COLOR_GREEN}[ INFO ] Install libtorch on Mac x86_64 ${COLOR_OFF}"
wget https://download.pytorch.org/libtorch/cpu/libtorch-macos-x86_64-${TORCH_VERSION}.zip
unzip libtorch-macos-x86_64-${TORCH_VERSION}.zip
wget -q https://download.pytorch.org/libtorch/cpu/libtorch-macos-x86_64-${TORCH_VERSION}.zip
unzip -q libtorch-macos-x86_64-${TORCH_VERSION}.zip
rm libtorch-macos-x86_64-${TORCH_VERSION}.zip
else
echo -e "${COLOR_GREEN}[ INFO ] Install libtorch on Mac arm64 ${COLOR_OFF}"
wget https://download.pytorch.org/libtorch/cpu/libtorch-macos-arm64-${TORCH_VERSION}.zip
unzip libtorch-macos-arm64-${TORCH_VERSION}.zip
wget -q https://download.pytorch.org/libtorch/cpu/libtorch-macos-arm64-${TORCH_VERSION}.zip
unzip -q libtorch-macos-arm64-${TORCH_VERSION}.zip
rm libtorch-macos-arm64-${TORCH_VERSION}.zip
fi
fi
Expand All @@ -86,27 +74,17 @@ function install_libtorch() {
echo -e "${COLOR_RED}[ ERROR ] Unknown platform: $PLATFORM ${COLOR_OFF}"
exit 1
else # Linux
if [ -d "$DEPS_DIR/libtorch" ]; then
RAW_VERSION=`cat "$DEPS_DIR/libtorch/build-version"`
VERSION=`cat "$DEPS_DIR/libtorch/build-version" | cut -d "+" -f 1`
if [ "$USE_NIGHTLIES" = "true" ] && [[ ! "${RAW_VERSION}" =~ .*"dev".* ]]; then
rm -rf "$DEPS_DIR/libtorch"
elif [ "$USE_NIGHTLIES" == "" ] && [ "$VERSION" != "$TORCH_VERSION" ]; then
rm -rf "$DEPS_DIR/libtorch"
fi
fi
if [ ! -d "$DEPS_DIR/libtorch" ]; then
cd "$DEPS_DIR" || exit
echo -e "${COLOR_GREEN}[ INFO ] Install libtorch on Linux ${COLOR_OFF}"
if [ "$USE_NIGHTLIES" == true ]; then
URL=https://download.pytorch.org/libtorch/nightly/${CUDA}/libtorch-cxx11-abi-shared-with-deps-latest.zip
else
URL=https://download.pytorch.org/libtorch/${CUDA}/libtorch-cxx11-abi-shared-with-deps-${TORCH_VERSION}%2B${CUDA}.zip
fi
wget $URL
wget -q $URL
ZIP_FILE=$(basename "$URL")
ZIP_FILE="${ZIP_FILE//%2B/+}"
unzip $ZIP_FILE
unzip -q $ZIP_FILE
rm $ZIP_FILE
fi
echo -e "${COLOR_GREEN}[ INFO ] libtorch is installed ${COLOR_OFF}"
Expand All @@ -115,58 +93,22 @@ function install_libtorch() {
cd "$BWD" || exit
}

function install_yaml_cpp() {
YAML_CPP_SRC_DIR=$BASE_DIR/third-party/yaml-cpp
YAML_CPP_BUILD_DIR=$DEPS_DIR/yaml-cpp-build

if [ ! -d "$YAML_CPP_BUILD_DIR" ] ; then
echo -e "${COLOR_GREEN}[ INFO ] Building yaml-cpp ${COLOR_OFF}"

if [ "$PLATFORM" = "Linux" ]; then
SUDO="sudo"
elif [ "$PLATFORM" = "Mac" ]; then
SUDO=""
fi

mkdir $YAML_CPP_BUILD_DIR
cd $YAML_CPP_BUILD_DIR
cmake $YAML_CPP_SRC_DIR -DYAML_BUILD_SHARED_LIBS=ON -DYAML_CPP_BUILD_TESTS=OFF -DCMAKE_CXX_FLAGS="-fPIC"
$SUDO make install

echo -e "${COLOR_GREEN}[ INFO ] yaml-cpp is installed ${COLOR_OFF}"
fi

cd "$BWD" || exit
}

function build_llama_cpp() {
BWD=$(pwd)
LLAMA_CPP_SRC_DIR=$BASE_DIR/third-party/llama.cpp
cd "${LLAMA_CPP_SRC_DIR}"
if [ "$PLATFORM" = "Mac" ]; then
make LLAMA_METAL=OFF -j
else
make -j
fi
cd "$BWD" || exit
}

function prepare_test_files() {
echo -e "${COLOR_GREEN}[ INFO ]Preparing test files ${COLOR_OFF}"
local EX_DIR="${TR_DIR}/examples/"
rsync -a --link-dest=../../test/resources/ ${BASE_DIR}/test/resources/ ${TR_DIR}/
if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin" ]; then
wget https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin -O "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin"
wget -q https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin -O "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin"
fi
if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin" ]; then
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin"
wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin"
fi
# PT2.2 torch.expport does not support Mac
if [ "$PLATFORM" = "Linux" ]; then
if [ ! -f "${EX_DIR}/aot_inductor/llama_handler/stories15M.so" ]; then
local HANDLER_DIR=${EX_DIR}/aot_inductor/llama_handler/
if [ ! -f "${HANDLER_DIR}/stories15M.pt" ]; then
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true -O "${HANDLER_DIR}/stories15M.pt"
wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true -O "${HANDLER_DIR}/stories15M.pt"
fi
local LLAMA_SO_DIR=${BASE_DIR}/third-party/llama2.so/
PYTHONPATH=${LLAMA_SO_DIR}:${PYTHONPATH} python ${BASE_DIR}/../examples/cpp/aot_inductor/llama2/compile.py --checkpoint ${HANDLER_DIR}/stories15M.pt ${HANDLER_DIR}/stories15M.so
Expand Down Expand Up @@ -221,12 +163,11 @@ function build() {

# Build torchserve_cpp with cmake
cd "$BWD" || exit
YAML_CPP_CMAKE_DIR=$DEPS_DIR/yaml-cpp-build
FOLLY_CMAKE_DIR=$DEPS_DIR/folly-build/installed
find $FOLLY_CMAKE_DIR -name "lib*.*" -exec ln -s "{}" $LIBS_DIR/ \;
if [ "$PLATFORM" = "Linux" ]; then
cmake \
-DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$YAML_CPP_CMAKE_DIR;$DEPS_DIR/libtorch" \
-DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$DEPS_DIR/libtorch" \
-DCMAKE_INSTALL_PREFIX="$PREFIX" \
"$MAYBE_BUILD_QUIC" \
"$MAYBE_BUILD_TESTS" \
Expand All @@ -242,8 +183,10 @@ function build() {
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/cuda/bin/nvcc
fi
elif [ "$PLATFORM" = "Mac" ]; then
export LIBRARY_PATH=${LIBRARY_PATH}:`brew --prefix icu4c`/lib:`brew --prefix libomp`/lib

cmake \
-DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$YAML_CPP_CMAKE_DIR;$DEPS_DIR/libtorch" \
-DCMAKE_PREFIX_PATH="$DEPS_DIR;$FOLLY_CMAKE_DIR;$DEPS_DIR/libtorch" \
-DCMAKE_INSTALL_PREFIX="$PREFIX" \
"$MAYBE_BUILD_QUIC" \
"$MAYBE_BUILD_TESTS" \
Expand All @@ -252,9 +195,10 @@ function build() {
"$MAYBE_USE_STATIC_DEPS" \
"$MAYBE_LIB_FUZZING_ENGINE" \
"$MAYBE_NIGHTLIES" \
"-DLLAMA_METAL=OFF" \
..

export LIBRARY_PATH=${LIBRARY_PATH}:/usr/local/opt/icu4c/lib

else
# TODO: Windows
echo -e "${COLOR_RED}[ ERROR ] Unknown platform: $PLATFORM ${COLOR_OFF}"
Expand Down Expand Up @@ -282,16 +226,8 @@ function symlink_torch_libs() {
fi
}

function symlink_yaml_cpp_lib() {
if [ "$PLATFORM" = "Linux" ]; then
ln -sf ${DEPS_DIR}/yaml-cpp-build/*.so* ${LIBS_DIR}
elif [ "$PLATFORM" = "Mac" ]; then
ln -sf ${DEPS_DIR}/yaml-cpp-build/*.dylib* ${LIBS_DIR}
fi
}

function install_torchserve_cpp() {
TARGET_DIR=$BASE_DIR/../ts/cpp/
TARGET_DIR=`python -c "import ts; from pathlib import Path; print(Path(ts.__file__).parent / 'cpp')"`

if [ -d $TARGET_DIR ]; then
rm -rf $TARGET_DIR
Expand Down Expand Up @@ -370,12 +306,8 @@ cd $BASE_DIR
git submodule update --init --recursive

install_folly
#install_kineto
install_libtorch
install_yaml_cpp
build_llama_cpp
prepare_test_files
build
symlink_torch_libs
symlink_yaml_cpp_lib
install_torchserve_cpp
1 change: 0 additions & 1 deletion cpp/third-party/llama.cpp
Submodule llama.cpp deleted from cd4fdd
Loading

0 comments on commit 2e26323

Please sign in to comment.