Skip to content

Commit

Permalink
Fix lvms videl-llama code issue (#654)
Browse files Browse the repository at this point in the history
Signed-off-by: letonghan <letong.han@intel.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
letonghan and pre-commit-ci[bot] authored Sep 10, 2024
1 parent 90a3f4b commit 38abaab
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 11 deletions.
2 changes: 1 addition & 1 deletion comps/lvms/video-llama/dependency/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
streamer = None
chat = None

VIDEO_DIR = "/home/user/comps/lvms/video-llama/server/data"
VIDEO_DIR = "/home/user/comps/lvms/video-llama/dependency/data"

CFG_PATH = "video_llama_config/video_llama_eval_only_vl.yaml"
MODEL_TYPE = "llama_v2"
Expand Down
19 changes: 9 additions & 10 deletions tests/lvms/test_lvms_video-llama.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ ip_address=$(hostname -I | awk '{print $1}')
function build_docker_images() {
cd $WORKPATH
echo $(pwd)
docker build --no-cache -t opea/video-llama-lvm-server:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/dependency/Dockerfile .
docker build --no-cache -t opea/video-llama-lvm-server:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/dependency/Dockerfile .
if $? ; then
echo "opea/video-llama-lvm-server built fail"
exit 1
else
echo "opea/video-llama-lvm-server built successful"
fi
docker build --no-cache -t opea/lvm-video-llama:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/Dockerfile .
docker build --no-cache -t opea/lvm-video-llama:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/Dockerfile .
if $? ; then
echo "opea/lvm-video-llama built fail"
exit 1
Expand All @@ -41,23 +41,22 @@ function start_service() {
-e https_proxy=$https_proxy \
-e no_proxy=$no_proxy \
-e llm_download="True" \
-v "/home/$USER/.cache:/home/user/.cache" \
-v video-llama-model:/home/user/model \
opea/video-llama-lvm-server:latest
opea/video-llama-lvm-server:comps

docker run -d --name="test-comps-lvm-video-llama" -p $server_port:9000 \
--ipc=host \
-e http_proxy=$http_proxy \
-e https_proxy=$https_proxy \
-e no_proxy=$no_proxy \
-e LVM_ENDPOINT=$LVM_ENDPOINT \
opea/lvm-video-llama:latest
opea/lvm-video-llama:comps

echo "Waiting for the LVM service to start"

# check whether lvm dependency is fully ready
n=0
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
docker logs test-comps-lvm-video-llama-dependency >> ${LOG_PATH}/lvm-video-llama-dependency.log
docker logs test-comps-lvm-video-llama-dependency &> ${LOG_PATH}/lvm-video-llama-dependency.log
n=$((n+1))
if grep -q "Uvicorn running on" ${LOG_PATH}/lvm-video-llama-dependency.log; then
break
Expand All @@ -69,7 +68,7 @@ function start_service() {
# check whether lvm service is fully ready
n=0
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
docker logs test-comps-lvm-video-llama >> ${LOG_PATH}/lvm-video-llama.log
docker logs test-comps-lvm-video-llama &> ${LOG_PATH}/lvm-video-llama.log
n=$((n+1))
if grep -q "Uvicorn running on" ${LOG_PATH}/lvm-video-llama.log; then
break
Expand All @@ -88,8 +87,8 @@ function validate_microservice() {
echo "Result correct."
else
echo "Result wrong."
docker logs test-comps-lvm-video-llama-dependency >> ${LOG_PATH}/video-llama-dependency.log
docker logs test-comps-lvm-video-llama >> ${LOG_PATH}/video-llama.log
docker logs test-comps-lvm-video-llama-dependency &> ${LOG_PATH}/lvm-video-llama-dependency.log
docker logs test-comps-lvm-video-llama &> ${LOG_PATH}/lvm-video-llama.log
exit 1
fi
}
Expand Down

0 comments on commit 38abaab

Please sign in to comment.