@@ -25,6 +25,7 @@ services:
2525 INDEX_NAME : ${INDEX_NAME}
2626 TEI_ENDPOINT : http://tei-embedding-service:80
2727 HUGGINGFACEHUB_API_TOKEN : ${HUGGINGFACEHUB_API_TOKEN}
28+ TELEMETRY_ENDPOINT : ${TELEMETRY_ENDPOINT}
2829 tei-embedding-service :
2930 image : ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
3031 container_name : tei-embedding-gaudi-server
@@ -37,7 +38,7 @@ services:
3738 no_proxy : ${no_proxy}
3839 http_proxy : ${http_proxy}
3940 https_proxy : ${https_proxy}
40- command : --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
41+ command : --model-id ${EMBEDDING_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
4142 retriever :
4243 image : ${REGISTRY:-opea}/retriever:${TAG:-latest}
4344 container_name : retriever-redis-server
@@ -55,6 +56,7 @@ services:
5556 INDEX_NAME : ${INDEX_NAME}
5657 TEI_EMBEDDING_ENDPOINT : http://tei-embedding-service:80
5758 HUGGINGFACEHUB_API_TOKEN : ${HUGGINGFACEHUB_API_TOKEN}
59+ TELEMETRY_ENDPOINT : ${TELEMETRY_ENDPOINT}
5860 restart : unless-stopped
5961 tei-reranking-service :
6062 image : ghcr.io/huggingface/tei-gaudi:1.5.0
@@ -76,7 +78,7 @@ services:
7678 HABANA_VISIBLE_DEVICES : all
7779 OMPI_MCA_btl_vader_single_copy_mechanism : none
7880 MAX_WARMUP_SEQUENCE_LENGTH : 512
79- command : --model-id ${RERANK_MODEL_ID} --auto-truncate
81+ command : --model-id ${RERANK_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
8082 vllm-service :
8183 image : ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest}
8284 container_name : vllm-gaudi-server
@@ -97,12 +99,27 @@ services:
9799 test : ["CMD-SHELL", "curl -f http://$host_ip:8007/health || exit 1"]
98100 interval : 10s
99101 timeout : 10s
100- retries : 100
102+ retries : 10 --otlp-traces-endpoint=0
101103 runtime : habana
102104 cap_add :
103105 - SYS_NICE
104106 ipc : host
105107 command : --model $LLM_MODEL_ID --tensor-parallel-size 1 --host 0.0.0.0 --port 80 --block-size 128 --max-num-seqs 256 --max-seq_len-to-capture 2048
108+ jaeger :
109+ image : jaegertracing/all-in-one:latest
110+ container_name : jaeger
111+ ports :
112+ - " 16686:16686"
113+ - " 4317:4317"
114+ - " 4318:4318"
115+ - " 9411:9411"
116+ ipc : host
117+ environment :
118+ no_proxy : ${no_proxy}
119+ http_proxy : ${http_proxy}
120+ https_proxy : ${https_proxy}
121+ COLLECTOR_ZIPKIN_HOST_PORT : 9411
122+ restart : unless-stopped
106123 chatqna-gaudi-backend-server :
107124 image : ${REGISTRY:-opea}/chatqna:${TAG:-latest}
108125 container_name : chatqna-gaudi-backend-server
@@ -128,6 +145,8 @@ services:
128145 - LLM_SERVER_PORT=${LLM_SERVER_PORT:-80}
129146 - LLM_MODEL=${LLM_MODEL_ID}
130147 - LOGFLAG=${LOGFLAG}
148+ - ENABLE_OPEA_TELEMETRY=true
149+ - TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}
131150 ipc : host
132151 restart : always
133152 chatqna-gaudi-ui-server :
0 commit comments