Skip to content

Commit 2d3a47e

Browse files
Radovan Fuchsradofuchs
authored andcommitted
setup e2e test env
1 parent f6cc203 commit 2d3a47e

File tree

3 files changed

+296
-0
lines changed

3 files changed

+296
-0
lines changed

.github/workflows/e2e_tests.yaml

Lines changed: 227 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,227 @@
1+
# .github/workflows/e2e_test.yml
2+
name: E2E Tests
3+
4+
on: [push, pull_request_target]
5+
6+
jobs:
7+
e2e_tests:
8+
runs-on: ubuntu-latest
9+
env:
10+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
11+
12+
steps:
13+
- uses: actions/checkout@v4
14+
15+
- uses: 1arp/create-a-file-action@0.4.5
16+
env:
17+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
18+
with:
19+
path: '.'
20+
isAbsolutePath: false
21+
file: 'lightspeed-stack.yaml'
22+
content: |
23+
name: foo bar baz
24+
service:
25+
host: 0.0.0.0
26+
port: 8080
27+
auth_enabled: false
28+
workers: 1
29+
color_log: true
30+
access_log: true
31+
llama_stack:
32+
# Uses a remote llama-stack service
33+
# The instance would have already been started with a llama-stack-run.yaml file
34+
use_as_library_client: false
35+
# Alternative for "as library use"
36+
# use_as_library_client: true
37+
# library_client_config_path: <path-to-llama-stack-run.yaml-file>
38+
url: http://llama-stack:8321
39+
api_key: xyzzy
40+
user_data_collection:
41+
feedback_disabled: false
42+
feedback_storage: "/tmp/data/feedback"
43+
transcripts_disabled: false
44+
transcripts_storage: "/tmp/data/transcripts"
45+
data_collector:
46+
enabled: false
47+
ingress_server_url: null
48+
ingress_server_auth_token: null
49+
ingress_content_service_name: null
50+
collection_interval: 7200 # 2 hours in seconds
51+
cleanup_after_send: true
52+
connection_timeout_seconds: 30
53+
authentication:
54+
module: "noop"
55+
56+
- uses: 1arp/create-a-file-action@0.4.5
57+
env:
58+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
59+
with:
60+
path: '.'
61+
isAbsolutePath: false
62+
file: 'run.yaml'
63+
content: |
64+
version: '2'
65+
image_name: simplest-llamastack-app
66+
apis:
67+
- agents
68+
- datasetio
69+
- eval
70+
- inference
71+
- post_training
72+
- safety
73+
- scoring
74+
- telemetry
75+
- tool_runtime
76+
- vector_io
77+
benchmarks: []
78+
container_image: null
79+
datasets: []
80+
external_providers_dir: null
81+
inference_store:
82+
db_path: /app-root/.llama/distributions/ollama/inference_store.db
83+
type: sqlite
84+
logging: null
85+
metadata_store:
86+
db_path: /app-root/.llama/distributions/ollama/registry.db
87+
namespace: null
88+
type: sqlite
89+
providers:
90+
agents:
91+
- config:
92+
persistence_store:
93+
db_path: /app-root/.llama/distributions/ollama/agents_store.db
94+
namespace: null
95+
type: sqlite
96+
responses_store:
97+
db_path: /app-root/.llama/distributions/ollama/responses_store.db
98+
type: sqlite
99+
provider_id: meta-reference
100+
provider_type: inline::meta-reference
101+
datasetio:
102+
- config:
103+
kvstore:
104+
db_path: /app-root/.llama/distributions/ollama/huggingface_datasetio.db
105+
namespace: null
106+
type: sqlite
107+
provider_id: huggingface
108+
provider_type: remote::huggingface
109+
- config:
110+
kvstore:
111+
db_path: /app-root/.llama/distributions/ollama/localfs_datasetio.db
112+
namespace: null
113+
type: sqlite
114+
provider_id: localfs
115+
provider_type: inline::localfs
116+
eval:
117+
- config:
118+
kvstore:
119+
db_path: /app-root/.llama/distributions/ollama/meta_reference_eval.db
120+
namespace: null
121+
type: sqlite
122+
provider_id: meta-reference
123+
provider_type: inline::meta-reference
124+
inference:
125+
- provider_id: openai
126+
provider_type: remote::openai
127+
config:
128+
api_key: ${{ secrets.OPENAI_API_KEY }}
129+
post_training:
130+
- config:
131+
checkpoint_format: huggingface
132+
device: cpu
133+
distributed_backend: null
134+
provider_id: huggingface
135+
provider_type: inline::huggingface
136+
safety:
137+
- config:
138+
excluded_categories: []
139+
provider_id: llama-guard
140+
provider_type: inline::llama-guard
141+
scoring:
142+
- config: {}
143+
provider_id: basic
144+
provider_type: inline::basic
145+
- config: {}
146+
provider_id: llm-as-judge
147+
provider_type: inline::llm-as-judge
148+
- config:
149+
openai_api_key: '******'
150+
provider_id: braintrust
151+
provider_type: inline::braintrust
152+
telemetry:
153+
- config:
154+
service_name: 'lightspeed-stack'
155+
sinks: sqlite
156+
sqlite_db_path: /app-root/.llama/distributions/ollama/trace_store.db
157+
provider_id: meta-reference
158+
provider_type: inline::meta-reference
159+
tool_runtime:
160+
- provider_id: model-context-protocol
161+
provider_type: remote::model-context-protocol
162+
config: {}
163+
vector_io:
164+
- config:
165+
kvstore:
166+
db_path: /app-root/.llama/distributions/ollama/faiss_store.db
167+
namespace: null
168+
type: sqlite
169+
provider_id: faiss
170+
provider_type: inline::faiss
171+
scoring_fns: []
172+
server:
173+
auth: null
174+
host: null
175+
port: 8321
176+
quota: null
177+
tls_cafile: null
178+
tls_certfile: null
179+
tls_keyfile: null
180+
shields: []
181+
vector_dbs: []
182+
183+
models:
184+
- model_id: gpt-4o-mini
185+
provider_id: openai
186+
model_type: llm
187+
provider_model_id: gpt-4o-mini
188+
189+
- name: list files
190+
run: |
191+
ls
192+
cat lightspeed-stack.yaml
193+
cat run.yaml
194+
195+
- name: Run service manually
196+
env:
197+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
198+
run: |
199+
# Debug: Check if environment variable is available for docker-compose
200+
echo "OPENAI_API_KEY is set: $([ -n "$OPENAI_API_KEY" ] && echo 'YES' || echo 'NO')"
201+
echo "OPENAI_API_KEY length: ${#OPENAI_API_KEY}"
202+
203+
docker compose --version
204+
docker compose up -d
205+
206+
- name: Wait for services
207+
run: |
208+
echo "Waiting for services to be healthy..."
209+
sleep 20 # adjust depending on boot time
210+
211+
- name: Quick connectivity test
212+
run: |
213+
echo "Testing basic connectivity before full test suite..."
214+
curl -f http://localhost:8080/v1/models || {
215+
echo "❌ Basic connectivity failed - showing logs before running full tests"
216+
docker compose logs --tail=30
217+
exit 1
218+
}
219+
220+
- name: Run e2e tests
221+
run: |
222+
echo "Installing test dependencies..."
223+
pip install uv
224+
uv sync
225+
226+
echo "Running comprehensive e2e test suite..."
227+
make test-e2e

docker-compose.yaml

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
services:
2+
llama-stack:
3+
build:
4+
context: .
5+
dockerfile: test.containerfile
6+
container_name: llama-stack
7+
ports:
8+
- "8321:8321" # Expose llama-stack on 8321 (adjust if needed)
9+
volumes:
10+
- ./run.yaml:/app-root/run.yaml:Z
11+
environment:
12+
- OPENAI_API_KEY=${OPENAI_API_KEY}
13+
networks:
14+
- lightspeednet
15+
16+
lightspeed-stack:
17+
image: quay.io/lightspeed-core/lightspeed-stack:latest
18+
container_name: lightspeed-stack
19+
ports:
20+
- "8080:8080"
21+
volumes:
22+
- ./lightspeed-stack.yaml:/app-root/lightspeed-stack.yaml:Z
23+
environment:
24+
- OPENAI_API_KEY=${OPENAI_API_KEY}
25+
depends_on:
26+
- llama-stack
27+
networks:
28+
- lightspeednet
29+
30+
networks:
31+
lightspeednet:
32+
driver: bridge

test.containerfile

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# vim: set filetype=dockerfile
2+
FROM registry.access.redhat.com/ubi9/ubi-minimal
3+
4+
ARG APP_ROOT=/app-root
5+
6+
ENV PATH="$PATH:/root/.local/bin"
7+
8+
ADD run.yaml ./
9+
10+
RUN microdnf install -y --nodocs --setopt=keepcache=0 --setopt=tsflags=nodocs \
11+
python3.12 python3.12-devel python3.12-pip git tar
12+
13+
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
14+
15+
RUN uv -h
16+
17+
RUN uv venv && \
18+
uv pip install llama-stack \
19+
fastapi \
20+
opentelemetry-sdk \
21+
opentelemetry-exporter-otlp \
22+
opentelemetry-instrumentation \
23+
aiosqlite \
24+
litellm \
25+
uvicorn \
26+
blobfile \
27+
datasets \
28+
sqlalchemy \
29+
faiss-cpu \
30+
mcp \
31+
autoevals \
32+
psutil \
33+
torch \
34+
peft \
35+
trl
36+
37+
CMD ["uv", "run", "llama", "stack", "run", "run.yaml"]

0 commit comments

Comments
 (0)