Skip to content

Commit

Permalink
Add app tests
Browse files Browse the repository at this point in the history
  • Loading branch information
SYusupov committed Sep 16, 2024
1 parent 861f104 commit 3d5c665
Show file tree
Hide file tree
Showing 5 changed files with 173 additions and 31 deletions.
48 changes: 32 additions & 16 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,17 +34,15 @@ jobs:
flake8 .
## Building the docker image
docker_build:
docker_build:
name: Build and Push Docker Image
runs-on: ubuntu-latest
needs: lint

steps:
- name: Checkout Code
uses: actions/checkout@v3

- name: Clean up Docker environment
run: docker system prune -af --volumes

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

Expand All @@ -59,18 +57,36 @@ jobs:
docker build -t ${{ secrets.DOCKERHUB_USERNAME }}/logicgpt:${{ github.sha }} .
docker push ${{ secrets.DOCKERHUB_USERNAME }}/logicgpt:${{ github.sha }}
# ## Testing if the API works correctly
# test_api:
# needs: docker_build # Only run this after the Docker build is successful
# runs-on: ubuntu-latest
## Testing if the API works correctly
test_api:
name: Test API
needs: docker_build # Only run this after the Docker build is successful
runs-on: ubuntu-latest

# steps:
# - name: Checkout Code
# uses: actions/checkout@v3
steps:
- name: Checkout Code
uses: actions/checkout@v3

# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

# - name: Test API
# run: |
# docker compose up --build
- name: Start the API with Docker Compose
run: |
cd app
docker compose up --build
- name: Wait for API to be Ready
run: |
until curl -s http://localhost:8000 | grep "Hello"; do
echo "Waiting for API to be ready..."
sleep 5
done
- name: Run API Tests
run: |
cd app
pytest test_api.py
- name: Tear Down Docker Compose
run: |
docker compose down
3 changes: 1 addition & 2 deletions main.py → app/main.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
from model import model_inference
from .model import model_inference
from fastapi import FastAPI
from typing import Optional

app = FastAPI()


@app.get("/")
def read_root():
return {"Hello": "World"}
Expand Down
27 changes: 14 additions & 13 deletions model.py → app/model.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,20 @@
from llama_cpp import Llama
import os

platypus_prompt = """
Below is a question or task that requires logical reasoning to solve,
along with additional context or information. Provide a detailed and
well-reasoned response that demonstrates clear logical thinking.
### Question/Task:
{}
### Input:
{}
### Reasoned Response:
{}"""

num_cores = os.cpu_count()
n_threads = max(1, num_cores // 2)
print('n_threads', n_threads)
Expand All @@ -16,19 +30,6 @@


def model_inference(instruction: str, input: str):
platypus_prompt = """
Below is a question or task that requires logical reasoning to solve,
along with additional context or information. Provide a detailed and
well-reasoned response that demonstrates clear logical thinking.
### Question/Task:
{}
### Input:
{}
### Reasoned Response:
{}"""

input = platypus_prompt.format(
instruction,
Expand Down
125 changes: 125 additions & 0 deletions app/test_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
from fastapi.testclient import TestClient
from .main import app

client = TestClient(app)

long_instruction = """
Babe was a professional baseball player who was known both for his prowess
at the plate and his perceived "heart of gold." One day, Babe was
visiting a sick boy named Jimmy in the hospital. Babe was touched by
Jimmy's will to live despite a very poor prognosis. In a moment of
weakness, Babe told Jimmy that in consideration of Jimmy's courage, he
would do anything that Jimmy asked. Jimmy's eyes momentarily gleamed as
he asked Babe to "hit a homer for me in your next game." Babe replied,
"Sure kid." As Babe was leaving Jimmy's hospital room, Jimmy's father,
Joe, pulled Babe aside and told Babe, "It would mean a lot to Jimmy if
you would hit a home run for him in your next game. The medicinal value
of raising Jimmy's spirits would be priceless." Babe replied, "Hey man,
we all have problems. I don't work for the Make a Wish Foundation."
Undaunted, Joe repeated that it would really raise Jimmy's spirits if
Babe would hit a homer, and as incentive, Joe told Babe that he would pay
Babe $ 5,000$ if Babe did hit a home run in his next game. Babe replied,
"You've got a deal." To raise his chances of collecting the $ 5,000$ from
Joe, Babe took extra batting practice before his next game, and the
practice paid off because in his next game, Babe hit two home runs.
During a post-game interview, Babe explained, "I did it for little Jimmy,
who is in the hospital." After showering, Babe went directly to Joe's
house and asked Joe for $ 5,000$. Babe's contract with his ball club
does not forbid him from accepting money from fans for good performance.
If Joe refuses to pay and Babe brings an action against Joe for damages,
which of the following is correct under the modern trend in contract law?
A. Babe can recover the $ 5,000$ because the preexisting duty rule does
not apply where the duty is owed to a third person. B. Babe can recover
the $ 5,000$ if he can prove that the value of the home run to Jimmy is
at least $ 5,000$. C. Babe cannot recover from Joe because Babe had a
preexisting duty to use his best efforts to hit home runs. D. Babe cannot
recover from Joe because, even under the modern trend, moral
consideration is not valid."""

# Test the root endpoint
def test_read_root():
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"Hello": "World"}

# Test valid model inference
def test_valid_inference():
instruction = """
How many four-digit numbers greater than 2999 can be formed such that the
product of the middle two digits exceeds 5?"""
input_text = ""
response = client.post(
"/ask",
json={"instruction": instruction, "input": input_text}
)

assert response.status_code == 200
response_data = response.text
assert "Reasoned Response" in response_data
# Check if there is text after "Reasoned Response:"
reasoned_response_part = response_data.split("Reasoned Response:")[-1].strip()
assert len(reasoned_response_part) > 0 # Ensure that the model generated some output

# Test inference with input provided
def test_inference_with_long_instruction_and_input():

input_text = "Choose A, B, C or D as your solution."
response = client.post(
"/ask",
json={"instruction": long_instruction, "input": input_text}
)

assert response.status_code == 200
response_data = response.text
assert "Reasoned Response" in response_data
# Check if there is text after "Reasoned Response:"
reasoned_response_part = response_data.split("Reasoned Response:")[-1].strip()
assert len(reasoned_response_part) > 0

# Test inference with missing instruction
def test_inference_missing_instruction():
input_text = "Choose A, B, C or D as your solution."
response = client.post(
"/ask",
json={"instruction": "", "input": input_text}
)

assert response.status_code == 422 # FastAPI will return a 422 if required fields are missing or invalid

# Test inference with a long input text
def test_inference_with_long_input():
response = client.post(
"/ask",
json={"instruction": long_instruction, "input": ""}
)

assert response.status_code == 200
response_data = response.text
assert "Reasoned Response" in response_data
# Check if there is text after "Reasoned Response:"
reasoned_response_part = response_data.split("Reasoned Response:")[-1].strip()
assert len(reasoned_response_part) > 0

# Test an invalid request structure
def test_invalid_request_structure():
response = client.post(
"/ask",
json={"invalid_field": "test"}
)
assert response.status_code == 422 # FastAPI should raise a 422 error for missing required fields

# Test when model returns no tokens (edge case)
def test_empty_model_response():
instruction = ""
input_text = ""
response = client.post(
"/ask",
json={"instruction": instruction, "input": input_text}
)

assert response.status_code == 200
response_data = response.text
assert "Reasoned Response" in response_data
# Check if there is text after "Reasoned Response:"
reasoned_response_part = response_data.split("Reasoned Response:")[-1].strip()
assert len(reasoned_response_part) > 0
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ llama_cpp_python==0.2.90
# for the application
fastapi
uvicorn==0.30.6
pytest

# for evaluation with llama_cpp and lm-evaluation-harness
lm-eval
Expand Down

0 comments on commit 3d5c665

Please sign in to comment.