Skip to content

Commit

Permalink
Fixing linting error 2
Browse files Browse the repository at this point in the history
  • Loading branch information
SYusupov committed Sep 15, 2024
1 parent f848f1c commit b29ad42
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 17 deletions.
10 changes: 2 additions & 8 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,14 @@
from model import model_inference

from fastapi import FastAPI

from typing import Union, Optional
from typing import Optional

app = FastAPI()


@app.get("/")
# whenever smb does get request, do things below
def read_root():
return {"Hello": "World"}

# @app.get("/items/{item_id}")
# # whenever smb does get request with /items/item_id where item_id is an integer, do things below
# def read_item(item_id: int, q: Union[str, None] = None):
# return {"item_id": item_id, "q": q}

@app.post('/ask')
def ask(instruction: str, input: Optional[str] = ""):
Expand Down
19 changes: 10 additions & 9 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,16 @@
llm = Llama(
model_path="./model/unsloth.Q4_K_M.gguf",
use_mmap=True,
use_gpu=True,
use_gpu=True,
n_batch=4,
n_threads=n_threads
)


def model_inference(instruction: str, input: str):
platypus_prompt = """Below is a question or task that requires logical reasoning to solve, along with additional context or information. Provide a detailed and well-reasoned response that demonstrates clear logical thinking.
platypus_prompt = """Below is a question or task that requires logical reasoning to solve,
along with additional context or information. Provide a detailed and well-reasoned response
that demonstrates clear logical thinking.
### Question/Task:
{}
Expand All @@ -27,12 +30,10 @@ def model_inference(instruction: str, input: str):
{}"""

input = platypus_prompt.format(
# "What is a famous tall tower in Paris?", # instruction
instruction,
# "", # input
input,
"", # output - leave this blank for generation!
)

instruction,
input,
"" # output - leave this blank for generation!
)

output = llm(input, max_tokens=100, echo=True)
return output

0 comments on commit b29ad42

Please sign in to comment.