Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement Dynamic routing and Endpoints #2

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
130 changes: 130 additions & 0 deletions configs/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
from typing import List, Literal, Optional, Iterable, Type
from pydantic import BaseModel, Field, create_model, field_validator

TypeHint = Literal[
"str",
"int",
"float",
"bool",
"str[]",
"int[]",
"float[]",
"bool[]",
]


class Property(BaseModel):
title: str
type: TypeHint
prompt: Optional[str] = None


class OutputSchema(BaseModel):
name: str
prompt: Optional[str] = None
properties: List[Property]


class InputSchema(BaseModel):
name: str
properties: List[Property]


class PromptMessage(BaseModel):
role: str
content: str


class Config(BaseModel):
path: str
output_schema: OutputSchema
input_schema: InputSchema
prompt: List[PromptMessage]
model: str = "gpt-4-turbo"

@field_validator("path")
def validate_path(cls, v: str) -> str:
assert v.startswith("/"), "Path must be absolute"
return v

def create_output_model(self) -> Type[BaseModel]:
types: dict[str, type] = {
"str": str,
"int": int,
"float": float,
"bool": bool,
"str[]": List[str],
"int[]": List[int],
"float[]": List[float],
"bool[]": List[bool],
}

return create_model(
self.output_schema.name,
**{
prop.title: (
types[prop.type],
Field(
...,
title=prop.title,
description=prop.prompt,
),
)
for prop in self.output_schema.properties
}, # type: ignore
) # type: ignore

def create_input_model(self) -> Type[BaseModel]:
types: dict[str, type] = {
"str": str,
"int": int,
"float": float,
"bool": bool,
"str[]": List[str],
"int[]": List[int],
"float[]": List[float],
"bool[]": List[bool],
}

return create_model(
self.input_schema.name,
**{
prop.title: (
types[prop.type],
Field(
...,
title=prop.title,
),
)
for prop in self.input_schema.properties
}, # type: ignore
) # type: ignore

def messages(self, data: BaseModel) -> List[dict]:
from jinja2 import Template

return [
{
"role": message.role,
"content": Template(message.content).render(**data.model_dump()),
}
for message in self.prompt
]


def load_configs() -> Iterable[Config]:
import os
import yaml

cur_path = os.path.dirname(__file__)

for root, dirs, files in os.walk(cur_path):
for filename in files:
if filename.endswith(".yaml"):
file_path = os.path.join(root, filename)
with open(file_path, "r") as f:
api_path = file_path.replace(cur_path, "").split(".")[0]

content = yaml.safe_load(f)
config = Config.model_validate(dict(path=api_path, **content))
yield config
24 changes: 24 additions & 0 deletions configs/v1/name.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
output_schema:
name: UserDetail
prompt: "Extracting a user"
properties:
- title: name
type: str
prompt: "What is the user's name?"
- title: age
type: int
prompt: "How old is the user?"

input_schema:
name: Request
properties:
- title: data
type: str

prompt:
- role: system
content: "You are a name extractor"
- role: user
content: "extract {{data}}"

model: gpt-4-turbo
24 changes: 24 additions & 0 deletions configs/v2/character.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
output_schema:
name: UserDetail
prompt: "Extracting a user"
properties:
- title: name
type: str
prompt: "What is the user's name?"
- title: friends
type: str[]
prompt: "some friends."

input_schema:
name: Request
properties:
- title: show
type: str

prompt:
- role: system
content: "You are character generator"
- role: user
content: "Create a person from this show: {{show}}"

model: gpt-4-turbo
91 changes: 52 additions & 39 deletions run.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,45 +5,58 @@


import instructor
from configs import load_configs
import openai

app = FastAPI()
client = instructor.from_openai(openai.OpenAI(), model="gpt-4-turbo-preview")


class Property(BaseModel):
name: str
value: str


class User(BaseModel):
name: str
age: int
properties: List[Property]


@app.post("/v1/extract_user", response_model=User)
def extract_user(text: str):
user = client.chat.completions.create(
messages=[
{"role": "user", "content": f"Extract user from `{text}`"},
],
response_model=User,
)
return user


@app.post("/v1/extract_user_stream")
def extract_user_stream(text: str):
user_stream = client.chat.completions.create_partial(
messages=[
{"role": "user", "content": f"Extract user from `{text}`"},
],
response_model=User,
)

def stream():
for partial_user in user_stream:
yield f"data: {partial_user.model_dump_json()}\n\n"

return StreamingResponse(stream(), media_type="text/event-stream")
client = instructor.from_openai(openai.OpenAI())


for config in load_configs():
OutputModel = config.create_output_model()
InputModel = config.create_input_model()
path = config.path

@app.post(path, response_model=OutputModel)
def extract_data(input: InputModel):
return client.chat.completions.create(
model=config.model,
messages=config.messages(input),
response_model=OutputModel,
)

@app.post(f"{path}/list")
def extract_data_list(input: InputModel):
objs = client.chat.completions.create_iterable(
model=config.model,
messages=config.messages(input),
response_model=OutputModel,
)
return [obj for obj in objs]

@app.post(f"{path}/list/stream")
def extract_data_list_stream(input: InputModel):
def stream():
for obj in client.chat.completions.create_iterable(
model=config.model,
messages=config.messages(input),
response_model=OutputModel,
stream=True,
):
yield obj

return StreamingResponse(stream(), media_type="text/event-stream")

@app.post(f"{path}/stream")
def extract_data_stream(input: InputModel):
user_stream = client.chat.completions.create_partial(
model=config.model,
messages=config.messages(input),
response_model=OutputModel,
)

def stream():
for partial_user in user_stream:
yield f"data: {partial_user.model_dump_json()}\n\n"

return StreamingResponse(stream(), media_type="text/event-stream")