Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🔧💥 #15 Apply API breaking changes from Glide v0.0.4-rc.1 #16

Merged
merged 5 commits into from
May 13, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
💥 #15 Apply the latest breaking API changes
roma-glushko committed May 13, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
commit e0554b72f4fcbc7ae7f38abed7766a2582f86ef6
4 changes: 2 additions & 2 deletions examples/lang/chat_stream_async.py
Original file line number Diff line number Diff line change
@@ -40,7 +40,7 @@ async def chat_stream() -> None:
continue

if err := message.error:
print(f"💥ERR: {err.message} (code: {err.err_code})")
print(f"💥ERR ({err.name}): {err.message}")
print("🧹 Restarting the stream")
continue

@@ -50,7 +50,7 @@ async def chat_stream() -> None:

if last_msg and last_msg.chunk and last_msg.finish_reason:
# LLM gen context
provider_name = last_msg.chunk.provider_name
provider_name = last_msg.chunk.provider_id
model_name = last_msg.chunk.model_name
finish_reason = last_msg.finish_reason

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -26,8 +26,8 @@ requires-python = ">=3.8"
[project.urls]
Homepage = "https://glide.einstack.ai/"
Documentation = "https://glide.einstack.ai/"
Repository = "https://github.com/me/spam.git"
Issues = "https://github.com/EinStack/glide-python"
Repository = "https://github.com/EinStack/glide-py.git"
Issues = "https://github.com/EinStack/glide-py/issues/"

[tool.pdm.version]
source = "scm"
4 changes: 2 additions & 2 deletions src/glide/exceptions.py
Original file line number Diff line number Diff line change
@@ -29,7 +29,7 @@ class GlideChatStreamError(GlideError):
Occurs when chat stream ends with an error
"""

def __init__(self, message: str, err_code: str) -> None:
def __init__(self, message: str, err_name: str) -> None:
super().__init__(message)

self.err_code = err_code
self.err_name = err_name
4 changes: 2 additions & 2 deletions src/glide/lang/router_async.py
Original file line number Diff line number Diff line change
@@ -82,8 +82,8 @@ async def chat_stream(
if err := message.ended_with_err:
# fail only on fatal errors that indicate stream stop
raise GlideChatStreamError(
f"Chat stream {req.id} ended with an error: {err.message} (code: {err.err_code})",
err.err_code,
f"Chat stream {req.id} ended with an error ({err.name}): {err.message}",
err.name,
)

yield message # returns content chunk and some error messages
20 changes: 10 additions & 10 deletions src/glide/lang/schemas.py
Original file line number Diff line number Diff line change
@@ -8,7 +8,7 @@
from pydantic import Field

from glide.schames import Schema
from glide.typing import RouterId, ProviderName, ModelName
from glide.typing import RouterId, ProviderId, ModelName

ChatRequestId = str
Metadata = Dict[str, Any]
@@ -45,7 +45,7 @@ class ModelMessageOverride(Schema):
class ChatRequest(Schema):
message: ChatMessage
message_history: List[ChatMessage] = Field(default_factory=list)
override: Optional[ModelMessageOverride] = None
override_params: Optional[ModelMessageOverride] = None


class TokenUsage(Schema):
@@ -57,24 +57,24 @@ class TokenUsage(Schema):
class ModelResponse(Schema):
response_id: Dict[str, str]
message: ChatMessage
token_count: TokenUsage
token_usage: TokenUsage


class ChatResponse(Schema):
id: ChatRequestId
created: datetime
provider: ProviderName
router: RouterId
created_at: datetime
provider_id: ProviderId
router_id: RouterId
model_id: str
model: ModelName
model_name: ModelName
model_response: ModelResponse


class ChatStreamRequest(Schema):
id: ChatRequestId = Field(default_factory=lambda: str(uuid.uuid4()))
message: ChatMessage
message_history: List[ChatMessage] = Field(default_factory=list)
override: Optional[ModelMessageOverride] = None
override_params: Optional[ModelMessageOverride] = None
metadata: Optional[Metadata] = None


@@ -90,7 +90,7 @@ class ChatStreamChunk(Schema):

model_id: str

provider_name: ProviderName
provider_id: ProviderId
model_name: ModelName

model_response: ModelChunkResponse
@@ -99,7 +99,7 @@ class ChatStreamChunk(Schema):

class ChatStreamError(Schema):
id: ChatRequestId
err_code: str
name: str
message: str
finish_reason: Optional[FinishReason] = None

2 changes: 0 additions & 2 deletions src/glide/schames.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Copyright EinStack
# SPDX-License-Identifier: APACHE-2.0
from pydantic import BaseModel, ConfigDict
from pydantic.alias_generators import to_camel


class Schema(BaseModel):
@@ -10,7 +9,6 @@ class Schema(BaseModel):
"""

model_config = ConfigDict(
alias_generator=to_camel,
populate_by_name=True,
from_attributes=True,
protected_namespaces=(),
2 changes: 1 addition & 1 deletion src/glide/typing.py
Original file line number Diff line number Diff line change
@@ -2,5 +2,5 @@
# SPDX-License-Identifier: APACHE-2.0

RouterId = str
ProviderName = str
ProviderId = str
ModelName = str