Skip to content

Commit

Permalink
Add passthrough to OpenAI
Browse files Browse the repository at this point in the history
  • Loading branch information
olokobayusuf committed Sep 16, 2024
1 parent 75d062c commit 90cf949
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 35 deletions.
3 changes: 3 additions & 0 deletions Changelog.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
## 0.0.2
+ Added `openai.embeddings.create` passthrough to OpenAI when `model` is not a valid Function predictor tag.

## 0.0.1
+ First pre-release.
23 changes: 0 additions & 23 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,29 +48,6 @@ embeddings = openai.embeddings.create(
> [!WARNING]
> Currently, only `openai.embeddings.create` is supported. Text generation is coming soon!
## Using the Anthropic Client Locally
To run text generation models locally using the Anthopic client, patch your `Anthropic` instance with the `locally` function and the following configuration:
```py
from anthropic import Anthropic
from fxn_llm import locally

# 💥 Create your Anthropic client
anthropic = Anthropic()

# 🔥 Make it local
anthropic = locally(openai, provider="anthropic")

# 🚀 Chat
message = anthropic.messages.create(
model="@meta/llama-3.1-8b-quant",
messages=[{ "role": "user", "content": "Hello, Llama" }],
max_tokens=1024,
)
```

> [!CAUTION]
> Anthropic support is not functional. It is still a work-in-progress.
___

## Useful Links
Expand Down
23 changes: 16 additions & 7 deletions fxn_llm/locally.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from fxn import Function
from numpy import float32
from numpy.typing import NDArray
from re import match
from types import MethodType
from typing import List, Literal, Optional, TypeVar

Expand Down Expand Up @@ -36,21 +37,29 @@ def locally (
if provider == "openai":
from openai.types import CreateEmbeddingResponse, Embedding
from openai.types.create_embedding_response import Usage
def embeddings_create (
embeddings_create_openai = client.embeddings.create
def embeddings_create_fxn (
self,
*,
input: str | List[str],
model: str,
dimensions: Optional[int]=None,
encoding_format: Optional[Literal["float", "base64"]]=None,
**kwargs
) -> CreateEmbeddingResponse:
encoding_format = encoding_format if encoding_format is not None else "float"
assert dimensions is None, "Explicit dimensionality is not yet supported"
assert encoding_format == "float", "Base64 encoding format is not yet supported"
# Check
if not match(r"^@[a-z0-9._-]+/[a-z0-9._-]+$", model):
return embeddings_create_openai(
input=input,
model=model,
**kwargs
)
# Check inputs
assert kwargs.get("dimensions", None) is None, "Explicit dimensionality is not supported"
assert kwargs.get("encoding_format", "float") == "float", "Base64 encoding format is not yet supported"
# Predict
input = [input] if isinstance(input, str) else input
prediction = fxn.predictions.create(tag=model, inputs={ "input": input })
embeddings: NDArray[float32] = prediction.results[0]
# Return
return CreateEmbeddingResponse(
data=[Embedding(
embedding=data.tolist(),
Expand All @@ -61,7 +70,7 @@ def embeddings_create (
object="list",
usage=Usage(prompt_tokens=0, total_tokens=0)
)
client.embeddings.create = MethodType(embeddings_create, client.embeddings)
client.embeddings.create = MethodType(embeddings_create_fxn, client.embeddings)
return client
elif provider == "anthropic":
pass
Expand Down
2 changes: 1 addition & 1 deletion fxn_llm/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
# Copyright © 2024 NatML Inc. All Rights Reserved.
#

__version__ = "0.0.1"
__version__ = "0.0.2"
20 changes: 16 additions & 4 deletions test/openai_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,23 @@
from openai import OpenAI
from openai.types import CreateEmbeddingResponse

def test_create_embeddings ():
openai = OpenAI(api_key="fxn")
def test_create_embeddings_locally ():
openai = OpenAI()
openai = locally(openai)
tag = "@nomic/nomic-embed-text-v1.5-quant"
embedding = openai.embeddings.create(
model="@nomic/nomic-embed-text-v1.5-quant",
model=tag,
input="search_query: What is the capital of France?"
)
assert isinstance(embedding, CreateEmbeddingResponse), f"Returned embeddings has invalid type: {type(embedding)}"
assert isinstance(embedding, CreateEmbeddingResponse), f"Embedding has invalid type: {type(embedding)}"
assert embedding.model == tag, f"Embedding model is incorrect: {embedding.model}"
assert embedding.usage.total_tokens == 0, f"Embedding usage is non zero: {embedding.usage.total_tokens}"

def test_create_embeddings_openai ():
openai = OpenAI()
openai = locally(openai)
embedding = openai.embeddings.create(
model="text-embedding-3-large",
input="What is the capital of France?"
)
assert embedding.model == "text-embedding-3-large"

0 comments on commit 90cf949

Please sign in to comment.