Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

google.ai.generativelanguage #534

Open
FeliciaAnnKelleyTaylorGV opened this issue Nov 5, 2024 · 0 comments
Open

google.ai.generativelanguage #534

FeliciaAnnKelleyTaylorGV opened this issue Nov 5, 2024 · 0 comments

Comments

@FeliciaAnnKelleyTaylorGV

`"""
Install an additional SDK for JSON schema support Google AI Python SDK

$ pip install google.ai.generativelanguage
"""

import os
import time
import google.generativeai as genai
from google.ai.generativelanguage_v1beta.types import content

genai.configure(api_key=os.environ["GEMINI_API_KEY"])

def upload_to_gemini(path, mime_type=None):
"""Uploads the given file to Gemini.

See https://ai.google.dev/gemini-api/docs/prompting_with_media
"""
file = genai.upload_file(path, mime_type=mime_type)
print(f"Uploaded file '{file.display_name}' as: {file.uri}")
return file

def wait_for_files_active(files):
"""Waits for the given files to be active.

Some files uploaded to the Gemini API need to be processed before they can be
used as prompt inputs. The status can be seen by querying the file's "state"
field.

This implementation uses a simple blocking polling loop. Production code
should probably employ a more sophisticated approach.
"""
print("Waiting for file processing...")
for name in (file.name for file in files):
file = genai.get_file(name)
while file.state.name == "PROCESSING":
print(".", end="", flush=True)
time.sleep(10)
file = genai.get_file(name)
if file.state.name != "ACTIVE":
raise Exception(f"File {file.name} failed to process")
print("...all files ready")
print()

Create the model

generation_config = {
"temperature": 2,
"top_p": 0.95,
"top_k": 40,
"max_output_tokens": 8192,
"response_schema": content.Schema(
type = content.Type.OBJECT,
properties = {
"null": content.Schema(
type = content.Type.OBJECT,
properties = {
"string": content.Schema(
type = content.Type.OBJECT,
properties = {
},
),
},
),
},
),
"response_mime_type": "application/json",
}

model = genai.GenerativeModel(
model_name="gemini-1.5-pro",
generation_config=generation_config,
)

TODO Make these files available on the local file system

You may need to update the file paths

files = [
upload_to_gemini("https://github.com/gitkraken/vscode-gitlens.wiki.git", mime_type="application/zip"),
upload_to_gemini("Unknown File", mime_type="application/octet-stream"),
]

Some files have a processing delay. Wait for them to be ready.

wait_for_files_active(files)

chat_session = model.start_chat(
history=[
{
"role": "user",
"parts": [
files[0],
],
},
{
"role": "model",
"parts": [
files[1],
],
},
]
)

response = chat_session.send_message("INSERT_INPUT_HERE")

print(response.text)`

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant