Skip to content

Commit

Permalink
Update cache.py (#435)
Browse files Browse the repository at this point in the history
  • Loading branch information
MarkDaoust authored Jul 3, 2024
1 parent 3b78e31 commit 42e968d
Showing 1 changed file with 51 additions and 51 deletions.
102 changes: 51 additions & 51 deletions samples/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,57 @@ def test_cache_create(self):
# [END cache_create]
cache.delete()

def test_cache_create_from_name(self):
# [START cache_create_from_name]
document = genai.upload_file(path=media / "a11.txt")
model_name = "gemini-1.5-flash-001"
cache = genai.caching.CachedContent.create(
model=model_name,
system_instruction="You are an expert analyzing transcripts.",
contents=[document],
)
cache_name = cache.name # Save the name for later

# Later
cache = genai.caching.CachedContent.get(cache_name)
apollo_model = genai.GenerativeModel.from_cached_content(cache)
response = apollo_model.generate_content("Find a lighthearted moment from this transcript")
print(response.text)
# [END cache_create_from_name]
cache.delete()

def test_cache_create_from_chat(self):
# [START cache_create_from_chat]
model_name = "gemini-1.5-flash-001"
system_instruction = "You are an expert analyzing transcripts."

model = genai.GenerativeModel(model_name=model_name, system_instruction=system_instruction)
chat = model.start_chat()
document = genai.upload_file(path=media / "a11.txt")
response = chat.send_message(["Hi, could you summarize this transcript?", document])
print("\n\nmodel: ", response.text)
response = chat.send_message(
["Okay, could you tell me more about the trans-lunar injection"]
)
print("\n\nmodel: ", response.text)

# To cache the conversation so far, pass the chat history as the list of "contents".
cache = genai.caching.CachedContent.create(
model=model_name,
system_instruction=system_instruction,
contents=chat.history,
)
model = genai.GenerativeModel.from_cached_content(cached_content=cache)

# Continue the chat where you left off.
chat = model.start_chat()
response = chat.send_message(
"I didn't understand that last part, could you explain it in simpler language?"
)
print("\n\nmodel: ", response.text)
# [END cache_create_from_chat]
cache.delete()

def test_cache_delete(self):
# [START cache_delete]
document = genai.upload_file(path=media / "a11.txt")
Expand Down Expand Up @@ -100,57 +151,6 @@ def test_cache_update(self):
# [END cache_update]
cache.delete()

def test_cache_create_from_name(self):
# [START cache_create_from_name]
document = genai.upload_file(path=media / "a11.txt")
model_name = "gemini-1.5-flash-001"
cache = genai.caching.CachedContent.create(
model=model_name,
system_instruction="You are an expert analyzing transcripts.",
contents=[document],
)
cache_name = cache.name # Save the name for later

# Later
cache = genai.caching.CachedContent.get(cache_name)
apollo_model = genai.GenerativeModel.from_cached_content(cache)
response = apollo_model.generate_content("Find a lighthearted moment from this transcript")
print(response.text)
# [END cache_create_from_name]
cache.delete()

def test_cache_chat(self):
# [START cache_chat]
model_name = "gemini-1.5-flash-001"
system_instruction = "You are an expert analyzing transcripts."

model = genai.GenerativeModel(model_name=model_name, system_instruction=system_instruction)
chat = model.start_chat()
document = genai.upload_file(path=media / "a11.txt")
response = chat.send_message(["Hi, could you summarize this transcript?", document])
print("\n\nmodel: ", response.text)
response = chat.send_message(
["Okay, could you tell me more about the trans-lunar injection"]
)
print("\n\nmodel: ", response.text)

# To cache the conversation so far, pass the chat history as the list of "contents".
cache = genai.caching.CachedContent.create(
model=model_name,
system_instruction=system_instruction,
contents=chat.history,
)
model = genai.GenerativeModel.from_cached_content(cached_content=cache)

# Continue the chat where you left off.
chat = model.start_chat()
response = chat.send_message(
"I didn't understand that last part, could you explain it in simpler language?"
)
print("\n\nmodel: ", response.text)
# [END cache_chat]
cache.delete()


if __name__ == "__main__":
absltest.main()

0 comments on commit 42e968d

Please sign in to comment.