diff --git a/samples/cache.py b/samples/cache.py index b7edc5de3..82c4c1d7d 100644 --- a/samples/cache.py +++ b/samples/cache.py @@ -39,6 +39,57 @@ def test_cache_create(self): # [END cache_create] cache.delete() + def test_cache_create_from_name(self): + # [START cache_create_from_name] + document = genai.upload_file(path=media / "a11.txt") + model_name = "gemini-1.5-flash-001" + cache = genai.caching.CachedContent.create( + model=model_name, + system_instruction="You are an expert analyzing transcripts.", + contents=[document], + ) + cache_name = cache.name # Save the name for later + + # Later + cache = genai.caching.CachedContent.get(cache_name) + apollo_model = genai.GenerativeModel.from_cached_content(cache) + response = apollo_model.generate_content("Find a lighthearted moment from this transcript") + print(response.text) + # [END cache_create_from_name] + cache.delete() + + def test_cache_create_from_chat(self): + # [START cache_create_from_chat] + model_name = "gemini-1.5-flash-001" + system_instruction = "You are an expert analyzing transcripts." + + model = genai.GenerativeModel(model_name=model_name, system_instruction=system_instruction) + chat = model.start_chat() + document = genai.upload_file(path=media / "a11.txt") + response = chat.send_message(["Hi, could you summarize this transcript?", document]) + print("\n\nmodel: ", response.text) + response = chat.send_message( + ["Okay, could you tell me more about the trans-lunar injection"] + ) + print("\n\nmodel: ", response.text) + + # To cache the conversation so far, pass the chat history as the list of "contents". + cache = genai.caching.CachedContent.create( + model=model_name, + system_instruction=system_instruction, + contents=chat.history, + ) + model = genai.GenerativeModel.from_cached_content(cached_content=cache) + + # Continue the chat where you left off. + chat = model.start_chat() + response = chat.send_message( + "I didn't understand that last part, could you explain it in simpler language?" + ) + print("\n\nmodel: ", response.text) + # [END cache_create_from_chat] + cache.delete() + def test_cache_delete(self): # [START cache_delete] document = genai.upload_file(path=media / "a11.txt") @@ -100,57 +151,6 @@ def test_cache_update(self): # [END cache_update] cache.delete() - def test_cache_create_from_name(self): - # [START cache_create_from_name] - document = genai.upload_file(path=media / "a11.txt") - model_name = "gemini-1.5-flash-001" - cache = genai.caching.CachedContent.create( - model=model_name, - system_instruction="You are an expert analyzing transcripts.", - contents=[document], - ) - cache_name = cache.name # Save the name for later - - # Later - cache = genai.caching.CachedContent.get(cache_name) - apollo_model = genai.GenerativeModel.from_cached_content(cache) - response = apollo_model.generate_content("Find a lighthearted moment from this transcript") - print(response.text) - # [END cache_create_from_name] - cache.delete() - - def test_cache_chat(self): - # [START cache_chat] - model_name = "gemini-1.5-flash-001" - system_instruction = "You are an expert analyzing transcripts." - - model = genai.GenerativeModel(model_name=model_name, system_instruction=system_instruction) - chat = model.start_chat() - document = genai.upload_file(path=media / "a11.txt") - response = chat.send_message(["Hi, could you summarize this transcript?", document]) - print("\n\nmodel: ", response.text) - response = chat.send_message( - ["Okay, could you tell me more about the trans-lunar injection"] - ) - print("\n\nmodel: ", response.text) - - # To cache the conversation so far, pass the chat history as the list of "contents". - cache = genai.caching.CachedContent.create( - model=model_name, - system_instruction=system_instruction, - contents=chat.history, - ) - model = genai.GenerativeModel.from_cached_content(cached_content=cache) - - # Continue the chat where you left off. - chat = model.start_chat() - response = chat.send_message( - "I didn't understand that last part, could you explain it in simpler language?" - ) - print("\n\nmodel: ", response.text) - # [END cache_chat] - cache.delete() - if __name__ == "__main__": absltest.main()