Skip to content

Commit

Permalink
Add tests
Browse files Browse the repository at this point in the history
Change-Id: I249188fa585bd9b7193efa48b1cfca20b8a79821
  • Loading branch information
mayureshagashe2105 committed May 24, 2024
1 parent e1d8c7a commit 59663c8
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 1 deletion.
27 changes: 27 additions & 0 deletions tests/test_caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,33 @@ def test_expiration_types_for_create_cached_content(self, ttl):
self.assertIsInstance(self.observed_requests[-1], glm.CreateCachedContentRequest)
self.assertIsInstance(cc, caching.CachedContent)

@parameterized.named_parameters(
[
dict(
testcase_name="upper_case",
name="Test-cached-content",
),
dict(
testcase_name="special_characters_except_dot_and_hyphen",
name="test-cac*@/hed-conte#nt",
),
dict(
testcase_name="empty_name",
name="",
),
dict(
testcase_name="blank_spaces",
name="test cached content",
),
]
)
def test_create_cached_content_with_invalid_name_format(self, name):
with self.assertRaises(ValueError):
_ = caching.CachedContent.create(
name=name,
model="models/gemini-1.0-pro-001",
)

def test_get_cached_content(self):
cc = caching.CachedContent.get(name="cachedContent/test-cached-content")
self.assertIsInstance(self.observed_requests[-1], glm.GetCachedContentRequest)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_generative_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def count_tokens(
self.observed_kwargs.append(kwargs)
response = self.responses["count_tokens"].pop(0)
return response

def get_cached_content(
self,
request: glm.GetCachedContentRequest,
Expand Down

0 comments on commit 59663c8

Please sign in to comment.