Skip to content

Commit 061c4ed

Browse files
Jinash RouniyarJinash Rouniyar
authored andcommitted
Removed baseURL support for python client
1 parent 78e055e commit 061c4ed

File tree

4 files changed

+0
-31
lines changed

4 files changed

+0
-31
lines changed

test/collection/test_classes_generative.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -416,7 +416,6 @@ def test_generative_parameters_images_parsing(
416416
),
417417
(
418418
GenerativeConfig.contextualai(
419-
base_url="http://localhost:8080",
420419
model="v2",
421420
max_tokens=100,
422421
temperature=0.5,
@@ -427,7 +426,6 @@ def test_generative_parameters_images_parsing(
427426
generative_pb2.GenerativeProvider(
428427
return_metadata=True,
429428
contextualai=generative_pb2.GenerativeContextualAI(
430-
base_url="http://localhost:8080",
431429
model="v2",
432430
max_tokens=100,
433431
temperature=0.5,

test/collection/test_config.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1057,7 +1057,6 @@ def test_config_with_vectorizer_and_properties(
10571057
top_p=0.9,
10581058
system_prompt="You are a helpful assistant that provides accurate and informative responses based on the given context.",
10591059
avoid_commentary=False,
1060-
base_url="https://api.contextual.ai",
10611060
),
10621061
{
10631062
"generative-contextualai": {
@@ -1067,7 +1066,6 @@ def test_config_with_vectorizer_and_properties(
10671066
"topPProperty": 0.9,
10681067
"systemPromptProperty": "You are a helpful assistant that provides accurate and informative responses based on the given context.",
10691068
"avoidCommentaryProperty": False,
1070-
"baseURL": "https://api.contextual.ai/",
10711069
}
10721070
},
10731071
),
@@ -1164,14 +1162,12 @@ def test_config_with_generative(
11641162
model="ctxl-rerank-v2-instruct-multilingual",
11651163
instruction="Prioritize recent documents",
11661164
top_n=5,
1167-
base_url="https://api.contextual.ai",
11681165
),
11691166
{
11701167
"reranker-contextualai": {
11711168
"model": "ctxl-rerank-v2-instruct-multilingual",
11721169
"instruction": "Prioritize recent documents",
11731170
"topN": 5,
1174-
"baseURL": "https://api.contextual.ai",
11751171
}
11761172
},
11771173
),

weaviate/collections/classes/config.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -465,20 +465,13 @@ class _GenerativeContextualAIConfig(_GenerativeProvider):
465465
generative: Union[GenerativeSearches, _EnumLikeStr] = Field(
466466
default=GenerativeSearches.CONTEXTUALAI, frozen=True, exclude=True
467467
)
468-
baseURL: Optional[AnyHttpUrl]
469468
model: Optional[str]
470469
maxTokensProperty: Optional[int]
471470
temperatureProperty: Optional[float]
472471
topPProperty: Optional[float]
473472
systemPromptProperty: Optional[str]
474473
avoidCommentaryProperty: Optional[bool]
475474

476-
def _to_dict(self) -> Dict[str, Any]:
477-
ret_dict = super()._to_dict()
478-
if self.baseURL is not None:
479-
ret_dict["baseURL"] = self.baseURL.unicode_string()
480-
return ret_dict
481-
482475

483476
class _GenerativeGoogleConfig(_GenerativeProvider):
484477
generative: Union[GenerativeSearches, _EnumLikeStr] = Field(
@@ -597,16 +590,9 @@ class _RerankerContextualAIConfig(_RerankerProvider):
597590
default=Rerankers.CONTEXTUALAI, frozen=True, exclude=True
598591
)
599592
model: Optional[Union[RerankerContextualAIModel, str]] = Field(default=None)
600-
baseURL: Optional[AnyHttpUrl]
601593
instruction: Optional[str] = Field(default=None)
602594
topN: Optional[int] = Field(default=None)
603595

604-
def _to_dict(self) -> Dict[str, Any]:
605-
ret_dict = super()._to_dict()
606-
if self.baseURL is not None:
607-
ret_dict["baseURL"] = self.baseURL.unicode_string()
608-
return ret_dict
609-
610596

611597
class _Generative:
612598
"""Use this factory class to create the correct object for the `generative_config` argument in the `collections.create()` method.
@@ -883,7 +869,6 @@ def contextualai(
883869
top_p: Optional[float] = None,
884870
system_prompt: Optional[str] = None,
885871
avoid_commentary: Optional[bool] = None,
886-
base_url: Optional[AnyHttpUrl] = None,
887872
) -> _GenerativeProvider:
888873
"""Create a `_GenerativeContextualAIConfig` object for use when performing AI generation using the `generative-contextualai` module.
889874
@@ -897,10 +882,8 @@ def contextualai(
897882
top_p: Nucleus sampling parameter (0 < x <= 1). Defaults to `None`, which uses the server-defined default
898883
system_prompt: System instructions the model follows. Defaults to `None`, which uses the server-defined default
899884
avoid_commentary: If `True`, reduce conversational commentary in responses. Defaults to `None`, which uses the server-defined default
900-
base_url: The base URL where the API request should go. Defaults to `None`, which uses the server-defined default
901885
"""
902886
return _GenerativeContextualAIConfig(
903-
baseURL=base_url,
904887
maxTokensProperty=max_tokens,
905888
model=model,
906889
temperatureProperty=temperature,
@@ -1131,7 +1114,6 @@ def nvidia(
11311114
@staticmethod
11321115
def contextualai(
11331116
model: Optional[str] = None,
1134-
base_url: Optional[AnyHttpUrl] = None,
11351117
instruction: Optional[str] = None,
11361118
top_n: Optional[int] = None,
11371119
) -> _RerankerProvider:
@@ -1142,13 +1124,11 @@ def contextualai(
11421124
11431125
Args:
11441126
model: The model to use. Defaults to `None`, which uses the server-defined default
1145-
base_url: The base URL to send the reranker requests to. Defaults to `None`, which uses the server-defined default.
11461127
instruction: Custom instructions for reranking. Defaults to `None`.
11471128
top_n: Number of top results to return. Defaults to `None`, which uses the server-defined default.
11481129
"""
11491130
return _RerankerContextualAIConfig(
11501131
model=model,
1151-
baseURL=base_url,
11521132
instruction=instruction,
11531133
topN=top_n
11541134
)

weaviate/collections/classes/generative.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -449,7 +449,6 @@ class _GenerativeContextualAI(_GenerativeConfigRuntime):
449449
generative: Union[GenerativeSearches, _EnumLikeStr] = Field(
450450
default=GenerativeSearches.CONTEXTUALAI, frozen=True, exclude=True
451451
)
452-
base_url: Optional[AnyHttpUrl]
453452
model: Optional[str]
454453
max_tokens: Optional[int]
455454
temperature: Optional[float]
@@ -462,7 +461,6 @@ def _to_grpc(self, opts: _GenerativeConfigRuntimeOptions) -> generative_pb2.Gene
462461
return generative_pb2.GenerativeProvider(
463462
return_metadata=opts.return_metadata,
464463
contextualai=generative_pb2.GenerativeContextualAI(
465-
base_url=_parse_anyhttpurl(self.base_url),
466464
model=self.model,
467465
max_tokens=self.max_tokens,
468466
temperature=self.temperature,
@@ -611,7 +609,6 @@ def cohere(
611609
@staticmethod
612610
def contextualai(
613611
*,
614-
base_url: Optional[str] = None,
615612
model: Optional[str] = None,
616613
max_tokens: Optional[int] = None,
617614
temperature: Optional[float] = None,
@@ -622,7 +619,6 @@ def contextualai(
622619
"""Create a `_GenerativeContextualAI` object for use with the `generative-contextualai` module.
623620
624621
Args:
625-
base_url: The base URL where the API request should go. Defaults to `None`, which uses the server-defined default
626622
model: The model to use. Defaults to `None`, which uses the server-defined default
627623
max_tokens: The maximum number of tokens to generate. Defaults to `None`, which uses the server-defined default
628624
temperature: The temperature to use. Defaults to `None`, which uses the server-defined default
@@ -631,7 +627,6 @@ def contextualai(
631627
avoid_commentary: Whether to avoid model commentary in responses
632628
"""
633629
return _GenerativeContextualAI(
634-
base_url=AnyUrl(base_url) if base_url is not None else None,
635630
model=model,
636631
max_tokens=max_tokens,
637632
temperature=temperature,

0 commit comments

Comments
 (0)