Skip to content

Commit 2790e59

Browse files
committed
Move logging to a central location for all AI Handlers
1 parent 044f005 commit 2790e59

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

pr_agent/algo/ai_handlers/litellm_ai_handler.py

-5
Original file line numberDiff line numberDiff line change
@@ -101,11 +101,6 @@ async def chat_completion(self, model: str, system: str, user: str, temperature:
101101
"""
102102
try:
103103
deployment_id = self.deployment_id
104-
if get_settings().config.verbosity_level >= 2:
105-
get_logger().debug(
106-
f"Generating completion with {model}"
107-
f"{(' from deployment ' + deployment_id) if deployment_id else ''}"
108-
)
109104
if self.azure:
110105
model = 'azure/' + model
111106
messages = [{"role": "system", "content": system}, {"role": "user", "content": user}]

pr_agent/algo/pr_processing.py

+5
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,11 @@ async def retry_with_fallback_models(f: Callable):
226226
# try each (model, deployment_id) pair until one is successful, otherwise raise exception
227227
for i, (model, deployment_id) in enumerate(zip(all_models, all_deployments)):
228228
try:
229+
if get_settings().config.verbosity_level >= 2:
230+
get_logger().debug(
231+
f"Generating prediction with {model}"
232+
f"{(' from deployment ' + deployment_id) if deployment_id else ''}"
233+
)
229234
get_settings().set("openai.deployment_id", deployment_id)
230235
return await f(model)
231236
except Exception as e:

0 commit comments

Comments
 (0)