diff --git a/backend/app/modules/chat/llm_processing.py b/backend/app/modules/chat/llm_processing.py index 2d5134fa..7b22d639 100644 --- a/backend/app/modules/chat/llm_processing.py +++ b/backend/app/modules/chat/llm_processing.py @@ -53,13 +53,16 @@ def ask_llm(question, docs): Question: {question} """ - - response = client.chat.completions.create( + try: + response = client.chat.completions.create( model="gemma2-9b-it", messages=[ {"role": "system", "content": "Use only the context to answer."}, {"role": "user", "content": prompt}, - ], - ) - logger.info("LLM response retrieved successfully.") - return response.choices[0].message.content + ], + ) + logger.info("LLM response retrieved successfully.") + return response.choices[0].message.content + except Exception as e: + logger.error(f"Error during LLM interaction: {e}") + raise