#!/usr/bin/env python3 """ Debug script to test LLM connection """ import openai from config import Config def test_llm_connection(): """Test basic LLM connection""" config = Config() client = openai.OpenAI( base_url=config.llm_config['base_url'], api_key=config.llm_config.get('api_key', 'not-needed') ) try: print(f"Testing connection to {config.llm_config['base_url']}") print(f"Using model: {config.llm_config['model']}") response = client.chat.completions.create( model=config.llm_config['model'], messages=[ {"role": "system", "content": "You are a helpful translator."}, {"role": "user", "content": "Translate 'Hello' to German. Respond with just the translation."} ], temperature=0.3, timeout=30 ) print(f"Response type: {type(response)}") print(f"Response: {response}") if hasattr(response, 'choices') and response.choices: choice = response.choices[0] print(f"Choice type: {type(choice)}") print(f"Choice: {choice}") if hasattr(choice, 'message'): message = choice.message print(f"Message type: {type(message)}") print(f"Message: {message}") if hasattr(message, 'content'): content = message.content print(f"Content type: {type(content)}") print(f"Content: {content}") else: print("No content attribute in message") else: print("No message attribute in choice") else: print("No choices in response") except Exception as e: print(f"Error: {e}") import traceback traceback.print_exc() if __name__ == "__main__": test_llm_connection()