# llm_providers/openrouter_provider.py
import requests
import json
import logging

logger = logging.getLogger(__name__)

def call_openrouter_stream(chat_history, model_name, api_key, api_url):
    """
    Sends messages to OpenRouter API and returns the response as a stream.
    """
    headers = {
        "Authorization": f"Bearer {api_key}",
        "Content-Type": "application/json"
    }
    
    payload = {
        "model": model_name,
        "messages": chat_history,
        "stream": True
    }
    
    logger.info(f"Sending request to OpenRouter API with model: {model_name}")
    
    buffer = ""
    try:
        with requests.post(api_url, headers=headers, json=payload, stream=True) as response:
            for chunk in response.iter_content(chunk_size=1024):
                if chunk:
                    buffer += chunk.decode('utf-8')
                    while True:
                        line_end = buffer.find('\n')
                        if line_end == -1:
                            break

                        line = buffer[:line_end].strip()
                        buffer = buffer[line_end + 1:]

                        if line.startswith('data: '):
                            data = line[6:]
                            if data == '[DONE]':
                                break

                            try:
                                data_obj = json.loads(data)
                                content = data_obj["choices"][0]["delta"].get("content")
                                if content:
                                    yield content
                            except json.JSONDecodeError:
                                pass
    except Exception as e:
        logger.error(f"Error processing OpenRouter chunk: {e}", exc_info=True)
        yield f"Error from OpenRouter: {str(e)}"
