# llm_providers/openai_provider.py
from openai import OpenAI
import logging
import os
import sys

# Configure logging
logger = logging.getLogger(__name__)

# Global client instance for LLM function calls
_global_client = None


try:
    sys.path.append(os.path.join(os.path.dirname(__file__), r'..', 'mcpserver'))
    from mcp_client import (MCPClient, get_llm_function_definitions, cleanup_llm_client)

    _global_client = MCPClient()
    logger.info("MCP Client initialized.")

except ImportError as e:
    logger.warning(f"Could not import MCP client functions: {e}. MCP will be disabled.")
    _global_client = None
except Exception as e:
    logger.error(f"Failed to initialize MCP client: {e}. MCP will be disabled.", exc_info=True)
    _global_client = None
finally:
    # Ensure client is stopped if it was partially initialized but not enabled
    if _global_client:
        _global_client._stop_server()
        _global_client = None

def get_client():
    """Get or create global MCP client instance"""
    global _global_client
    if _global_client is None:
        _global_client = MCPClient()
        logger.info("MCP Client created.")
    return _global_client


def get_mcp_tools_openai():
    """Returns MCP tools definitions for OpenAI"""
    tools = []
    
    # Add MCP functions if available
    if _global_client: # Changed from MCP_ENABLED to _global_client
        mcp_functions = get_llm_function_definitions()
        for func_def in mcp_functions:
            logger.info(f"Adding OpenAI MCP function: {func_def['name']}")
            
            # Convert properties to OpenAI format
            properties = {}
            for prop_name, prop_info in func_def['parameters']['properties'].items():
                properties[prop_name] = {
                    "type": "string",  # MCP functions typically use string parameters
                    "description": prop_info['description']
                }
            
            openai_func = {
                "type": "function",
                "function": {
                    "name": func_def['name'],
                    "description": func_def['description'],
                    "parameters": {
                        "type": "object",
                        "properties": properties,
                        "required": func_def['parameters']['required']
                    }
                }
            }
            tools.append(openai_func)
    
    logger.info(f"Created {len(tools)} MCP tools for OpenAI")
    return tools

def call_openai_stream(chat_history, model_name, api_key, base_url):
    """
    Sends messages to OpenAI API and returns the response as a stream.
    """
    client = OpenAI(
        api_key=api_key,
        base_url=base_url
    )
    logger.info(f"API model (OpenAI): {model_name}")

    try:
        response = client.chat.completions.create(
            model=model_name,
            messages=chat_history,
            stream=True,
            timeout=120,
            max_tokens=8192
        )
        
        logger.info("Received response from OpenAI API")
        
        for chunk in response:
            if chunk and chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content:
                yield chunk.choices[0].delta.content
    except Exception as api_error:
        logger.error(f"Error calling OpenAI API: {api_error}", exc_info=True)
        yield f"An error occurred: {str(api_error)}"
