"""
LoggerAgent - записывает прогресс в БД через MCP tools.
"""

import logging
from typing import List, Any
from .base_agent import BaseAgent, AgentState
from deepseek_handler import get_mcp_tools, get_mcp_tools_openai, get_mcp_tools_anthropic

logger = logging.getLogger(__name__)

class LoggerAgent(BaseAgent):
    def __init__(self):
        system_prompt = (
            "Ты – Logger Agent. Твоя задача – записывать прогресс ученика в базу данных. "
            "Анализируй диалог и используй функцию llm_save_paragraph_progress для сохранения прогресса по изученным темам. "
            "Определи подходящий номер параграфа (seq_number) на основе контекста и сохрани краткое описание прогресса. "
            "Отвечай кратким подтверждением о сохранении прогресса."
        )
        super().__init__("Logger", system_prompt, mode=0)

    def _get_tools(self) -> List[Any]:
        """Compatibility method - not used"""
        return []

    def _get_gemini_tools(self) -> List[Any]:
        """Get MCP tools in Gemini format"""
        try:
            return get_mcp_tools_gemini()
        except Exception as e:
            logger.error(f"Logger: Error getting Gemini MCP tools: {e}")
            return []

    def _get_openai_tools(self) -> List[Any]:
        """Get MCP tools in OpenAI format"""
        try:
            return get_mcp_tools_openai()
        except Exception as e:
            logger.error(f"Logger: Error getting OpenAI MCP tools: {e}")
            return []

    def _get_anthropic_tools(self) -> List[Any]:
        """Get MCP tools in Anthropic format"""
        try:
            return get_mcp_tools_anthropic()
        except Exception as e:
            logger.error(f"Logger: Error getting Anthropic MCP tools: {e}")
            return []

    def _process_function_call(self, function_call, state: AgentState) -> Any:
        """Process MCP function calls"""
        try:
            from mcpserver.mcp_client import llm_save_paragraph_progress
            
            func_name = function_call.name
            
            # Handle Gemini, OpenAI and Anthropic function call formats
            if hasattr(function_call, 'args'):
                # Gemini format
                args = dict(function_call.args)
            elif hasattr(function_call, 'input'):
                # Anthropic format
                args = function_call.input
            else:
                # OpenAI format (already converted in base class)
                args = function_call.args
            
            if func_name == "llm_save_paragraph_progress":
                seq_number = args.get("seq_number", "auto")
                progress = args.get("progress", "Progress logged")
                result = llm_save_paragraph_progress(seq_number, progress)
                logger.info(f"Logger: Progress saved - {seq_number}: {progress[:50]}...")
                return result
                
        except Exception as e:
            logger.error(f"Logger: Error processing function call {function_call.name}: {e}")
            return f"Error: {str(e)}"
        
        return None 