"""
SolverAgent - решает задачи, используя reasoner-model.
"""

import logging
from typing import List, Any, Dict, Optional
import re
from .base_agent import BaseAgent, AgentState, AgentResponse
from deepseek_handler import get_llm_response_stream
from prompts import get_prompts
from message_utils import get_last_messages

logger = logging.getLogger(__name__)


class SolverAgent(BaseAgent):
    """Agent that solves problems via specialized reasoner-model"""

    def __init__(self):
        # chat model всё равно нужен для генерации промптов function-call, но мы будем обходить.
        super().__init__("Solver", "stub", mode=1, history_limit=25)

    def _get_system_prompt(self, subject_id: int, state: Optional[AgentState] = None) -> str:
        # Use only subject-specific reasoning prompt (e.g., physics_prompts.physics_reasoner_system_prompt)
        base_prompt = get_prompts(subject_id)[1] or ""
        return base_prompt

    def _get_tools(self) -> List[Any]:
        """Solver напрямую вызывает reasoner-model, доп.tools не нужны"""
        return []

    def _process_function_call(self, function_call, state: AgentState) -> Any:
        return None

    # Override process_message to call reasoner-model instead of chat-model
    def process_message(self, history: List[Dict[str, Any]], state: AgentState) -> AgentResponse:
        logger.info("Solver: Processing user's solution or request for answer.")

        # The history is now passed directly to the agent
        stream_iter = get_llm_response_stream(
            history, 
            mode="reasoner-model", 
            subject=state.subject_id,
            plain_text_mode=state.plain_text_mode
        )

        return AgentResponse(
            agent_name=self.name,
            stream=stream_iter,
            is_final=True
        ) 