import sys
import os
import json
import time
import logging
from api_key import GEMINI_API_KEY

# Add the project root to the Python path to allow importing modules from the project
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# Import the Gemini provider
from llm_providers.gemini_provider import call_gemini_stream as get_llm_response_stream
# You'll need to define or import get_system_instruction and get_api_key if they are used
# For now, we'll use placeholders or simple values

# --- Configuration ---
# You can change these values for testing
USER_ID = 1          # A valid user ID from your 'users' table (though not used in this fast version)
SUBJECT_ID = 4       # 4 corresponds to Chemistry
MODE = "chat-model"  # Use "chat-model" for general chat or "reasoner-model" for complex problems

def get_system_instruction(subject_id):
    """Placeholder for getting system instructions."""
    return "You are a helpful AI assistant. Provide concise and accurate responses."

def main():
    """
    Main function to run the terminal-based chat client without DB interaction.
    """
    print("--- ClassGPT Terminal Client (Fast Mode) ---")
    print(f"User ID: {USER_ID}, Subject: Chemistry (ID: {SUBJECT_ID})")
    print("Type your message and press Enter. Type 'exit' to quit.")
    print("---------------------------------")

    # 1. No chat session creation in the database

    # 2. Start the main interaction loop
    while True:
        try:
            # Get input from the user in the terminal
            user_message = input("You: ")

            # Check if the user wants to exit
            if user_message.strip().lower() == 'exit':
                print("Ending chat session. Goodbye!")
                break
            
            if not user_message.strip():
                continue

            # No saving user message to the database

            # Prepare message for LLM (only the current user message)
            messages_for_llm = [
                {"role": "user", "content": user_message}
            ]
            
            # Retrieve system instruction and API key
            system_instruction = get_system_instruction(SUBJECT_ID)

            # Get the streaming response from the language model
            print("ClassGPT: ", end="", flush=True)
            
            full_response = []
            first_chunk_received = False
            start_time = time.time()
            # Call the function that communicates with the Gemini API
            stream = get_llm_response_stream(
                chat_history=messages_for_llm, 
                model_name="gemini-2.5-flash", # You might want to make this configurable
                api_key=GEMINI_API_KEY, 
                system_instruction=system_instruction, 
                subject=SUBJECT_ID, 
                reasoner_mode=(MODE == "reasoner-model")
            )
            
            # Process the stream chunk by chunk
            for chunk in stream:
                if not first_chunk_received:
                    end_time = time.time()
                    time_to_first_chunk = end_time - start_time
                    logging.info(f"Time to first chunk: {time_to_first_chunk:.2f} seconds")
                    first_chunk_received = True
                
                text_content = ""
                # Chunks from call_gemini_stream are expected to be strings directly
                if isinstance(chunk, str):
                    text_content = chunk
                
                if text_content:
                    # Print the content to the terminal immediately
                    print(text_content, end="", flush=True)
                    # Accumulate the parts of the response
                    full_response.append(text_content)
            
            print()  # Add a newline after the model's response is complete

            # No saving the model's response to the database

        except KeyboardInterrupt:
            # Handle Ctrl+C to exit gracefully
            print("\n\nExiting chat session. Goodbye!")
            break
        except Exception as e:
            print(f"\nAn error occurred: {e}")
            break

if __name__ == "__main__":
    main()
