"""
Helper module to integrate all improvements into the main system
"""

import os
import sys
from typing import Dict, Any, Optional

# Import all the new components
from utils.optimized_conversation_memory import optimized_conversation_memory
from utils.agent_config import agent_config
from utils.guardrails import guardrails
from utils.greeting_templates import greeting_templates

class IntegrationHelper:
    """Helper class to integrate all improvements into the main system"""
    
    @staticmethod
    def update_crew_with_optimized_memory(crew_instance) -> None:
        """
        Update a RAG crew instance to use optimized conversation memory
        
        Args:
            crew_instance: The RAG crew instance to update
        """
        # Replace the conversation memory manager
        from utils.persistent_conversation_memory import persistent_conversation_memory
        
        # Store a reference to the old conversation memory
        old_memory = persistent_conversation_memory
        
        # Copy existing conversations to the optimized memory
        for conv_id in old_memory.conversations:
            conversation = old_memory.get_conversation(conv_id)
            if conversation:
                # Create conversation in optimized memory
                optimized_conversation_memory.create_conversation(conv_id)
                
                # Copy messages
                for message in conversation.messages:
                    optimized_conversation_memory.add_message(
                        conversation_id=conv_id,
                        content=message.content,
                        role=message.role
                    )
        
        # Update the crew's _get_conversation_history method to use optimized memory
        original_get_history = crew_instance._get_conversation_history
        
        def new_get_history(conversation_id=None):
            if not conversation_id:
                return ""
            return optimized_conversation_memory.get_context_string(conversation_id)
        
        # Replace the method
        crew_instance._get_conversation_history = new_get_history
        
        # Log the update
        print("Updated RAG crew to use optimized conversation memory")
    
    @staticmethod
    def apply_guardrails_to_response_generation(response_agent) -> None:
        """
        Apply guardrails to a response generation agent
        
        Args:
            response_agent: The response generation agent to update
        """
        # Store a reference to the original generate_response method
        original_generate = response_agent.generate_response
        
        # Create a new method that applies guardrails
        def generate_response_with_guardrails(query, retrieved_information, conversation_history=""):
            # Check query with guardrails
            query_passes, reason, details = guardrails.check_query(query)
            
            if not query_passes:
                # Return a safe response based on the reason
                return {
                    "response": guardrails.get_safe_response(query, ""),
                    "sources": []
                }
            
            # Generate response using the original method
            response = original_generate(query, retrieved_information, conversation_history)
            
            # Check the generated response with guardrails
            if "response" in response:
                response_text = response["response"]
                response_passes, reason, details = guardrails.check_response(response_text)
                
                if not response_passes:
                    # Replace with a safe response
                    response["response"] = guardrails.get_safe_response(query, response_text)
            
            return response
        
        # Replace the method
        response_agent.generate_response = generate_response_with_guardrails
        
        # Log the update
        print("Applied guardrails to response generation agent")
    
    @staticmethod
    def update_prompt_templates_with_configurable_name() -> None:
        """Update prompt templates to use the configurable agent name"""
        from utils.prompt_templates import PromptTemplates
        
        # Store references to the original methods
        original_general = PromptTemplates.get_general_prompt
        original_technical = PromptTemplates.get_technical_prompt
        original_pricing = PromptTemplates.get_pricing_prompt
        original_scheduling = PromptTemplates.get_scheduling_prompt
        
        # Create new methods that use the configurable agent name
        def new_general_prompt(query, retrieved_info, conversation_history):
            prompt = original_general(query, retrieved_info, conversation_history)
            # Replace hardcoded name with configurable name
            prompt = prompt.replace(
                "You are Sam, a friendly AI assistant for MangoIT Solutions", 
                f"You are {agent_config.AGENT_NAME}, a friendly AI assistant for {agent_config.COMPANY_NAME}"
            )
            return prompt
        
        def new_technical_prompt(query, retrieved_info, conversation_history, detected_technologies=None):
            prompt = original_technical(query, retrieved_info, conversation_history, detected_technologies)
            # Replace hardcoded name with configurable name
            prompt = prompt.replace(
                "You are Sam, a technical expert at MangoIT Solutions", 
                f"You are {agent_config.AGENT_NAME}, a technical expert at {agent_config.COMPANY_NAME}"
            )
            return prompt
        
        def new_pricing_prompt(query, retrieved_info, conversation_history, project_type=None):
            prompt = original_pricing(query, retrieved_info, conversation_history, project_type)
            # Replace hardcoded name with configurable name
            prompt = prompt.replace(
                "You are Sam, a solutions consultant at MangoIT Solutions", 
                f"You are {agent_config.AGENT_NAME}, a solutions consultant at {agent_config.COMPANY_NAME}"
            )
            return prompt
        
        def new_scheduling_prompt(query, retrieved_info, conversation_history, meeting_details=None):
            prompt = original_scheduling(query, retrieved_info, conversation_history, meeting_details)
            # Replace hardcoded name with configurable name
            prompt = prompt.replace(
                "You are Sam, a helpful assistant at MangoIT Solutions", 
                f"You are {agent_config.AGENT_NAME}, a helpful assistant at {agent_config.COMPANY_NAME}"
            )
            return prompt
        
        # Replace the methods
        PromptTemplates.get_general_prompt = new_general_prompt
        PromptTemplates.get_technical_prompt = new_technical_prompt
        PromptTemplates.get_pricing_prompt = new_pricing_prompt
        PromptTemplates.get_scheduling_prompt = new_scheduling_prompt
        
        # Log the update
        print("Updated prompt templates to use configurable agent name")
    
    @staticmethod
    def update_enhanced_response_generator() -> None:
        """Update the enhanced response generator to use shorter greetings and guardrails"""
        from utils.enhanced_response_generator import enhanced_response_generator
        
        # Store a reference to the original generate_response method
        original_generate = enhanced_response_generator.generate_response
        
        # Create a new method that uses shorter greetings and guardrails
        async def generate_response_with_improvements(query, results, conversation_history=""):
            # Check query with guardrails
            query_passes, reason, details = guardrails.check_query(query)
            
            if not query_passes:
                # Return a safe response based on the reason
                return {
                    "response": guardrails.get_safe_response(query, ""),
                    "sources": []
                }
            
            # Generate response using the original method
            response = await original_generate(query, results, conversation_history)
            
            # Check if this is a greeting and apply shorter greeting if needed
            if query.lower() in ["hello", "hi", "hey", "greetings"]:
                is_returning = bool(conversation_history)
                greeting = greeting_templates.get_greeting(is_returning)
                response["response"] = greeting
            
            # Check the generated response with guardrails
            if "response" in response:
                response_text = response["response"]
                response_passes, reason, details = guardrails.check_response(response_text)
                
                if not response_passes:
                    # Replace with a safe response
                    response["response"] = guardrails.get_safe_response(query, response_text)
            
            return response
        
        # Replace the method
        enhanced_response_generator.generate_response = generate_response_with_improvements
        
        # Log the update
        print("Updated enhanced response generator with improvements")
    
    @staticmethod
    def integrate_all_improvements(crew_instance=None, response_agent=None) -> None:
        """
        Integrate all improvements into the main system
        
        Args:
            crew_instance: Optional RAG crew instance to update
            response_agent: Optional response generation agent to update
        """
        # Update prompt templates with configurable name
        IntegrationHelper.update_prompt_templates_with_configurable_name()
        
        # Update enhanced response generator
        IntegrationHelper.update_enhanced_response_generator()
        
        # Update crew with optimized memory if provided
        if crew_instance:
            IntegrationHelper.update_crew_with_optimized_memory(crew_instance)
        
        # Apply guardrails to response generation if provided
        if response_agent:
            IntegrationHelper.apply_guardrails_to_response_generation(response_agent)
        
        # Log the integration
        print("All improvements integrated into the main system")

# Create a singleton instance
integration_helper = IntegrationHelper
