"""
Enhanced prompt templates with user context integration
"""

from typing import Dict, Any, List, Optional
from utils.agent_config import agent_config
from utils.user_context_extractor import user_context_extractor
from utils.prompt_templates import PromptTemplates as BasePromptTemplates

class ContextEnhancedTemplates(BasePromptTemplates):
    """Enhanced prompt templates with user context integration"""
    
    @staticmethod
    def get_technical_prompt(query: str, 
                           retrieved_info: Dict[str, Any], 
                           conversation_history: str,
                           detected_technologies: Optional[List[str]] = None) -> str:
        """
        Generate a prompt template for technical queries with user context
        
        Args:
            query: The user's query
            retrieved_info: Information retrieved from the knowledge base
            conversation_history: Previous conversation history
            detected_technologies: List of technologies mentioned in the query
            
        Returns:
            str: Formatted prompt for the LLM
        """
        # Get base prompt
        base_prompt = BasePromptTemplates.get_technical_prompt(
            query, retrieved_info, conversation_history, detected_technologies
        )
        
        # Extract user context
        user_context = user_context_extractor.extract_user_context(conversation_history)
        personalization = user_context_extractor.get_personalization_text(user_context)
        
        # Insert personalization after the first line
        lines = base_prompt.split('\n')
        enhanced_prompt = lines[0] + personalization + '\n' + '\n'.join(lines[1:])
        
        return enhanced_prompt
    
    @staticmethod
    def get_pricing_prompt(query: str, 
                         retrieved_info: Dict[str, Any], 
                         conversation_history: str,
                         project_type: Optional[str] = None) -> str:
        """
        Generate a prompt template for pricing queries with user context
        
        Args:
            query: The user's query
            retrieved_info: Information retrieved from the knowledge base
            conversation_history: Previous conversation history
            project_type: Type of project being discussed
            
        Returns:
            str: Formatted prompt for the LLM
        """
        # Get base prompt
        base_prompt = BasePromptTemplates.get_pricing_prompt(
            query, retrieved_info, conversation_history, project_type
        )
        
        # Extract user context
        user_context = user_context_extractor.extract_user_context(conversation_history)
        personalization = user_context_extractor.get_personalization_text(user_context)
        
        # Insert personalization after the first line
        lines = base_prompt.split('\n')
        enhanced_prompt = lines[0] + personalization + '\n' + '\n'.join(lines[1:])
        
        return enhanced_prompt
    
    @staticmethod
    def get_scheduling_prompt(query: str, 
                            retrieved_info: Dict[str, Any], 
                            conversation_history: str,
                            meeting_details: Optional[Dict[str, Any]] = None) -> str:
        """
        Generate a prompt template for scheduling queries with user context
        
        Args:
            query: The user's query
            retrieved_info: Information retrieved from the knowledge base
            conversation_history: Previous conversation history
            meeting_details: Details about the requested meeting
            
        Returns:
            str: Formatted prompt for the LLM
        """
        # Get base prompt
        base_prompt = BasePromptTemplates.get_scheduling_prompt(
            query, retrieved_info, conversation_history, meeting_details
        )
        
        # Extract user context
        user_context = user_context_extractor.extract_user_context(conversation_history)
        personalization = user_context_extractor.get_personalization_text(user_context)
        
        # Insert personalization after the first line
        lines = base_prompt.split('\n')
        enhanced_prompt = lines[0] + personalization + '\n' + '\n'.join(lines[1:])
        
        return enhanced_prompt
    
    @staticmethod
    def get_general_prompt(query: str, 
                         retrieved_info: Dict[str, Any], 
                         conversation_history: str) -> str:
        """
        Generate a prompt template for general queries with user context
        
        Args:
            query: The user's query
            retrieved_info: Information retrieved from the knowledge base
            conversation_history: Previous conversation history
            
        Returns:
            str: Formatted prompt for the LLM
        """
        # Get base prompt
        base_prompt = BasePromptTemplates.get_general_prompt(
            query, retrieved_info, conversation_history
        )
        
        # Extract user context
        user_context = user_context_extractor.extract_user_context(conversation_history)
        personalization = user_context_extractor.get_personalization_text(user_context)
        
        # Insert personalization after the first line
        lines = base_prompt.split('\n')
        enhanced_prompt = lines[0] + personalization + '\n' + '\n'.join(lines[1:])
        
        return enhanced_prompt

# Create a singleton instance
context_enhanced_templates = ContextEnhancedTemplates()
