File size: 3,198 Bytes
d6ea378
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
726f816
 
 
d6ea378
 
 
 
 
 
 
 
 
 
 
726f816
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
"""Main application file for Nexus AI Assistant.

This file handles the Gradio interface and orchestrates the chat implementations.
"""


from typing import List, Dict, Tuple
import os
import sys
from pathlib import Path
from dotenv import load_dotenv

# Add the parent directory to Python path so imports work correctly
# root_path = Path(__file__).resolve().parent
# sys.path.append(str(root_path))
# print(f"root path: {root_path}")

import gradio as gr
# Import the unified chat implementation
from agents.unified_chat import UnifiedChat
load_dotenv()

def create_demo():
    """Create the Gradio demo for the unified chat system."""
    
    # Initialize the unified chat implementation
    chat_impl = UnifiedChat()
    
    # Initialize the chat implementation
    try:
        chat_impl.initialize()
        init_status = "โœ… All systems ready!"
    except Exception as e:
        init_status = f"โŒ Error initializing: {str(e)}"
        print(init_status)
    
    def respond(message: str, history: List[Tuple[str, str]]) -> str:
        """Process a message and return the response.
        
        Args:
            message: The user's input message
            history: List of tuples containing (user_message, assistant_response)
            
        Returns:
            str: The assistant's response
        """
        if not message:
            return "Please enter a message."
        
        # Convert history to the format expected by the chat implementation
        history_dicts = []
        for user_msg, assistant_msg in history:
            history_dicts.append({"role": "user", "content": user_msg})
            history_dicts.append({"role": "assistant", "content": assistant_msg})
        
        try:
            # Process the message
            response = chat_impl.process_message(message, history_dicts)
            return response
        except Exception as e:
            return f"Error processing message: {str(e)}"
    
    
    # Create the Gradio interface using ChatInterface
    demo = gr.ChatInterface(
        fn=respond,
        title="๐Ÿค– Nexus AI - Unified Intelligent Assistant",
        description=f"""
        {init_status}
        
        I combine multiple AI capabilities:
        โ€ข ๐Ÿงฎ **Calculator & Math** - Complex calculations
        โ€ข ๐Ÿ“… **Date & Time** - Current date, time calculations  
        โ€ข ๐ŸŒค๏ธ **Weather** - Real-time weather information
        โ€ข ๐Ÿ“š **Document Analysis** - RAG-powered document search
        โ€ข ๐Ÿ”ฌ **Deep Research** - Comprehensive multi-source analysis
        โ€ข ๐Ÿ’ฌ **General Chat** - Conversational AI
        
        The system automatically routes your query to the most appropriate handler.
        """,
        examples=[
            "What is 847 * 293?",
            "What's today's date?",
            # "What's the weather in San Francisco?",
            # "Explain quantum computing in simple terms",
            # "Research the impact of AI on healthcare",
        ],
        theme=gr.themes.Soft(),
        analytics_enabled=False,
    )
    
    return demo


if __name__ == "__main__":
    # Create and launch the demo
    demo = create_demo()
    demo.launch()