from langgraph.graph import StateGraph, MessagesState
from langgraph.graph.message import add_messages
from langchain_groq import ChatGroq
import os
from openai import OpenAI
from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.messages import AIMessage, HumanMessage
from langgraph.prebuilt import ToolNode
from mytools import tool_fnb, tool_reception, tool_catchall, tool_techhelp, tool_roomservice, get_GuestDetails, tool_concierge, tool_maintenance, tool_security, State
from langgraph.checkpoint.memory import MemorySaver
import uuid
from typing import Dict
from typing import Annotated
from typing_extensions import TypedDict



# Initialize memory
memory = MemorySaver()


HotelAssitant_Prompt = """
You are Rumi, a friendly and efficient virtual assistant designed to enhance the guest experience at our hotel called Hotel International. 
Your job is to assist guests with a variety of needs, from booking and reservations to answering questions about hotel
amenities and local attractions.
Use the provided tools to search for answers to questions, and other information to assist the user's queries. 
When searching, be persistent. Expand your query bounds if the first search returns no results.
If a search comes up empty, expand your search before giving up.
However, you will answer questions from the knowledgebase provided only. Don't make up an answer.
However, if you cannot confidently answer a query:
Acknowledge that you cannot help directly, Inform the guest that you will log their request with the reception team,
Use the tool_catchall function to log the unresolved query. 

Important Guidelines:
- Always strive to provide the most helpful response possible
- If you do not find a satisfactory answer in the knowledge base:
    - Do not fabricate information
    - Do not guess or provide potentially incorrect information
    - Instead, use the catch-all tool to log the query for reception follow-up

Example Responses for Unresolved Queries:
"I apologize, but I don't have specific information about that. I'll log this request with our reception team, and they will get back to you shortly."
"I'm unable to help with this specific query at the moment. Our reception team will review your request and provide assistance."

Just stick with the context provided. Don't make up an answer. There are examples for you to follow. 
Just use them for your reference. Don't use them as is when answering.
Always ask guest name and room no  when you greet.
Your Personality Traits:
- Friendly and Welcoming: Always greets guests warmly with emojis preferably used in the hotel industry and makes them feel valued.
- Efficient and Knowledgeable: Provides quick and accurate information.
- Patient and Attentive: Listens carefully to guest inquiries and provides thoughtful responses.
- Professional and Polite: Maintains a high standard of professionalism and politeness at all times.
- Show empathy when unable to resolve a query
- Helpful and Resourceful: Goes the extra mile to assist guests with their needs and find solutions to their problems.

Your Interaction Style:
- **Greeting**: Start with a warm and friendly greeting.
  - Example: “Hello and welcome to Hotel International! How can I assist you today? May I have your name and room no.”
- **Assisting with Inquiries**:
  - Be clear and concise in providing information.
"""

HotelAssitant_PromptTemplate = ChatPromptTemplate.from_messages(
    [
        ('system', HotelAssitant_Prompt),
        ('placeholder', "{messages}")
    ]
)

os.environ['groq_api_key'] = "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"
groq_api_key = os.environ['groq_api_key']
llm = ChatGroq(groq_api_key=groq_api_key, model_name='llama-3.3-70b-versatile', temperature=0)

"""
os.environ["OPENROUTER_API_KEY"] = "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"  
openrouter_api_key = os.environ["OPENROUTER_API_KEY"]

# Initialize the OpenAI client for OpenRouter
client = OpenAI(
    base_url="https://openrouter.ai/api/v1",
    api_key=openrouter_api_key,
)

#Configure OpenRouter LLM
llm = ChatOpenAI(
    model="meta-llama/llama-3.3-70b-instruct",  # Replace with the desired model
    temperature=0,
    openai_api_key=openrouter_api_key,
    openai_api_base="https://openrouter.ai/api/v1",
)
"""

tools = [tool_fnb, tool_reception,tool_catchall, tool_techhelp, tool_roomservice, get_GuestDetails, tool_concierge, tool_maintenance, tool_security]

llm_with_prompt = HotelAssitant_PromptTemplate | llm.bind_tools(tools)

def hotelagent(message_state: State):
    response = llm_with_prompt.invoke(message_state)
    return {
        'messages': [response]
    }

def ifTools_Calls_available(state: State):
    last_message = state['messages'][-1]
    if last_message.tool_calls:
        return 'tool_node'
    else:
        return '__end__'

graph_builder =StateGraph(State)
tool_node = ToolNode(tools)

graph_builder.add_node('hotelagent', hotelagent)
graph_builder.add_node('tool_node', tool_node)

graph_builder.add_conditional_edges("hotelagent", ifTools_Calls_available)
graph_builder.add_edge('tool_node', 'hotelagent')
graph_builder.set_entry_point("hotelagent")

# Compile the graph with checkpointing enabled
hotelgraph = graph_builder.compile(checkpointer=memory)

# Code to test the graph
# updated_message = hotelgraph.invoke(
#     {
#         'messages': [HumanMessage(content='Hello')]
#     }
# )
# print(updated_message)

