Spaces:
Sleeping
Sleeping
Kalyanbrata Maity
commited on
Commit
·
12be6a2
1
Parent(s):
994345d
added tools
Browse files- src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/graph_builder.py +35 -0
- src/langgraphagenticai/nodes/__pycache__/chatbot_with_tool_node.cpython-312.pyc +0 -0
- src/langgraphagenticai/nodes/chatbot_with_tool_node.py +34 -0
- src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/tools/__pycache__/search_tools.cpython-312.pyc +0 -0
- src/langgraphagenticai/tools/search_tools.py +13 -0
- src/langgraphagenticai/ui/streamlit/__pycache__/display_result.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlit/__pycache__/load_ui.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlit/display_result.py +23 -2
- src/langgraphagenticai/ui/streamlit/load_ui.py +8 -0
- src/langgraphagenticai/ui/uiconfigfile.ini +1 -1
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc and b/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc differ
|
|
src/langgraphagenticai/graph/graph_builder.py
CHANGED
@@ -3,6 +3,8 @@ from langgraph.prebuilt import tools_condition, tool_node
|
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
from src.langgraphagenticai.state.state import State
|
5 |
from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
|
|
|
|
|
6 |
|
7 |
class GraphBuilder:
|
8 |
def __init__(self, model):
|
@@ -21,10 +23,43 @@ class GraphBuilder:
|
|
21 |
self.graph_builder.add_edge(START, "chatbot")
|
22 |
self.graph_builder.add_edge("chatbot", END)
|
23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
def setup_graph(self, usecase: str):
|
25 |
"""
|
26 |
Sets up the graph for the selected use case.
|
27 |
"""
|
28 |
if usecase == "Basic Chatbot":
|
29 |
self.basic_chatbot_build_graph()
|
|
|
|
|
|
|
30 |
return self.graph_builder.compile()
|
|
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
from src.langgraphagenticai.state.state import State
|
5 |
from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
|
6 |
+
from src.langgraphagenticai.nodes.chatbot_with_tool_node import ChatbotWithToolNode
|
7 |
+
from src.langgraphagenticai.tools.search_tools import get_tools, create_tool_node
|
8 |
|
9 |
class GraphBuilder:
|
10 |
def __init__(self, model):
|
|
|
23 |
self.graph_builder.add_edge(START, "chatbot")
|
24 |
self.graph_builder.add_edge("chatbot", END)
|
25 |
|
26 |
+
def chatbot_with_tools_build_graph(self):
|
27 |
+
"""
|
28 |
+
Builds an advanced chatbot with tool integration.
|
29 |
+
This method creates a chatbot graph that includes both
|
30 |
+
a chatbot node and tool node. It defines tools, initializes
|
31 |
+
the chatbot with tool capabilities, and sets up conditional
|
32 |
+
and direct edges between nodes. The chatbot node is set as an
|
33 |
+
entry point.
|
34 |
+
"""
|
35 |
+
## Define the tool and tool node
|
36 |
+
tools = get_tools()
|
37 |
+
tool_node = create_tool_node(tools)
|
38 |
+
|
39 |
+
## Define LLM
|
40 |
+
llm = self.llm
|
41 |
+
|
42 |
+
# Define the chatbot node
|
43 |
+
obj_chatbot_with_node = ChatbotWithToolNode(llm)
|
44 |
+
chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
|
45 |
+
|
46 |
+
# Add nodes
|
47 |
+
self.graph_builder.add_node("chatbot", chatbot_node)
|
48 |
+
self.graph_builder.add_node("tools", tool_node)
|
49 |
+
|
50 |
+
# Define conditional and direct edges
|
51 |
+
self.graph_builder.add_edge(START, "chatbot")
|
52 |
+
self.graph_builder.add_conditional_edges("chatbot", tools_condition)
|
53 |
+
self.graph_builder.add_edge("tools", "chatbot")
|
54 |
+
|
55 |
+
|
56 |
def setup_graph(self, usecase: str):
|
57 |
"""
|
58 |
Sets up the graph for the selected use case.
|
59 |
"""
|
60 |
if usecase == "Basic Chatbot":
|
61 |
self.basic_chatbot_build_graph()
|
62 |
+
|
63 |
+
if usecase == "Chatbot with Tools":
|
64 |
+
self.chatbot_with_tools_build_graph()
|
65 |
return self.graph_builder.compile()
|
src/langgraphagenticai/nodes/__pycache__/chatbot_with_tool_node.cpython-312.pyc
ADDED
Binary file (2 kB). View file
|
|
src/langgraphagenticai/nodes/chatbot_with_tool_node.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from src.langgraphagenticai.state.state import State
|
2 |
+
|
3 |
+
|
4 |
+
class ChatbotWithToolNode:
|
5 |
+
"""
|
6 |
+
Chatbot logic enhanced with tool integration
|
7 |
+
"""
|
8 |
+
def __init__(self, model):
|
9 |
+
self.llm = model
|
10 |
+
|
11 |
+
def process(self, state: State) -> dict:
|
12 |
+
"""
|
13 |
+
Processes the input state and generates a response with tool integration.
|
14 |
+
"""
|
15 |
+
user_input = state["messages"] if state["messages"] else ""
|
16 |
+
llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
|
17 |
+
|
18 |
+
# Simulate tool-specific logic
|
19 |
+
tools_response = f"Tool integration for: '{user_input}'"
|
20 |
+
return {"messages": [llm_response, tools_response]}
|
21 |
+
|
22 |
+
def create_chatbot(self, tools):
|
23 |
+
"""
|
24 |
+
Returns a chatbot node function.
|
25 |
+
"""
|
26 |
+
llm_with_tools = self.llm.bind_tools(tools)
|
27 |
+
|
28 |
+
def chatbot_node(state: State):
|
29 |
+
"""
|
30 |
+
Chatbot logic for processing the input state and returning a response
|
31 |
+
"""
|
32 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
33 |
+
|
34 |
+
return chatbot_node
|
src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc
ADDED
Binary file (220 Bytes). View file
|
|
src/langgraphagenticai/tools/__pycache__/search_tools.cpython-312.pyc
ADDED
Binary file (791 Bytes). View file
|
|
src/langgraphagenticai/tools/search_tools.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
2 |
+
from langgraph.prebuilt import ToolNode
|
3 |
+
|
4 |
+
def get_tools():
|
5 |
+
"""
|
6 |
+
Returns the list of tools to be used in the chatbot
|
7 |
+
"""
|
8 |
+
tools = [TavilySearchResults(max_results=2)]
|
9 |
+
return tools
|
10 |
+
|
11 |
+
def create_tool_node(tools):
|
12 |
+
"""creates and returns a tool node for the graph"""
|
13 |
+
return ToolNode(tools=tools)
|
src/langgraphagenticai/ui/streamlit/__pycache__/display_result.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/ui/streamlit/__pycache__/display_result.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlit/__pycache__/display_result.cpython-312.pyc differ
|
|
src/langgraphagenticai/ui/streamlit/__pycache__/load_ui.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/ui/streamlit/__pycache__/load_ui.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlit/__pycache__/load_ui.cpython-312.pyc differ
|
|
src/langgraphagenticai/ui/streamlit/display_result.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import streamlit as st
|
2 |
-
from langchain_core.messages import HumanMessage, AIMessage
|
3 |
import json
|
4 |
|
5 |
class DisplayResultStreamlit:
|
@@ -12,6 +12,7 @@ class DisplayResultStreamlit:
|
|
12 |
usecase = self.usecase
|
13 |
graph = self.graph
|
14 |
user_message = self.user_message
|
|
|
15 |
if usecase == "Basic Chatbot":
|
16 |
print(usecase, graph, user_message)
|
17 |
for event in graph.stream({'messages': ("user", user_message)}):
|
@@ -21,4 +22,24 @@ class DisplayResultStreamlit:
|
|
21 |
with st.chat_message("user"):
|
22 |
st.write(user_message)
|
23 |
with st.chat_message("assistant"):
|
24 |
-
st.write(value["messages"].content)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
|
3 |
import json
|
4 |
|
5 |
class DisplayResultStreamlit:
|
|
|
12 |
usecase = self.usecase
|
13 |
graph = self.graph
|
14 |
user_message = self.user_message
|
15 |
+
|
16 |
if usecase == "Basic Chatbot":
|
17 |
print(usecase, graph, user_message)
|
18 |
for event in graph.stream({'messages': ("user", user_message)}):
|
|
|
22 |
with st.chat_message("user"):
|
23 |
st.write(user_message)
|
24 |
with st.chat_message("assistant"):
|
25 |
+
st.write(value["messages"].content)
|
26 |
+
|
27 |
+
if usecase == "Chatbot with Tools":
|
28 |
+
print(usecase, graph, user_message)
|
29 |
+
# Prepare state and invoke the graph
|
30 |
+
initial_state = {"messages": [user_message]}
|
31 |
+
response = graph.invoke(initial_state)
|
32 |
+
for message in response['messages']:
|
33 |
+
if type(message) == HumanMessage:
|
34 |
+
with st.chat_message("user"):
|
35 |
+
st.write(message.content)
|
36 |
+
elif type(message) == ToolMessage:
|
37 |
+
with st.chat_message("ai"):
|
38 |
+
st.write("Tool Call Start")
|
39 |
+
st.write(message.content)
|
40 |
+
st.write("Tool Call End")
|
41 |
+
elif type(message) == AIMessage and message.content:
|
42 |
+
with st.chat_message("assistant"):
|
43 |
+
st.write(message.content)
|
44 |
+
|
45 |
+
|
src/langgraphagenticai/ui/streamlit/load_ui.py
CHANGED
@@ -59,6 +59,14 @@ class LoadStreamlitUI:
|
|
59 |
selected_usecase = st.selectbox("Select Use Case", usecase_options)
|
60 |
self.user_controls["selected_usecase"] = selected_usecase
|
61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
if "state" not in st.session_state:
|
63 |
st.session_state.state = self.initialize_session()
|
64 |
|
|
|
59 |
selected_usecase = st.selectbox("Select Use Case", usecase_options)
|
60 |
self.user_controls["selected_usecase"] = selected_usecase
|
61 |
|
62 |
+
if self.user_controls["selected_usecase"] == "Chatbot with Tools":
|
63 |
+
# APIKey input
|
64 |
+
os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY_API_KEY", type="password")
|
65 |
+
|
66 |
+
# Validate API Key
|
67 |
+
if not self.user_controls["TAVILY_API_KEY"]:
|
68 |
+
st.warning("⚠️ Please enter your TAVILY_API_KEY to proceed. Don't have?, refer: https://app.tavily.com/home")
|
69 |
+
|
70 |
if "state" not in st.session_state:
|
71 |
st.session_state.state = self.initialize_session()
|
72 |
|
src/langgraphagenticai/ui/uiconfigfile.ini
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
[DEFAULT]
|
2 |
PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
|
3 |
LLM_OPTIONS = Groq, OPENAI
|
4 |
-
USECASE_OPTIONS = Basic Chatbot,
|
5 |
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama-3.3-70b-versatile, gemma2-9b-it
|
|
|
1 |
[DEFAULT]
|
2 |
PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
|
3 |
LLM_OPTIONS = Groq, OPENAI
|
4 |
+
USECASE_OPTIONS = Basic Chatbot, Chatbot with Tools
|
5 |
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama-3.3-70b-versatile, gemma2-9b-it
|