S-Dreamer commited on
Commit
6f8e334
·
verified ·
1 Parent(s): ac91a9e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -53
app.py CHANGED
@@ -1,18 +1,33 @@
1
  import os
2
- from typing import Optional
3
  import gradio as gr
4
  import requests
5
  from smolagents import CodeAgent, Tool
6
  from smolagents.models import HfApiModel
7
  from smolagents.monitoring import LogLevel
8
  from gradio import ChatMessage
 
9
  from functools import lru_cache
10
 
 
 
 
 
11
  DEFAULT_MODEL = "Qwen/Qwen2.5-Coder-32B-Instruct"
12
  HF_API_TOKEN = os.getenv("HF_TOKEN")
13
 
 
 
 
 
 
 
14
  @lru_cache(maxsize=128)
15
- def search_spaces(query, limit=1):
 
 
 
 
16
  try:
17
  url = f"https://huggingface.co/api/spaces?search={query}&limit={limit}"
18
  response = requests.get(url, headers={"Authorization": f"Bearer {HF_API_TOKEN}"})
@@ -21,10 +36,14 @@ def search_spaces(query, limit=1):
21
  if not spaces:
22
  return None
23
  return extract_space_info(spaces[0])
24
- except requests.RequestException:
 
25
  return None
26
 
27
- def extract_space_info(space):
 
 
 
28
  space_id = space["id"]
29
  title = space_id.split("/")[-1]
30
  description = f"Tool from {space_id}"
@@ -38,27 +57,33 @@ def extract_space_info(space):
38
  description = space["cardData"]["description"]
39
  return {"id": space_id, "title": title, "description": description}
40
 
41
- def get_space_metadata(space_id):
 
 
 
42
  try:
43
  url = f"https://huggingface.co/api/spaces/{space_id}"
44
  response = requests.get(url, headers={"Authorization": f"Bearer {HF_API_TOKEN}"})
45
  response.raise_for_status()
46
  space = response.json()
47
  return extract_space_info(space)
48
- except requests.RequestException:
 
49
  return None
50
 
51
- def create_agent(model_name, space_tools=None):
 
 
 
52
  if not space_tools:
53
  space_tools = []
54
  try:
55
  tools = [
56
  Tool.from_space(
57
- t["id"],
58
- name=t.get("name", t["id"]),
59
- description=t.get("description", ""),
60
- )
61
- for t in space_tools
62
  ]
63
  model = HfApiModel(model_id=model_name, token=HF_API_TOKEN)
64
  agent = CodeAgent(
@@ -67,39 +92,46 @@ def create_agent(model_name, space_tools=None):
67
  additional_authorized_imports=["PIL", "requests"],
68
  verbosity_level=LogLevel.DEBUG,
69
  )
 
70
  return agent
71
- except:
72
- try:
73
- fallback_model = HfApiModel(
74
- model_id="Qwen/Qwen2.5-Coder-7B-Instruct", token=HF_API_TOKEN
75
- )
76
- agent = CodeAgent(
77
- tools=tools,
78
- model=fallback_model,
79
- additional_authorized_imports=["PIL", "requests"],
80
- verbosity_level=LogLevel.DEBUG,
81
- )
82
- return agent
83
- except:
84
- return None
85
 
86
- def on_search_spaces(query):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  if not query:
88
  return "Please enter a search term.", "", "", ""
89
  try:
90
  space_info = search_spaces(query)
91
  if space_info is None:
92
  return "No spaces found.", "", "", ""
93
- results_md = (
94
- f"### Search Results:\n- ID: `{space_info['id']}`\n"
95
- f"- Title: {space_info['title']}\n"
96
- f"- Description: {space_info['description']}\n"
97
- )
98
  return results_md, space_info["id"], space_info["title"], space_info["description"]
99
  except Exception as e:
 
100
  return f"Error: {str(e)}", "", "", ""
101
 
102
- def on_validate_space(space_id):
103
  if not space_id:
104
  return "Please enter a space ID or search term.", "", ""
105
  try:
@@ -108,20 +140,15 @@ def on_validate_space(space_id):
108
  space_info = search_spaces(space_id)
109
  if space_info is None:
110
  return f"No spaces found for '{space_id}'.", "", ""
111
- result_md = (
112
- f"### Found Space via Search:\n- ID: `{space_info['id']}`\n"
113
- f"- Title: {space_info['title']}\n- Description: {space_info['description']}\n"
114
- )
115
  return result_md, space_info["title"], space_info["description"]
116
- result_md = (
117
- f"### Space Validated Successfully:\n- ID: `{space_info['id']}`\n"
118
- f"- Title: {space_info['title']}\n- Description: {space_info['description']}\n"
119
- )
120
  return result_md, space_info["title"], space_info["description"]
121
  except Exception as e:
 
122
  return f"Error: {str(e)}", "", ""
123
 
124
- def on_add_tool(space_id, space_name, space_description, current_tools):
125
  if not space_id:
126
  return current_tools, "Please enter a space ID."
127
  for tool in current_tools:
@@ -135,31 +162,30 @@ def on_add_tool(space_id, space_name, space_description, current_tools):
135
  updated_tools = current_tools + [new_tool]
136
  tools_md = "### Added Tools:\n"
137
  for i, tool in enumerate(updated_tools, 1):
138
- tools_md += (
139
- f"{i}. **{tool['name']}** (`{tool['id']}`)\n {tool['description']}\n\n"
140
- )
141
  return updated_tools, tools_md
142
 
143
- def on_create_agent(model, space_tools):
144
  if not space_tools:
145
  return None, [], "", "Please add at least one tool before creating an agent.", "No agent created yet."
146
  try:
147
  agent = create_agent(model, space_tools)
148
  if agent is None:
149
  return None, [], "", "Failed to create agent. Please try again with different tools or model.", "No agent created yet."
150
- tools_str = ", ".join([f"{t['name']} ({t['id']})" for t in space_tools])
151
  agent_status = update_agent_status(agent)
152
  return agent, [], "", f"✅ Agent created successfully with {model}!\nTools: {tools_str}", agent_status
153
  except Exception as e:
 
154
  return None, [], "", f"Error creating agent: {str(e)}", "No agent created yet."
155
 
156
- def add_user_message(message, chat_history):
157
  if not message:
158
  return "", chat_history
159
  chat_history = chat_history + [ChatMessage(role="user", content=message)]
160
  return message, chat_history
161
 
162
- def stream_to_gradio(agent, task: str, reset_agent_memory: bool = False, additional_args: Optional[dict] = None):
163
  from smolagents.gradio_ui import pull_messages_from_step, handle_agent_output_types
164
  from smolagents.agent_types import AgentAudio, AgentImage, AgentText
165
  for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
@@ -176,7 +202,7 @@ def stream_to_gradio(agent, task: str, reset_agent_memory: bool = False, additio
176
  else:
177
  yield gr.ChatMessage(role="assistant", content=f"**Final answer:** {str(final_answer)}")
178
 
179
- def stream_agent_response(agent, message, chat_history):
180
  if not message or agent is None:
181
  return chat_history
182
  yield chat_history
@@ -189,15 +215,18 @@ def stream_agent_response(agent, message, chat_history):
189
  chat_history = chat_history + [ChatMessage(role="assistant", content=error_msg)]
190
  yield chat_history
191
 
192
- def on_clear(agent=None):
193
  return agent, [], "", "Agent cleared. Create a new one to continue.", "", gr.update(interactive=False)
194
 
195
- def update_agent_status(agent):
196
  if agent is None:
197
  return "No agent created yet. Add a Space tool to get started."
198
  tools = agent.tools if hasattr(agent, "tools") else []
199
- return f"Agent ready with {len(tools)} tools"
 
 
200
 
 
201
  with gr.Blocks(title="AI Agent Builder") as app:
202
  gr.Markdown("# AI Agent Builder with smolagents")
203
  gr.Markdown("Build your own AI agent by selecting tools from Hugging Face Spaces.")
 
1
  import os
2
+ from typing import Optional, List, Dict, Any
3
  import gradio as gr
4
  import requests
5
  from smolagents import CodeAgent, Tool
6
  from smolagents.models import HfApiModel
7
  from smolagents.monitoring import LogLevel
8
  from gradio import ChatMessage
9
+ import logging
10
  from functools import lru_cache
11
 
12
+ # Configure logging
13
+ logging.basicConfig(level=logging.INFO)
14
+ logger = logging.getLogger(__name__)
15
+
16
  DEFAULT_MODEL = "Qwen/Qwen2.5-Coder-32B-Instruct"
17
  HF_API_TOKEN = os.getenv("HF_TOKEN")
18
 
19
+ # Tool descriptions for the UI
20
+ TOOL_DESCRIPTIONS = {
21
+ "Hub Collections": "Add tool collections from Hugging Face Hub.",
22
+ "Spaces": "Add tools from Hugging Face Spaces.",
23
+ }
24
+
25
  @lru_cache(maxsize=128)
26
+ def search_spaces(query: str, limit: int = 1) -> Optional[Dict[str, str]]:
27
+ """
28
+ Search for Hugging Face Spaces using the API.
29
+ Returns the first result or None if no results.
30
+ """
31
  try:
32
  url = f"https://huggingface.co/api/spaces?search={query}&limit={limit}"
33
  response = requests.get(url, headers={"Authorization": f"Bearer {HF_API_TOKEN}"})
 
36
  if not spaces:
37
  return None
38
  return extract_space_info(spaces[0])
39
+ except requests.RequestException as e:
40
+ logger.error(f"Error searching spaces: {e}")
41
  return None
42
 
43
+ def extract_space_info(space: Dict[str, Any]) -> Dict[str, str]:
44
+ """
45
+ Extracts space information from the API response.
46
+ """
47
  space_id = space["id"]
48
  title = space_id.split("/")[-1]
49
  description = f"Tool from {space_id}"
 
57
  description = space["cardData"]["description"]
58
  return {"id": space_id, "title": title, "description": description}
59
 
60
+ def get_space_metadata(space_id: str) -> Optional[Dict[str, str]]:
61
+ """
62
+ Get metadata for a specific Hugging Face Space.
63
+ """
64
  try:
65
  url = f"https://huggingface.co/api/spaces/{space_id}"
66
  response = requests.get(url, headers={"Authorization": f"Bearer {HF_API_TOKEN}"})
67
  response.raise_for_status()
68
  space = response.json()
69
  return extract_space_info(space)
70
+ except requests.RequestException as e:
71
+ logger.error(f"Error getting space metadata: {e}")
72
  return None
73
 
74
+ def create_agent(model_name: str, space_tools: Optional[List[Dict[str, str]]] = None) -> Optional[CodeAgent]:
75
+ """
76
+ Create a CodeAgent with the specified model and tools.
77
+ """
78
  if not space_tools:
79
  space_tools = []
80
  try:
81
  tools = [
82
  Tool.from_space(
83
+ tool_info["id"],
84
+ name=tool_info.get("name", tool_info["id"]),
85
+ description=tool_info.get("description", ""),
86
+ ) for tool_info in space_tools
 
87
  ]
88
  model = HfApiModel(model_id=model_name, token=HF_API_TOKEN)
89
  agent = CodeAgent(
 
92
  additional_authorized_imports=["PIL", "requests"],
93
  verbosity_level=LogLevel.DEBUG,
94
  )
95
+ logger.info(f"Agent created successfully with {len(tools)} tools")
96
  return agent
97
+ except Exception as e:
98
+ logger.error(f"Error creating agent: {e}")
99
+ return create_fallback_agent(tools)
 
 
 
 
 
 
 
 
 
 
 
100
 
101
+ def create_fallback_agent(tools: List[Tool]) -> Optional[CodeAgent]:
102
+ """
103
+ Create a fallback CodeAgent if the primary model fails.
104
+ """
105
+ try:
106
+ logger.info("Trying fallback model...")
107
+ fallback_model = HfApiModel(model_id="Qwen/Qwen2.5-Coder-7B-Instruct", token=HF_API_TOKEN)
108
+ agent = CodeAgent(
109
+ tools=tools,
110
+ model=fallback_model,
111
+ additional_authorized_imports=["PIL", "requests"],
112
+ verbosity_level=LogLevel.DEBUG,
113
+ )
114
+ logger.info("Agent created successfully with fallback model")
115
+ return agent
116
+ except Exception as e:
117
+ logger.error(f"Error creating agent with fallback model: {e}")
118
+ return None
119
+
120
+ # Event handler functions
121
+ def on_search_spaces(query: str) -> tuple:
122
  if not query:
123
  return "Please enter a search term.", "", "", ""
124
  try:
125
  space_info = search_spaces(query)
126
  if space_info is None:
127
  return "No spaces found.", "", "", ""
128
+ results_md = f"### Search Results:\n- ID: `{space_info['id']}`\n- Title: {space_info['title']}\n- Description: {space_info['description']}\n"
 
 
 
 
129
  return results_md, space_info["id"], space_info["title"], space_info["description"]
130
  except Exception as e:
131
+ logger.error(f"Error in search: {e}")
132
  return f"Error: {str(e)}", "", "", ""
133
 
134
+ def on_validate_space(space_id: str) -> tuple:
135
  if not space_id:
136
  return "Please enter a space ID or search term.", "", ""
137
  try:
 
140
  space_info = search_spaces(space_id)
141
  if space_info is None:
142
  return f"No spaces found for '{space_id}'.", "", ""
143
+ result_md = f"### Found Space via Search:\n- ID: `{space_info['id']}`\n- Title: {space_info['title']}\n- Description: {space_info['description']}\n"
 
 
 
144
  return result_md, space_info["title"], space_info["description"]
145
+ result_md = f"### Space Validated Successfully:\n- ID: `{space_info['id']}`\n- Title: {space_info['title']}\n- Description: {space_info['description']}\n"
 
 
 
146
  return result_md, space_info["title"], space_info["description"]
147
  except Exception as e:
148
+ logger.error(f"Error validating space: {e}")
149
  return f"Error: {str(e)}", "", ""
150
 
151
+ def on_add_tool(space_id: str, space_name: str, space_description: str, current_tools: List[Dict[str, str]]) -> tuple:
152
  if not space_id:
153
  return current_tools, "Please enter a space ID."
154
  for tool in current_tools:
 
162
  updated_tools = current_tools + [new_tool]
163
  tools_md = "### Added Tools:\n"
164
  for i, tool in enumerate(updated_tools, 1):
165
+ tools_md += f"{i}. **{tool['name']}** (`{tool['id']}`)\n {tool['description']}\n\n"
 
 
166
  return updated_tools, tools_md
167
 
168
+ def on_create_agent(model: str, space_tools: List[Dict[str, str]]) -> tuple:
169
  if not space_tools:
170
  return None, [], "", "Please add at least one tool before creating an agent.", "No agent created yet."
171
  try:
172
  agent = create_agent(model, space_tools)
173
  if agent is None:
174
  return None, [], "", "Failed to create agent. Please try again with different tools or model.", "No agent created yet."
175
+ tools_str = ", ".join([f"{tool['name']} ({tool['id']})" for tool in space_tools])
176
  agent_status = update_agent_status(agent)
177
  return agent, [], "", f"✅ Agent created successfully with {model}!\nTools: {tools_str}", agent_status
178
  except Exception as e:
179
+ logger.error(f"Error creating agent: {e}")
180
  return None, [], "", f"Error creating agent: {str(e)}", "No agent created yet."
181
 
182
+ def add_user_message(message: str, chat_history: List[ChatMessage]) -> tuple:
183
  if not message:
184
  return "", chat_history
185
  chat_history = chat_history + [ChatMessage(role="user", content=message)]
186
  return message, chat_history
187
 
188
+ def stream_to_gradio(agent: CodeAgent, task: str, reset_agent_memory: bool = False, additional_args: Optional[dict] = None):
189
  from smolagents.gradio_ui import pull_messages_from_step, handle_agent_output_types
190
  from smolagents.agent_types import AgentAudio, AgentImage, AgentText
191
  for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
 
202
  else:
203
  yield gr.ChatMessage(role="assistant", content=f"**Final answer:** {str(final_answer)}")
204
 
205
+ def stream_agent_response(agent: CodeAgent, message: str, chat_history: List[ChatMessage]):
206
  if not message or agent is None:
207
  return chat_history
208
  yield chat_history
 
215
  chat_history = chat_history + [ChatMessage(role="assistant", content=error_msg)]
216
  yield chat_history
217
 
218
+ def on_clear(agent: Optional[CodeAgent] = None) -> tuple:
219
  return agent, [], "", "Agent cleared. Create a new one to continue.", "", gr.update(interactive=False)
220
 
221
+ def update_agent_status(agent: Optional[CodeAgent]) -> str:
222
  if agent is None:
223
  return "No agent created yet. Add a Space tool to get started."
224
  tools = agent.tools if hasattr(agent, "tools") else []
225
+ tool_count = len(tools)
226
+ status = f"Agent ready with {tool_count} tools"
227
+ return status
228
 
229
+ # Create the Gradio app
230
  with gr.Blocks(title="AI Agent Builder") as app:
231
  gr.Markdown("# AI Agent Builder with smolagents")
232
  gr.Markdown("Build your own AI agent by selecting tools from Hugging Face Spaces.")