themissingCRAM
commited on
Commit
·
ee14926
1
Parent(s):
3674844
test
Browse files
app.py
CHANGED
@@ -1,10 +1,36 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
|
|
|
|
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
-
client = InferenceClient(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
|
10 |
def respond(
|
@@ -27,7 +53,9 @@ def respond(
|
|
27 |
|
28 |
response = ""
|
29 |
|
30 |
-
|
|
|
|
|
31 |
messages,
|
32 |
max_tokens=max_tokens,
|
33 |
stream=True,
|
@@ -58,9 +86,15 @@ demo = gr.ChatInterface(
|
|
58 |
),
|
59 |
],
|
60 |
)
|
61 |
-
|
62 |
-
|
63 |
if __name__ == "__main__":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
from sqlalchemy import (
|
65 |
create_engine,
|
66 |
MetaData,
|
@@ -73,16 +107,16 @@ if __name__ == "__main__":
|
|
73 |
inspect,
|
74 |
text,
|
75 |
)
|
76 |
-
|
77 |
engine = create_engine("sqlite:///:memory:")
|
78 |
metadata_obj = MetaData()
|
79 |
-
|
80 |
def insert_rows_into_table(rows, table, engine=engine):
|
81 |
for row in rows:
|
82 |
stmt = insert(table).values(**row)
|
83 |
with engine.begin() as connection:
|
84 |
connection.execute(stmt)
|
85 |
-
|
86 |
table_name = "receipts"
|
87 |
receipts = Table(
|
88 |
table_name,
|
@@ -93,12 +127,22 @@ if __name__ == "__main__":
|
|
93 |
Column("tip", Float),
|
94 |
)
|
95 |
metadata_obj.create_all(engine)
|
96 |
-
|
97 |
rows = [
|
98 |
{"receipt_id": 1, "customer_name": "Alan Payne", "price": 12.06, "tip": 1.20},
|
99 |
{"receipt_id": 2, "customer_name": "Alex Mason", "price": 23.86, "tip": 0.24},
|
100 |
-
{
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
]
|
103 |
insert_rows_into_table(rows, receipts)
|
104 |
-
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
+
from smolagents import tool, CodeAgent, HfApiModel, GradioUI # type: ignore
|
5 |
+
|
6 |
+
# testing teste
|
7 |
"""
|
8 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
9 |
"""
|
10 |
+
client = InferenceClient(
|
11 |
+
"HuggingFaceH4/zephyr-7b-beta", token=os.getenv("my_first_agents_hf_tokens")
|
12 |
+
)
|
13 |
+
|
14 |
+
|
15 |
+
def sql_engine(query: str) -> str:
|
16 |
+
"""
|
17 |
+
Allows you to perform SQL queries on the table. Returns a string representation of the result.
|
18 |
+
The table is named 'receipts'. Its description is as follows:
|
19 |
+
Columns:
|
20 |
+
- receipt_id: INTEGER
|
21 |
+
- customer_name: VARCHAR(16)
|
22 |
+
- price: FLOAT
|
23 |
+
- tip: FLOAT
|
24 |
+
|
25 |
+
Args:
|
26 |
+
query: The query to perform. This should be correct SQL.
|
27 |
+
"""
|
28 |
+
output = ""
|
29 |
+
with engine.connect() as con:
|
30 |
+
rows = con.execute(text(query))
|
31 |
+
for row in rows:
|
32 |
+
output += "\n" + str(row)
|
33 |
+
return output
|
34 |
|
35 |
|
36 |
def respond(
|
|
|
53 |
|
54 |
response = ""
|
55 |
|
56 |
+
# agent.run("Can you give me the name of the client who got the most expensive receipt?")
|
57 |
+
|
58 |
+
for message in agent.chat_completion(
|
59 |
messages,
|
60 |
max_tokens=max_tokens,
|
61 |
stream=True,
|
|
|
86 |
),
|
87 |
],
|
88 |
)
|
|
|
|
|
89 |
if __name__ == "__main__":
|
90 |
+
agent = CodeAgent(
|
91 |
+
tools=[sql_engine],
|
92 |
+
model=HfApiModel(
|
93 |
+
model_id="meta-llama/Meta-Llama-3.1-8B-Instruct",
|
94 |
+
token=os.getenv("my_first_agents_hf_tokens"),
|
95 |
+
),
|
96 |
+
)
|
97 |
+
|
98 |
from sqlalchemy import (
|
99 |
create_engine,
|
100 |
MetaData,
|
|
|
107 |
inspect,
|
108 |
text,
|
109 |
)
|
110 |
+
|
111 |
engine = create_engine("sqlite:///:memory:")
|
112 |
metadata_obj = MetaData()
|
113 |
+
|
114 |
def insert_rows_into_table(rows, table, engine=engine):
|
115 |
for row in rows:
|
116 |
stmt = insert(table).values(**row)
|
117 |
with engine.begin() as connection:
|
118 |
connection.execute(stmt)
|
119 |
+
|
120 |
table_name = "receipts"
|
121 |
receipts = Table(
|
122 |
table_name,
|
|
|
127 |
Column("tip", Float),
|
128 |
)
|
129 |
metadata_obj.create_all(engine)
|
130 |
+
|
131 |
rows = [
|
132 |
{"receipt_id": 1, "customer_name": "Alan Payne", "price": 12.06, "tip": 1.20},
|
133 |
{"receipt_id": 2, "customer_name": "Alex Mason", "price": 23.86, "tip": 0.24},
|
134 |
+
{
|
135 |
+
"receipt_id": 3,
|
136 |
+
"customer_name": "Woodrow Wilson",
|
137 |
+
"price": 53.43,
|
138 |
+
"tip": 5.43,
|
139 |
+
},
|
140 |
+
{
|
141 |
+
"receipt_id": 4,
|
142 |
+
"customer_name": "Margaret James",
|
143 |
+
"price": 21.11,
|
144 |
+
"tip": 1.00,
|
145 |
+
},
|
146 |
]
|
147 |
insert_rows_into_table(rows, receipts)
|
148 |
+
GradioUI(agent).launch()
|