Spaces:
Restarting
Restarting
Update app.py
Browse files
app.py
CHANGED
@@ -8,13 +8,14 @@ import pandas as pd
|
|
8 |
import gradio as gr
|
9 |
|
10 |
# βββ Configuration ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
11 |
-
API_URL
|
12 |
-
|
13 |
-
|
|
|
14 |
|
15 |
-
if not HF_TOKEN:
|
16 |
raise RuntimeError(
|
17 |
-
"β Please set HUGGINGFACEHUB_API_TOKEN in your Space Secrets."
|
18 |
)
|
19 |
|
20 |
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
|
@@ -26,29 +27,20 @@ Build your agent, score **β₯30%** to earn your Certificate,
|
|
26 |
and see where you land on the Student Leaderboard!
|
27 |
"""
|
28 |
|
29 |
-
# βββ Utility to fetch your HF username from the token ββββββββββββββββββββββββ
|
30 |
-
def get_hf_username():
|
31 |
-
try:
|
32 |
-
resp = requests.get("https://huggingface.co/api/whoami-v2", headers=HEADERS, timeout=10)
|
33 |
-
resp.raise_for_status()
|
34 |
-
data = resp.json()
|
35 |
-
# V2 returns {"user": { "id": ..., "username": ... }, ...}
|
36 |
-
return data.get("user", {}).get("username") or data.get("name")
|
37 |
-
except Exception as e:
|
38 |
-
print("[DEBUG] whoami failed:", e)
|
39 |
-
return None
|
40 |
-
|
41 |
# βββ Simple HF-Inference Agent βββββββββββββββββββββββββββββββββββββββββββββ
|
42 |
class GAIAAgent:
|
43 |
def __init__(self, model_id: str):
|
44 |
-
print(f"[DEBUG] Initializing with model
|
45 |
self.model_id = model_id
|
46 |
self.headers = HEADERS
|
47 |
|
48 |
def answer(self, prompt: str) -> str:
|
49 |
payload = {
|
50 |
"inputs": prompt,
|
51 |
-
"parameters": {
|
|
|
|
|
|
|
52 |
}
|
53 |
url = f"https://api-inference.huggingface.co/models/{self.model_id}"
|
54 |
resp = requests.post(url, headers=self.headers, json=payload, timeout=60)
|
@@ -61,45 +53,46 @@ class GAIAAgent:
|
|
61 |
# βββ Gradio callback ββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
62 |
def run_and_submit_all():
|
63 |
try:
|
64 |
-
#
|
65 |
-
|
|
|
|
|
66 |
if not username:
|
67 |
return "β Could not fetch your HF username. Check your token.", pd.DataFrame()
|
68 |
|
69 |
-
#
|
70 |
q_resp = requests.get(f"{API_URL}/questions", timeout=15)
|
71 |
q_resp.raise_for_status()
|
72 |
questions = q_resp.json() or []
|
73 |
if not questions:
|
74 |
return "β No questions returned; check your API_URL.", pd.DataFrame()
|
75 |
|
76 |
-
#
|
77 |
agent = GAIAAgent(MODEL_ID)
|
78 |
-
|
79 |
-
# 3) Answer each
|
80 |
results = []
|
81 |
payload = []
|
82 |
-
for
|
83 |
-
tid =
|
84 |
-
|
85 |
try:
|
86 |
-
ans = agent.answer(
|
87 |
except Exception as e:
|
88 |
ans = f"ERROR: {e}"
|
89 |
-
results.append({"Task ID": tid, "Question":
|
90 |
payload.append({"task_id": tid, "submitted_answer": ans})
|
91 |
time.sleep(0.5)
|
92 |
|
93 |
-
# 4) Submit
|
94 |
submission = {
|
95 |
"username": username,
|
|
|
96 |
"answers": payload
|
97 |
}
|
98 |
s_resp = requests.post(f"{API_URL}/submit", json=submission, timeout=60)
|
99 |
s_resp.raise_for_status()
|
100 |
data = s_resp.json()
|
101 |
|
102 |
-
# 5) Build status
|
103 |
status = (
|
104 |
f"β
**Submission Successful!**\n\n"
|
105 |
f"**User:** {data.get('username')}\n"
|
@@ -112,7 +105,7 @@ def run_and_submit_all():
|
|
112 |
except Exception as e:
|
113 |
tb = traceback.format_exc()
|
114 |
print("[ERROR] Unhandled exception:\n", tb)
|
115 |
-
return
|
116 |
|
117 |
# βββ Gradio UI ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
118 |
with gr.Blocks() as demo:
|
|
|
8 |
import gradio as gr
|
9 |
|
10 |
# βββ Configuration ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
11 |
+
API_URL = os.getenv("API_URL", "https://agents-course-unit4-scoring.hf.space")
|
12 |
+
SPACE_ID = os.getenv("SPACE_ID") # e.g. "your-username/your-space"
|
13 |
+
MODEL_ID = os.getenv("MODEL_ID", "meta-llama/Llama-2-7b-instruct")
|
14 |
+
HF_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
15 |
|
16 |
+
if not HF_TOKEN or not SPACE_ID:
|
17 |
raise RuntimeError(
|
18 |
+
"β Please set both SPACE_ID and HUGGINGFACEHUB_API_TOKEN in your Space Secrets."
|
19 |
)
|
20 |
|
21 |
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
|
|
|
27 |
and see where you land on the Student Leaderboard!
|
28 |
"""
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
# βββ Simple HF-Inference Agent βββββββββββββββββββββββββββββββββββββββββββββ
|
31 |
class GAIAAgent:
|
32 |
def __init__(self, model_id: str):
|
33 |
+
print(f"[DEBUG] Initializing GAIAAgent with model={model_id}")
|
34 |
self.model_id = model_id
|
35 |
self.headers = HEADERS
|
36 |
|
37 |
def answer(self, prompt: str) -> str:
|
38 |
payload = {
|
39 |
"inputs": prompt,
|
40 |
+
"parameters": {
|
41 |
+
"max_new_tokens": 512,
|
42 |
+
"temperature": 0.2
|
43 |
+
}
|
44 |
}
|
45 |
url = f"https://api-inference.huggingface.co/models/{self.model_id}"
|
46 |
resp = requests.post(url, headers=self.headers, json=payload, timeout=60)
|
|
|
53 |
# βββ Gradio callback ββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
54 |
def run_and_submit_all():
|
55 |
try:
|
56 |
+
# 1) Fetch username via WhoAmI
|
57 |
+
who = requests.get("https://huggingface.co/api/whoami-v2", headers=HEADERS, timeout=10)
|
58 |
+
who.raise_for_status()
|
59 |
+
username = who.json().get("user", {}).get("username")
|
60 |
if not username:
|
61 |
return "β Could not fetch your HF username. Check your token.", pd.DataFrame()
|
62 |
|
63 |
+
# 2) Fetch GAIA questions
|
64 |
q_resp = requests.get(f"{API_URL}/questions", timeout=15)
|
65 |
q_resp.raise_for_status()
|
66 |
questions = q_resp.json() or []
|
67 |
if not questions:
|
68 |
return "β No questions returned; check your API_URL.", pd.DataFrame()
|
69 |
|
70 |
+
# 3) Initialize and run agent
|
71 |
agent = GAIAAgent(MODEL_ID)
|
|
|
|
|
72 |
results = []
|
73 |
payload = []
|
74 |
+
for task in questions:
|
75 |
+
tid = task["task_id"]
|
76 |
+
q = task.get("question", "")
|
77 |
try:
|
78 |
+
ans = agent.answer(q)
|
79 |
except Exception as e:
|
80 |
ans = f"ERROR: {e}"
|
81 |
+
results.append({"Task ID": tid, "Question": q, "Answer": ans})
|
82 |
payload.append({"task_id": tid, "submitted_answer": ans})
|
83 |
time.sleep(0.5)
|
84 |
|
85 |
+
# 4) Submit answers (including agent_code)
|
86 |
submission = {
|
87 |
"username": username,
|
88 |
+
"agent_code": f"https://huggingface.co/spaces/{SPACE_ID}/tree/main",
|
89 |
"answers": payload
|
90 |
}
|
91 |
s_resp = requests.post(f"{API_URL}/submit", json=submission, timeout=60)
|
92 |
s_resp.raise_for_status()
|
93 |
data = s_resp.json()
|
94 |
|
95 |
+
# 5) Build and return status + results table
|
96 |
status = (
|
97 |
f"β
**Submission Successful!**\n\n"
|
98 |
f"**User:** {data.get('username')}\n"
|
|
|
105 |
except Exception as e:
|
106 |
tb = traceback.format_exc()
|
107 |
print("[ERROR] Unhandled exception:\n", tb)
|
108 |
+
return f"β Unexpected error:\n{e}\n\nSee logs for details.", pd.DataFrame()
|
109 |
|
110 |
# βββ Gradio UI ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
111 |
with gr.Blocks() as demo:
|