framsouza commited on
Commit
a81d3df
Β·
verified Β·
1 Parent(s): 6de08e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -33
app.py CHANGED
@@ -8,13 +8,14 @@ import pandas as pd
8
  import gradio as gr
9
 
10
  # ─── Configuration ──────────────────────────────────────────────────────────
11
- API_URL = os.getenv("API_URL", "https://agents-course-unit4-scoring.hf.space")
12
- MODEL_ID = os.getenv("MODEL_ID", "meta-llama/Llama-2-7b-instruct")
13
- HF_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
 
14
 
15
- if not HF_TOKEN:
16
  raise RuntimeError(
17
- "❌ Please set HUGGINGFACEHUB_API_TOKEN in your Space Secrets."
18
  )
19
 
20
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
@@ -26,29 +27,20 @@ Build your agent, score **β‰₯30%** to earn your Certificate,
26
  and see where you land on the Student Leaderboard!
27
  """
28
 
29
- # ─── Utility to fetch your HF username from the token ────────────────────────
30
- def get_hf_username():
31
- try:
32
- resp = requests.get("https://huggingface.co/api/whoami-v2", headers=HEADERS, timeout=10)
33
- resp.raise_for_status()
34
- data = resp.json()
35
- # V2 returns {"user": { "id": ..., "username": ... }, ...}
36
- return data.get("user", {}).get("username") or data.get("name")
37
- except Exception as e:
38
- print("[DEBUG] whoami failed:", e)
39
- return None
40
-
41
  # ─── Simple HF-Inference Agent ─────────────────────────────────────────────
42
  class GAIAAgent:
43
  def __init__(self, model_id: str):
44
- print(f"[DEBUG] Initializing with model {model_id}")
45
  self.model_id = model_id
46
  self.headers = HEADERS
47
 
48
  def answer(self, prompt: str) -> str:
49
  payload = {
50
  "inputs": prompt,
51
- "parameters": {"max_new_tokens": 512, "temperature": 0.2}
 
 
 
52
  }
53
  url = f"https://api-inference.huggingface.co/models/{self.model_id}"
54
  resp = requests.post(url, headers=self.headers, json=payload, timeout=60)
@@ -61,45 +53,46 @@ class GAIAAgent:
61
  # ─── Gradio callback ────────────────────────────────────────────────────────
62
  def run_and_submit_all():
63
  try:
64
- # 0) Resolve username
65
- username = get_hf_username()
 
 
66
  if not username:
67
  return "❌ Could not fetch your HF username. Check your token.", pd.DataFrame()
68
 
69
- # 1) Fetch GAIA questions
70
  q_resp = requests.get(f"{API_URL}/questions", timeout=15)
71
  q_resp.raise_for_status()
72
  questions = q_resp.json() or []
73
  if not questions:
74
  return "❌ No questions returned; check your API_URL.", pd.DataFrame()
75
 
76
- # 2) Init agent
77
  agent = GAIAAgent(MODEL_ID)
78
-
79
- # 3) Answer each
80
  results = []
81
  payload = []
82
- for item in questions:
83
- tid = item.get("task_id")
84
- qtxt = item.get("question", "")
85
  try:
86
- ans = agent.answer(qtxt)
87
  except Exception as e:
88
  ans = f"ERROR: {e}"
89
- results.append({"Task ID": tid, "Question": qtxt, "Answer": ans})
90
  payload.append({"task_id": tid, "submitted_answer": ans})
91
  time.sleep(0.5)
92
 
93
- # 4) Submit all answers
94
  submission = {
95
  "username": username,
 
96
  "answers": payload
97
  }
98
  s_resp = requests.post(f"{API_URL}/submit", json=submission, timeout=60)
99
  s_resp.raise_for_status()
100
  data = s_resp.json()
101
 
102
- # 5) Build status text
103
  status = (
104
  f"βœ… **Submission Successful!**\n\n"
105
  f"**User:** {data.get('username')}\n"
@@ -112,7 +105,7 @@ def run_and_submit_all():
112
  except Exception as e:
113
  tb = traceback.format_exc()
114
  print("[ERROR] Unhandled exception:\n", tb)
115
- return (f"❌ Unexpected error:\n{e}\n\nSee logs for details."), pd.DataFrame()
116
 
117
  # ─── Gradio UI ──────────────────────────────────────────────────────────────
118
  with gr.Blocks() as demo:
 
8
  import gradio as gr
9
 
10
  # ─── Configuration ──────────────────────────────────────────────────────────
11
+ API_URL = os.getenv("API_URL", "https://agents-course-unit4-scoring.hf.space")
12
+ SPACE_ID = os.getenv("SPACE_ID") # e.g. "your-username/your-space"
13
+ MODEL_ID = os.getenv("MODEL_ID", "meta-llama/Llama-2-7b-instruct")
14
+ HF_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
15
 
16
+ if not HF_TOKEN or not SPACE_ID:
17
  raise RuntimeError(
18
+ "❌ Please set both SPACE_ID and HUGGINGFACEHUB_API_TOKEN in your Space Secrets."
19
  )
20
 
21
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
 
27
  and see where you land on the Student Leaderboard!
28
  """
29
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  # ─── Simple HF-Inference Agent ─────────────────────────────────────────────
31
  class GAIAAgent:
32
  def __init__(self, model_id: str):
33
+ print(f"[DEBUG] Initializing GAIAAgent with model={model_id}")
34
  self.model_id = model_id
35
  self.headers = HEADERS
36
 
37
  def answer(self, prompt: str) -> str:
38
  payload = {
39
  "inputs": prompt,
40
+ "parameters": {
41
+ "max_new_tokens": 512,
42
+ "temperature": 0.2
43
+ }
44
  }
45
  url = f"https://api-inference.huggingface.co/models/{self.model_id}"
46
  resp = requests.post(url, headers=self.headers, json=payload, timeout=60)
 
53
  # ─── Gradio callback ────────────────────────────────────────────────────────
54
  def run_and_submit_all():
55
  try:
56
+ # 1) Fetch username via WhoAmI
57
+ who = requests.get("https://huggingface.co/api/whoami-v2", headers=HEADERS, timeout=10)
58
+ who.raise_for_status()
59
+ username = who.json().get("user", {}).get("username")
60
  if not username:
61
  return "❌ Could not fetch your HF username. Check your token.", pd.DataFrame()
62
 
63
+ # 2) Fetch GAIA questions
64
  q_resp = requests.get(f"{API_URL}/questions", timeout=15)
65
  q_resp.raise_for_status()
66
  questions = q_resp.json() or []
67
  if not questions:
68
  return "❌ No questions returned; check your API_URL.", pd.DataFrame()
69
 
70
+ # 3) Initialize and run agent
71
  agent = GAIAAgent(MODEL_ID)
 
 
72
  results = []
73
  payload = []
74
+ for task in questions:
75
+ tid = task["task_id"]
76
+ q = task.get("question", "")
77
  try:
78
+ ans = agent.answer(q)
79
  except Exception as e:
80
  ans = f"ERROR: {e}"
81
+ results.append({"Task ID": tid, "Question": q, "Answer": ans})
82
  payload.append({"task_id": tid, "submitted_answer": ans})
83
  time.sleep(0.5)
84
 
85
+ # 4) Submit answers (including agent_code)
86
  submission = {
87
  "username": username,
88
+ "agent_code": f"https://huggingface.co/spaces/{SPACE_ID}/tree/main",
89
  "answers": payload
90
  }
91
  s_resp = requests.post(f"{API_URL}/submit", json=submission, timeout=60)
92
  s_resp.raise_for_status()
93
  data = s_resp.json()
94
 
95
+ # 5) Build and return status + results table
96
  status = (
97
  f"βœ… **Submission Successful!**\n\n"
98
  f"**User:** {data.get('username')}\n"
 
105
  except Exception as e:
106
  tb = traceback.format_exc()
107
  print("[ERROR] Unhandled exception:\n", tb)
108
+ return f"❌ Unexpected error:\n{e}\n\nSee logs for details.", pd.DataFrame()
109
 
110
  # ─── Gradio UI ──────────────────────────────────────────────────────────────
111
  with gr.Blocks() as demo: