UntilDot commited on
Commit
a84172d
·
verified ·
1 Parent(s): 718aa48

Upload 10 files

Browse files
Files changed (3) hide show
  1. llm/agents.py +46 -4
  2. static/script.js +24 -5
  3. templates/index.html +137 -6
llm/agents.py CHANGED
@@ -1,9 +1,52 @@
1
  import asyncio
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  async def query_llm_agent(name: str, prompt: str, settings: dict) -> str:
4
- # Placeholder for real LLM API calls (OpenAI, Claude, etc.)
5
- await asyncio.sleep(0.5) # Simulate network latency
6
- return f"[{name}] thinks: '{prompt[::-1]}'" # Reverse input for mock
 
 
 
 
 
 
 
 
 
 
7
 
8
  async def query_all_llms(prompt: str, settings: dict) -> list:
9
  agents = ["LLM-A", "LLM-B", "LLM-C"]
@@ -11,6 +54,5 @@ async def query_all_llms(prompt: str, settings: dict) -> list:
11
  results = await asyncio.gather(*tasks)
12
  return results
13
 
14
- # Wrapper for sync Flask usage
15
  def query_all_llms_sync(prompt: str, settings: dict) -> list:
16
  return asyncio.run(query_all_llms(prompt, settings))
 
1
  import asyncio
2
+ import httpx
3
+ import os
4
+
5
+ OPENROUTER_BASE = "https://openrouter.ai/api/v1/chat/completions"
6
+ HEADERS = {
7
+ "Authorization": f"Bearer {os.getenv('OPENROUTER_API_KEY')}",
8
+ "Content-Type": "application/json",
9
+ }
10
+
11
+ ALLOWED_MODELS = [
12
+ "deepseek/deepseek-chat-v3-0324:free",
13
+ "google/gemini-2.0-flash-exp:free",
14
+ "meta-llama/llama-4-maverick:free",
15
+ "microsoft/mai-ds-r1:free",
16
+ "meta-llama/llama-4-scout:free",
17
+ "google/gemma-3-27b-it:free",
18
+ "qwen/qwq-32b:free",
19
+ "qwen/qwen2.5-vl-72b-instruct:free",
20
+ "qwen/qwen-2.5-72b-instruct:free",
21
+ "google/gemini-2.5-pro-exp-03-25:free",
22
+ "deepseek/deepseek-r1:free",
23
+ ]
24
+
25
+ async def call_openrouter(model: str, prompt: str) -> str:
26
+ body = {
27
+ "model": model,
28
+ "messages": [{"role": "user", "content": prompt}],
29
+ "temperature": 0.7,
30
+ }
31
+ async with httpx.AsyncClient(timeout=30) as client:
32
+ response = await client.post(OPENROUTER_BASE, headers=HEADERS, json=body)
33
+ response.raise_for_status()
34
+ return response.json()["choices"][0]["message"]["content"]
35
 
36
  async def query_llm_agent(name: str, prompt: str, settings: dict) -> str:
37
+ selected_model = settings.get("models", {}).get(name)
38
+
39
+ if not selected_model:
40
+ return f"[{name}] No model selected."
41
+
42
+ if selected_model not in ALLOWED_MODELS:
43
+ return f"[{name}] Model '{selected_model}' is not supported."
44
+
45
+ try:
46
+ response = await call_openrouter(selected_model, prompt)
47
+ return f"[{name}] {response}"
48
+ except Exception as e:
49
+ return f"[{name}] Error: {str(e)}"
50
 
51
  async def query_all_llms(prompt: str, settings: dict) -> list:
52
  agents = ["LLM-A", "LLM-B", "LLM-C"]
 
54
  results = await asyncio.gather(*tasks)
55
  return results
56
 
 
57
  def query_all_llms_sync(prompt: str, settings: dict) -> list:
58
  return asyncio.run(query_all_llms(prompt, settings))
static/script.js CHANGED
@@ -1,8 +1,9 @@
1
  // === Theme Toggle ===
2
  const themeToggle = document.getElementById("themeToggle");
 
 
3
  const html = document.documentElement;
4
 
5
- // Load from localStorage or system preference
6
  function setInitialTheme() {
7
  const savedTheme = localStorage.getItem("theme");
8
  if (savedTheme === "dark") {
@@ -10,7 +11,6 @@ function setInitialTheme() {
10
  } else if (savedTheme === "light") {
11
  html.classList.remove("dark");
12
  } else {
13
- // Auto-detect
14
  const prefersDark = window.matchMedia(
15
  "(prefers-color-scheme: dark)",
16
  ).matches;
@@ -21,17 +21,24 @@ function setInitialTheme() {
21
 
22
  setInitialTheme();
23
 
24
- // Toggle theme
25
  themeToggle.addEventListener("click", () => {
26
  const isDark = html.classList.toggle("dark");
27
  localStorage.setItem("theme", isDark ? "dark" : "light");
28
  });
29
 
 
 
 
 
30
  // === Chat Handling ===
31
  const chatForm = document.getElementById("chatForm");
32
  const userInput = document.getElementById("userInput");
33
  const chatContainer = document.getElementById("chatContainer");
34
 
 
 
 
 
35
  function appendMessage(role, text) {
36
  const div = document.createElement("div");
37
  div.className = `p-3 rounded shadow max-w-2xl ${role === "user" ? "bg-blue text-fg0 self-end" : "bg-green text-fg0 self-start"}`;
@@ -44,15 +51,24 @@ chatForm.addEventListener("submit", async (e) => {
44
  e.preventDefault();
45
  const prompt = userInput.value.trim();
46
  if (!prompt) return;
 
47
  appendMessage("user", prompt);
48
  userInput.value = "";
49
 
50
  appendMessage("bot", "Thinking...");
51
 
 
 
 
 
 
 
 
 
52
  const response = await fetch("/chat", {
53
  method: "POST",
54
  headers: { "Content-Type": "application/json" },
55
- body: JSON.stringify({ prompt }),
56
  });
57
 
58
  chatContainer.lastChild.remove(); // remove 'Thinking...'
@@ -61,6 +77,9 @@ chatForm.addEventListener("submit", async (e) => {
61
  const data = await response.json();
62
  appendMessage("bot", data.response);
63
  } else {
64
- appendMessage("bot", "An error occurred. Please try again.");
 
 
 
65
  }
66
  });
 
1
  // === Theme Toggle ===
2
  const themeToggle = document.getElementById("themeToggle");
3
+ const toggleConfig = document.getElementById("toggleConfig");
4
+ const configPanel = document.getElementById("configPanel");
5
  const html = document.documentElement;
6
 
 
7
  function setInitialTheme() {
8
  const savedTheme = localStorage.getItem("theme");
9
  if (savedTheme === "dark") {
 
11
  } else if (savedTheme === "light") {
12
  html.classList.remove("dark");
13
  } else {
 
14
  const prefersDark = window.matchMedia(
15
  "(prefers-color-scheme: dark)",
16
  ).matches;
 
21
 
22
  setInitialTheme();
23
 
 
24
  themeToggle.addEventListener("click", () => {
25
  const isDark = html.classList.toggle("dark");
26
  localStorage.setItem("theme", isDark ? "dark" : "light");
27
  });
28
 
29
+ toggleConfig.addEventListener("click", () => {
30
+ configPanel.classList.toggle("hidden");
31
+ });
32
+
33
  // === Chat Handling ===
34
  const chatForm = document.getElementById("chatForm");
35
  const userInput = document.getElementById("userInput");
36
  const chatContainer = document.getElementById("chatContainer");
37
 
38
+ const modelA = document.getElementById("modelA");
39
+ const modelB = document.getElementById("modelB");
40
+ const modelC = document.getElementById("modelC");
41
+
42
  function appendMessage(role, text) {
43
  const div = document.createElement("div");
44
  div.className = `p-3 rounded shadow max-w-2xl ${role === "user" ? "bg-blue text-fg0 self-end" : "bg-green text-fg0 self-start"}`;
 
51
  e.preventDefault();
52
  const prompt = userInput.value.trim();
53
  if (!prompt) return;
54
+
55
  appendMessage("user", prompt);
56
  userInput.value = "";
57
 
58
  appendMessage("bot", "Thinking...");
59
 
60
+ const settings = {
61
+ models: {
62
+ "LLM-A": modelA.value,
63
+ "LLM-B": modelB.value,
64
+ "LLM-C": modelC.value,
65
+ },
66
+ };
67
+
68
  const response = await fetch("/chat", {
69
  method: "POST",
70
  headers: { "Content-Type": "application/json" },
71
+ body: JSON.stringify({ prompt, settings }),
72
  });
73
 
74
  chatContainer.lastChild.remove(); // remove 'Thinking...'
 
77
  const data = await response.json();
78
  appendMessage("bot", data.response);
79
  } else {
80
+ appendMessage(
81
+ "bot",
82
+ "An error occurred. Please check your model selections.",
83
+ );
84
  }
85
  });
templates/index.html CHANGED
@@ -14,14 +14,145 @@
14
  <div class="min-h-screen flex flex-col justify-between p-4">
15
  <header class="flex items-center justify-between mb-4">
16
  <h1 class="text-2xl font-bold">MoA Chat</h1>
17
- <button
18
- id="themeToggle"
19
- class="bg-blue px-3 py-1 rounded text-fg0 hover:bg-purple transition"
20
- >
21
- Toggle Theme
22
- </button>
 
 
 
 
 
 
 
 
23
  </header>
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  <main
26
  id="chatContainer"
27
  class="flex-1 overflow-y-auto bg1 p-4 rounded shadow-md space-y-4"
 
14
  <div class="min-h-screen flex flex-col justify-between p-4">
15
  <header class="flex items-center justify-between mb-4">
16
  <h1 class="text-2xl font-bold">MoA Chat</h1>
17
+ <div class="space-x-2">
18
+ <button
19
+ id="toggleConfig"
20
+ class="bg-blue px-3 py-1 rounded text-fg0 hover:bg-purple transition"
21
+ >
22
+ Config
23
+ </button>
24
+ <button
25
+ id="themeToggle"
26
+ class="bg-blue px-3 py-1 rounded text-fg0 hover:bg-purple transition"
27
+ >
28
+ Theme
29
+ </button>
30
+ </div>
31
  </header>
32
 
33
+ <div id="configPanel" class="bg1 rounded p-4 mb-4 hidden">
34
+ <h2 class="text-lg font-semibold mb-2">Model Selection</h2>
35
+ <div class="grid grid-cols-1 md:grid-cols-3 gap-4">
36
+ <div>
37
+ <label for="modelA" class="block mb-1">LLM-A</label>
38
+ <select
39
+ id="modelA"
40
+ class="w-full rounded p-2 bg-statusline1"
41
+ >
42
+ <option value="deepseek/deepseek-chat-v3-0324">
43
+ DeepSeek Chat v3
44
+ </option>
45
+ <option value="google/gemini-2.0-flash-exp">
46
+ Gemini 2.0 Flash
47
+ </option>
48
+ <option value="meta-llama/llama-4-maverick">
49
+ LLaMA 4 Maverick
50
+ </option>
51
+ <option value="microsoft/mai-ds-r1">
52
+ MAI DS R1
53
+ </option>
54
+ <option value="meta-llama/llama-4-scout">
55
+ LLaMA 4 Scout
56
+ </option>
57
+ <option value="google/gemma-3-27b-it">
58
+ Gemma 3 27B
59
+ </option>
60
+ <option value="qwen/qwq-32b">Qwen QWQ 32B</option>
61
+ <option value="qwen/qwen2.5-vl-72b-instruct">
62
+ Qwen2.5 VL 72B
63
+ </option>
64
+ <option value="qwen/qwen-2.5-72b-instruct">
65
+ Qwen 2.5 72B
66
+ </option>
67
+ <option value="google/gemini-2.5-pro-exp-03-25">
68
+ Gemini 2.5 Pro
69
+ </option>
70
+ <option value="deepseek/deepseek-r1">
71
+ DeepSeek R1
72
+ </option>
73
+ </select>
74
+ </div>
75
+ <div>
76
+ <label for="modelB" class="block mb-1">LLM-B</label>
77
+ <select
78
+ id="modelB"
79
+ class="w-full rounded p-2 bg-statusline1"
80
+ >
81
+ <option value="deepseek/deepseek-chat-v3-0324">
82
+ DeepSeek Chat v3
83
+ </option>
84
+ <option value="google/gemini-2.0-flash-exp">
85
+ Gemini 2.0 Flash
86
+ </option>
87
+ <option value="meta-llama/llama-4-maverick">
88
+ LLaMA 4 Maverick
89
+ </option>
90
+ <option value="microsoft/mai-ds-r1">
91
+ MAI DS R1
92
+ </option>
93
+ <option value="meta-llama/llama-4-scout">
94
+ LLaMA 4 Scout
95
+ </option>
96
+ <option value="google/gemma-3-27b-it">
97
+ Gemma 3 27B
98
+ </option>
99
+ <option value="qwen/qwq-32b">Qwen QWQ 32B</option>
100
+ <option value="qwen/qwen2.5-vl-72b-instruct">
101
+ Qwen2.5 VL 72B
102
+ </option>
103
+ <option value="qwen/qwen-2.5-72b-instruct">
104
+ Qwen 2.5 72B
105
+ </option>
106
+ <option value="google/gemini-2.5-pro-exp-03-25">
107
+ Gemini 2.5 Pro
108
+ </option>
109
+ <option value="deepseek/deepseek-r1">
110
+ DeepSeek R1
111
+ </option>
112
+ </select>
113
+ </div>
114
+ <div>
115
+ <label for="modelC" class="block mb-1">LLM-C</label>
116
+ <select
117
+ id="modelC"
118
+ class="w-full rounded p-2 bg-statusline1"
119
+ >
120
+ <option value="deepseek/deepseek-chat-v3-0324">
121
+ DeepSeek Chat v3
122
+ </option>
123
+ <option value="google/gemini-2.0-flash-exp">
124
+ Gemini 2.0 Flash
125
+ </option>
126
+ <option value="meta-llama/llama-4-maverick">
127
+ LLaMA 4 Maverick
128
+ </option>
129
+ <option value="microsoft/mai-ds-r1">
130
+ MAI DS R1
131
+ </option>
132
+ <option value="meta-llama/llama-4-scout">
133
+ LLaMA 4 Scout
134
+ </option>
135
+ <option value="google/gemma-3-27b-it">
136
+ Gemma 3 27B
137
+ </option>
138
+ <option value="qwen/qwq-32b">Qwen QWQ 32B</option>
139
+ <option value="qwen/qwen2.5-vl-72b-instruct">
140
+ Qwen2.5 VL 72B
141
+ </option>
142
+ <option value="qwen/qwen-2.5-72b-instruct">
143
+ Qwen 2.5 72B
144
+ </option>
145
+ <option value="google/gemini-2.5-pro-exp-03-25">
146
+ Gemini 2.5 Pro
147
+ </option>
148
+ <option value="deepseek/deepseek-r1">
149
+ DeepSeek R1
150
+ </option>
151
+ </select>
152
+ </div>
153
+ </div>
154
+ </div>
155
+
156
  <main
157
  id="chatContainer"
158
  class="flex-1 overflow-y-auto bg1 p-4 rounded shadow-md space-y-4"