Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Alina Lozovskaya
commited on
Commit
·
4a9b060
1
Parent(s):
8feb093
Add user info
Browse files- yourbench_space/app.py +10 -7
yourbench_space/app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import os
|
2 |
import sys
|
3 |
-
from huggingface_hub import HfApi
|
4 |
import requests
|
5 |
import pathlib
|
6 |
import subprocess
|
@@ -8,7 +8,6 @@ import shutil
|
|
8 |
import io
|
9 |
import yaml
|
10 |
import gradio as gr
|
11 |
-
from huggingface_hub import whoami
|
12 |
from loguru import logger
|
13 |
from yourbench.pipeline import run_pipeline
|
14 |
|
@@ -142,9 +141,13 @@ def hello(profile: gr.OAuthProfile | None) -> str:
|
|
142 |
def update_hf_org_dropdown(oauth_token: gr.OAuthToken | None) -> str:
|
143 |
if oauth_token is None:
|
144 |
print("Please deploy this on Spaces and log in to list organizations.")
|
145 |
-
return
|
146 |
-
|
147 |
-
|
|
|
|
|
|
|
|
|
148 |
|
149 |
config_output = gr.Code(label="Generated Config", language="yaml")
|
150 |
|
@@ -158,10 +161,10 @@ with gr.Blocks() as app:
|
|
158 |
with gr.Tab("Configuration"):
|
159 |
model_name = gr.Textbox(label="Model Name")
|
160 |
|
161 |
-
hf_org_dropdown = gr.Dropdown(list(), label="Organization")
|
162 |
app.load(update_hf_org_dropdown, inputs=None, outputs=hf_org_dropdown)
|
163 |
|
164 |
-
provider = gr.Dropdown(["openrouter", "openai", "huggingface"], value="huggingface", label="Provider")
|
165 |
base_url = gr.Textbox(label="Base URL")
|
166 |
api_key = gr.Textbox(label="API Key")
|
167 |
max_concurrent_requests = gr.Dropdown([8, 16, 32], value=16, label="Max Concurrent Requests")
|
|
|
1 |
import os
|
2 |
import sys
|
3 |
+
from huggingface_hub import HfApi, whoami
|
4 |
import requests
|
5 |
import pathlib
|
6 |
import subprocess
|
|
|
8 |
import io
|
9 |
import yaml
|
10 |
import gradio as gr
|
|
|
11 |
from loguru import logger
|
12 |
from yourbench.pipeline import run_pipeline
|
13 |
|
|
|
141 |
def update_hf_org_dropdown(oauth_token: gr.OAuthToken | None) -> str:
|
142 |
if oauth_token is None:
|
143 |
print("Please deploy this on Spaces and log in to list organizations.")
|
144 |
+
return list()
|
145 |
+
user_info = whoami(oauth_token.token)
|
146 |
+
|
147 |
+
org_names = [org["name"] for org in user_info["orgs"]]
|
148 |
+
user_name = user_info["name"]
|
149 |
+
all_orgs = [user_name].extend(org_names)
|
150 |
+
return gr.Dropdown(all_orgs, label="Organization")
|
151 |
|
152 |
config_output = gr.Code(label="Generated Config", language="yaml")
|
153 |
|
|
|
161 |
with gr.Tab("Configuration"):
|
162 |
model_name = gr.Textbox(label="Model Name")
|
163 |
|
164 |
+
hf_org_dropdown = gr.Dropdown(list(), label="Organization", allow_custom_value=True)
|
165 |
app.load(update_hf_org_dropdown, inputs=None, outputs=hf_org_dropdown)
|
166 |
|
167 |
+
provider = gr.Dropdown(["openrouter", "openai", "huggingface"], value="huggingface", label="Provider", allow_custom_value=True)
|
168 |
base_url = gr.Textbox(label="Base URL")
|
169 |
api_key = gr.Textbox(label="API Key")
|
170 |
max_concurrent_requests = gr.Dropdown([8, 16, 32], value=16, label="Max Concurrent Requests")
|