File size: 20,265 Bytes
e6e7506 64fd7bf e6e7506 009d93e e6e7506 5b218f0 009d93e e6e7506 009d93e efd51ba 009d93e 5b218f0 009d93e efd51ba 639429d 009d93e e7eb4f8 434eb0f 009d93e efd51ba 009d93e 7d6cc28 009d93e efd51ba 009d93e 7d6cc28 009d93e efd51ba 639429d efd51ba 5b218f0 efd51ba e6e7506 efd51ba 639429d efd51ba 009d93e 99dd437 009d93e 7d6cc28 009d93e 5b218f0 009d93e e6e7506 009d93e 5b218f0 e6e7506 009d93e 2a03b34 5b218f0 2a03b34 64fd7bf baa93c3 2a03b34 64fd7bf 2a03b34 009d93e 2a03b34 e6e7506 2a03b34 48d0863 5b218f0 efd51ba 639429d 009d93e 64fd7bf 14c39ae 009d93e 639429d 5b218f0 283ac11 5b218f0 639429d efd51ba 2a03b34 5b218f0 efd51ba 009d93e 2a03b34 14c39ae 64fd7bf 009d93e 14c39ae 009d93e 14c39ae 009d93e 14c39ae e6e7506 14c39ae 009d93e 639429d 5b218f0 009d93e 99dd437 5b218f0 009d93e efd51ba 009d93e e6e7506 efd51ba 5b218f0 009d93e 639429d 009d93e 5b218f0 132649c 5b218f0 132649c 009d93e 639429d 009d93e efd51ba 5b218f0 efd51ba 5b218f0 efd51ba 5b218f0 efd51ba e6e7506 009d93e efd51ba 009d93e efd51ba 639429d efd51ba 4754e33 009d93e 5b218f0 51d91b9 009d93e efd51ba 5b218f0 009d93e 5b218f0 009d93e efd51ba 009d93e 639429d 5b218f0 009d93e efd51ba 009d93e efd51ba 5b218f0 efd51ba 009d93e 132649c 009d93e efd51ba 009d93e efd51ba 5b218f0 efd51ba 009d93e efd51ba 009d93e efd51ba 009d93e efd51ba 5b218f0 009d93e e6e7506 2a03b34 64fd7bf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 |
"""
For HuggingFace Space.
"""
import gradio as gr
import json
import random
import re
from models import *
from pipeline import Pipeline
examples = [
{
"task": "NER",
"mode": "quick",
"use_file": False,
"text": "Finally, every other year , ELRA organizes a major conference LREC , the International Language Resources and Evaluation Conference .",
"instruction": "",
"constraint": """["algorithm", "conference", "else", "product", "task", "field", "metrics", "organization", "researcher", "program language", "country", "location", "person", "university"]""",
"file_path": None,
"update_case": False,
"truth": "",
},
{
"task": "Base",
"mode": "quick",
"use_file": True,
"file_path": "data/input_files/Tulsi_Gabbard_News.html",
"instruction": "Extract key information from the given text.",
"constraint": "",
"text": "",
"update_case": False,
"truth": "",
},
{
"task": "RE",
"mode": "quick",
"use_file": False,
"text": "The aid group Doctors Without Borders said that since Saturday , more than 275 wounded people had been admitted and treated at Donka Hospital in the capital of Guinea , Conakry .",
"instruction": "",
"constraint": """["nationality", "country capital", "place of death", "children", "location contains", "place of birth", "place lived", "administrative division of country", "country of administrative divisions", "company", "neighborhood of", "company founders"]""",
"file_path": None,
"update_case": True,
"truth": """{"relation_list": [{"head": "Guinea", "tail": "Conakry", "relation": "country capital"}]}""",
},
{
"task": "EE",
"mode": "standard",
"use_file": False,
"text": "The file suggested to the user contains no software related to video streaming and simply carries the malicious payload that later compromises victim \u2019s account and sends out the deceptive messages to all victim \u2019s contacts .",
"instruction": "",
"constraint": """{"phishing": ["damage amount", "attack pattern", "tool", "victim", "place", "attacker", "purpose", "trusted entity", "time"], "data breach": ["damage amount", "attack pattern", "number of data", "number of victim", "tool", "compromised data", "victim", "place", "attacker", "purpose", "time"], "ransom": ["damage amount", "attack pattern", "payment method", "tool", "victim", "place", "attacker", "price", "time"], "discover vulnerability": ["vulnerable system", "vulnerability", "vulnerable system owner", "vulnerable system version", "supported platform", "common vulnerabilities and exposures", "capabilities", "time", "discoverer"], "patch vulnerability": ["vulnerable system", "vulnerability", "issues addressed", "vulnerable system version", "releaser", "supported platform", "common vulnerabilities and exposures", "patch number", "time", "patch"]}""",
"file_path": None,
"update_case": False,
"truth": "",
},
{
"task": "Triple",
"mode": "quick",
"use_file": True,
"file_path": "data/input_files/Artificial_Intelligence_Wikipedia.txt",
"instruction": "",
"constraint": """[["Person", "Place", "Event", "property"], ["Interpersonal", "Located", "Ownership", "Action"]]""",
"text": "",
"update_case": False,
"truth": "",
},
{
"task": "Base",
"mode": "quick",
"use_file": True,
"file_path": "data/input_files/Harry_Potter_Chapter1.pdf",
"instruction": "Extract main characters and the background setting from this chapter.",
"constraint": "",
"text": "",
"update_case": False,
"truth": "",
},
]
example_start_index = 0
def create_interface():
with gr.Blocks(title="OneKE Demo", theme=gr.themes.Glass(text_size="lg")) as demo:
gr.HTML("""
<div style="text-align:center;">
<p align="center">
<a>
<img src="https://raw.githubusercontent.com/zjunlp/OneKE/refs/heads/main/figs/logo.png" width="240"/>
</a>
</p>
<h1>OneKE: A Dockerized Schema-Guided LLM Agent-based Knowledge Extraction System</h1>
<p>
π[<a href="https://oneke.openkg.cn/" target="_blank">Home</a>]
πΉ[<a href="http://oneke.openkg.cn/demo.mp4" target="_blank">Video</a>]
π[<a href="https://arxiv.org/abs/2412.20005v2" target="_blank">Paper</a>]
π»[<a href="https://github.com/zjunlp/OneKE" target="_blank">Code</a>]
</p>
</div>
""")
example_button_gr = gr.Button("π² Quick Start with an Example π²")
with gr.Row():
with gr.Column():
model_gr = gr.Dropdown(
label="πͺ Select your Model",
choices=["deepseek-chat", "deepseek-reasoner",
"gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o",
],
value="deepseek-chat",
)
api_key_gr = gr.Textbox(
label="π Enter your API-Key",
placeholder="Please enter your API-Key from ChatGPT or DeepSeek.",
type="password",
)
base_url_gr = gr.Textbox(
label="π Enter your Base-URL",
placeholder="Please leave this field empty if using the default Base-URL.",
)
with gr.Column():
task_gr = gr.Dropdown(
label="π― Select your Task",
choices=["Base", "NER", "RE", "EE", "Triple"],
value="Base",
)
mode_gr = gr.Dropdown(
label="π§ Select your Mode",
choices=["quick", "standard", "customized"],
value="quick",
)
schema_agent_gr = gr.Dropdown(choices=["Not Required", "get_default_schema", "get_deduced_schema"], value="Not Required", label="π€ Select your Schema-Agent", visible=False)
extraction_Agent_gr = gr.Dropdown(choices=["Not Required", "extract_information_direct", "extract_information_with_case"], value="Not Required", label="π€ Select your Extraction-Agent", visible=False)
reflection_agent_gr = gr.Dropdown(choices=["Not Required", "reflect_with_case"], value="Not Required", label="π€ Select your Reflection-Agent", visible=False)
use_file_gr = gr.Checkbox(label="π Use File", value=True)
file_path_gr = gr.File(label="π Upload a File", visible=True)
text_gr = gr.Textbox(label="π Text", lines=5, placeholder="Please enter the text to be processed.", visible=False)
instruction_gr = gr.Textbox(label="πΉοΈ Instruction", lines=3, placeholder="Please enter any type of information you want to extract here, for example: Help me extract all the place names.", visible=True)
constraint_gr = gr.Textbox(label="πΉοΈ Constraint", lines=3, placeholder="Please specify the types of entities, relations, events, or other relevant attributes in list format as per the task requirements.", visible=False)
update_case_gr = gr.Checkbox(label="π° Update Case", value=False)
# update_schema_gr = gr.Checkbox(label="π Update Schema", value=False)
truth_gr = gr.Textbox(label="πͺ Truth", lines=2, placeholder="""Please enter the truth you want LLM know, for example: {"relation_list": [{"head": "Guinea", "tail": "Conakry", "relation": "country capital"}]}""", visible=False)
# selfschema_gr = gr.Textbox(label="π Schema", lines=5, placeholder="Enter your New Schema", visible=False, interactive=True)
def get_model_category(model_name_or_path):
if model_name_or_path in ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o3-mini"]:
return ChatGPT
elif model_name_or_path in ["deepseek-chat", "deepseek-reasoner"]:
return DeepSeek
elif re.search(r'(?i)llama', model_name_or_path):
return LLaMA
elif re.search(r'(?i)qwen', model_name_or_path):
return Qwen
elif re.search(r'(?i)minicpm', model_name_or_path):
return MiniCPM
elif re.search(r'(?i)chatglm', model_name_or_path):
return ChatGLM
else:
return BaseEngine
def customized_mode(mode):
if mode == "customized":
return gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
else:
return gr.update(visible=False, value="Not Required"), gr.update(visible=False, value="Not Required"), gr.update(visible=False, value="Not Required")
def update_fields(task):
if task == "Base" or task == "":
return gr.update(visible=True, label="πΉοΈ Instruction", lines=3,
placeholder="Please enter any type of information you want to extract here, for example: Help me extract all the place names."), gr.update(visible=False)
elif task == "NER":
return gr.update(visible=False), gr.update(visible=True, label="πΉοΈ Constraint", lines=3,
placeholder="Please specify the entity types to extract in list format, and all types will be extracted by default if not specified.")
elif task == "RE":
return gr.update(visible=False), gr.update(visible=True, label="πΉοΈ Constraint", lines=3,
placeholder="Please specify the relation types to extract in list format, and all types will be extracted by default if not specified.")
elif task == "EE":
return gr.update(visible=False), gr.update(visible=True, label="πΉοΈ Constraint", lines=3,
placeholder="Please specify the event types and their corresponding extraction attributes in dictionary format, and all types and attributes will be extracted by default if not specified.")
elif task == "Triple":
return gr.update(visible=False), gr.update(visible=True, label="πΉοΈ Constraint", lines=3,
placeholder="Please read the documentation and specify the types of triples in list format.")
def update_input_fields(use_file):
if use_file:
return gr.update(visible=False), gr.update(visible=True)
else:
return gr.update(visible=True), gr.update(visible=False)
def update_case(update_case):
if update_case:
return gr.update(visible=True)
else:
return gr.update(visible=False)
# def update_schema(update_schema):
# if update_schema:
# return gr.update(visible=True)
# else:
# return gr.update(visible=False)
def start_with_example():
global example_start_index
example = examples[example_start_index]
example_start_index += 1
if example_start_index >= len(examples):
example_start_index = 0
return (
gr.update(value=example["task"]),
gr.update(value=example["mode"]),
gr.update(value=example["use_file"]),
gr.update(value=example["file_path"], visible=example["use_file"]),
gr.update(value=example["text"], visible=not example["use_file"]),
gr.update(value=example["instruction"], visible=example["task"] == "Base"),
gr.update(value=example["constraint"], visible=example["task"] in ["NER", "RE", "EE", "Triple"]),
gr.update(value=example["update_case"]),
gr.update(value=example["truth"]), # gr.update(value=example["update_schema"]), gr.update(value=example["selfschema"]),
gr.update(value="Not Required", visible=False),
gr.update(value="Not Required", visible=False),
gr.update(value="Not Required", visible=False),
)
def submit(model, api_key, base_url, task, mode, instruction, constraint, text, use_file, file_path, update_case, truth, schema_agent, extraction_Agent, reflection_agent):
try:
ModelClass = get_model_category(model)
if base_url == "Default" or base_url == "":
if api_key == "":
pipeline = Pipeline(ModelClass(model_name_or_path=model))
else:
pipeline = Pipeline(ModelClass(model_name_or_path=model, api_key=api_key))
else:
if api_key == "":
pipeline = Pipeline(ModelClass(model_name_or_path=model, base_url=base_url))
else:
pipeline = Pipeline(ModelClass(model_name_or_path=model, api_key=api_key, base_url=base_url))
if task == "Base":
instruction = instruction
constraint = ""
else:
instruction = ""
constraint = constraint
if use_file:
text = ""
file_path = file_path
else:
text = text
file_path = None
if not update_case:
truth = ""
agent3 = {}
if mode == "customized":
if schema_agent not in ["", "Not Required"]:
agent3["schema_agent"] = schema_agent
if extraction_Agent not in ["", "Not Required"]:
agent3["extraction_agent"] = extraction_Agent
if reflection_agent not in ["", "Not Required"]:
agent3["reflection_agent"] = reflection_agent
# use 'Pipeline'
_, _, ger_frontend_schema, ger_frontend_res = pipeline.get_extract_result(
task=task,
text=text,
use_file=use_file,
file_path=file_path,
instruction=instruction,
constraint=constraint,
mode=mode,
three_agents=agent3,
isgui=True,
update_case=update_case,
truth=truth,
output_schema="",
show_trajectory=False,
)
ger_frontend_schema = str(ger_frontend_schema)
ger_frontend_res = json.dumps(ger_frontend_res, ensure_ascii=False, indent=4) if isinstance(ger_frontend_res, dict) else str(ger_frontend_res)
return ger_frontend_schema, ger_frontend_res, gr.update(value="", visible=False)
except Exception as e:
error_message = f"β οΈ Error:\n {str(e)}"
return "", "", gr.update(value=error_message, visible=True)
def clear_all():
return (
gr.update(value="Not Required", visible=False), # sechema_agent
gr.update(value="Not Required", visible=False), # extraction_Agent
gr.update(value="Not Required", visible=False), # reflection_agent
gr.update(value="Base"), # task
gr.update(value="quick"), # mode
gr.update(value="", visible=False), # instruction
gr.update(value="", visible=False), # constraint
gr.update(value=True), # use_file
gr.update(value="", visible=False), # text
gr.update(value=None, visible=True), # file_path
gr.update(value=False), # update_case
gr.update(value="", visible=False), # truth # gr.update(value=False), # update_schema gr.update(value="", visible=False), # selfschema
gr.update(value=""), # py_output_gr
gr.update(value=""), # json_output_gr
gr.update(value="", visible=False), # error_output
)
with gr.Row():
submit_button_gr = gr.Button("Submit", variant="primary", scale=8)
clear_button = gr.Button("Clear", scale=5)
gr.HTML("""
<div style="width: 100%; text-align: center; font-size: 16px; font-weight: bold; position: relative; margin: 20px 0;">
<span style="position: absolute; left: 0; top: 50%; transform: translateY(-50%); width: 45%; border-top: 1px solid #ccc;"></span>
<span style="position: relative; z-index: 1; background-color: white; padding: 0 10px;">Output:</span>
<span style="position: absolute; right: 0; top: 50%; transform: translateY(-50%); width: 45%; border-top: 1px solid #ccc;"></span>
</div>
""")
error_output_gr = gr.Textbox(label="π΅βπ« Ops, an Error Occurred", visible=False, interactive=False)
with gr.Row():
with gr.Column(scale=1):
py_output_gr = gr.Code(label="π€ Generated Schema", language="python", lines=10, interactive=False)
with gr.Column(scale=1):
json_output_gr = gr.Code(label="π Final Answer", language="json", lines=10, interactive=False)
task_gr.change(fn=update_fields, inputs=task_gr, outputs=[instruction_gr, constraint_gr])
mode_gr.change(fn=customized_mode, inputs=mode_gr, outputs=[schema_agent_gr, extraction_Agent_gr, reflection_agent_gr])
use_file_gr.change(fn=update_input_fields, inputs=use_file_gr, outputs=[text_gr, file_path_gr])
update_case_gr.change(fn=update_case, inputs=update_case_gr, outputs=[truth_gr])
# update_schema_gr.change(fn=update_schema, inputs=update_schema_gr, outputs=[selfschema_gr])
example_button_gr.click(
fn=start_with_example,
inputs=[],
outputs=[
task_gr,
mode_gr,
use_file_gr,
file_path_gr,
text_gr,
instruction_gr,
constraint_gr,
update_case_gr,
truth_gr, # update_schema_gr, selfschema_gr,
schema_agent_gr,
extraction_Agent_gr,
reflection_agent_gr,
],
)
submit_button_gr.click(
fn=submit,
inputs=[
model_gr,
api_key_gr,
base_url_gr,
task_gr,
mode_gr,
instruction_gr,
constraint_gr,
text_gr,
use_file_gr,
file_path_gr,
update_case_gr,
truth_gr, # update_schema_gr, selfschema_gr,
schema_agent_gr,
extraction_Agent_gr,
reflection_agent_gr,
],
outputs=[py_output_gr, json_output_gr, error_output_gr],
show_progress=True,
)
clear_button.click(
fn=clear_all,
outputs=[
schema_agent_gr,
extraction_Agent_gr,
reflection_agent_gr,
task_gr,
mode_gr,
instruction_gr,
constraint_gr,
use_file_gr,
text_gr,
file_path_gr,
update_case_gr,
truth_gr, # update_schema_gr, selfschema_gr,
py_output_gr,
json_output_gr,
error_output_gr,
],
)
return demo
# Launch the front-end interface
if __name__ == "__main__":
interface = create_interface()
interface.launch()
|