""" For HuggingFace Space. """ import gradio as gr import json import random import re from models import * from pipeline import Pipeline examples = [ { "task": "NER", "mode": "quick", "use_file": False, "text": "Finally, every other year , ELRA organizes a major conference LREC , the International Language Resources and Evaluation Conference .", "instruction": "", "constraint": """["algorithm", "conference", "else", "product", "task", "field", "metrics", "organization", "researcher", "program language", "country", "location", "person", "university"]""", "file_path": None, "update_case": False, "truth": "", }, { "task": "Base", "mode": "quick", "use_file": True, "file_path": "data/input_files/Tulsi_Gabbard_News.html", "instruction": "Extract key information from the given text.", "constraint": "", "text": "", "update_case": False, "truth": "", }, { "task": "RE", "mode": "quick", "use_file": False, "text": "The aid group Doctors Without Borders said that since Saturday , more than 275 wounded people had been admitted and treated at Donka Hospital in the capital of Guinea , Conakry .", "instruction": "", "constraint": """["nationality", "country capital", "place of death", "children", "location contains", "place of birth", "place lived", "administrative division of country", "country of administrative divisions", "company", "neighborhood of", "company founders"]""", "file_path": None, "update_case": True, "truth": """{"relation_list": [{"head": "Guinea", "tail": "Conakry", "relation": "country capital"}]}""", }, { "task": "EE", "mode": "standard", "use_file": False, "text": "The file suggested to the user contains no software related to video streaming and simply carries the malicious payload that later compromises victim \u2019s account and sends out the deceptive messages to all victim \u2019s contacts .", "instruction": "", "constraint": """{"phishing": ["damage amount", "attack pattern", "tool", "victim", "place", "attacker", "purpose", "trusted entity", "time"], "data breach": ["damage amount", "attack pattern", "number of data", "number of victim", "tool", "compromised data", "victim", "place", "attacker", "purpose", "time"], "ransom": ["damage amount", "attack pattern", "payment method", "tool", "victim", "place", "attacker", "price", "time"], "discover vulnerability": ["vulnerable system", "vulnerability", "vulnerable system owner", "vulnerable system version", "supported platform", "common vulnerabilities and exposures", "capabilities", "time", "discoverer"], "patch vulnerability": ["vulnerable system", "vulnerability", "issues addressed", "vulnerable system version", "releaser", "supported platform", "common vulnerabilities and exposures", "patch number", "time", "patch"]}""", "file_path": None, "update_case": False, "truth": "", }, { "task": "Triple", "mode": "quick", "use_file": True, "file_path": "data/input_files/Artificial_Intelligence_Wikipedia.txt", "instruction": "", "constraint": """[["Person", "Place", "Event", "property"], ["Interpersonal", "Located", "Ownership", "Action"]]""", "text": "", "update_case": False, "truth": "", }, { "task": "Base", "mode": "quick", "use_file": True, "file_path": "data/input_files/Harry_Potter_Chapter1.pdf", "instruction": "Extract main characters and the background setting from this chapter.", "constraint": "", "text": "", "update_case": False, "truth": "", }, ] example_start_index = 0 def create_interface(): with gr.Blocks(title="OneKE Demo", theme=gr.themes.Glass(text_size="lg")) as demo: gr.HTML("""

OneKE: A Dockerized Schema-Guided LLM Agent-based Knowledge Extraction System

🌐[Home] 📹[Video] 📝[Paper] đŸ’ģ[Code]

""") example_button_gr = gr.Button("🎲 Quick Start with an Example 🎲") with gr.Row(): with gr.Column(): model_gr = gr.Dropdown( label="đŸĒ„ Select your Model", choices=["deepseek-chat", "deepseek-reasoner", "gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", ], value="deepseek-chat", ) api_key_gr = gr.Textbox( label="🔑 Enter your API-Key", placeholder="Please enter your API-Key from ChatGPT or DeepSeek.", type="password", ) base_url_gr = gr.Textbox( label="🔗 Enter your Base-URL", placeholder="Please leave this field empty if using the default Base-URL.", ) with gr.Column(): task_gr = gr.Dropdown( label="đŸŽ¯ Select your Task", choices=["Base", "NER", "RE", "EE", "Triple"], value="Base", ) mode_gr = gr.Dropdown( label="🧭 Select your Mode", choices=["quick", "standard", "customized"], value="quick", ) schema_agent_gr = gr.Dropdown(choices=["Not Required", "get_default_schema", "get_deduced_schema"], value="Not Required", label="🤖 Select your Schema-Agent", visible=False) extraction_Agent_gr = gr.Dropdown(choices=["Not Required", "extract_information_direct", "extract_information_with_case"], value="Not Required", label="🤖 Select your Extraction-Agent", visible=False) reflection_agent_gr = gr.Dropdown(choices=["Not Required", "reflect_with_case"], value="Not Required", label="🤖 Select your Reflection-Agent", visible=False) use_file_gr = gr.Checkbox(label="📂 Use File", value=True) file_path_gr = gr.File(label="📖 Upload a File", visible=True) text_gr = gr.Textbox(label="📖 Text", lines=5, placeholder="Please enter the text to be processed.", visible=False) instruction_gr = gr.Textbox(label="đŸ•šī¸ Instruction", lines=3, placeholder="Please enter any type of information you want to extract here, for example: Help me extract all the place names.", visible=True) constraint_gr = gr.Textbox(label="đŸ•šī¸ Constraint", lines=3, placeholder="Please specify the types of entities, relations, events, or other relevant attributes in list format as per the task requirements.", visible=False) update_case_gr = gr.Checkbox(label="💰 Update Case", value=False) # update_schema_gr = gr.Checkbox(label="📟 Update Schema", value=False) truth_gr = gr.Textbox(label="đŸĒ™ Truth", lines=2, placeholder="""Please enter the truth you want LLM know, for example: {"relation_list": [{"head": "Guinea", "tail": "Conakry", "relation": "country capital"}]}""", visible=False) # selfschema_gr = gr.Textbox(label="📟 Schema", lines=5, placeholder="Enter your New Schema", visible=False, interactive=True) def get_model_category(model_name_or_path): if model_name_or_path in ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o3-mini"]: return ChatGPT elif model_name_or_path in ["deepseek-chat", "deepseek-reasoner"]: return DeepSeek elif re.search(r'(?i)llama', model_name_or_path): return LLaMA elif re.search(r'(?i)qwen', model_name_or_path): return Qwen elif re.search(r'(?i)minicpm', model_name_or_path): return MiniCPM elif re.search(r'(?i)chatglm', model_name_or_path): return ChatGLM else: return BaseEngine def customized_mode(mode): if mode == "customized": return gr.update(visible=True), gr.update(visible=True), gr.update(visible=True) else: return gr.update(visible=False, value="Not Required"), gr.update(visible=False, value="Not Required"), gr.update(visible=False, value="Not Required") def update_fields(task): if task == "Base" or task == "": return gr.update(visible=True, label="đŸ•šī¸ Instruction", lines=3, placeholder="Please enter any type of information you want to extract here, for example: Help me extract all the place names."), gr.update(visible=False) elif task == "NER": return gr.update(visible=False), gr.update(visible=True, label="đŸ•šī¸ Constraint", lines=3, placeholder="Please specify the entity types to extract in list format, and all types will be extracted by default if not specified.") elif task == "RE": return gr.update(visible=False), gr.update(visible=True, label="đŸ•šī¸ Constraint", lines=3, placeholder="Please specify the relation types to extract in list format, and all types will be extracted by default if not specified.") elif task == "EE": return gr.update(visible=False), gr.update(visible=True, label="đŸ•šī¸ Constraint", lines=3, placeholder="Please specify the event types and their corresponding extraction attributes in dictionary format, and all types and attributes will be extracted by default if not specified.") elif task == "Triple": return gr.update(visible=False), gr.update(visible=True, label="đŸ•šī¸ Constraint", lines=3, placeholder="Please read the documentation and specify the types of triples in list format.") def update_input_fields(use_file): if use_file: return gr.update(visible=False), gr.update(visible=True) else: return gr.update(visible=True), gr.update(visible=False) def update_case(update_case): if update_case: return gr.update(visible=True) else: return gr.update(visible=False) # def update_schema(update_schema): # if update_schema: # return gr.update(visible=True) # else: # return gr.update(visible=False) def start_with_example(): global example_start_index example = examples[example_start_index] example_start_index += 1 if example_start_index >= len(examples): example_start_index = 0 return ( gr.update(value=example["task"]), gr.update(value=example["mode"]), gr.update(value=example["use_file"]), gr.update(value=example["file_path"], visible=example["use_file"]), gr.update(value=example["text"], visible=not example["use_file"]), gr.update(value=example["instruction"], visible=example["task"] == "Base"), gr.update(value=example["constraint"], visible=example["task"] in ["NER", "RE", "EE", "Triple"]), gr.update(value=example["update_case"]), gr.update(value=example["truth"]), # gr.update(value=example["update_schema"]), gr.update(value=example["selfschema"]), gr.update(value="Not Required", visible=False), gr.update(value="Not Required", visible=False), gr.update(value="Not Required", visible=False), ) def submit(model, api_key, base_url, task, mode, instruction, constraint, text, use_file, file_path, update_case, truth, schema_agent, extraction_Agent, reflection_agent): try: ModelClass = get_model_category(model) if base_url == "Default" or base_url == "": if api_key == "": pipeline = Pipeline(ModelClass(model_name_or_path=model)) else: pipeline = Pipeline(ModelClass(model_name_or_path=model, api_key=api_key)) else: if api_key == "": pipeline = Pipeline(ModelClass(model_name_or_path=model, base_url=base_url)) else: pipeline = Pipeline(ModelClass(model_name_or_path=model, api_key=api_key, base_url=base_url)) if task == "Base": instruction = instruction constraint = "" else: instruction = "" constraint = constraint if use_file: text = "" file_path = file_path else: text = text file_path = None if not update_case: truth = "" agent3 = {} if mode == "customized": if schema_agent not in ["", "Not Required"]: agent3["schema_agent"] = schema_agent if extraction_Agent not in ["", "Not Required"]: agent3["extraction_agent"] = extraction_Agent if reflection_agent not in ["", "Not Required"]: agent3["reflection_agent"] = reflection_agent # use 'Pipeline' _, _, ger_frontend_schema, ger_frontend_res = pipeline.get_extract_result( task=task, text=text, use_file=use_file, file_path=file_path, instruction=instruction, constraint=constraint, mode=mode, three_agents=agent3, isgui=True, update_case=update_case, truth=truth, output_schema="", show_trajectory=False, ) ger_frontend_schema = str(ger_frontend_schema) ger_frontend_res = json.dumps(ger_frontend_res, ensure_ascii=False, indent=4) if isinstance(ger_frontend_res, dict) else str(ger_frontend_res) return ger_frontend_schema, ger_frontend_res, gr.update(value="", visible=False) except Exception as e: error_message = f"âš ī¸ Error:\n {str(e)}" return "", "", gr.update(value=error_message, visible=True) def clear_all(): return ( gr.update(value="Not Required", visible=False), # sechema_agent gr.update(value="Not Required", visible=False), # extraction_Agent gr.update(value="Not Required", visible=False), # reflection_agent gr.update(value="Base"), # task gr.update(value="quick"), # mode gr.update(value="", visible=False), # instruction gr.update(value="", visible=False), # constraint gr.update(value=True), # use_file gr.update(value="", visible=False), # text gr.update(value=None, visible=True), # file_path gr.update(value=False), # update_case gr.update(value="", visible=False), # truth # gr.update(value=False), # update_schema gr.update(value="", visible=False), # selfschema gr.update(value=""), # py_output_gr gr.update(value=""), # json_output_gr gr.update(value="", visible=False), # error_output ) with gr.Row(): submit_button_gr = gr.Button("Submit", variant="primary", scale=8) clear_button = gr.Button("Clear", scale=5) gr.HTML("""
Output:
""") error_output_gr = gr.Textbox(label="đŸ˜ĩ‍đŸ’Ģ Ops, an Error Occurred", visible=False, interactive=False) with gr.Row(): with gr.Column(scale=1): py_output_gr = gr.Code(label="🤔 Generated Schema", language="python", lines=10, interactive=False) with gr.Column(scale=1): json_output_gr = gr.Code(label="😉 Final Answer", language="json", lines=10, interactive=False) task_gr.change(fn=update_fields, inputs=task_gr, outputs=[instruction_gr, constraint_gr]) mode_gr.change(fn=customized_mode, inputs=mode_gr, outputs=[schema_agent_gr, extraction_Agent_gr, reflection_agent_gr]) use_file_gr.change(fn=update_input_fields, inputs=use_file_gr, outputs=[text_gr, file_path_gr]) update_case_gr.change(fn=update_case, inputs=update_case_gr, outputs=[truth_gr]) # update_schema_gr.change(fn=update_schema, inputs=update_schema_gr, outputs=[selfschema_gr]) example_button_gr.click( fn=start_with_example, inputs=[], outputs=[ task_gr, mode_gr, use_file_gr, file_path_gr, text_gr, instruction_gr, constraint_gr, update_case_gr, truth_gr, # update_schema_gr, selfschema_gr, schema_agent_gr, extraction_Agent_gr, reflection_agent_gr, ], ) submit_button_gr.click( fn=submit, inputs=[ model_gr, api_key_gr, base_url_gr, task_gr, mode_gr, instruction_gr, constraint_gr, text_gr, use_file_gr, file_path_gr, update_case_gr, truth_gr, # update_schema_gr, selfschema_gr, schema_agent_gr, extraction_Agent_gr, reflection_agent_gr, ], outputs=[py_output_gr, json_output_gr, error_output_gr], show_progress=True, ) clear_button.click( fn=clear_all, outputs=[ schema_agent_gr, extraction_Agent_gr, reflection_agent_gr, task_gr, mode_gr, instruction_gr, constraint_gr, use_file_gr, text_gr, file_path_gr, update_case_gr, truth_gr, # update_schema_gr, selfschema_gr, py_output_gr, json_output_gr, error_output_gr, ], ) return demo # Launch the front-end interface if __name__ == "__main__": interface = create_interface() interface.launch()