|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
import torch |
|
import firebase_admin |
|
from firebase_admin import credentials, db |
|
import os |
|
import json |
|
|
|
|
|
firebase_key_path = os.environ.get("FIREBASE_KEY_PATH", "firebase-key.json") |
|
with open(firebase_key_path, "r") as f: |
|
firebase_config = json.load(f) |
|
|
|
|
|
cred = credentials.Certificate(firebase_config) |
|
firebase_admin.initialize_app(cred, { |
|
"databaseURL": "https://taskmate-d6e71-default-rtdb.firebaseio.com/" |
|
}) |
|
ref = db.reference("tasks") |
|
|
|
|
|
model_name = "ibm-granite/granite-7b-base—" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForCausalLM.from_pretrained(model_name) |
|
|
|
|
|
def generate_response(prompt, max_length=100): |
|
inputs = tokenizer(prompt, return_tensors="pt") |
|
outputs = model.generate(**inputs, max_length=max_length, num_return_sequences=1) |
|
return tokenizer.decode(outputs[0], skip_special_tokens=True).strip() |
|
|
|
|
|
def parse_task(input_text, persona="default"): |
|
prompt = f"For a {persona} employee, extract task, time, priority from: '{input_text}'" |
|
response = generate_response(prompt) |
|
return response |
|
|
|
|
|
def generate_subtasks(task, persona="default"): |
|
prompt = f"List 3 subtasks for '{task}' suited for a {persona} employee." |
|
response = generate_response(prompt, max_length=150) |
|
return response |
|
|
|
|
|
def task_mate_chat(user_input, persona, chat_history): |
|
|
|
parsed = parse_task(user_input, persona) |
|
task_name = parsed.split(",")[0].replace("Task: ", "").strip() |
|
|
|
|
|
subtasks = generate_subtasks(task_name, persona) |
|
|
|
|
|
task_data = { |
|
"input": user_input, |
|
"parsed": parsed, |
|
"subtasks": subtasks, |
|
"persona": persona, |
|
"timestamp": str(db.ServerValue.TIMESTAMP) |
|
} |
|
ref.push().set(task_data) |
|
|
|
|
|
response = f"Parsed: {parsed}\nSubtasks:\n{subtasks}" |
|
chat_history.append((user_input, response)) |
|
return "", chat_history |
|
|
|
|
|
with gr.Blocks(title="Task_Mate") as interface: |
|
gr.Markdown("# Task_Mate: Your AI Task Buddy") |
|
persona = gr.Dropdown(["lazy", "multitasker", "perfect"], label="Who are you?", value="lazy") |
|
chatbot = gr.Chatbot(label="Chat with Task_Mate") |
|
msg = gr.Textbox(label="Talk to me", placeholder="e.g., 'What’s today?' or 'Meeting at 2 PM'") |
|
submit = gr.Button("Submit") |
|
|
|
|
|
submit.click( |
|
fn=task_mate_chat, |
|
inputs=[msg, persona, chatbot], |
|
outputs=[msg, chatbot] |
|
) |
|
|
|
|
|
gr.Examples( |
|
examples=[ |
|
["What’s today?", "lazy"], |
|
["Meeting Sarah, slides, IT call", "multitasker"], |
|
["Email boss by 3 PM", "perfect"] |
|
], |
|
inputs=[msg, persona], |
|
outputs=chatbot |
|
) |
|
|
|
interface.launch() |