File size: 3,329 Bytes
b9f8cf8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import firebase_admin
from firebase_admin import credentials, db
import os
import json

# Load Firebase credentials from firebase-key.json
firebase_key_path = os.environ.get("FIREBASE_KEY_PATH", "firebase-key.json")
with open(firebase_key_path, "r") as f:
    firebase_config = json.load(f)

# Initialize Firebase
cred = credentials.Certificate(firebase_config)
firebase_admin.initialize_app(cred, {
    "databaseURL": "https://taskmate-d6e71-default-rtdb.firebaseio.com/"  # Confirm this URL!
})
ref = db.reference("tasks")

# Load IBM Granite model from Hugging Face
model_name = "ibm-granite/granite-7b-base—"  # Switch to "granite-3b" if 7b is too heavy
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

# Function to generate text with Granite
def generate_response(prompt, max_length=100):
    inputs = tokenizer(prompt, return_tensors="pt")
    outputs = model.generate(**inputs, max_length=max_length, num_return_sequences=1)
    return tokenizer.decode(outputs[0], skip_special_tokens=True).strip()

# Parse user input into structured task
def parse_task(input_text, persona="default"):
    prompt = f"For a {persona} employee, extract task, time, priority from: '{input_text}'"
    response = generate_response(prompt)
    return response  # e.g., "Task: Email boss, Time: Today, Priority: High"

# Generate persona-specific subtasks
def generate_subtasks(task, persona="default"):
    prompt = f"List 3 subtasks for '{task}' suited for a {persona} employee."
    response = generate_response(prompt, max_length=150)
    return response  # e.g., "1. Draft email\n2. Send it\n3. Chill"

# Main chat function
def task_mate_chat(user_input, persona, chat_history):
    # Parse the input
    parsed = parse_task(user_input, persona)
    task_name = parsed.split(",")[0].replace("Task: ", "").strip()
    
    # Generate subtasks
    subtasks = generate_subtasks(task_name, persona)
    
    # Store in Firebase
    task_data = {
        "input": user_input,
        "parsed": parsed,
        "subtasks": subtasks,
        "persona": persona,
        "timestamp": str(db.ServerValue.TIMESTAMP)
    }
    ref.push().set(task_data)
    
    # Format response
    response = f"Parsed: {parsed}\nSubtasks:\n{subtasks}"
    chat_history.append((user_input, response))
    return "", chat_history

# Gradio Interface
with gr.Blocks(title="Task_Mate") as interface:
    gr.Markdown("# Task_Mate: Your AI Task Buddy")
    persona = gr.Dropdown(["lazy", "multitasker", "perfect"], label="Who are you?", value="lazy")
    chatbot = gr.Chatbot(label="Chat with Task_Mate")
    msg = gr.Textbox(label="Talk to me", placeholder="e.g., 'What’s today?' or 'Meeting at 2 PM'")
    submit = gr.Button("Submit")
    
    # Handle chat submission
    submit.click(
        fn=task_mate_chat,
        inputs=[msg, persona, chatbot],
        outputs=[msg, chatbot]
    )
    
    # Examples for each persona
    gr.Examples(
        examples=[
            ["What’s today?", "lazy"],
            ["Meeting Sarah, slides, IT call", "multitasker"],
            ["Email boss by 3 PM", "perfect"]
        ],
        inputs=[msg, persona],
        outputs=chatbot
    )

interface.launch()