memorease-api / app.py
memorease's picture
Update app.py
02cb8d5 verified
raw
history blame contribute delete
760 Bytes
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
app = FastAPI()
# MODELİ VE TOKENIZER'I YÜKLE (CPU için)
model = AutoModelForCausalLM.from_pretrained("memorease/memorease-quizgen")
tokenizer = AutoTokenizer.from_pretrained("memorease/memorease-quizgen")
class Memory(BaseModel):
description: str
@app.post("/generate")
def generate(memory: Memory):
prompt = f"Soru üret: {memory.description}"
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=128)
outputs = model.generate(**inputs, max_new_tokens=64)
question = tokenizer.decode(outputs[0], skip_special_tokens=True)
return {"question": question}
#