from transformers import GPT2Tokenizer import json import os def preprocess_input(prompt, tokenizer): return tokenizer(prompt, return_tensors="pt", truncation=True, max_length=128) def save_feedback(prompt, generated_text, user_feedback): feedback_data = { "prompt": prompt, "generated_text": generated_text, "user_feedback": user_feedback } os.makedirs("data/feedback", exist_ok=True) with open(f"data/feedback/feedback_{len(os.listdir('data/feedback'))}.json", "w") as f: json.dump(feedback_data, f) def load_feedback_data(): feedback_data = [] for file in os.listdir("data/feedback"): with open(os.path.join("data/feedback", file), "r") as f: feedback_data.append(json.load(f)) return feedback_data