naseem9006's picture
Update app.py
ca3ebdb verified
import torch
import gradio as gr
# Use a pipeline as a high-level helper
from transformers import pipeline
# model_path="../models/models--deepset--roberta-base-squad2/snapshots/adc3b06f79f797d1c575d5479d6f5efe54a9e3b4"
question_answer = pipeline("question-answering", model="deepset/roberta-base-squad2")
# question_answer = pipeline("question-answering", model="deepset/roberta-large-squad2")
# question_answer = pipeline("question-answering", model="google/flan-t5-large")
# question_answer = pipeline("question-answering", model=model_path)
def read_file_content(file_path):
"""
Reads the content of a file given its file path and returns it as a string.
"""
try:
with open(file_path, "r", encoding="utf-8") as file:
return file.read()
except Exception as e:
return f"Error reading file: {e}"
def get_answer(file, question):
context = read_file_content(file) # 'file' is a path string, not a file object
if context.startswith("Error"):
return context # Return error message if file reading fails
answer = question_answer(question=question, context=context)
return answer["answer"]
demo = gr.Interface(fn=get_answer,
inputs=[gr.File(label="Upload your file"),gr.Textbox(label="Ask any Question related to file",lines=1)],
outputs=[gr.Textbox(label="Answer",lines=2)],
title="@Naseem GenAI Project 2: Question Answering based on file provided",
description="THIS APPLICATION WILL PROVIDE ANSWER BASED ON FILE PROVIDED")
demo.launch()