MIRNA-MOUKHTAR2025 commited on
Commit
5f43f70
Β·
verified Β·
1 Parent(s): 84a3a78

Upload 2 files

Browse files
Files changed (1) hide show
  1. app.py +17 -16
app.py CHANGED
@@ -1,25 +1,26 @@
1
- # app.py
2
-
3
  import gradio as gr
4
  from transformers import pipeline
5
 
6
- # Load question generation pipeline (use any other T5-based if you want)
7
- question_generator = pipeline("e2e-qg", model="valhalla/t5-base-e2e-qg")
8
 
9
  def generate_questions(text):
10
- try:
11
- results = question_generator(text)
12
- questions = [q['question'] for q in results]
13
- return "\n".join(questions)
14
- except Exception as e:
15
- return f"Error: {str(e)}"
 
 
16
 
17
- iface = gr.Interface(
 
18
  fn=generate_questions,
19
- inputs=gr.Textbox(lines=10, label="Paste your text here"),
20
- outputs=gr.Textbox(lines=10, label="Generated Questions"),
21
- title="T5-based Question Generator",
22
- description="Paste a long paragraph, and this tool will generate multiple questions using a T5 model."
23
  )
24
 
25
- iface.launch()
 
 
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # βœ… Use the correct pipeline task
5
+ question_generator = pipeline("text2text-generation", model="valhalla/t5-base-e2e-qg")
6
 
7
  def generate_questions(text):
8
+ # Split text into chunks (optional)
9
+ chunks = text.split(". ")
10
+ questions = []
11
+ for chunk in chunks:
12
+ if len(chunk.strip()) > 0:
13
+ result = question_generator(chunk, max_length=64)[0]['generated_text']
14
+ questions.append(f"❓ {result}")
15
+ return "\n".join(questions)
16
 
17
+ # Gradio interface
18
+ interface = gr.Interface(
19
  fn=generate_questions,
20
+ inputs=gr.Textbox(lines=15, placeholder="Paste your long text here..."),
21
+ outputs="text",
22
+ title="πŸ” Generate Questions from Text",
23
+ description="Uses a T5 model to generate questions from a multi-sentence paragraph."
24
  )
25
 
26
+ interface.launch()