Athspi commited on
Commit
cca6563
·
verified ·
1 Parent(s): d375a16

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -15
app.py CHANGED
@@ -1,32 +1,31 @@
1
- import base64
2
  import os
3
- from google import genai
4
- from google.genai import types
5
  import gradio as gr
6
 
7
  def generate_text(user_input):
8
  try:
9
- client = genai.Client(api_key=os.environ.get("GEMINI_API_KEY"))
 
10
 
11
  model = "gemini-2.5-pro-exp-03-25"
12
  contents = [
13
  types.Content(
14
  role="user",
15
- parts=[
16
- types.Part.from_text(text=user_input),
17
- ],
18
  ),
19
  ]
20
- generate_content_config = types.GenerateContentConfig(
21
  temperature=2,
22
  response_mime_type="text/plain",
23
  )
24
 
 
25
  response = []
26
- for chunk in client.models.generate_content_stream(
27
  model=model,
28
  contents=contents,
29
- config=generate_content_config,
30
  ):
31
  response.append(chunk.text)
32
 
@@ -35,13 +34,14 @@ def generate_text(user_input):
35
  except Exception as e:
36
  return f"Error: {str(e)}"
37
 
 
38
  iface = gr.Interface(
39
  fn=generate_text,
40
- inputs=gr.Textbox(lines=4, placeholder="Enter your prompt here...", label="Input Text"),
41
- outputs=gr.Textbox(label="Generated Text"),
42
- title="Gemini 2.5 Pro Text Generator",
43
- description="Generate text using the Gemini 2.5 Pro model with streaming support",
44
- allow_flagging="never"
45
  )
46
 
47
  if __name__ == "__main__":
 
 
1
  import os
2
+ import google.generativeai as genai
3
+ from google.generativeai import types
4
  import gradio as gr
5
 
6
  def generate_text(user_input):
7
  try:
8
+ # Initialize client with API key
9
+ genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
10
 
11
  model = "gemini-2.5-pro-exp-03-25"
12
  contents = [
13
  types.Content(
14
  role="user",
15
+ parts=[types.Part.from_text(user_input)],
 
 
16
  ),
17
  ]
18
+ config = types.GenerationConfig(
19
  temperature=2,
20
  response_mime_type="text/plain",
21
  )
22
 
23
+ # Generate response
24
  response = []
25
+ for chunk in genai.generate_content_stream(
26
  model=model,
27
  contents=contents,
28
+ generation_config=config,
29
  ):
30
  response.append(chunk.text)
31
 
 
34
  except Exception as e:
35
  return f"Error: {str(e)}"
36
 
37
+ # Create Gradio interface
38
  iface = gr.Interface(
39
  fn=generate_text,
40
+ inputs=gr.Textbox(lines=4, placeholder="Enter your prompt...", label="Input"),
41
+ outputs=gr.Textbox(label="Output"),
42
+ title="Gemini 2.5 Pro Demo",
43
+ description="Generate text with Gemini 2.5 Pro (streaming enabled)",
44
+ allow_flagging="never",
45
  )
46
 
47
  if __name__ == "__main__":