|
import base64 |
|
import os |
|
import gradio as gr |
|
from google import genai |
|
from google.genai import types |
|
|
|
def generate(prompt): |
|
client = genai.Client(api_key=os.environ.get("GEMINI_API_KEY")) |
|
model = "gemini-2.5-pro-exp-03-25" |
|
contents = [ |
|
types.Content( |
|
role="user", |
|
parts=[types.Part.from_text(text=prompt)], |
|
), |
|
] |
|
generate_content_config = types.GenerateContentConfig( |
|
temperature=2, |
|
response_mime_type="text/plain", |
|
) |
|
|
|
response_text = "" |
|
for chunk in client.models.generate_content_stream( |
|
model=model, contents=contents, config=generate_content_config |
|
): |
|
response_text += chunk.text |
|
|
|
return response_text |
|
|
|
demo = gr.Interface( |
|
fn=generate, |
|
inputs=gr.Textbox(lines=2, placeholder="Enter your prompt here..."), |
|
outputs="text", |
|
title="Gemini 2.5 Pro Generator", |
|
description="Enter a prompt and get AI-generated text.", |
|
) |
|
|
|
demo.launch() |
|
|