tiny-model / app.py
warhawkmonk's picture
Update app.py
ec196b6 verified
raw
history blame
767 Bytes
import torch
import gradio as gr
def process_text(input_text):
# Example CUDA processing: reverse the input string and move to GPU
if torch.cuda.is_available():
device = torch.device("cuda")
tensor = torch.tensor([ord(c) for c in input_text], device=device)
reversed_tensor = tensor.flip(0)
output_text = ''.join([chr(int(c)) for c in reversed_tensor.cpu()])
return output_text
else:
return "CUDA is not available."
# Define the Gradio interface
interface = gr.Interface(
fn=process_text, # Function to process input
inputs=gr.Textbox(label="Enter text"), # Input component
outputs=gr.Textbox(label="Output") # Output component
)
# Launch the Gradio app
interface.launch()