Spaces:
Runtime error
Runtime error
# V04 | |
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
import requests | |
def get_hfhub_models(): | |
"""Récupère la liste des modèles disponibles sur Hugging Face Hub""" | |
response = requests.get("https://huggingface.co./api/models") | |
if response.status_code == 200: | |
models = [model['modelId'] for model in response.json()['models']] | |
return models | |
else: | |
raise Exception(f"Erreur lors de la récupération des modèles : {response.status_code}") | |
def load_model(model_name): | |
"""Charge le modèle et le tokenizer""" | |
model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) | |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) | |
return model, tokenizer | |
def generate_text(model, tokenizer, input_text, max_length, temperature): | |
"""Génère du texte en utilisant le modèle""" | |
inputs = tokenizer(input_text, return_tensors="pt") | |
output = model.generate(**inputs, max_length=max_length, temperature=temperature) | |
return tokenizer.decode(output[0], skip_special_tokens=True) | |
def main(input_text, max_length, temperature, model_name): | |
"""Fonction principale pour générer le texte""" | |
model, tokenizer = load_model(model_name) | |
generated_text = generate_text(model, tokenizer, input_text, max_length, temperature) | |
return generated_text | |
demo = gr.Blocks() | |
with demo: | |
gr.Markdown("# Modèle de Langage") | |
with gr.Row(): | |
input_text = gr.Textbox(label="Texte d'entrée") | |
with gr.Row(): | |
max_length_slider = gr.Slider(50, 500, label="Longueur maximale", value=200) | |
temperature_slider = gr.Slider(0.1, 1.0, label="Température", value=0.7) | |
model_name_dropdown = gr.Dropdown(choices=get_hfhub_models(), label="Sélectionnez un modèle", interactive=True) | |
with gr.Row(): | |
submit_button = gr.Button("Soumettre") | |
output_text = gr.Textbox(label="Texte généré") | |
submit_button.click( | |
main, | |
inputs=[input_text, max_length_slider, temperature_slider, model_name_dropdown], | |
outputs=output_text, | |
queue=False | |
) | |
if __name__ == "__main__": | |
demo.launch() | |