import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Initialize the model and tokenizer tokenizer = AutoTokenizer.from_pretrained("defog/sqlcoder-70b-alpha") model = AutoModelForCausalLM.from_pretrained("defog/sqlcoder-70b-alpha") def generate_sql(prompt): """Generate SQL code based on the provided prompt""" inputs = tokenizer(prompt, return_tensors="pt").to(model.device) # Generate SQL code outputs = model.generate( inputs.input_ids, max_length=1024, temperature=0.1, do_sample=True, pad_token_id=tokenizer.eos_token_id ) # Decode the generated SQL sql_code = tokenizer.decode(outputs[0], skip_special_tokens=True) return sql_code # Create Gradio interface demo = gr.Interface( fn=generate_sql, inputs=gr.Textbox(lines=5, placeholder="Describe the SQL query you need..."), outputs=gr.Textbox(lines=10, label="Generated SQL"), title="SQL Code Generator", description="Generate SQL code using defog/sqlcoder-70b-alpha. Enter your request in natural language." ) # Launch the app if __name__ == "__main__": demo.launch()