import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM import torch MODELS = { "BM1_CS1_Syn (33M)": "withmartian/sql_interp_bm1_cs1_experiment_1.10", "BM1_CS2_Syn (33M)": "withmartian/sql_interp_bm1_cs2_experiment_2.10", "BM1_CS3_Syn (33M)": "withmartian/sql_interp_bm1_cs3_experiment_3.10", "BM1_CS4_Syn (33M)": "withmartian/sql_interp_bm1_cs4_dataset_synonyms_experiment_1.1", "BM1_CS5_Syn (33M)": "withmartian/sql_interp_bm1_cs5_dataset_synonyms_experiment_1.2", "BM2_CS1_Syn (0.5B)": "withmartian/sql_interp_bm2_cs1_experiment_4.3", "BM2_CS2_Syn (0.5B)": "withmartian/sql_interp_bm2_cs2_experiment_5.3", "BM2_CS3_Syn (0.5B)": "withmartian/sql_interp_bm2_cs3_experiment_6.3", "BM3_CS1_Syn (1B)": "withmartian/sql_interp_bm3_cs1_experiment_7.3", "BM3_CS2_Syn (1B)": "withmartian/sql_interp_bm3_cs2_experiment_8.3", "BM3_CS3_Syn (1B)": "withmartian/sql_interp_bm3_cs3_experiment_9.3", } model_cache = {} def load_model(model_name): if model_name not in model_cache: model_id = MODELS[model_name] tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained( model_id, torch_dtype=torch.float16, device_map="auto" ) model_cache[model_name] = (tokenizer, model) return model_cache[model_name] def generate_sql(model_name, instruction, schema, max_length=256, temperature=0.0): if not model_name or not instruction or not schema: return "Please fill in all fields and select a model" try: tokenizer, model = load_model(model_name) prompt = f"### Instruction: {instruction} ### Context: {schema} ### Response:" inputs = tokenizer(prompt, return_tensors="pt").to(model.device) outputs = model.generate( **inputs, max_length=max_length, temperature=temperature if temperature > 0 else 1.0, do_sample=temperature > 0, pad_token_id=tokenizer.eos_token_id ) generated = tokenizer.decode(outputs[0], skip_special_tokens=True) if "### Response:" in generated: sql = generated.split("### Response:")[-1].strip() else: sql = generated.strip() return sql except Exception as e: return f"Error: {str(e)}" def model_demo(shared_instruction, shared_schema): gr.HTML("""
Transform natural language into SQL