whackthejacker commited on
Commit
4ac04b5
·
verified ·
1 Parent(s): bf3219e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -33
app.py CHANGED
@@ -1,48 +1,60 @@
1
  import gradio as gr
 
 
2
 
3
- # Load the models
 
 
 
 
 
 
4
  models = {
5
- "bigcode/python-stack-v1-functions-filtered-sc2-subset": gr.Interface.load("bigcode/python-stack-v1-functions-filtered-sc2-subset"),
6
- "bigcode/python-stack-v1-functions-filtered-sc2": gr.Interface.load("bigcode/python-stack-v1-functions-filtered-sc2"),
7
- "muellerzr/python-stack-v1-functions-filtered-llama-3-8B": gr.Interface.load("muellerzr/python-stack-v1-functions-filtered-llama-3-8B"),
8
- "YoLo2000/python-stack-functions-filtered": gr.Interface.load("YoLo2000/python-stack-functions-filtered"),
9
- "YoLo2000/python-stack-functions-filteredbigcode/python-stack-v1-functions-filtered-sc2": gr.Interface.load("YoLo2000/python-stack-functions-filteredbigcode/python-stack-v1-functions-filtered-sc2"),
10
- "TheBloke/Python-Code-13B-GGUF": gr.Interface.load("TheBloke/Python-Code-13B-GGUF"),
11
- "replit/replit-code-v1_5-3b": gr.Interface.load("replit/replit-code-v1_5-3b"),
12
- "neulab/codebert-python": gr.Interface.load("neulab/codebert-python")
13
  }
14
 
15
- # Load the datasets
16
  datasets = {
17
- "kye/all-huggingface-python-code": gr.Dataset.load("kye/all-huggingface-python-code"),
18
- "ajibawa-2023/WikiHow": gr.Dataset.load("ajibawa-2023/WikiHow"),
19
- "ajibawa-2023/Code-74k-ShareGPT": gr.Dataset.load("ajibawa-2023/Code-74k-ShareGPT"),
20
- "ajibawa-2023/Software-Architectural-Frameworks": gr.Dataset.load("ajibawa-2023/Software-Architectural-Frameworks"),
21
- "ajibawa-2023/Python-Code-23k-ShareGPT": gr.Dataset.load("ajibawa-2023/Python-Code-23k-ShareGPT"),
22
- "HuggingFaceFW/fineweb": gr.Dataset.load("HuggingFaceFW/fineweb"),
23
- "kye/all-huggingface-python-code-2": gr.Dataset.load("kye/all-huggingface-python-code-2"),
24
- "suvadityamuk/huggingface-transformers-code-dataset": gr.Dataset.load("suvadityamuk/huggingface-transformers-code-dataset")
25
  }
26
 
27
- # Define the interface
28
- def generate_code(prompt, model, dataset, temperature, max_length):
29
- model_instance = models[model]
30
- dataset_instance = datasets[dataset]
31
- output = model_instance.generate(prompt, dataset_instance, temperature, max_length)
32
- return output
 
 
 
 
 
 
 
 
 
33
 
 
34
  iface = gr.Interface(
35
  fn=generate_code,
36
  inputs=[
37
- ________gr.Textbox(label="Prompt"),
38
- ________gr.Dropdown(label="Model",_choices=list(models.keys())),
39
- ________gr.Dropdown(label="Dataset",_choices=list(datasets.keys())),
40
- ________gr.Slider(label="Temperature",_minimum=0.1,_maximum=1.0,_default=0.5),
41
- ________gr.Slider(label="Max_Length",_minimum=10,_maximum=1000,_default=200)
42
- ____],
43
- outputs="text"
 
 
44
  )
45
 
46
- # Launch the interface
47
  iface.launch()
48
-
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ from datasets import load_dataset
4
 
5
+ # Define model loading function
6
+ def load_model(model_name):
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name)
9
+ return tokenizer, model
10
+
11
+ # Load selected models
12
  models = {
13
+ "bigcode/python-stack-v1-functions-filtered-sc2-subset": "bigcode/python-stack-v1-functions-filtered-sc2-subset",
14
+ "bigcode/python-stack-v1-functions-filtered-sc2": "bigcode/python-stack-v1-functions-filtered-sc2",
15
+ "muellerzr/python-stack-v1-functions-filtered-llama-3-8B": "muellerzr/python-stack-v1-functions-filtered-llama-3-8B",
16
+ "TheBloke/Python-Code-13B-GGUF": "TheBloke/Python-Code-13B-GGUF",
17
+ "replit/replit-code-v1_5-3b": "replit/replit-code-v1_5-3b",
18
+ "neulab/codebert-python": "neulab/codebert-python"
 
 
19
  }
20
 
21
+ # Load selected datasets
22
  datasets = {
23
+ "kye/all-huggingface-python-code": "kye/all-huggingface-python-code",
24
+ "ajibawa-2023/Python-Code-23k-ShareGPT": "ajibawa-2023/Python-Code-23k-ShareGPT",
25
+ "suvadityamuk/huggingface-transformers-code-dataset": "suvadityamuk/huggingface-transformers-code-dataset"
 
 
 
 
 
26
  }
27
 
28
+ # Define the function for code generation
29
+ def generate_code(prompt, model_name, dataset_name, temperature, max_length):
30
+ tokenizer, model = load_model(models[model_name])
31
+
32
+ # Load dataset (for reference, not directly used)
33
+ dataset = load_dataset(datasets[dataset_name], split="train")
34
+
35
+ # Tokenize input prompt
36
+ inputs = tokenizer(prompt, return_tensors="pt")
37
+
38
+ # Generate output
39
+ output_ids = model.generate(**inputs, temperature=temperature, max_length=max_length)
40
+ generated_code = tokenizer.decode(output_ids[0], skip_special_tokens=True)
41
+
42
+ return generated_code
43
 
44
+ # Create Gradio Interface
45
  iface = gr.Interface(
46
  fn=generate_code,
47
  inputs=[
48
+ gr.Textbox(label="Prompt"),
49
+ gr.Dropdown(label="Model", choices=list(models.keys())),
50
+ gr.Dropdown(label="Dataset", choices=list(datasets.keys())),
51
+ gr.Slider(label="Temperature", minimum=0.1, maximum=1.0, value=0.5),
52
+ gr.Slider(label="Max Length", minimum=10, maximum=1000, value=200)
53
+ ],
54
+ outputs="text",
55
+ title="AI Code Generator with Hugging Face Models",
56
+ description="Select a model and dataset, input a prompt, and generate Python code using AI models."
57
  )
58
 
59
+ # Launch the Gradio App
60
  iface.launch()