Bronco92 commited on
Commit
b3f9249
·
1 Parent(s): aa47c1e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -1
app.py CHANGED
@@ -1,3 +1,37 @@
1
  import gradio as gr
 
 
2
 
3
- gr.Interface.load("models/neulab/omnitab-large-finetuned-wtq").launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ import pandas as pd
3
+ from transformers import AutoTokenizer, AutoModelForTableQuestionAnswering
4
 
5
+ # Replace with your actual Hugging Face API key
6
+ model_name = "neulab/omnitab-large-finetuned-wtq"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForTableQuestionAnswering.from_pretrained(model_name)
9
+
10
+ # Define the question-answering function
11
+ def answer_question(csv_file, question):
12
+ # Read the csv file into a pandas DataFrame
13
+ data = pd.read_csv(csv_file.name)
14
+
15
+ # Convert the DataFrame into a format the model can understand
16
+ tables = tokenizer.table_encode(data, return_tensors="pt")
17
+
18
+ # Tokenize the question
19
+ questions = tokenizer(question, return_tensors="pt")
20
+
21
+ # Generate the answer
22
+ outputs = model(questions, tables)
23
+ predicted_answer = tokenizer.batch_decode(outputs.logits, skip_special_tokens=True)
24
+
25
+ return predicted_answer[0].strip()
26
+
27
+ # Create the Gradio interface
28
+ gr.Interface(
29
+ fn=answer_question,
30
+ inputs=[
31
+ gr.inputs.File(label="CSV File"),
32
+ gr.inputs.Textbox(lines=2, label="Question")
33
+ ],
34
+ outputs=gr.outputs.Textbox(label="Answer"),
35
+ title="Table Question Answering",
36
+ description="Upload a CSV file and ask a question about the data.",
37
+ ).launch()