Rausda6 commited on
Commit
16dfb59
·
verified ·
1 Parent(s): 89ad5ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -14,6 +14,7 @@ os.environ["HF_HUB_DISABLE_SYMLINKS_WARNING"] = "1"
14
  import gradio as gr
15
  import torch
16
  from transformers import AutoProcessor, LlavaForConditionalGeneration
 
17
 
18
 
19
  # Hugging Face model identifier. See the model card for more details:
@@ -59,7 +60,7 @@ def load_model():
59
  MODEL, PROCESSOR = load_model()
60
 
61
 
62
- def answer_question(image: "PIL.Image.Image", question: str) -> str:
63
  """Generate an answer for the given question about the uploaded image.
64
 
65
  Parameters
@@ -149,7 +150,7 @@ def build_interface() -> gr.Interface:
149
  outputs=gr.Textbox(label="Answer"),
150
  title="Visual Question Answering with LLaVA Dinov2 InternLM2 7B",
151
  description=description,
152
- allow_flagging="never",
153
  )
154
  return iface
155
 
 
14
  import gradio as gr
15
  import torch
16
  from transformers import AutoProcessor, LlavaForConditionalGeneration
17
+ from PIL import Image
18
 
19
 
20
  # Hugging Face model identifier. See the model card for more details:
 
60
  MODEL, PROCESSOR = load_model()
61
 
62
 
63
+ def answer_question(image: Image.Image, question: str) -> str:
64
  """Generate an answer for the given question about the uploaded image.
65
 
66
  Parameters
 
150
  outputs=gr.Textbox(label="Answer"),
151
  title="Visual Question Answering with LLaVA Dinov2 InternLM2 7B",
152
  description=description,
153
+ flagging_mode="never",
154
  )
155
  return iface
156