Spaces:
Running
on
Zero
Running
on
Zero
玙珲
commited on
Commit
·
fe15e5b
1
Parent(s):
0753fd3
2nd commit
Browse files
app.py
CHANGED
|
@@ -100,7 +100,7 @@ def run_inference(
|
|
| 100 |
"eos_token_id": model.text_tokenizer.eos_token_id, "pad_token_id": model.text_tokenizer.pad_token_id
|
| 101 |
}
|
| 102 |
|
| 103 |
-
with torch.
|
| 104 |
try:
|
| 105 |
outputs = model.generate(inputs=input_ids, pixel_values=pixel_values, grid_thws=grid_thws, **gen_kwargs)
|
| 106 |
except Exception as e:
|
|
@@ -122,7 +122,7 @@ def toggle_media_input(choice: str) -> Tuple:
|
|
| 122 |
|
| 123 |
|
| 124 |
# --- Build Gradio Application ---
|
| 125 |
-
@spaces.GPU
|
| 126 |
def build_demo(model_path: str):
|
| 127 |
"""Builds the Gradio user interface for the model."""
|
| 128 |
global model
|
|
|
|
| 100 |
"eos_token_id": model.text_tokenizer.eos_token_id, "pad_token_id": model.text_tokenizer.pad_token_id
|
| 101 |
}
|
| 102 |
|
| 103 |
+
with torch.inference_mode():
|
| 104 |
try:
|
| 105 |
outputs = model.generate(inputs=input_ids, pixel_values=pixel_values, grid_thws=grid_thws, **gen_kwargs)
|
| 106 |
except Exception as e:
|
|
|
|
| 122 |
|
| 123 |
|
| 124 |
# --- Build Gradio Application ---
|
| 125 |
+
# @spaces.GPU
|
| 126 |
def build_demo(model_path: str):
|
| 127 |
"""Builds the Gradio user interface for the model."""
|
| 128 |
global model
|