Bazyl commited on
Commit
bc56537
·
1 Parent(s): 83b41b1

reduce inference size

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -8,7 +8,7 @@ from typing import Tuple, Dict
8
  def predict(img) -> Tuple[Dict, float]:
9
  start_time = timer()
10
  classifier = pipeline("image-classification", model="bazyl/gtsrb-model")
11
- result = classifier(img)
12
  response = {result[i]["label"]: result[i]["score"] for i in range(len(result))}
13
  pred_time = round(timer() - start_time, 5)
14
  return response, pred_time
@@ -24,7 +24,7 @@ demo = gr.Interface(
24
  fn=predict,
25
  inputs=gr.Image(type="pil"),
26
  outputs=[
27
- gr.Label(num_top_classes=5, label="Predictions"),
28
  gr.Number(label="Prediction time (s)"),
29
  ],
30
  examples=example_list,
 
8
  def predict(img) -> Tuple[Dict, float]:
9
  start_time = timer()
10
  classifier = pipeline("image-classification", model="bazyl/gtsrb-model")
11
+ result = classifier(img, top_k=3)
12
  response = {result[i]["label"]: result[i]["score"] for i in range(len(result))}
13
  pred_time = round(timer() - start_time, 5)
14
  return response, pred_time
 
24
  fn=predict,
25
  inputs=gr.Image(type="pil"),
26
  outputs=[
27
+ gr.Label(num_top_classes=3, label="Predictions"),
28
  gr.Number(label="Prediction time (s)"),
29
  ],
30
  examples=example_list,