luisarizmendi commited on
Commit
52f9205
·
1 Parent(s): 392f34f
Files changed (1) hide show
  1. app.py +6 -11
app.py CHANGED
@@ -7,16 +7,6 @@ import torch
7
 
8
  DEFAULT_MODEL_URL = "https://github.com/luisarizmendi/ai-apps/raw/refs/heads/main/models/luisarizmendi/object-detector-hardhat-or-hat/object-detector-hardhat-or-hat.pt"
9
 
10
- def load_model(model_input):
11
- model = YOLO(model_input)
12
- if torch.cuda.is_available():
13
- model.to('cuda')
14
- print("Using GPU for inference")
15
- else:
16
- print("Using CPU for inference")
17
-
18
- return model
19
-
20
  def detect_objects_in_files(model_input, files):
21
  """
22
  Processes uploaded images for object detection.
@@ -24,7 +14,12 @@ def detect_objects_in_files(model_input, files):
24
  if not files:
25
  return "No files uploaded.", []
26
 
27
- model = load_model(model_input)
 
 
 
 
 
28
 
29
  results_images = []
30
  for file in files:
 
7
 
8
  DEFAULT_MODEL_URL = "https://github.com/luisarizmendi/ai-apps/raw/refs/heads/main/models/luisarizmendi/object-detector-hardhat-or-hat/object-detector-hardhat-or-hat.pt"
9
 
 
 
 
 
 
 
 
 
 
 
10
  def detect_objects_in_files(model_input, files):
11
  """
12
  Processes uploaded images for object detection.
 
14
  if not files:
15
  return "No files uploaded.", []
16
 
17
+ model = YOLO(str(model_input))
18
+ if torch.cuda.is_available():
19
+ model.to('cuda')
20
+ print("Using GPU for inference")
21
+ else:
22
+ print("Using CPU for inference")
23
 
24
  results_images = []
25
  for file in files: