Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,239 +1,3 @@
|
|
1 |
-
# from fastapi import FastAPI, UploadFile, File, HTTPException
|
2 |
-
# import cv2
|
3 |
-
# import torch
|
4 |
-
# import pandas as pd
|
5 |
-
# from PIL import Image
|
6 |
-
# from transformers import AutoImageProcessor, AutoModelForImageClassification
|
7 |
-
# from tqdm import tqdm
|
8 |
-
# import shutil
|
9 |
-
# from fastapi.middleware.cors import CORSMiddleware
|
10 |
-
# from fastapi.responses import HTMLResponse
|
11 |
-
# from huggingface_hub import HfApi
|
12 |
-
# import os
|
13 |
-
# from dotenv import load_dotenv
|
14 |
-
# from typing import Optional
|
15 |
-
|
16 |
-
# # Charger les variables d'environnement, y compris la clé API Hugging Face
|
17 |
-
# load_dotenv()
|
18 |
-
|
19 |
-
# api_key = os.getenv("HUGGINGFACE_API_KEY")
|
20 |
-
# if not api_key:
|
21 |
-
# raise ValueError("La clé API Hugging Face n'est pas définie dans le fichier .env.")
|
22 |
-
|
23 |
-
# # Initialiser l'API Hugging Face
|
24 |
-
# hf_api = HfApi()
|
25 |
-
|
26 |
-
# app = FastAPI()
|
27 |
-
|
28 |
-
# # Add CORS middleware to allow requests from Vue.js frontend
|
29 |
-
# app.add_middleware(
|
30 |
-
# CORSMiddleware,
|
31 |
-
# allow_origins=[
|
32 |
-
# "http://localhost:8080",
|
33 |
-
# "https://labeling2-163849140747.europe-west9.run.app/",
|
34 |
-
# "https://my-vue-app-latest-qqzd.onrender.com/",
|
35 |
-
# ],
|
36 |
-
# allow_credentials=True,
|
37 |
-
# allow_methods=["*"], # Permet toutes les méthodes HTTP (GET, POST, etc.)
|
38 |
-
# allow_headers=["*"], # Permet tous les en-têtes (Content-Type, Authorization, etc.)
|
39 |
-
# )
|
40 |
-
|
41 |
-
# # Charger le processeur d'image et le modèle fine-tuné localement
|
42 |
-
# local_model_path = r'./vit-finetuned-ucf101'
|
43 |
-
# processor = AutoImageProcessor.from_pretrained("google/vit-base-patch16-224")
|
44 |
-
# model = AutoModelForImageClassification.from_pretrained(local_model_path)
|
45 |
-
# model.eval()
|
46 |
-
|
47 |
-
# # Fonction pour classifier une image
|
48 |
-
# def classifier_image(image):
|
49 |
-
# image_pil = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
|
50 |
-
# inputs = processor(images=image_pil, return_tensors="pt")
|
51 |
-
# with torch.no_grad():
|
52 |
-
# outputs = model(**inputs)
|
53 |
-
# logits = outputs.logits
|
54 |
-
# predicted_class_idx = logits.argmax(-1).item()
|
55 |
-
# predicted_class = model.config.id2label[predicted_class_idx]
|
56 |
-
# return predicted_class
|
57 |
-
|
58 |
-
# # Fonction pour traiter la vidéo et identifier les séquences de "Surfing"
|
59 |
-
# def identifier_sequences_surfing(video_path, intervalle=0.5):
|
60 |
-
# cap = cv2.VideoCapture(video_path)
|
61 |
-
# frame_rate = cap.get(cv2.CAP_PROP_FPS)
|
62 |
-
# total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
63 |
-
# frame_interval = int(frame_rate * intervalle)
|
64 |
-
|
65 |
-
# sequences_surfing = []
|
66 |
-
# frame_index = 0
|
67 |
-
# in_surf_sequence = False
|
68 |
-
# start_timestamp = None
|
69 |
-
|
70 |
-
# with tqdm(total=total_frames, desc="Traitement des frames de la vidéo", unit="frame") as pbar:
|
71 |
-
# success, frame = cap.read()
|
72 |
-
# while success:
|
73 |
-
# if frame_index % frame_interval == 0:
|
74 |
-
# timestamp = round(frame_index / frame_rate, 2)
|
75 |
-
# classe = classifier_image(frame)
|
76 |
-
|
77 |
-
# if classe == "Surfing" and not in_surf_sequence:
|
78 |
-
# in_surf_sequence = True
|
79 |
-
# start_timestamp = timestamp
|
80 |
-
# elif classe != "Surfing" and in_surf_sequence:
|
81 |
-
# in_surf_sequence = False
|
82 |
-
# end_timestamp = timestamp
|
83 |
-
# sequences_surfing.append((start_timestamp, end_timestamp))
|
84 |
-
|
85 |
-
# success, frame = cap.read()
|
86 |
-
# frame_index += 1
|
87 |
-
# pbar.update(1)
|
88 |
-
|
89 |
-
# if in_surf_sequence:
|
90 |
-
# sequences_surfing.append((start_timestamp, round(frame_index / frame_rate, 2)))
|
91 |
-
|
92 |
-
# cap.release()
|
93 |
-
# dataframe_sequences = pd.DataFrame(sequences_surfing, columns=["Début", "Fin"])
|
94 |
-
# return dataframe_sequences
|
95 |
-
|
96 |
-
# # Fonction pour convertir les séquences en format JSON
|
97 |
-
# def convertir_sequences_en_json(dataframe):
|
98 |
-
# events = []
|
99 |
-
# blocks = []
|
100 |
-
# for idx, row in dataframe.iterrows():
|
101 |
-
# block = {
|
102 |
-
# "id": f"Surfing{idx + 1}",
|
103 |
-
# "start": round(row["Début"], 2),
|
104 |
-
# "end": round(row["Fin"], 2)
|
105 |
-
# }
|
106 |
-
# blocks.append(block)
|
107 |
-
# event = {
|
108 |
-
# "event": "Surfing",
|
109 |
-
# "blocks": blocks
|
110 |
-
# }
|
111 |
-
# events.append(event)
|
112 |
-
# return events
|
113 |
-
|
114 |
-
# # # Endpoint pour analyser la vidéo et uploader sur Hugging Face
|
115 |
-
# # @app.post("/analyze_video/")
|
116 |
-
# # async def analyze_video(user_name: str, file: UploadFile = File(...)):
|
117 |
-
# # try:
|
118 |
-
# # # Sauvegarder la vidéo temporairement
|
119 |
-
# # temp_file_path = f"/tmp/{file.filename}"
|
120 |
-
# # with open(temp_file_path, "wb") as buffer:
|
121 |
-
# # shutil.copyfileobj(file.file, buffer)
|
122 |
-
|
123 |
-
# # # Uploader la vidéo sur Hugging Face Hub
|
124 |
-
# # dataset_name = "2nzi/Video-Sequence-Labeling"
|
125 |
-
# # target_path_in_repo = f"{user_name}/raw/{file.filename}"
|
126 |
-
|
127 |
-
# # hf_api.upload_file(
|
128 |
-
# # path_or_fileobj=temp_file_path,
|
129 |
-
# # path_in_repo=target_path_in_repo,
|
130 |
-
# # repo_id=dataset_name,
|
131 |
-
# # repo_type="dataset",
|
132 |
-
# # token=api_key
|
133 |
-
# # )
|
134 |
-
|
135 |
-
# # # Analyser la vid��o pour trouver des séquences "Surfing"
|
136 |
-
# # dataframe_sequences = identifier_sequences_surfing(temp_file_path, intervalle=1)
|
137 |
-
# # json_result = convertir_sequences_en_json(dataframe_sequences)
|
138 |
-
|
139 |
-
# # # Supprimer le fichier temporaire après l'upload
|
140 |
-
# # os.remove(temp_file_path)
|
141 |
-
|
142 |
-
# # return {"message": "Video uploaded and analyzed successfully!",
|
143 |
-
# # "file_url": f"https://huggingface.co/datasets/{dataset_name}/resolve/main/{target_path_in_repo}",
|
144 |
-
# # "analysis": json_result}
|
145 |
-
|
146 |
-
# # except Exception as e:
|
147 |
-
# # raise HTTPException(status_code=500, detail=f"Failed to upload or analyze video: {str(e)}")
|
148 |
-
|
149 |
-
|
150 |
-
# @app.post("/analyze_video/")
|
151 |
-
# async def analyze_video(user_name: str, file: Optional[UploadFile] = File(None), video_url: Optional[str] = None):
|
152 |
-
# try:
|
153 |
-
# # Vérifier si la vidéo est fournie sous forme de fichier ou d'URL
|
154 |
-
# if file:
|
155 |
-
# # Sauvegarder la vidéo temporairement
|
156 |
-
# temp_file_path = f"/tmp/{file.filename}"
|
157 |
-
# with open(temp_file_path, "wb") as buffer:
|
158 |
-
# shutil.copyfileobj(file.file, buffer)
|
159 |
-
|
160 |
-
# # Uploader la vidéo sur Hugging Face Hub
|
161 |
-
# dataset_name = "2nzi/Video-Sequence-Labeling"
|
162 |
-
# target_path_in_repo = f"{user_name}/raw/{file.filename}"
|
163 |
-
|
164 |
-
# hf_api.upload_file(
|
165 |
-
# path_or_fileobj=temp_file_path,
|
166 |
-
# path_in_repo=target_path_in_repo,
|
167 |
-
# repo_id=dataset_name,
|
168 |
-
# repo_type="dataset",
|
169 |
-
# token=os.getenv("HUGGINGFACE_WRITE_API_KEY")
|
170 |
-
# )
|
171 |
-
|
172 |
-
# # URL de la vidéo sur Hugging Face
|
173 |
-
# video_url = f"https://huggingface.co/datasets/{dataset_name}/resolve/main/{target_path_in_repo}"
|
174 |
-
# # Supprimer le fichier temporaire après l'upload
|
175 |
-
# os.remove(temp_file_path)
|
176 |
-
|
177 |
-
# # Assurez-vous d'avoir une URL valide à ce stade
|
178 |
-
# if not video_url:
|
179 |
-
# raise HTTPException(status_code=400, detail="No valid video URL or file provided.")
|
180 |
-
|
181 |
-
# # Analyser la vidéo via l'URL
|
182 |
-
# dataframe_sequences = identifier_sequences_surfing(video_url, intervalle=1)
|
183 |
-
# json_result = convertir_sequences_en_json(dataframe_sequences)
|
184 |
-
|
185 |
-
# return {
|
186 |
-
# "message": "Video uploaded and analyzed successfully!",
|
187 |
-
# "file_url": video_url,
|
188 |
-
# "analysis": json_result
|
189 |
-
# }
|
190 |
-
|
191 |
-
# except Exception as e:
|
192 |
-
# raise HTTPException(status_code=500, detail=f"Failed to upload or analyze video: {str(e)}")
|
193 |
-
|
194 |
-
|
195 |
-
# # Fonction pour uploader une vidéo vers un dataset Hugging Face
|
196 |
-
# def upload_to_hf_dataset(user_name: str, video_path: str):
|
197 |
-
# dataset_name = "2nzi/Video-Sequence-Labeling"
|
198 |
-
# repo_path = f"{user_name}/raw/{os.path.basename(video_path)}"
|
199 |
-
|
200 |
-
# try:
|
201 |
-
# hf_api.upload_file(
|
202 |
-
# path_or_fileobj=video_path,
|
203 |
-
# path_in_repo=repo_path,
|
204 |
-
# repo_id=dataset_name,
|
205 |
-
# repo_type="dataset",
|
206 |
-
# token=api_key
|
207 |
-
# )
|
208 |
-
|
209 |
-
# # Retourner l'URL de la vidéo après l'upload
|
210 |
-
# url = f"https://huggingface.co/datasets/{dataset_name}/resolve/main/{repo_path}"
|
211 |
-
# return {"status": "success", "url": url}
|
212 |
-
# except Exception as e:
|
213 |
-
# return {"status": "error", "message": str(e)}
|
214 |
-
|
215 |
-
|
216 |
-
# @app.get("/", response_class=HTMLResponse)
|
217 |
-
# async def index():
|
218 |
-
# return (
|
219 |
-
# """
|
220 |
-
# <html>
|
221 |
-
# <body>
|
222 |
-
# <h1>Hello world!</h1>
|
223 |
-
# <p>This `/` is the most simple and default endpoint.</p>
|
224 |
-
# <p>If you want to learn more, check out the documentation of the API at
|
225 |
-
# <a href='/docs'>/docs</a> or
|
226 |
-
# <a href='https://2nzi-video-sequence-labeling.hf.space/docs' target='_blank'>external docs</a>.
|
227 |
-
# </p>
|
228 |
-
# </body>
|
229 |
-
# </html>
|
230 |
-
# """
|
231 |
-
# )
|
232 |
-
|
233 |
-
# # Lancer l'application avec uvicorn (command line)
|
234 |
-
# # uvicorn main:app --reload
|
235 |
-
# # http://localhost:8000/docs#/
|
236 |
-
# # (.venv) PS C:\Users\antoi\Documents\Work_Learn\Labeling-Deploy\FastAPI> uvicorn main:app --host 0.0.0.0 --port 8000 --workers 1
|
237 |
|
238 |
from fastapi import FastAPI, UploadFile, File, HTTPException
|
239 |
import cv2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
|
2 |
from fastapi import FastAPI, UploadFile, File, HTTPException
|
3 |
import cv2
|