Spaces:
Running
on
A10G
Running
on
A10G
MekkCyber
commited on
Commit
·
114827e
1
Parent(s):
d5a3279
cleaning mem
Browse files
app.py
CHANGED
|
@@ -224,7 +224,7 @@ def save_model(
|
|
| 224 |
repo_id=repo_name,
|
| 225 |
repo_type="model",
|
| 226 |
)
|
| 227 |
-
progress(
|
| 228 |
|
| 229 |
# Get model architecture as string
|
| 230 |
import io
|
|
@@ -330,7 +330,16 @@ def quantize_and_save(
|
|
| 330 |
public,
|
| 331 |
progress,
|
| 332 |
)
|
| 333 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 334 |
return final_message
|
| 335 |
|
| 336 |
except Exception as e:
|
|
|
|
| 224 |
repo_id=repo_name,
|
| 225 |
repo_type="model",
|
| 226 |
)
|
| 227 |
+
progress(0.95, desc="Model pushed to Hub")
|
| 228 |
|
| 229 |
# Get model architecture as string
|
| 230 |
import io
|
|
|
|
| 330 |
public,
|
| 331 |
progress,
|
| 332 |
)
|
| 333 |
+
# Clean up the model to free memory
|
| 334 |
+
del quantized_model
|
| 335 |
+
# Force garbage collection to release memory
|
| 336 |
+
import gc
|
| 337 |
+
gc.collect()
|
| 338 |
+
|
| 339 |
+
if torch.cuda.is_available():
|
| 340 |
+
torch.cuda.empty_cache()
|
| 341 |
+
|
| 342 |
+
progress(1.0, desc="Memory cleaned")
|
| 343 |
return final_message
|
| 344 |
|
| 345 |
except Exception as e:
|