Update newapp.py
Browse files
newapp.py
CHANGED
|
@@ -45,70 +45,47 @@ logger = logging.getLogger(__name__)
|
|
| 45 |
# Initialize geocoder
|
| 46 |
geocoder = Nominatim(user_agent="indian_property_verifier", timeout=10)
|
| 47 |
|
| 48 |
-
#
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
|
|
|
|
|
|
|
|
|
| 55 |
|
| 56 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
try:
|
| 58 |
clip_processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
| 59 |
-
clip_model = CLIPModel.from_pretrained(
|
| 60 |
-
"openai/clip-vit-base-patch32",
|
| 61 |
-
quantization_config=quantization_config,
|
| 62 |
-
device_map="auto"
|
| 63 |
-
)
|
| 64 |
has_clip_model = True
|
| 65 |
-
logger.info("CLIP model loaded successfully
|
| 66 |
except Exception as e:
|
| 67 |
logger.error(f"Error loading CLIP model: {str(e)}")
|
| 68 |
has_clip_model = False
|
| 69 |
|
| 70 |
-
# Initialize sentence transformer
|
| 71 |
try:
|
| 72 |
-
sentence_model = SentenceTransformer(
|
| 73 |
-
'paraphrase-MiniLM-L6-v2',
|
| 74 |
-
device='cpu',
|
| 75 |
-
cache_folder='./model_cache'
|
| 76 |
-
)
|
| 77 |
logger.info("Sentence transformer loaded successfully")
|
| 78 |
except Exception as e:
|
| 79 |
logger.error(f"Error loading sentence transformer: {str(e)}")
|
| 80 |
sentence_model = None
|
| 81 |
|
| 82 |
-
# Initialize spaCy
|
| 83 |
try:
|
| 84 |
-
nlp = spacy.load('
|
| 85 |
logger.info("spaCy model loaded successfully")
|
| 86 |
except Exception as e:
|
| 87 |
logger.error(f"Error loading spaCy model: {str(e)}")
|
| 88 |
nlp = None
|
| 89 |
|
| 90 |
-
# Add memory monitoring function
|
| 91 |
-
def monitor_memory():
|
| 92 |
-
while True:
|
| 93 |
-
process = psutil.Process()
|
| 94 |
-
memory_info = process.memory_info()
|
| 95 |
-
logger.info(f"Memory usage: {memory_info.rss / 1024 / 1024:.2f} MB")
|
| 96 |
-
if memory_info.rss > 8 * 1024 * 1024 * 1024: # If using more than 8GB
|
| 97 |
-
logger.warning("High memory usage detected, clearing cache")
|
| 98 |
-
clear_model_cache()
|
| 99 |
-
time.sleep(300) # Check every 5 minutes
|
| 100 |
-
|
| 101 |
-
# Start memory monitoring in a separate thread
|
| 102 |
-
memory_monitor_thread = threading.Thread(target=monitor_memory, daemon=True)
|
| 103 |
-
memory_monitor_thread.start()
|
| 104 |
-
|
| 105 |
-
def clear_model_cache():
|
| 106 |
-
"""Clear model cache and free up memory"""
|
| 107 |
-
gc.collect()
|
| 108 |
-
if torch.cuda.is_available():
|
| 109 |
-
torch.cuda.empty_cache()
|
| 110 |
-
logger.info("Model cache cleared and memory freed")
|
| 111 |
-
|
| 112 |
def make_json_serializable(obj):
|
| 113 |
try:
|
| 114 |
if isinstance(obj, (bool, int, float, str, type(None))):
|
|
|
|
| 45 |
# Initialize geocoder
|
| 46 |
geocoder = Nominatim(user_agent="indian_property_verifier", timeout=10)
|
| 47 |
|
| 48 |
+
# Add memory monitoring function
|
| 49 |
+
def monitor_memory():
|
| 50 |
+
while True:
|
| 51 |
+
process = psutil.Process()
|
| 52 |
+
memory_info = process.memory_info()
|
| 53 |
+
logger.info(f"Memory usage: {memory_info.rss / 1024 / 1024:.2f} MB")
|
| 54 |
+
if memory_info.rss > 1 * 1024 * 1024 * 1024: # If using more than 1GB
|
| 55 |
+
logger.warning("High memory usage detected, clearing cache")
|
| 56 |
+
clear_model_cache()
|
| 57 |
+
time.sleep(300) # Check every 5 minutes
|
| 58 |
|
| 59 |
+
# Start memory monitoring in a separate thread
|
| 60 |
+
memory_monitor_thread = threading.Thread(target=monitor_memory, daemon=True)
|
| 61 |
+
memory_monitor_thread.start()
|
| 62 |
+
|
| 63 |
+
# Initialize CLIP model
|
| 64 |
try:
|
| 65 |
clip_processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
| 66 |
+
clip_model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
has_clip_model = True
|
| 68 |
+
logger.info("CLIP model loaded successfully")
|
| 69 |
except Exception as e:
|
| 70 |
logger.error(f"Error loading CLIP model: {str(e)}")
|
| 71 |
has_clip_model = False
|
| 72 |
|
| 73 |
+
# Initialize sentence transformer
|
| 74 |
try:
|
| 75 |
+
sentence_model = SentenceTransformer('paraphrase-MiniLM-L6-v2')
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
logger.info("Sentence transformer loaded successfully")
|
| 77 |
except Exception as e:
|
| 78 |
logger.error(f"Error loading sentence transformer: {str(e)}")
|
| 79 |
sentence_model = None
|
| 80 |
|
| 81 |
+
# Initialize spaCy
|
| 82 |
try:
|
| 83 |
+
nlp = spacy.load('en_core_web_md')
|
| 84 |
logger.info("spaCy model loaded successfully")
|
| 85 |
except Exception as e:
|
| 86 |
logger.error(f"Error loading spaCy model: {str(e)}")
|
| 87 |
nlp = None
|
| 88 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 89 |
def make_json_serializable(obj):
|
| 90 |
try:
|
| 91 |
if isinstance(obj, (bool, int, float, str, type(None))):
|