bikas commited on
Commit
7100792
·
1 Parent(s): cd4de3a
Files changed (1) hide show
  1. new_image.py +0 -143
new_image.py DELETED
@@ -1,143 +0,0 @@
1
- import os
2
- import cv2
3
- import pickle
4
- import shutil
5
- import numpy as np
6
- from PIL import Image
7
- from mtcnn import MTCNN
8
- from numpy import asarray
9
- from keras.preprocessing import image
10
- from keras_vggface.vggface import VGGFace
11
- from keras_vggface.utils import preprocess_input
12
- import tensorflow as tf
13
-
14
- # Configure GPU memory growth for TensorFlow
15
- physical_devices = tf.config.list_physical_devices('GPU')
16
- if physical_devices:
17
- for device in physical_devices:
18
- tf.config.experimental.set_memory_growth(device, True)
19
-
20
- def feature_extractor(img_path):
21
- model = VGGFace(model='resnet50', include_top=False, input_shape=(224, 224, 3), pooling='avg')
22
- img = image.load_img(img_path, target_size=(224, 224))
23
- img_array = image.img_to_array(img)
24
- expanded_img = np.expand_dims(img_array, axis=0)
25
- preprocessed_img = preprocess_input(expanded_img)
26
- result = model.predict(preprocessed_img).flatten()
27
- return result
28
-
29
- def extract_faces_mtcnn(input_folder, output_folder, required_size=(224, 224)):
30
- detector = MTCNN()
31
- # if not os.path.exists(output_folder):
32
- # os.makedirs(output_folder)
33
- os.makedirs(output_folder, exist_ok=True)
34
-
35
- for filename in os.listdir(input_folder):
36
- img_path = os.path.join(input_folder, filename)
37
- img = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
38
-
39
- if img is None:
40
- print(f"Error reading image: {img_path}")
41
- continue
42
-
43
- print(img.shape)
44
-
45
- clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
46
- clahe_image = clahe.apply(img)
47
- ConvertedImage = cv2.cvtColor(clahe_image, cv2.COLOR_GRAY2BGR)
48
-
49
- faces = detector.detect_faces(ConvertedImage)
50
- if faces is None or not faces:
51
- continue
52
- else:
53
- for i, face_info in enumerate(faces):
54
- x, y, w, h = face_info['box']
55
- x, y = max(x, 0), max(y, 0)
56
- face = img[y:y + h, x:x + w]
57
- image_obj = Image.fromarray(face)
58
- image_obj = image_obj.resize(required_size)
59
- face_array = asarray(image_obj)
60
- face_filename = f"{filename}"
61
- output_path = os.path.join(output_folder, face_filename)
62
- cv2.imwrite(output_path, face_array)
63
-
64
- def delete_files_in_folder(folder_path):
65
- try:
66
- files = os.listdir(folder_path)
67
- for file_name in files:
68
- file_path = os.path.join(folder_path, file_name)
69
- if os.path.isfile(file_path):
70
- os.remove(file_path)
71
- except Exception as e:
72
- print(f"An error occurred: {e}")
73
-
74
- def new_pickle_file(new_image_dir, update_face_dir):
75
-
76
- pickle_file_dir = "/home/bikas/Desktop/IR/pickle"
77
- filenames_pickle_path = os.path.join(pickle_file_dir, 'FinalFilenames.pkl')
78
- features_pickle_path = os.path.join(pickle_file_dir, 'FeatureEmbeddings.pkl')
79
-
80
- # Initialize existing data
81
- if os.path.exists(filenames_pickle_path) and os.path.exists(features_pickle_path):
82
- existing_filenames = pickle.load(open(filenames_pickle_path, 'rb'))
83
- existing_features = pickle.load(open(features_pickle_path, 'rb'))
84
- else:
85
- existing_filenames = []
86
- existing_features = []
87
-
88
- image_files = os.listdir(new_image_dir)
89
-
90
- if len(image_files) == 0:
91
- # No valid image files found in the directory.
92
- return
93
-
94
- # Filter out already processed files
95
- new_files = [file for file in image_files if file not in existing_filenames]
96
-
97
- if not new_files:
98
- # All files have already been processed
99
- return
100
-
101
- extract_faces_mtcnn(new_image_dir, update_face_dir)
102
- filenames = []
103
- features = []
104
-
105
- image_files = os.listdir(update_face_dir)
106
- if len(image_files) == 0:
107
- # No valid image files found in the update face directory.
108
- return
109
-
110
- # Filter out already processed files
111
- new_files = [file for file in image_files if file not in existing_filenames]
112
-
113
- if not new_files:
114
- # All files have already been processed
115
- return
116
-
117
- for person_file_name in new_files:
118
- filenames.append(person_file_name)
119
-
120
- # Update the existing pickle files with new data
121
- existing_filenames.extend(filenames)
122
-
123
- for file in filenames:
124
- features.append(feature_extractor(os.path.join(update_face_dir, file)))
125
-
126
- # Convert features to numpy array
127
- if existing_features:
128
- existing_features = np.array(existing_features)
129
- features = np.array(features)
130
- updated_features = np.vstack((existing_features, features))
131
- else:
132
- updated_features = np.array(features)
133
-
134
- pickle.dump(existing_filenames, open(filenames_pickle_path, 'wb'))
135
- pickle.dump(updated_features, open(features_pickle_path, 'wb'))
136
-
137
- # Clean up the new_pickle and new_extract_face directories
138
- delete_files_in_folder(update_face_dir)
139
- # Delete the directory and all its contents
140
- # shutil.rmtree(update_face_dir)
141
-
142
- # Example usage
143
- new_pickle_file("/home/hajj_images", "new_extract_face")