yashzambre commited on
Commit
9608bfe
·
1 Parent(s): ad4b6ac

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +180 -0
app.py ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from PIL import Image
3
+ import torch
4
+ import numpy as np
5
+ import cv2
6
+ from plantcv import plantcv as pcv
7
+ from skimage.feature import local_binary_pattern
8
+ from io import BytesIO
9
+ from skimage.feature import hog
10
+ import base64 # import the base64 module
11
+ import openpyxl
12
+ import pandas as pd
13
+ from gradio import themes
14
+ import gradio as gr
15
+ import os
16
+ from gradio import components
17
+ import webbrowser
18
+
19
+ #from share_btn import community_icon_html, loading_icon_html, share_js, share_btn_css
20
+
21
+ theme = gr.themes.Base(
22
+ primary_hue="violet",
23
+ secondary_hue="green",
24
+ ).set(
25
+ body_background_fill_dark='*checkbox_label_background_fill'
26
+ )
27
+
28
+ def show_excel():
29
+ os.system("start excel tip_pts_mask.xlsx")
30
+
31
+ def image_processing(image,input_type,input_choice):
32
+ array = np.array(image)
33
+ array = array.astype(np.float32)
34
+ gray_img = cv2.cvtColor(array, cv2.COLOR_RGB2GRAY)
35
+ if input_type == "Tips":
36
+ img1 = pcv.morphology.skeletonize(mask=gray_img)
37
+ output_image = pcv.morphology.find_tips(skel_img=img1, mask=None, label="default")
38
+ non_zero_indices = np.nonzero(output_image)
39
+
40
+ # Create a new Excel workbook and worksheet
41
+ workbook = openpyxl.Workbook()
42
+ worksheet = workbook.active
43
+
44
+ # Write the non-zero indices to the worksheet
45
+ for row, col in zip(*non_zero_indices):
46
+ worksheet.cell(row=row+1, column=1, value=row)
47
+ worksheet.cell(row=row+1, column=2, value=col)
48
+
49
+ # Save the workbook
50
+ #excel_tips = 'tip_pts_mask_indices.xlsx'
51
+ excel_tips= workbook.save('tip_pts_mask_indices.xlsx')
52
+
53
+
54
+ # Create a DataFrame from the branch points mask
55
+ df = pd.DataFrame(output_image)
56
+ # Save the DataFrame to an excel file
57
+ df.to_excel('tip_pts_mask.xlsx', index=False)
58
+
59
+
60
+ elif input_type == "Branches":
61
+ img1 = pcv.morphology.skeletonize(mask=gray_img)
62
+ output_image = pcv.morphology.find_branch_pts(skel_img=img1, mask=None, label="default")
63
+ non_zero_indices = np.nonzero(output_image)
64
+
65
+ # Create a new Excel workbook and worksheet
66
+ workbook = openpyxl.Workbook()
67
+ worksheet = workbook.active
68
+
69
+ # Write the non-zero indices to the worksheet
70
+ for row, col in zip(*non_zero_indices):
71
+ worksheet.cell(row=row+1, column=1, value=row)
72
+ worksheet.cell(row=row+1, column=2, value=col)
73
+
74
+ # Save the workbook
75
+ workbook.save('branch_pts_mask_indices.xlsx')
76
+
77
+ # Create a DataFrame from the branch points mask
78
+ df = pd.DataFrame(output_image)
79
+ # Save the DataFrame to an excel file
80
+ df.to_excel('branch_pts_mask.xlsx', index=False)
81
+
82
+ elif input_type == "Both":
83
+ img1 = pcv.morphology.skeletonize(mask=gray_img)
84
+ tips = pcv.morphology.find_tips(skel_img=img1, mask=None, label="default")
85
+ branches = pcv.morphology.find_branch_pts(skel_img=img1, mask=None, label="default")
86
+ output_image = np.zeros_like(img1)
87
+ output_image[tips > 0] = 254
88
+ output_image[branches > 0] = 128
89
+ elif input_type == "sort":
90
+ image = pcv.morphology.skeletonize(mask=gray_img)
91
+ img1,edge_objects = pcv.morphology.prune(skel_img=image, size=70, mask=None)
92
+ #output_image = leaf(skel_img=img1,objects=edge_objects, mask=None)
93
+ elif input_type == "sift transform":
94
+ image = pcv.morphology.skeletonize(mask=gray_img)
95
+ sift = cv2.SIFT_create()
96
+ kp, des= sift.detectAndCompute(image, None)
97
+ output_image = cv2.drawKeypoints(image, kp, des)
98
+ np.savez('sift_descriptors.npz', descriptors=des)
99
+
100
+ elif input_type == "lbp transform":
101
+ radius = 1 # LBP feature radius
102
+ n_points = 8 * radius # number of LBP feature points
103
+ output_image = local_binary_pattern(gray_img, n_points, radius)
104
+ # Save the LBP transformed image as a NumPy array in .npz format
105
+ np.savez('lbp_transform.npz', lbp=output_image)
106
+
107
+ elif input_type == "hog transform":
108
+ image = pcv.morphology.skeletonize(mask=array)
109
+ fd,output_image = hog(gray_img, orientations=10, pixels_per_cell=(16, 16),
110
+ cells_per_block=(1, 1), visualize=True, multichannel=False, channel_axis=-1)
111
+ np.savez('hog_transform.npz', hog=output_image)
112
+ elif input_type == "compute all":
113
+ img1 = pcv.morphology.skeletonize(mask=gray_img)
114
+ if input_choice == "compute_branches":
115
+ output_image = pcv.morphology.find_branch_pts(skel_img=img1, mask=None, label="default")
116
+ elif input_choice == "compute_tips":
117
+ output_image = pcv.morphology.find_tips(skel_img=img1, mask=None, label="default")
118
+ elif input_choice == "compute_both":
119
+ img1 = pcv.morphology.skeletonize(mask=gray_img)
120
+ tips = pcv.morphology.find_tips(skel_img=img1, mask=None, label="default")
121
+ branches = pcv.morphology.find_branch_pts(skel_img=img1, mask=None, label="default")
122
+ output_image = np.zeros_like(img1)
123
+ output_image[tips > 0] = 255
124
+ output_image[branches > 0] = 128
125
+ elif input_choice == "compute_sift":
126
+ image = pcv.morphology.skeletonize(mask=gray_img)
127
+ sift = cv2.SIFT_create()
128
+ kp, des= sift.detectAndCompute(image, None)
129
+ output_image = cv2.drawKeypoints(image, kp, des)
130
+ elif input_choice == "compute_lbp":
131
+ radius = 1 # LBP feature radius
132
+ n_points = 8 * radius # number of LBP feature points
133
+ output_image = local_binary_pattern(gray_img, n_points, radius)
134
+ elif input_choice == "compute_hog":
135
+ image = pcv.morphology.skeletonize(mask=array)
136
+ fd,output_image = hog(gray_img, orientations=10, pixels_per_cell=(16, 16),
137
+ cells_per_block=(1, 1), visualize=True, multichannel=False, channel_axis=-1)
138
+
139
+
140
+
141
+ # Convert the resulting NumPy array back to a PIL image object for Gradio output
142
+ img2 = Image.fromarray(output_image)
143
+ if img2.mode == 'F':
144
+ img2 = img2.convert('RGB')
145
+
146
+ # Return the processed image as a Gradio output
147
+ return img2
148
+
149
+ body = (
150
+ "<center>"
151
+ "<a href='https://precisiongreenhouse.tamu.edu/'><img src='https://peepleslab.engr.tamu.edu/wp-content/uploads/sites/268/2023/04/AgriLife_Logo-e1681857158121.png' width=1650></a>"
152
+ "<br>"
153
+ "This demo extracts the plant statistics and the image features and also stores them. "
154
+ "<br>"
155
+ "<a href ='https://precisiongreenhouse.tamu.edu/'>The Texas A&M Plant Growth and Phenotyping Facility Data Analysis Pipeline</a>"
156
+ "</center>"
157
+ )
158
+
159
+
160
+ #@xamples = [["img1.png"],["img2.png"],["img3.png"]]
161
+ iface = gr.Interface(
162
+ fn=image_processing,
163
+ inputs=[gr.inputs.Image(label="Input Image"),
164
+ gr.components.Dropdown(["Tips", "Branches","Both","SIFT Transform","LBP Transform","HOG Transform","Compute ALL"], label="Choose the operation to be performed"),
165
+ gr.components.Dropdown(["Compute_Branches","Compute_Tips","Compute_Both","Compute_SIFT","Compute_LBP","Compute_HOG"],label="choose from compute all")],
166
+ outputs=gr.outputs.Image(type="pil", label="Processed Image"),
167
+ #gr.components.Button("Show Excel", type="button", onclick=show_excel)],
168
+ #title="TAMU AgriLife Plant Phenotyping Data Analysis Tool",
169
+ description= body,
170
+ layout="vertical",
171
+ allow_flagging=False,
172
+ allow_screenshot=False,
173
+ theme=theme
174
+ #examples=examples
175
+ )
176
+
177
+ #iface.launch()
178
+
179
+ iface.launch()
180
+