Commit
·
56a2c3e
1
Parent(s):
8ba64a4
format app.py file
Browse files
app.py
CHANGED
@@ -3,6 +3,7 @@ import os
|
|
3 |
os.environ["HUGGINGFACE_DEMO"] = "1" # set before import from app
|
4 |
|
5 |
from dotenv import load_dotenv
|
|
|
6 |
load_dotenv()
|
7 |
################################################################################################
|
8 |
|
@@ -20,10 +21,12 @@ settings = get_settings()
|
|
20 |
IMAGE_MAX_SIZE = 1536
|
21 |
|
22 |
|
23 |
-
async def forward_request(
|
|
|
|
|
24 |
# prepare temp folder
|
25 |
request_id = str(uuid.uuid4())
|
26 |
-
request_temp_folder = os.path.join(
|
27 |
os.makedirs(request_temp_folder, exist_ok=True)
|
28 |
|
29 |
try:
|
@@ -32,9 +35,11 @@ async def forward_request(attributes, product_taxonomy, product_data, ai_model,
|
|
32 |
try:
|
33 |
attributes = exec(attributes, globals())
|
34 |
except:
|
35 |
-
raise gr.Error(
|
36 |
-
|
37 |
-
|
|
|
|
|
38 |
|
39 |
if product_data == "":
|
40 |
product_data = "{}"
|
@@ -43,39 +48,48 @@ async def forward_request(attributes, product_taxonomy, product_data, ai_model,
|
|
43 |
try:
|
44 |
exec(product_data_code, globals())
|
45 |
except:
|
46 |
-
raise gr.Error(
|
|
|
|
|
47 |
|
48 |
if pil_images is None:
|
49 |
-
raise gr.Error(
|
50 |
pil_images = [pil_image[0] for pil_image in pil_images]
|
51 |
img_paths = []
|
52 |
for i, pil_image in enumerate(pil_images):
|
53 |
if max(pil_image.size) > IMAGE_MAX_SIZE:
|
54 |
ratio = IMAGE_MAX_SIZE / max(pil_image.size)
|
55 |
-
pil_image = pil_image.resize(
|
56 |
-
|
57 |
-
|
|
|
|
|
|
|
|
|
58 |
pil_image = pil_image.convert("RGBA")
|
59 |
-
if pil_image.getchannel("A").getextrema() == (
|
|
|
|
|
|
|
60 |
pil_image = pil_image.convert("RGB")
|
61 |
-
image_format =
|
62 |
else:
|
63 |
-
image_format =
|
64 |
else:
|
65 |
-
image_format =
|
66 |
pil_image.save(img_path, image_format, quality=100, subsampling=0)
|
67 |
img_paths.append(img_path)
|
68 |
|
69 |
# mapping
|
70 |
if ai_model in settings.OPENAI_MODELS:
|
71 |
-
ai_vendor =
|
72 |
elif ai_model in settings.ANTHROPIC_MODELS:
|
73 |
-
ai_vendor =
|
74 |
service = AIServiceFactory.get_service(ai_vendor)
|
75 |
|
76 |
try:
|
77 |
json_attributes = await service.extract_attributes_with_validation(
|
78 |
-
attributes_object,
|
79 |
ai_model,
|
80 |
None,
|
81 |
product_taxonomy,
|
@@ -83,12 +97,12 @@ async def forward_request(attributes, product_taxonomy, product_data, ai_model,
|
|
83 |
img_paths=img_paths,
|
84 |
)
|
85 |
except:
|
86 |
-
raise gr.Error(
|
87 |
finally:
|
88 |
# remove temp folder anyway
|
89 |
shutil.rmtree(request_temp_folder)
|
90 |
-
|
91 |
-
gr.Info(
|
92 |
return json_attributes
|
93 |
|
94 |
|
@@ -209,7 +223,7 @@ with gr.Blocks(title="Internal Demo for Attribution") as demo:
|
|
209 |
# ka_value = gr.Textbox(placeholder="data", key=f"data-{i}", show_label=False)
|
210 |
# ka_names.append(ka_name)
|
211 |
# ka_values.append(ka_value)
|
212 |
-
|
213 |
# add_track_btn = gr.Button("Add Product Data")
|
214 |
# remove_track_btn = gr.Button("Remove Product Data")
|
215 |
# add_track_btn.click(lambda count: count + 1, track_count, track_count)
|
@@ -272,4 +286,8 @@ with gr.Blocks(title="Internal Demo for Attribution") as demo:
|
|
272 |
outputs=output_json,
|
273 |
)
|
274 |
|
275 |
-
|
|
|
|
|
|
|
|
|
|
3 |
os.environ["HUGGINGFACE_DEMO"] = "1" # set before import from app
|
4 |
|
5 |
from dotenv import load_dotenv
|
6 |
+
|
7 |
load_dotenv()
|
8 |
################################################################################################
|
9 |
|
|
|
21 |
IMAGE_MAX_SIZE = 1536
|
22 |
|
23 |
|
24 |
+
async def forward_request(
|
25 |
+
attributes, product_taxonomy, product_data, ai_model, pil_images
|
26 |
+
):
|
27 |
# prepare temp folder
|
28 |
request_id = str(uuid.uuid4())
|
29 |
+
request_temp_folder = os.path.join("gradio_temp", request_id)
|
30 |
os.makedirs(request_temp_folder, exist_ok=True)
|
31 |
|
32 |
try:
|
|
|
35 |
try:
|
36 |
attributes = exec(attributes, globals())
|
37 |
except:
|
38 |
+
raise gr.Error(
|
39 |
+
"Invalid `Attribute Schema`. Please insert valid schema following the example."
|
40 |
+
)
|
41 |
+
for key, value in attributes_object.items(): # type: ignore
|
42 |
+
attributes_object[key] = Attribute(**value) # type: ignore
|
43 |
|
44 |
if product_data == "":
|
45 |
product_data = "{}"
|
|
|
48 |
try:
|
49 |
exec(product_data_code, globals())
|
50 |
except:
|
51 |
+
raise gr.Error(
|
52 |
+
"Invalid `Product Data`. Please insert valid dictionary or leave it empty."
|
53 |
+
)
|
54 |
|
55 |
if pil_images is None:
|
56 |
+
raise gr.Error("Please upload image(s) of the product")
|
57 |
pil_images = [pil_image[0] for pil_image in pil_images]
|
58 |
img_paths = []
|
59 |
for i, pil_image in enumerate(pil_images):
|
60 |
if max(pil_image.size) > IMAGE_MAX_SIZE:
|
61 |
ratio = IMAGE_MAX_SIZE / max(pil_image.size)
|
62 |
+
pil_image = pil_image.resize(
|
63 |
+
(int(pil_image.width * ratio), int(pil_image.height * ratio))
|
64 |
+
)
|
65 |
+
img_path = os.path.join(request_temp_folder, f"{i}.jpg")
|
66 |
+
if pil_image.mode in ("RGBA", "LA") or (
|
67 |
+
pil_image.mode == "P" and "transparency" in pil_image.info
|
68 |
+
):
|
69 |
pil_image = pil_image.convert("RGBA")
|
70 |
+
if pil_image.getchannel("A").getextrema() == (
|
71 |
+
255,
|
72 |
+
255,
|
73 |
+
): # if fully opaque, save as JPEG
|
74 |
pil_image = pil_image.convert("RGB")
|
75 |
+
image_format = "JPEG"
|
76 |
else:
|
77 |
+
image_format = "PNG"
|
78 |
else:
|
79 |
+
image_format = "JPEG"
|
80 |
pil_image.save(img_path, image_format, quality=100, subsampling=0)
|
81 |
img_paths.append(img_path)
|
82 |
|
83 |
# mapping
|
84 |
if ai_model in settings.OPENAI_MODELS:
|
85 |
+
ai_vendor = "openai"
|
86 |
elif ai_model in settings.ANTHROPIC_MODELS:
|
87 |
+
ai_vendor = "anthropic"
|
88 |
service = AIServiceFactory.get_service(ai_vendor)
|
89 |
|
90 |
try:
|
91 |
json_attributes = await service.extract_attributes_with_validation(
|
92 |
+
attributes_object, # type: ignore
|
93 |
ai_model,
|
94 |
None,
|
95 |
product_taxonomy,
|
|
|
97 |
img_paths=img_paths,
|
98 |
)
|
99 |
except:
|
100 |
+
raise gr.Error("Failed to extract attributes. Something went wrong.")
|
101 |
finally:
|
102 |
# remove temp folder anyway
|
103 |
shutil.rmtree(request_temp_folder)
|
104 |
+
|
105 |
+
gr.Info("Process completed!")
|
106 |
return json_attributes
|
107 |
|
108 |
|
|
|
223 |
# ka_value = gr.Textbox(placeholder="data", key=f"data-{i}", show_label=False)
|
224 |
# ka_names.append(ka_name)
|
225 |
# ka_values.append(ka_value)
|
226 |
+
|
227 |
# add_track_btn = gr.Button("Add Product Data")
|
228 |
# remove_track_btn = gr.Button("Remove Product Data")
|
229 |
# add_track_btn.click(lambda count: count + 1, track_count, track_count)
|
|
|
286 |
outputs=output_json,
|
287 |
)
|
288 |
|
289 |
+
|
290 |
+
attr_user = os.getenv("ATTR_USER", "1")
|
291 |
+
attr_pass = os.getenv("ATTR_PASS", "a")
|
292 |
+
auth = (attr_user, attr_pass)
|
293 |
+
demo.launch(auth=auth, debug=True)
|