mohbay commited on
Commit
9e4b885
·
verified ·
1 Parent(s): ee6ab0d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -41
app.py CHANGED
@@ -46,6 +46,71 @@
46
  # iface.launch()
47
 
48
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
  import torch
50
  import pandas as pd
51
  from sentence_transformers import SentenceTransformer, util
@@ -57,54 +122,34 @@ df2 = pd.read_csv("cleaned2.csv")
57
  embeddings = torch.load("embeddings1.pt")
58
  embeddings2 = torch.load("embeddings2.pt")
59
 
60
- # def search_fatwa(data):
61
- # query = data[0] if data else ""
62
- # query_embedding = model.encode(query, convert_to_tensor=True)
63
- # top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
64
- # top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
65
- # return {
66
- # "question1": df.iloc[top_idx]["question"],
67
- # "link1": df.iloc[top_idx]["link"],
68
- # "question2": df2.iloc[top_idx2]["question"],
69
- # "link2": df2.iloc[top_idx2]["link"]
70
- # }
71
-
72
- def search_fatwa(data):
73
- query = data[0] if isinstance(data, list) else data
74
- if not query:
75
- return {"question1": "", "link1": "", "question2": "", "link2": ""}
76
  query_embedding = model.encode(query, convert_to_tensor=True)
77
  top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
78
  top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
79
- # return {
80
- # "question1": df.iloc[top_idx]["question"],
81
- # "link1": df.iloc[top_idx]["link"],
82
- # "question2": df2.iloc[top_idx2]["question"],
83
- # "link2": df2.iloc[top_idx2]["link"]
84
- # }
85
  result = f"""Question 1: {df.iloc[top_idx]["question"]}
86
- Link 1: {df.iloc[top_idx]["link"]}
87
-
88
- Question 2: {df2.iloc[top_idx2]["question"]}
89
- Link 2: {df2.iloc[top_idx2]["link"]}"""
 
90
  return result
91
 
 
92
  iface = gr.Interface(
93
  fn=search_fatwa,
94
- inputs=[gr.Textbox(label="text", lines=3)],
95
- outputs="text" # Changed from "json" to "text"
 
96
  )
97
 
98
- # iface = gr.Interface(fn=search_fatwa, inputs=[gr.Textbox(label="text", lines=3)], outputs="json")
99
-
100
-
101
-
102
-
103
- # iface = gr.Interface(
104
- # fn=predict,
105
- # inputs=[gr.Textbox(label="text", lines=3)],
106
- # outputs='text',
107
- # title=title,
108
- # )
109
-
110
- iface.launch()
 
46
  # iface.launch()
47
 
48
 
49
+ # import torch
50
+ # import pandas as pd
51
+ # from sentence_transformers import SentenceTransformer, util
52
+ # import gradio as gr
53
+
54
+ # model = SentenceTransformer("paraphrase-multilingual-MiniLM-L12-v2")
55
+ # df = pd.read_csv("cleaned1.csv")
56
+ # df2 = pd.read_csv("cleaned2.csv")
57
+ # embeddings = torch.load("embeddings1.pt")
58
+ # embeddings2 = torch.load("embeddings2.pt")
59
+
60
+ # # def search_fatwa(data):
61
+ # # query = data[0] if data else ""
62
+ # # query_embedding = model.encode(query, convert_to_tensor=True)
63
+ # # top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
64
+ # # top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
65
+ # # return {
66
+ # # "question1": df.iloc[top_idx]["question"],
67
+ # # "link1": df.iloc[top_idx]["link"],
68
+ # # "question2": df2.iloc[top_idx2]["question"],
69
+ # # "link2": df2.iloc[top_idx2]["link"]
70
+ # # }
71
+
72
+ # def search_fatwa(data):
73
+ # query = data[0] if isinstance(data, list) else data
74
+ # if not query:
75
+ # return {"question1": "", "link1": "", "question2": "", "link2": ""}
76
+ # query_embedding = model.encode(query, convert_to_tensor=True)
77
+ # top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
78
+ # top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
79
+ # # return {
80
+ # # "question1": df.iloc[top_idx]["question"],
81
+ # # "link1": df.iloc[top_idx]["link"],
82
+ # # "question2": df2.iloc[top_idx2]["question"],
83
+ # # "link2": df2.iloc[top_idx2]["link"]
84
+ # # }
85
+ # result = f"""Question 1: {df.iloc[top_idx]["question"]}
86
+ # Link 1: {df.iloc[top_idx]["link"]}
87
+
88
+ # Question 2: {df2.iloc[top_idx2]["question"]}
89
+ # Link 2: {df2.iloc[top_idx2]["link"]}"""
90
+ # return result
91
+
92
+ # iface = gr.Interface(
93
+ # fn=search_fatwa,
94
+ # inputs=[gr.Textbox(label="text", lines=3)],
95
+ # outputs="text" # Changed from "json" to "text"
96
+ # )
97
+
98
+ # # iface = gr.Interface(fn=search_fatwa, inputs=[gr.Textbox(label="text", lines=3)], outputs="json")
99
+
100
+
101
+
102
+
103
+ # # iface = gr.Interface(
104
+ # # fn=predict,
105
+ # # inputs=[gr.Textbox(label="text", lines=3)],
106
+ # # outputs='text',
107
+ # # title=title,
108
+ # # )
109
+
110
+ # iface.launch()
111
+
112
+
113
+
114
  import torch
115
  import pandas as pd
116
  from sentence_transformers import SentenceTransformer, util
 
122
  embeddings = torch.load("embeddings1.pt")
123
  embeddings2 = torch.load("embeddings2.pt")
124
 
125
+ def search_fatwa(query):
126
+ # Handle both string and list inputs
127
+ if isinstance(query, list):
128
+ query = query[0] if query else ""
129
+
130
+ if not query or query.strip() == "":
131
+ return "No query provided"
132
+
 
 
 
 
 
 
 
 
133
  query_embedding = model.encode(query, convert_to_tensor=True)
134
  top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
135
  top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
136
+
137
+ # Return formatted text (like your working first app)
 
 
 
 
138
  result = f"""Question 1: {df.iloc[top_idx]["question"]}
139
+ Link 1: {df.iloc[top_idx]["link"]}
140
+
141
+ Question 2: {df2.iloc[top_idx2]["question"]}
142
+ Link 2: {df2.iloc[top_idx2]["link"]}"""
143
+
144
  return result
145
 
146
+ # Use the same structure as your working first app
147
  iface = gr.Interface(
148
  fn=search_fatwa,
149
+ inputs=[gr.Textbox(label="text", lines=3)],
150
+ outputs='text', # Changed to 'text' like your working app
151
+ title="Search CSV"
152
  )
153
 
154
+ # Enable API access for curl requests
155
+ iface.launch(share=False, show_api=True)