Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,8 @@ import datetime
|
|
12 |
from pypdf import PdfReader
|
13 |
import uuid
|
14 |
#from query import tasks
|
|
|
|
|
15 |
from agent import (
|
16 |
PREFIX,
|
17 |
GET_CHART,
|
@@ -24,6 +26,16 @@ api=HfApi()
|
|
24 |
|
25 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
def find_all(url):
|
28 |
return_list=[]
|
29 |
print (url)
|
@@ -58,6 +70,47 @@ def find_all(url):
|
|
58 |
|
59 |
return "MAIN", None, history, task
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
def read_txt(txt_path):
|
62 |
text=""
|
63 |
with open(txt_path,"r") as f:
|
@@ -409,8 +462,10 @@ def summarize(inp,history,report_check,chart_check,data=None,files=None,director
|
|
409 |
if i == " " or i=="," or i=="\n":
|
410 |
c +=1
|
411 |
print (f'c:: {c}')
|
412 |
-
|
413 |
-
|
|
|
|
|
414 |
#json_box.append(json_out)
|
415 |
|
416 |
#json_object = json.dumps(eval(json_out), indent=4)
|
@@ -489,6 +544,9 @@ with gr.Blocks() as app:
|
|
489 |
#text=gr.JSON()
|
490 |
#inp_query.change(search_models,inp_query,models_dd)
|
491 |
clear_btn.click(clear_fn,None,[prompt,chatbot])
|
|
|
|
|
492 |
go=button.click(summarize,[prompt,chatbot,report_check,chart_check,data,file,directory,url,pdf_url,pdf_batch],[prompt,chatbot,e_box,json_out])
|
|
|
493 |
stop_button.click(None,None,None,cancels=[go])
|
494 |
app.queue(default_concurrency_limit=20).launch(show_api=False)
|
|
|
12 |
from pypdf import PdfReader
|
13 |
import uuid
|
14 |
#from query import tasks
|
15 |
+
from gradio_client import Client
|
16 |
+
|
17 |
from agent import (
|
18 |
PREFIX,
|
19 |
GET_CHART,
|
|
|
26 |
|
27 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
28 |
|
29 |
+
|
30 |
+
def sort_fn(inp):
|
31 |
+
|
32 |
+
client_sort = Client("Omnibus/sort_document")
|
33 |
+
sen,nouns = client_sort.predict(
|
34 |
+
f"{inp}", # str in 'Paste Text' Textbox component
|
35 |
+
api_name="/sort_doc"
|
36 |
+
)
|
37 |
+
return sen
|
38 |
+
|
39 |
def find_all(url):
|
40 |
return_list=[]
|
41 |
print (url)
|
|
|
70 |
|
71 |
return "MAIN", None, history, task
|
72 |
|
73 |
+
FIND_KEYWORDS="""Find keywords from the dictionary of provided keywords that are relevant to the users query.
|
74 |
+
Return the keyword:value pairs from the list in the form of a JSON file output.
|
75 |
+
dictionary:
|
76 |
+
{keywords}
|
77 |
+
user query:
|
78 |
+
|
79 |
+
"""
|
80 |
+
|
81 |
+
def find_keyword_fn(c,inp,data):
|
82 |
+
|
83 |
+
|
84 |
+
seed=random.randint(1,1000000000)
|
85 |
+
divr=int(c)/MAX_DATA
|
86 |
+
divi=int(divr)+1 if divr != int(divr) else int(divr)
|
87 |
+
chunk = int(int(c)/divr)
|
88 |
+
out = []
|
89 |
+
s=0
|
90 |
+
e=chunk
|
91 |
+
print(f'e:: {e}')
|
92 |
+
new_history=""
|
93 |
+
#task = f'Compile this data to fulfill the task: {task}, and complete the purpose: {purpose}\n'
|
94 |
+
for z in range(divi):
|
95 |
+
print(f's:e :: {s}:{e}')
|
96 |
+
|
97 |
+
hist = history[s:e]
|
98 |
+
resp = run_gpt(
|
99 |
+
FIND_KEYWORDS,
|
100 |
+
stop_tokens=[],
|
101 |
+
max_tokens=8192,
|
102 |
+
seed=seed,
|
103 |
+
keywords=data,
|
104 |
+
).strip("\n")
|
105 |
+
out.append(resp)
|
106 |
+
#new_history = resp
|
107 |
+
print (resp)
|
108 |
+
#out+=resp
|
109 |
+
e=e+chunk
|
110 |
+
s=s+chunk
|
111 |
+
return out
|
112 |
+
|
113 |
+
|
114 |
def read_txt(txt_path):
|
115 |
text=""
|
116 |
with open(txt_path,"r") as f:
|
|
|
462 |
if i == " " or i=="," or i=="\n":
|
463 |
c +=1
|
464 |
print (f'c:: {c}')
|
465 |
+
|
466 |
+
json_start = sort_fn(out)
|
467 |
+
json_out = find_keywords_fn(c,inp,json_start)
|
468 |
+
#json_out = compress_data(c,inp,out)
|
469 |
#json_box.append(json_out)
|
470 |
|
471 |
#json_object = json.dumps(eval(json_out), indent=4)
|
|
|
544 |
#text=gr.JSON()
|
545 |
#inp_query.change(search_models,inp_query,models_dd)
|
546 |
clear_btn.click(clear_fn,None,[prompt,chatbot])
|
547 |
+
|
548 |
+
#go=button.click(summarize,[prompt,chatbot,report_check,chart_check,data,file,directory,url,pdf_url,pdf_batch],[prompt,chatbot,e_box,json_out])
|
549 |
go=button.click(summarize,[prompt,chatbot,report_check,chart_check,data,file,directory,url,pdf_url,pdf_batch],[prompt,chatbot,e_box,json_out])
|
550 |
+
|
551 |
stop_button.click(None,None,None,cancels=[go])
|
552 |
app.queue(default_concurrency_limit=20).launch(show_api=False)
|