Spaces:
Running
Running
shaocongma
commited on
Commit
·
2617428
1
Parent(s):
ca1dc6f
add files
Browse files- app.py +14 -14
- auto_backgrounds.py +16 -1
- latex_templates/Default/template.tex +45 -0
- latex_templates/ICLR2022/template.tex +35 -0
- requirements.txt +0 -0
app.py
CHANGED
@@ -134,7 +134,7 @@ def wrapped_generator(
|
|
134 |
tldr=True, max_kw_refs=10, bib_refs=None, max_tokens_ref=2048, # references
|
135 |
knowledge_database=None, max_tokens_kd=2048, query_counts=10, # domain knowledge
|
136 |
paper_template="ICLR2022", selected_sections=None, model="gpt-4", prompts_mode=False, # outputs parameters
|
137 |
-
cache_mode=
|
138 |
):
|
139 |
# if `cache_mode` is True, then follow the following steps:
|
140 |
# check if "title"+"description" have been generated before
|
@@ -271,16 +271,16 @@ with gr.Blocks(theme=theme) as demo:
|
|
271 |
clear_button_pp = gr.Button("Clear")
|
272 |
submit_button_pp = gr.Button("Submit", variant="primary")
|
273 |
|
274 |
-
with gr.Tab("文献搜索"):
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
|
285 |
with gr.Tab("文献综述 (Coming soon!)"):
|
286 |
gr.Markdown('''
|
@@ -312,9 +312,9 @@ with gr.Blocks(theme=theme) as demo:
|
|
312 |
domain_knowledge, max_tokens_kd_slider, query_counts_slider,
|
313 |
template, sections, model_selection, prompts_mode], outputs=file_output)
|
314 |
|
315 |
-
clear_button_refs.click(fn=clear_inputs_refs, inputs=[title_refs, slider_refs], outputs=[title_refs, slider_refs])
|
316 |
-
submit_button_refs.click(fn=wrapped_references_generator,
|
317 |
-
|
318 |
|
319 |
demo.queue(concurrency_count=1, max_size=5, api_open=False)
|
320 |
demo.launch(show_error=True)
|
|
|
134 |
tldr=True, max_kw_refs=10, bib_refs=None, max_tokens_ref=2048, # references
|
135 |
knowledge_database=None, max_tokens_kd=2048, query_counts=10, # domain knowledge
|
136 |
paper_template="ICLR2022", selected_sections=None, model="gpt-4", prompts_mode=False, # outputs parameters
|
137 |
+
cache_mode=IS_CACHE_AVAILABLE # handle cache mode
|
138 |
):
|
139 |
# if `cache_mode` is True, then follow the following steps:
|
140 |
# check if "title"+"description" have been generated before
|
|
|
271 |
clear_button_pp = gr.Button("Clear")
|
272 |
submit_button_pp = gr.Button("Submit", variant="primary")
|
273 |
|
274 |
+
# with gr.Tab("文献搜索"):
|
275 |
+
# gr.Markdown(REFERENCES)
|
276 |
+
#
|
277 |
+
# title_refs = gr.Textbox(value="Playing Atari with Deep Reinforcement Learning", lines=1, max_lines=1,
|
278 |
+
# label="Title", info="论文标题")
|
279 |
+
# slider_refs = gr.Slider(minimum=1, maximum=100, value=5, step=1,
|
280 |
+
# interactive=True, label="最相关的参考文献数目")
|
281 |
+
# with gr.Row():
|
282 |
+
# clear_button_refs = gr.Button("Clear")
|
283 |
+
# submit_button_refs = gr.Button("Submit", variant="primary")
|
284 |
|
285 |
with gr.Tab("文献综述 (Coming soon!)"):
|
286 |
gr.Markdown('''
|
|
|
312 |
domain_knowledge, max_tokens_kd_slider, query_counts_slider,
|
313 |
template, sections, model_selection, prompts_mode], outputs=file_output)
|
314 |
|
315 |
+
# clear_button_refs.click(fn=clear_inputs_refs, inputs=[title_refs, slider_refs], outputs=[title_refs, slider_refs])
|
316 |
+
# submit_button_refs.click(fn=wrapped_references_generator,
|
317 |
+
# inputs=[title_refs, slider_refs, key], outputs=json_output)
|
318 |
|
319 |
demo.queue(concurrency_count=1, max_size=5, api_open=False)
|
320 |
demo.launch(show_error=True)
|
auto_backgrounds.py
CHANGED
@@ -5,6 +5,7 @@ from utils.knowledge import Knowledge
|
|
5 |
from utils.file_operations import hash_name, make_archive, copy_templates
|
6 |
from utils.tex_processing import create_copies
|
7 |
from section_generator import section_generation # figures_generation, section_generation_bg, keywords_generation,
|
|
|
8 |
import logging
|
9 |
import time
|
10 |
from langchain.vectorstores import FAISS
|
@@ -145,6 +146,7 @@ def _generation_setup(title, description="", template="ICLR2022",
|
|
145 |
print(f"Failed to query from FAISS. Error {e}. Use empty domain knowledge instead.")
|
146 |
domain_knowledge = ""
|
147 |
else:
|
|
|
148 |
domain_knowledge = ""
|
149 |
|
150 |
###################################################################################################################
|
@@ -249,8 +251,14 @@ def generate_draft(title, description="", # main input
|
|
249 |
knowledge_database=knowledge_database)
|
250 |
|
251 |
# main components
|
|
|
252 |
print(f"================PROCESSING================")
|
253 |
for section in sections:
|
|
|
|
|
|
|
|
|
|
|
254 |
print(f"Generate {section} part...")
|
255 |
max_attempts = 4
|
256 |
attempts_count = 0
|
@@ -273,7 +281,14 @@ def generate_draft(title, description="", # main input
|
|
273 |
input_dict = {"title": title, "description": description, "generator": "generate_draft"}
|
274 |
filename = hash_name(input_dict) + ".zip"
|
275 |
print("\nMission completed.\n")
|
276 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
277 |
|
278 |
|
279 |
if __name__ == "__main__":
|
|
|
5 |
from utils.file_operations import hash_name, make_archive, copy_templates
|
6 |
from utils.tex_processing import create_copies
|
7 |
from section_generator import section_generation # figures_generation, section_generation_bg, keywords_generation,
|
8 |
+
from utils.prompts import generate_paper_prompts
|
9 |
import logging
|
10 |
import time
|
11 |
from langchain.vectorstores import FAISS
|
|
|
146 |
print(f"Failed to query from FAISS. Error {e}. Use empty domain knowledge instead.")
|
147 |
domain_knowledge = ""
|
148 |
else:
|
149 |
+
print("Selected database doesn't exist or no database is selected.")
|
150 |
domain_knowledge = ""
|
151 |
|
152 |
###################################################################################################################
|
|
|
251 |
knowledge_database=knowledge_database)
|
252 |
|
253 |
# main components
|
254 |
+
prompts_dict = {}
|
255 |
print(f"================PROCESSING================")
|
256 |
for section in sections:
|
257 |
+
if prompts_mode:
|
258 |
+
prompts = generate_paper_prompts(paper, section)
|
259 |
+
prompts_dict[section] = prompts
|
260 |
+
continue
|
261 |
+
|
262 |
print(f"Generate {section} part...")
|
263 |
max_attempts = 4
|
264 |
attempts_count = 0
|
|
|
281 |
input_dict = {"title": title, "description": description, "generator": "generate_draft"}
|
282 |
filename = hash_name(input_dict) + ".zip"
|
283 |
print("\nMission completed.\n")
|
284 |
+
|
285 |
+
if prompts_mode:
|
286 |
+
filename = hash_name(input_dict) + ".json"
|
287 |
+
with open(filename, "w") as f:
|
288 |
+
json.dump(prompts_dict, f)
|
289 |
+
return filename
|
290 |
+
else:
|
291 |
+
return make_archive(destination_folder, filename)
|
292 |
|
293 |
|
294 |
if __name__ == "__main__":
|
latex_templates/Default/template.tex
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
\documentclass{article} % For LaTeX2e
|
2 |
+
\UseRawInputEncoding
|
3 |
+
\usepackage{graphicx}
|
4 |
+
\usepackage{booktabs}
|
5 |
+
\usepackage{times}
|
6 |
+
\usepackage{eso-pic} % used by \AddToShipoutPicture
|
7 |
+
\RequirePackage{fancyhdr}
|
8 |
+
\RequirePackage{natbib}
|
9 |
+
\usepackage{fullpage}
|
10 |
+
|
11 |
+
\input{math_commands.tex}
|
12 |
+
\usepackage{hyperref}
|
13 |
+
\usepackage{url}
|
14 |
+
\usepackage{algorithm}
|
15 |
+
\usepackage{algpseudocode}
|
16 |
+
|
17 |
+
\newlength\tindent
|
18 |
+
\setlength{\tindent}{\parindent}
|
19 |
+
\setlength{\parindent}{0pt}
|
20 |
+
\renewcommand{\indent}{\hspace*{\tindent}}
|
21 |
+
|
22 |
+
\title{TITLE}
|
23 |
+
\author{GPT-4}
|
24 |
+
|
25 |
+
\newcommand{\fix}{\marginpar{FIX}}
|
26 |
+
\newcommand{\new}{\marginpar{NEW}}
|
27 |
+
|
28 |
+
\begin{document}
|
29 |
+
\maketitle
|
30 |
+
\input{abstract.tex}
|
31 |
+
\input{introduction.tex}
|
32 |
+
\input{related works.tex}
|
33 |
+
\input{backgrounds.tex}
|
34 |
+
\input{methodology.tex}
|
35 |
+
\input{experiments.tex}
|
36 |
+
\input{conclusion.tex}
|
37 |
+
|
38 |
+
\bibliography{ref}
|
39 |
+
\bibliographystyle{iclr2022_conference}
|
40 |
+
|
41 |
+
%\appendix
|
42 |
+
%\section{Appendix}
|
43 |
+
%You may include other additional sections here.
|
44 |
+
|
45 |
+
\end{document}
|
latex_templates/ICLR2022/template.tex
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
\documentclass{article} % For LaTeX2e
|
2 |
+
\UseRawInputEncoding
|
3 |
+
\usepackage{graphicx}
|
4 |
+
\usepackage{booktabs}
|
5 |
+
\usepackage{iclr2022_conference, times}
|
6 |
+
\input{math_commands.tex}
|
7 |
+
\usepackage{hyperref}
|
8 |
+
\usepackage{url}
|
9 |
+
\usepackage{algorithm}
|
10 |
+
\usepackage{algpseudocode}
|
11 |
+
|
12 |
+
\title{TITLE}
|
13 |
+
\author{GPT-4}
|
14 |
+
|
15 |
+
\newcommand{\fix}{\marginpar{FIX}}
|
16 |
+
\newcommand{\new}{\marginpar{NEW}}
|
17 |
+
|
18 |
+
\begin{document}
|
19 |
+
\maketitle
|
20 |
+
\input{abstract.tex}
|
21 |
+
\input{introduction.tex}
|
22 |
+
\input{related works.tex}
|
23 |
+
\input{backgrounds.tex}
|
24 |
+
\input{methodology.tex}
|
25 |
+
\input{experiments.tex}
|
26 |
+
\input{conclusion.tex}
|
27 |
+
|
28 |
+
\bibliography{ref}
|
29 |
+
\bibliographystyle{iclr2022_conference}
|
30 |
+
|
31 |
+
%\appendix
|
32 |
+
%\section{Appendix}
|
33 |
+
%You may include other additional sections here.
|
34 |
+
|
35 |
+
\end{document}
|
requirements.txt
CHANGED
Binary files a/requirements.txt and b/requirements.txt differ
|
|