vanessbut commited on
Commit
db6efa3
·
1 Parent(s): ff782de

Добавлена гистограма.

Browse files
Files changed (1) hide show
  1. app.py +9 -6
app.py CHANGED
@@ -13,9 +13,9 @@ transformers.utils.logging.disable_progress_bar()
13
  os.system("python3 -m spacy download en")
14
 
15
  st.markdown("""### TL;DR: give me the keywords!
16
- Here you can get the keywords and topic of the article based on it's title or abstract.
17
 
18
- The only supported language is English.""")
19
 
20
  st.markdown("<p style=\"text-align:center\"><img width=100% src='https://c.tenor.com/IKt-6tAk9CUAAAAd/thats-a-lot-of-words-lots-of-words.gif'></p>", unsafe_allow_html=True)
21
 
@@ -24,8 +24,8 @@ st.markdown("<p style=\"text-align:center\"><img width=100% src='https://c.tenor
24
  #pipe = pipeline("ner", "Davlan/distilbert-base-multilingual-cased-ner-hrl")
25
 
26
  #st.markdown("#### Title:")
27
- title = st.text_area("Title:", value="How to cook a neural network", height=16, help="Title of the article")
28
- abstract = st.text_area("Abstract:",
29
  value="""My dad fits hellish models in general.
30
  Well, this is about an average recipe, because there are a lot of variations.
31
  The model is taken, it is not finetuned, finetuning is not about my dad.
@@ -41,7 +41,7 @@ Kindly offers me sometimes, but I refuse.
41
  Do I need to talk about what the wildest overfitting then?
42
  The overfitting is such that the val loss peels off the walls.
43
  """,
44
- height=512, help="Abstract of the article")
45
 
46
  # Spacy
47
 
@@ -81,7 +81,10 @@ if not text is None and len(text) > 0:
81
 
82
  # График важности слов.
83
  fig, ax = plt.subplots(figsize=(8, 16))
84
- ax.set_title("Ключевые слова в порядке важности")
 
 
 
85
 
86
  bar_width = 0.75
87
  indexes = -np.arange(len(labels))
 
13
  os.system("python3 -m spacy download en")
14
 
15
  st.markdown("""### TL;DR: give me the keywords!
16
+ Здесь вы можете получить отранжированный список ключевых слов по названию и аннотации статьи.
17
 
18
+ Единственным поддерживаемым языком является английский.""")
19
 
20
  st.markdown("<p style=\"text-align:center\"><img width=100% src='https://c.tenor.com/IKt-6tAk9CUAAAAd/thats-a-lot-of-words-lots-of-words.gif'></p>", unsafe_allow_html=True)
21
 
 
24
  #pipe = pipeline("ner", "Davlan/distilbert-base-multilingual-cased-ner-hrl")
25
 
26
  #st.markdown("#### Title:")
27
+ title = st.text_area("Заголовок:", value="How to cook a neural network", height=16, help="Заголовок статьи")
28
+ abstract = st.text_area("Аннотация:",
29
  value="""My dad fits hellish models in general.
30
  Well, this is about an average recipe, because there are a lot of variations.
31
  The model is taken, it is not finetuned, finetuning is not about my dad.
 
41
  Do I need to talk about what the wildest overfitting then?
42
  The overfitting is such that the val loss peels off the walls.
43
  """,
44
+ height=512, help="Аннотация статьи")
45
 
46
  # Spacy
47
 
 
81
 
82
  # График важности слов.
83
  fig, ax = plt.subplots(figsize=(8, 16))
84
+ ax.set_title("95% самых важных ключевых слов")
85
+ ax.grid(color='#000000', alpha=0.15, linestyle='-', linewidth=1, which='major')
86
+ ax.grid(color='#000000', alpha=0.1, linestyle='-', linewidth=0.5, which='minor')
87
+ ax.xaxis.set_minor_locator(locmin)
88
 
89
  bar_width = 0.75
90
  indexes = -np.arange(len(labels))