Spaces:
Runtime error
Runtime error
Commit
路
0567ede
1
Parent(s):
3dc9128
Update app.py
Browse files
app.py
CHANGED
@@ -97,7 +97,7 @@ def run():
|
|
97 |
submit_button = col.form_submit_button(label='Analizar')
|
98 |
error=False
|
99 |
if submit_button:
|
100 |
-
|
101 |
if ( termino == False and usuario == False):
|
102 |
st.text('Error no se ha seleccionado ningun check')
|
103 |
error=True
|
@@ -108,22 +108,15 @@ def run():
|
|
108 |
if (error == False):
|
109 |
if (termino):
|
110 |
new_search = search_words + " -filter:retweets"
|
111 |
-
tweets =tw.Cursor(api.search_tweets,q=new_search,lang="es"
|
112 |
elif (usuario):
|
113 |
tweets = api.user_timeline(screen_name = search_words,count=number_of_tweets)
|
114 |
|
115 |
tweet_list = [i.text for i in tweets]
|
116 |
-
#tweet_list = [strip_undesired_chars(i.text) for i in tweets]
|
117 |
text= pd.DataFrame(tweet_list)
|
118 |
-
|
119 |
-
#text[0] = text[0].apply(preprocess_tweet)
|
120 |
text1=text[0].values
|
121 |
-
indices1=tokenizer.batch_encode_plus(text1.tolist(),
|
122 |
-
max_length=128,
|
123 |
-
add_special_tokens=True,
|
124 |
-
return_attention_mask=True,
|
125 |
-
pad_to_max_length=True,
|
126 |
-
truncation=True)
|
127 |
input_ids1=indices1["input_ids"]
|
128 |
attention_masks1=indices1["attention_mask"]
|
129 |
prediction_inputs1= torch.tensor(input_ids1)
|
@@ -135,16 +128,16 @@ def run():
|
|
135 |
prediction_sampler1 = SequentialSampler(prediction_data1)
|
136 |
prediction_dataloader1 = DataLoader(prediction_data1, sampler=prediction_sampler1, batch_size=batch_size)
|
137 |
print('Predicting labels for {:,} test sentences...'.format(len(prediction_inputs1)))
|
138 |
-
#
|
139 |
model.eval()
|
140 |
-
#
|
141 |
predictions = []
|
142 |
# Predict
|
143 |
for batch in prediction_dataloader1:
|
144 |
batch = tuple(t.to(device) for t in batch)
|
145 |
-
#
|
146 |
b_input_ids1, b_input_mask1 = batch
|
147 |
-
#
|
148 |
with torch.no_grad():
|
149 |
# Forward pass, calculate logit predictions
|
150 |
outputs1 = model(b_input_ids1, token_type_ids=None,attention_mask=b_input_mask1)
|
|
|
97 |
submit_button = col.form_submit_button(label='Analizar')
|
98 |
error=False
|
99 |
if submit_button:
|
100 |
+
# Condici贸n para el caso de que esten dos check seleccionados
|
101 |
if ( termino == False and usuario == False):
|
102 |
st.text('Error no se ha seleccionado ningun check')
|
103 |
error=True
|
|
|
108 |
if (error == False):
|
109 |
if (termino):
|
110 |
new_search = search_words + " -filter:retweets"
|
111 |
+
tweets =tw.Cursor(api.search_tweets,q=new_search,lang="es").items(number_of_tweets)
|
112 |
elif (usuario):
|
113 |
tweets = api.user_timeline(screen_name = search_words,count=number_of_tweets)
|
114 |
|
115 |
tweet_list = [i.text for i in tweets]
|
|
|
116 |
text= pd.DataFrame(tweet_list)
|
117 |
+
text[0] = text[0].apply(preprocess_tweet)
|
|
|
118 |
text1=text[0].values
|
119 |
+
indices1=tokenizer.batch_encode_plus(text1.tolist(),max_length=128,add_special_tokens=True, return_attention_mask=True,pad_to_max_length=True,truncation=True)
|
|
|
|
|
|
|
|
|
|
|
120 |
input_ids1=indices1["input_ids"]
|
121 |
attention_masks1=indices1["attention_mask"]
|
122 |
prediction_inputs1= torch.tensor(input_ids1)
|
|
|
128 |
prediction_sampler1 = SequentialSampler(prediction_data1)
|
129 |
prediction_dataloader1 = DataLoader(prediction_data1, sampler=prediction_sampler1, batch_size=batch_size)
|
130 |
print('Predicting labels for {:,} test sentences...'.format(len(prediction_inputs1)))
|
131 |
+
# Pone el modelo en modo evaluaci贸n
|
132 |
model.eval()
|
133 |
+
# Variables de Seguimiento
|
134 |
predictions = []
|
135 |
# Predict
|
136 |
for batch in prediction_dataloader1:
|
137 |
batch = tuple(t.to(device) for t in batch)
|
138 |
+
# Descomprimir las entradas de nuestro cargador de datos
|
139 |
b_input_ids1, b_input_mask1 = batch
|
140 |
+
# Decirle al modelo que no calcule ni almacene gradientes, ahorrando memoria y # acelerando la predicci贸n.
|
141 |
with torch.no_grad():
|
142 |
# Forward pass, calculate logit predictions
|
143 |
outputs1 = model(b_input_ids1, token_type_ids=None,attention_mask=b_input_mask1)
|