feat: Added required files
Browse files- app.py +6 -1
- poembot_weight.h5 +1 -1
- tokenizer.pickle +3 -0
app.py
CHANGED
@@ -1,15 +1,20 @@
|
|
1 |
|
2 |
import gradio as gr
|
3 |
import json
|
|
|
4 |
import numpy as np
|
5 |
import tensorflow as tf
|
6 |
from tensorflow.keras.preprocessing.text import Tokenizer
|
7 |
from tensorflow.keras.preprocessing.sequence import pad_sequences
|
8 |
|
9 |
-
|
10 |
# Static constants
|
11 |
max_seq_len = 18
|
12 |
model = tf.keras.models.load_model('poembot_weight.h5')
|
|
|
|
|
|
|
|
|
|
|
13 |
with open("word_token.json") as wt:
|
14 |
word_token = json.load(wt)
|
15 |
# Main function
|
|
|
1 |
|
2 |
import gradio as gr
|
3 |
import json
|
4 |
+
import pickle
|
5 |
import numpy as np
|
6 |
import tensorflow as tf
|
7 |
from tensorflow.keras.preprocessing.text import Tokenizer
|
8 |
from tensorflow.keras.preprocessing.sequence import pad_sequences
|
9 |
|
|
|
10 |
# Static constants
|
11 |
max_seq_len = 18
|
12 |
model = tf.keras.models.load_model('poembot_weight.h5')
|
13 |
+
|
14 |
+
with open('tokenizer.pickle', 'rb') as handle:
|
15 |
+
tokenizer = pickle.load(handle)
|
16 |
+
|
17 |
+
|
18 |
with open("word_token.json") as wt:
|
19 |
word_token = json.load(wt)
|
20 |
# Main function
|
poembot_weight.h5
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1097832
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e547c33adc95b519e1e21e70549f5af7a225a0cc8b8d80d760f760f845688f2
|
3 |
size 1097832
|
tokenizer.pickle
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f878ad09d410a430fd49129d7712020cb50f49e93db9cb0a8bb0ef5b42cf502b
|
3 |
+
size 2918
|