def predict(text): | |
max_length = 612 | |
encoded = tokenizer(text, truncation=True, padding=True, max_length=max_length, return_tensors='tf') | |
pred = model.predict(dict(encoded), verbose=0) | |
prob = tf.nn.softmax(pred.logits)[0] | |
predictions = np.argmax(prob) | |
predicted_label = label_encoder.inverse_transform([predictions])[0] | |
return prob, predicted_label | |