File size: 1,757 Bytes
0bd265b
 
 
1101b01
 
 
 
4155848
 
1101b01
4155848
1101b01
 
 
65f2355
1101b01
 
65f2355
1101b01
4155848
1101b01
 
65f2355
1101b01
 
 
65f2355
1101b01
 
4155848
 
 
 
 
 
 
 
 
 
65f2355
 
4155848
65f2355
1101b01
4155848
 
 
 
 
 
 
 
 
 
 
bef2658
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import os
os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'

import tensorflow as tf
import gradio as gr
import pickle
from keras.utils import pad_sequences
import json
from gradio_client import Client

# Load tokenizer and model
max_len = 200
with open('tokenizer.pickle', 'rb') as handle:
    tokenizer = pickle.load(handle)

model = tf.keras.models.load_model('toxic.h5')

arr = ["toxic", "severe_toxic", "obscene", "threat", "insult", "identity_hate"]

# Define the function to score comments
def score_comment(comment):
    sequences = tokenizer.texts_to_sequences([comment])
    inp = pad_sequences(sequences, maxlen=max_len)
    results = model.predict(inp)
    text = ''
    for i in range(len(arr)):
        text += '{}: {}\n'.format(arr[i], results[0][i] > 0.5)
    return text

# Define API function
def predict_api(comment):
    sequences = tokenizer.texts_to_sequences([comment])
    inp = pad_sequences(sequences, maxlen=max_len)
    results = model.predict(inp)
    response = {arr[i]: results[0][i] > 0.5 for i in range(len(arr))}
    return response

# Gradio Interface setup
inputs = gr.Textbox(lines=2, placeholder='Enter comment here...')
outputs = gr.Textbox()

# Create Gradio interface
interface = gr.Interface(fn=score_comment, inputs=inputs, outputs=outputs)

# API endpoint function (exposing the model via Gradio API)
def api_route(request):
    if request.method == 'POST':
        data = request.get_json()
        comment = data.get('comment')
        if not comment:
            return json.dumps({"error": "Missing 'comment' in request"}), 400
        prediction = predict_api(comment)
        return json.dumps({"prediction": prediction})

# Launch the Gradio interface for the space (auto-shared link)
interface.launch(share=True)