Spaces:
Running
Running
import gradio as gr | |
import spaces | |
# TOKENIZER = | |
# MINIMUM_TOKENS = 64 | |
# def count_tokens(text): | |
# return len(TOKENIZER(text).input_ids) | |
# Mock function for testing layout | |
def run_test_power(model_name, real_text, generated_text, N=10): | |
return "Prediction: Human (Mocked)" | |
# Change mode name | |
# def change_mode(mode): | |
# if mode == "" | |
css = """ | |
#header { text-align: center; font-size: 1.5em; margin-bottom: 20px; } | |
#output-text { font-weight: bold; font-size: 1.2em; } | |
.links { | |
display: flex; | |
justify-content: flex-end; | |
gap: 10px; | |
margin-right: 10px; | |
align-items: center; | |
} | |
.separator { | |
margin: 0 5px; | |
color: black; | |
} | |
""" | |
# Gradio App | |
with gr.Blocks(css=css) as app: | |
with gr.Row(): | |
gr.HTML('<div id="header">R-detect On HuggingFace</div>') | |
with gr.Row(): | |
gr.HTML( | |
""" | |
<div class="links"> | |
<a href="https://openreview.net/forum?id=z9j7wctoGV" target="_blank">Paper</a> | |
<span class="separator">|</span> | |
<a href="https://github.com/xLearn-AU/R-Detect" target="_blank">Code</a> | |
<span class="separator">|</span> | |
<a href="mailto:1730421718@qq.com" target="_blank">Contact</a> | |
</div> | |
""" | |
) | |
with gr.Row(): | |
input_text = gr.Textbox( | |
label="Input Text", | |
placeholder="Enter Text Here", | |
lines=8, | |
) | |
with gr.Row(): | |
model_name = gr.Dropdown( | |
[ | |
"gpt2-medium", | |
"gpt2-large", | |
"t5-large", | |
"t5-small", | |
"roberta-base", | |
"roberta-base-openai-detector", | |
"chatgpt-detector-roberta", | |
"gpt3-small-finetune-cnndaily-news", | |
"gpt-neo-125m", | |
"falcon-rw-1b", | |
], | |
label="Select Model", | |
value="gpt2-medium", | |
) | |
submit_button = gr.Button("Run Detection", variant="primary") | |
clear_button = gr.Button("Clear", variant="secondary") | |
with gr.Row(): | |
output = gr.Textbox( | |
label="Inference Result", | |
placeholder="Maked by Human or AI", | |
elem_id="output-text", | |
) | |
submit_button.click( | |
run_test_power, inputs=[model_name, input_text, input_text], outputs=output | |
) | |
clear_button.click(lambda: ("", ""), inputs=[], outputs=[input_text, output]) | |
# model_name.change(change_mode, inputs=[model_name], outputs=[model_name]) | |
with gr.Accordion("Disclaimer", open=False): | |
gr.Markdown( | |
""" | |
- **Disclaimer**: This tool is for demonstration purposes only. It is not a foolproof AI detector. | |
- **Accuracy**: Results may vary based on input length and quality. | |
""" | |
) | |
with gr.Accordion("Citations", open=False): | |
gr.Markdown( | |
""" | |
``` | |
@inproceedings{zhangs2024MMDMP, | |
title={Detecting Machine-Generated Texts by Multi-Population Aware Optimization for Maximum Mean Discrepancy}, | |
author={Zhang, Shuhai and Song, Yiliao and Yang, Jiahao and Li, Yuanqing and Han, Bo and Tan, Mingkui}, | |
booktitle = {International Conference on Learning Representations (ICLR)}, | |
year={2024} | |
} | |
``` | |
""" | |
) | |
app.launch() | |