"""
Try out gradio.Chatinterface.

colab gradio-chatinterface.

%%writefile reuirements.txt
gradio
transformers
sentencepiece
torch
cpm_kernels

import gradio as gr

def greet(name):
    return "Hello " + name + "!"

with gr.Blocks() as demo:
    name = gr.Textbox(label="Name")
    output = gr.Textbox(label="Output Box")
    greet_btn = gr.Button("Greet")
    greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet")


demo.launch()

"""
# pylint: disable=line-too-long, missing-module-docstring, missing-function-docstring
# import torch
import random
import time

import gradio as gr


def respond2(message, chat_history):
    if chat_history is None:
        chat_history = []
    bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])

    temp = ""
    chat_history.append((message, temp))
    for elm in range(len(bot_message)):
        temp = bot_message[:elm+1]
        time.sleep(0.2)
        chat_history[-1] = message, temp
        # yield message, chat_history
        # chatbot.value = chat_history

    chat_history[-1] = (message, "done " + bot_message)
    time.sleep(2)

    yield "", chat_history

def stream_chat():
    """
    List samples.

    Sure [('test me', 'Sure')]
    Sure, [('test me', 'Sure,')]
    Sure, I [('test me', 'Sure, I')]
    Sure, I' [('test me', "Sure, I'")]
    Sure, I'd [('test me', "Sure, I'd")]
    """
    resp = ""
    for elm in range(10):
        resp += str(elm)
        from time import sleep
        sleep(0.1)
        yield resp


def chat(message="", history=[]):
    # prompt = f"{system_prompt}### User: {message}\n\n### Assistant:\n"
    # inputs = tokenizer(prompt, return_tensors="pt").to(device=device)
    # output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=256)
    # return tokenizer.decode(output[0], skip_special_tokens=True)
    _ = """
    for response, _ in chat_model.stream_chat(
        tokenizer, message, history, max_length=2048, top_p=0.7, temperature=0.95
    ):
       yield response
    """
    g = update_chatbot()
    g.send(None)

    for response in stream_chat():
        # yield response
        g.send(response)
        yield response

    yield 'done ' + response


def update_chatbot():
    while 1:
        message = yield
        print(f"{message=}")


def greet(name):
    return "Hello " + name + "!"

with gr.Blocks() as block:
    name = gr.Textbox(label="Name")
    output = gr.Textbox(label="Output Box")
    greet_btn = gr.Button("Greet")
    # greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet")

    greet_btn.click(fn=chat, inputs=name, outputs=output, api_name="greet")

_ = """
with gr.Blocks(theme=gr.themes.Glass(text_size="sm", spacing_size="sm"),) as block:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()

# gr.ChatInterface(
block(
    chat,
    [msg, chatbot],
    [chatbot],
    # title="gradio-chatinterface-tryout",
    # examples=examples_list,
).queue(max_size=2).launch()
# """

# block.queue(max_size=2).launch()

with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    clear = gr.ClearButton([msg, chatbot])

    def respond(message, chat_history):
        bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
        chat_history.append((message, bot_message))
        time.sleep(2)
        return "", chat_history

    def respond1(message, chat_history):
        if chat_history is None:
            chat_history = []
        bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])

        temp = ""
        chat_history.append((message, temp))
        for elm in range(len(bot_message)):
            temp = bot_message[:elm+1]
            time.sleep(0.2)
            chat_history[-1] = message, temp
            yield message, chat_history

        chat_history[-1] = (message, "done " + bot_message)
        time.sleep(2)

        yield "", chat_history

    msg.submit(respond2, [msg, chatbot], [msg, chatbot])

# demo.queue(max_size=2).launch()