File size: 1,396 Bytes
00a08a3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import gradio as gr
import shutil
import os 
import ollama
import time 
#import pandas as pd 
global Modelfile  # Declare Modelfile as a global variable


#def Generate_report(history,model_flags):
   # data={'steps':model_flags,
    #      'turns':history}
   # dataframe=pd.DataFrame.from_dict(data)
    #dataframe.to_csv('Repports.csv',index=False)


def user(user_message,history):
    return "", history+[{'role': 'user', 'content':user_message}]
def respond(history):
    text=f"<s> ###Human: {history[-1]['content']} ###Asistant: "
    response=ollama.generate(
    model='LLAMA3.2 Virtual_doctor2:latest',
    prompt=text,
    stream=False,
)
    history.append({'role':'assistant','content':""})
    for character in response['response']:
        history[-1]['content']+=character
        time.sleep(0.02)
        yield history



with gr.Blocks() as demo: 
    gr.Markdown('# AI Therapist for (CBT and FIT)')
    with gr.Tab('Chat Interface'):
        gr.HTML('<h1> Fake client chatbot </h2>')
        chatbot = gr.Chatbot(type="messages")
        msg = gr.Textbox()
        btn=gr.Button('Send')

        clear = gr.ClearButton([msg, chatbot])
        btn.click(user, [msg, chatbot], [msg, chatbot],queue=False).then(respond,chatbot,chatbot)
        clear.click(lambda:None,None,chatbot,queue=False)
  
if __name__=='__main__':
    demo.launch(server_name="0.0.0.0", server_port=7860)