File size: 1,661 Bytes
00a08a3
 
 
e71c178
00a08a3
 
 
a8f4608
8ad47fe
a8f4608
 
8ad47fe
 
 
 
 
00a08a3
 
 
 
 
 
 
 
 
 
 
 
8ad47fe
 
 
df4973d
8ad47fe
00a08a3
8ad47fe
00a08a3
 
 
 
 
 
 
ae80400
00a08a3
e71c178
00a08a3
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import gradio as gr
import shutil
import os 
#import ollama
import time 
#import pandas as pd 
global Modelfile  # Declare Modelfile as a global variable
import os 
from llama_cpp import Llama
os.environ["HF_HOME"] = "/app/.hf_cache"
os.environ["TRANSFORMERS_CACHE"] = "/app/.transformers_cache"

llm = Llama.from_pretrained(
	repo_id="alibidaran/LLAMA3.2-Virtual_doctor_GGUF",
	filename="unsloth.Q8_0.gguf",
)

#def Generate_report(history,model_flags):
   # data={'steps':model_flags,
    #      'turns':history}
   # dataframe=pd.DataFrame.from_dict(data)
    #dataframe.to_csv('Repports.csv',index=False)


def user(user_message,history):
    return "", history+[{'role': 'user', 'content':user_message}]
def respond(history):
    text=f"<s> ###Human: {history[-1]['content']} ###Asistant: "
    response=llm(text,
                max_tokens=512,
            	echo=True)
    response=response['choices'][0]['text']
    print(response)
    history.append({'role':'assistant','content':""})
    for character in response:
        history[-1]['content']+=character
        time.sleep(0.02)
        yield history



with gr.Blocks() as demo: 
    gr.Markdown('# Welcome to Zaki platform')
    with gr.Tab('Chat Interface'):
        gr.HTML('<h1> Virtual Doctor </h2>')
        chatbot = gr.Chatbot(type="messages")
        msg = gr.Textbox()
        btn=gr.Button('Send')

        clear = gr.ClearButton([msg, chatbot])
        btn.click(user, [msg, chatbot], [msg, chatbot],queue=False).then(respond,chatbot,chatbot)
        clear.click(lambda:None,None,chatbot,queue=False)
  
if __name__=='__main__':
    demo.launch(server_name="0.0.0.0", server_port=7860)