Thiloid commited on
Commit
7502cc2
·
verified ·
1 Parent(s): 2cf478d

Update run.py

Browse files
Files changed (1) hide show
  1. run.py +2 -2
run.py CHANGED
@@ -25,7 +25,7 @@ collection = client.get_collection(name="chromaTS", embedding_function=sentence_
25
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
26
 
27
 
28
- def format_prompt(message):
29
  prompt = "" #"<s>"
30
  #for user_prompt, bot_response in history:
31
  # prompt += f"[INST] {user_prompt} [/INST]"
@@ -76,7 +76,7 @@ def response(
76
  #output=output+"\n\n<br><details open><summary><strong>Sources</strong></summary><br><ul>"+ "".join(["<li>" + s + "</li>" for s in combination])+"</ul></details>"
77
  yield output
78
 
79
- gr.ChatInterface(response, chatbot=gr.Chatbot(value=[[None,"Herzlich willkommen! Ich bin Chätti ein KI-basiertes Studienassistenzsystem, das für jede Anfrage die am besten Studieninformationen empfiehlt.<br>Erzähle mir, was du gerne tust!"]],render_markdown=True),title="German BERUFENET-RAG-Interface to the Hugging Face Hub").queue().launch(share=True) #False, server_name="0.0.0.0", server_port=7864)
80
  print("Interface up and running!")
81
 
82
 
 
25
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
26
 
27
 
28
+ def format_prompt(message, history):
29
  prompt = "" #"<s>"
30
  #for user_prompt, bot_response in history:
31
  # prompt += f"[INST] {user_prompt} [/INST]"
 
76
  #output=output+"\n\n<br><details open><summary><strong>Sources</strong></summary><br><ul>"+ "".join(["<li>" + s + "</li>" for s in combination])+"</ul></details>"
77
  yield output
78
 
79
+ gr.ChatInterface(response, chatbot=gr.Chatbot(value=[[None,"Herzlich willkommen! Ich bin Chätti ein KI-basiertes Studienassistenzsystem, das für jede Anfrage die am besten Studieninformationen empfiehlt.<br>Erzähle mir, was du gerne tust!"]],render_markdown=True),title="German Studyhelder Chätti").queue().launch(share=True) #False, server_name="0.0.0.0", server_port=7864)
80
  print("Interface up and running!")
81
 
82