Spaces:
Runtime error
Runtime error
Commit
·
7ad098c
1
Parent(s):
21aad16
fix: another bugfix - removing logger again
Browse files- backend/controller.py +4 -5
- main.py +2 -0
- requirements.txt +0 -1
backend/controller.py
CHANGED
@@ -3,7 +3,6 @@
|
|
3 |
|
4 |
# external imports
|
5 |
import gradio as gr
|
6 |
-
from loguru import logger
|
7 |
|
8 |
# internal imports
|
9 |
from model import godel
|
@@ -34,10 +33,10 @@ def interference(
|
|
34 |
|
35 |
if model_selection.lower() == "mistral":
|
36 |
model = mistral
|
37 |
-
|
38 |
else:
|
39 |
model = godel
|
40 |
-
|
41 |
|
42 |
# if a XAI approach is selected, grab the XAI module instance
|
43 |
if xai_selection in ("SHAP", "Attention"):
|
@@ -96,7 +95,7 @@ def interference(
|
|
96 |
def vanilla_chat(
|
97 |
model, message: str, history: list, system_prompt: str, knowledge: str = ""
|
98 |
):
|
99 |
-
|
100 |
|
101 |
# formatting the prompt using the model's format_prompt function
|
102 |
prompt = model.format_prompt(message, history, system_prompt, knowledge)
|
@@ -113,7 +112,7 @@ def vanilla_chat(
|
|
113 |
def explained_chat(
|
114 |
model, xai, message: str, history: list, system_prompt: str, knowledge: str = ""
|
115 |
):
|
116 |
-
|
117 |
|
118 |
# formatting the prompt using the model's format_prompt function
|
119 |
# message, history, system_prompt, knowledge = mdl.prompt_limiter(
|
|
|
3 |
|
4 |
# external imports
|
5 |
import gradio as gr
|
|
|
6 |
|
7 |
# internal imports
|
8 |
from model import godel
|
|
|
33 |
|
34 |
if model_selection.lower() == "mistral":
|
35 |
model = mistral
|
36 |
+
print("Indetified model as Mistral")
|
37 |
else:
|
38 |
model = godel
|
39 |
+
print("Indetified model as GODEL")
|
40 |
|
41 |
# if a XAI approach is selected, grab the XAI module instance
|
42 |
if xai_selection in ("SHAP", "Attention"):
|
|
|
95 |
def vanilla_chat(
|
96 |
model, message: str, history: list, system_prompt: str, knowledge: str = ""
|
97 |
):
|
98 |
+
print(f"Running normal chat with {model}.")
|
99 |
|
100 |
# formatting the prompt using the model's format_prompt function
|
101 |
prompt = model.format_prompt(message, history, system_prompt, knowledge)
|
|
|
112 |
def explained_chat(
|
113 |
model, xai, message: str, history: list, system_prompt: str, knowledge: str = ""
|
114 |
):
|
115 |
+
print(f"Running explained chat with {xai} with {model}.")
|
116 |
|
117 |
# formatting the prompt using the model's format_prompt function
|
118 |
# message, history, system_prompt, knowledge = mdl.prompt_limiter(
|
main.py
CHANGED
@@ -64,10 +64,12 @@ def xai_info(xai_radio):
|
|
64 |
else:
|
65 |
gr.Info("No XAI method was selected.")
|
66 |
|
|
|
67 |
def model_info(model_radio):
|
68 |
# displays the selected model using the Gradio Info component
|
69 |
gr.Info(f"The following model was selected:\n {model_radio} ")
|
70 |
|
|
|
71 |
# ui interface based on Gradio Blocks
|
72 |
# see https://www.gradio.app/docs/interface)
|
73 |
with gr.Blocks(
|
|
|
64 |
else:
|
65 |
gr.Info("No XAI method was selected.")
|
66 |
|
67 |
+
|
68 |
def model_info(model_radio):
|
69 |
# displays the selected model using the Gradio Info component
|
70 |
gr.Info(f"The following model was selected:\n {model_radio} ")
|
71 |
|
72 |
+
|
73 |
# ui interface based on Gradio Blocks
|
74 |
# see https://www.gradio.app/docs/interface)
|
75 |
with gr.Blocks(
|
requirements.txt
CHANGED
@@ -19,4 +19,3 @@ matplotlib
|
|
19 |
pre-commit
|
20 |
ipython
|
21 |
gradio-iframe~=0.0.10
|
22 |
-
loguru
|
|
|
19 |
pre-commit
|
20 |
ipython
|
21 |
gradio-iframe~=0.0.10
|
|