Spaces:
Runtime error
Runtime error
Commit
·
21aad16
1
Parent(s):
dacf466
fix: fixing controller, adding logging
Browse files- backend/controller.py +9 -2
- requirements.txt +1 -0
backend/controller.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3 |
|
4 |
# external imports
|
5 |
import gradio as gr
|
|
|
6 |
|
7 |
# internal imports
|
8 |
from model import godel
|
@@ -31,17 +32,19 @@ def interference(
|
|
31 |
Always answer as helpfully as possible, while being safe.
|
32 |
"""
|
33 |
|
34 |
-
if model_selection.lower == "mistral":
|
35 |
model = mistral
|
|
|
36 |
else:
|
37 |
model = godel
|
|
|
38 |
|
39 |
# if a XAI approach is selected, grab the XAI module instance
|
40 |
if xai_selection in ("SHAP", "Attention"):
|
41 |
# matching selection
|
42 |
match xai_selection.lower():
|
43 |
case "shap":
|
44 |
-
if model_selection.lower == "mistral":
|
45 |
xai = cpt_int
|
46 |
else:
|
47 |
xai = shap_int
|
@@ -93,6 +96,8 @@ def interference(
|
|
93 |
def vanilla_chat(
|
94 |
model, message: str, history: list, system_prompt: str, knowledge: str = ""
|
95 |
):
|
|
|
|
|
96 |
# formatting the prompt using the model's format_prompt function
|
97 |
prompt = model.format_prompt(message, history, system_prompt, knowledge)
|
98 |
|
@@ -108,6 +113,8 @@ def vanilla_chat(
|
|
108 |
def explained_chat(
|
109 |
model, xai, message: str, history: list, system_prompt: str, knowledge: str = ""
|
110 |
):
|
|
|
|
|
111 |
# formatting the prompt using the model's format_prompt function
|
112 |
# message, history, system_prompt, knowledge = mdl.prompt_limiter(
|
113 |
# message, history, system_prompt, knowledge
|
|
|
3 |
|
4 |
# external imports
|
5 |
import gradio as gr
|
6 |
+
from loguru import logger
|
7 |
|
8 |
# internal imports
|
9 |
from model import godel
|
|
|
32 |
Always answer as helpfully as possible, while being safe.
|
33 |
"""
|
34 |
|
35 |
+
if model_selection.lower() == "mistral":
|
36 |
model = mistral
|
37 |
+
logger.debug("Indetified model as Mistral")
|
38 |
else:
|
39 |
model = godel
|
40 |
+
logger.debug("Indetified model as GODEL")
|
41 |
|
42 |
# if a XAI approach is selected, grab the XAI module instance
|
43 |
if xai_selection in ("SHAP", "Attention"):
|
44 |
# matching selection
|
45 |
match xai_selection.lower():
|
46 |
case "shap":
|
47 |
+
if model_selection.lower() == "mistral":
|
48 |
xai = cpt_int
|
49 |
else:
|
50 |
xai = shap_int
|
|
|
96 |
def vanilla_chat(
|
97 |
model, message: str, history: list, system_prompt: str, knowledge: str = ""
|
98 |
):
|
99 |
+
logger.info(f"Running normal chat with {model}.")
|
100 |
+
|
101 |
# formatting the prompt using the model's format_prompt function
|
102 |
prompt = model.format_prompt(message, history, system_prompt, knowledge)
|
103 |
|
|
|
113 |
def explained_chat(
|
114 |
model, xai, message: str, history: list, system_prompt: str, knowledge: str = ""
|
115 |
):
|
116 |
+
logger.info(f"Running explained chat with {xai} with {model}.")
|
117 |
+
|
118 |
# formatting the prompt using the model's format_prompt function
|
119 |
# message, history, system_prompt, knowledge = mdl.prompt_limiter(
|
120 |
# message, history, system_prompt, knowledge
|
requirements.txt
CHANGED
@@ -19,3 +19,4 @@ matplotlib
|
|
19 |
pre-commit
|
20 |
ipython
|
21 |
gradio-iframe~=0.0.10
|
|
|
|
19 |
pre-commit
|
20 |
ipython
|
21 |
gradio-iframe~=0.0.10
|
22 |
+
loguru
|