Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -45,7 +45,7 @@ def model_launch(input):
|
|
45 |
instruction=input,
|
46 |
response="",
|
47 |
)
|
48 |
-
out =
|
49 |
ind = out.index('Response') + len('Response')+2
|
50 |
return out[ind:]
|
51 |
|
@@ -135,8 +135,16 @@ def set_question_to_edit(selection):
|
|
135 |
|
136 |
def submit_question(question_content):
|
137 |
# Simulate processing by an LLM model
|
138 |
-
model_output = f"Model response to the question: {question_content[:100]}..." # Only showing part for brevity
|
139 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
|
141 |
def clear_output():
|
142 |
return "", ""
|
|
|
45 |
instruction=input,
|
46 |
response="",
|
47 |
)
|
48 |
+
out = geomodel_llm.generate(prompt, max_length=1024)
|
49 |
ind = out.index('Response') + len('Response')+2
|
50 |
return out[ind:]
|
51 |
|
|
|
135 |
|
136 |
def submit_question(question_content):
|
137 |
# Simulate processing by an LLM model
|
138 |
+
#model_output = f"Model response to the question: {question_content[:100]}..." # Only showing part for brevity
|
139 |
+
template = "Instruction:\n{instruction}\n\nResponse:\n{response}"
|
140 |
+
prompt = template.format(
|
141 |
+
instruction=question_content,
|
142 |
+
response="",
|
143 |
+
)
|
144 |
+
out = geomodel_llm.generate(prompt, max_length=1024)
|
145 |
+
ind = out.index('Response') + len('Response')+2
|
146 |
+
#return out[ind:]
|
147 |
+
return model_output [ind:] #model_output
|
148 |
|
149 |
def clear_output():
|
150 |
return "", ""
|