Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,7 +9,6 @@ a Llama 2 model with 70B parameters fine-tuned for chat instructions.
|
|
| 9 |
"""
|
| 10 |
|
| 11 |
# Initialize client
|
| 12 |
-
client = Client("https://ysharma-explore-llamav2-with-tgi.hf.space/")
|
| 13 |
|
| 14 |
|
| 15 |
with st.sidebar:
|
|
@@ -23,6 +22,9 @@ with st.sidebar:
|
|
| 23 |
|
| 24 |
# Prediction function
|
| 25 |
def predict(message, system_prompt, temperature, max_new_tokens,Topp,Repetitionpenalty):
|
|
|
|
|
|
|
|
|
|
| 26 |
with st.status("Requesting LLama-2"):
|
| 27 |
st.write("Requesting API")
|
| 28 |
response = client.predict(
|
|
|
|
| 9 |
"""
|
| 10 |
|
| 11 |
# Initialize client
|
|
|
|
| 12 |
|
| 13 |
|
| 14 |
with st.sidebar:
|
|
|
|
| 22 |
|
| 23 |
# Prediction function
|
| 24 |
def predict(message, system_prompt, temperature, max_new_tokens,Topp,Repetitionpenalty):
|
| 25 |
+
with st.status("Starting client"):
|
| 26 |
+
client = Client("https://ysharma-explore-llamav2-with-tgi.hf.space/")
|
| 27 |
+
st.write("Requesting client")
|
| 28 |
with st.status("Requesting LLama-2"):
|
| 29 |
st.write("Requesting API")
|
| 30 |
response = client.predict(
|