Spaces:
Sleeping
Sleeping
indentation error model_info in create_llm_pipeline
Browse files
app.py
CHANGED
|
@@ -253,7 +253,7 @@ def create_llm_pipeline(model_key):
|
|
| 253 |
raise ValueError(f"Model is None and no fallback available")
|
| 254 |
|
| 255 |
# Get the model info for reference
|
| 256 |
-
|
| 257 |
|
| 258 |
# For GGUF models from llama-cpp-python
|
| 259 |
if is_gguf:
|
|
|
|
| 253 |
raise ValueError(f"Model is None and no fallback available")
|
| 254 |
|
| 255 |
# Get the model info for reference
|
| 256 |
+
model_info = MODEL_CONFIG.get(model_key, MODEL_CONFIG.get(fallback_model, {}))
|
| 257 |
|
| 258 |
# For GGUF models from llama-cpp-python
|
| 259 |
if is_gguf:
|