Merge pull request #14 from namin/tweak-for-openai-compatible-server
Browse files- aide/backend/__init__.py +1 -1
aide/backend/__init__.py
CHANGED
|
@@ -33,7 +33,7 @@ def query(
|
|
| 33 |
"max_tokens": max_tokens,
|
| 34 |
}
|
| 35 |
|
| 36 |
-
query_func =
|
| 37 |
output, req_time, in_tok_count, out_tok_count, info = query_func(
|
| 38 |
system_message=compile_prompt_to_md(system_message) if system_message else None,
|
| 39 |
user_message=compile_prompt_to_md(user_message) if user_message else None,
|
|
|
|
| 33 |
"max_tokens": max_tokens,
|
| 34 |
}
|
| 35 |
|
| 36 |
+
query_func = backend_anthropic.query if "claude-" in model else backend_openai.query
|
| 37 |
output, req_time, in_tok_count, out_tok_count, info = query_func(
|
| 38 |
system_message=compile_prompt_to_md(system_message) if system_message else None,
|
| 39 |
user_message=compile_prompt_to_md(user_message) if user_message else None,
|