Spaces:
Paused
Paused
:recycle: [Refactor] Rename gpt-3.5 to gpt-3.5-turbo
Browse files- apis/chat_api.py +1 -1
- constants/models.py +3 -3
- networks/openai_streamer.py +4 -2
apis/chat_api.py
CHANGED
|
@@ -89,7 +89,7 @@ class ChatAPIApp:
|
|
| 89 |
def chat_completions(
|
| 90 |
self, item: ChatCompletionsPostItem, api_key: str = Depends(extract_api_key)
|
| 91 |
):
|
| 92 |
-
if item.model == "gpt-3.5":
|
| 93 |
streamer = OpenaiStreamer()
|
| 94 |
stream_response = streamer.chat_response(messages=item.messages)
|
| 95 |
else:
|
|
|
|
| 89 |
def chat_completions(
|
| 90 |
self, item: ChatCompletionsPostItem, api_key: str = Depends(extract_api_key)
|
| 91 |
):
|
| 92 |
+
if item.model == "gpt-3.5-turbo":
|
| 93 |
streamer = OpenaiStreamer()
|
| 94 |
stream_response = streamer.chat_response(messages=item.messages)
|
| 95 |
else:
|
constants/models.py
CHANGED
|
@@ -22,7 +22,7 @@ TOKEN_LIMIT_MAP = {
|
|
| 22 |
"mistral-7b": 32768,
|
| 23 |
"openchat-3.5": 8192,
|
| 24 |
"gemma-7b": 8192,
|
| 25 |
-
"gpt-3.5": 8192,
|
| 26 |
}
|
| 27 |
|
| 28 |
TOKEN_RESERVED = 20
|
|
@@ -34,7 +34,7 @@ AVAILABLE_MODELS = [
|
|
| 34 |
"mistral-7b",
|
| 35 |
"openchat-3.5",
|
| 36 |
"gemma-7b",
|
| 37 |
-
"gpt-3.5",
|
| 38 |
]
|
| 39 |
|
| 40 |
# https://platform.openai.com/docs/api-reference/models/list
|
|
@@ -75,7 +75,7 @@ AVAILABLE_MODELS_DICTS = [
|
|
| 75 |
"owned_by": "Google",
|
| 76 |
},
|
| 77 |
{
|
| 78 |
-
"id": "gpt-3.5",
|
| 79 |
"description": "[openai/gpt-3.5-turbo]: https://platform.openai.com/docs/models/gpt-3-5-turbo",
|
| 80 |
"object": "model",
|
| 81 |
"created": 1700000000,
|
|
|
|
| 22 |
"mistral-7b": 32768,
|
| 23 |
"openchat-3.5": 8192,
|
| 24 |
"gemma-7b": 8192,
|
| 25 |
+
"gpt-3.5-turbo": 8192,
|
| 26 |
}
|
| 27 |
|
| 28 |
TOKEN_RESERVED = 20
|
|
|
|
| 34 |
"mistral-7b",
|
| 35 |
"openchat-3.5",
|
| 36 |
"gemma-7b",
|
| 37 |
+
"gpt-3.5-turbo",
|
| 38 |
]
|
| 39 |
|
| 40 |
# https://platform.openai.com/docs/api-reference/models/list
|
|
|
|
| 75 |
"owned_by": "Google",
|
| 76 |
},
|
| 77 |
{
|
| 78 |
+
"id": "gpt-3.5-turbo",
|
| 79 |
"description": "[openai/gpt-3.5-turbo]: https://platform.openai.com/docs/models/gpt-3-5-turbo",
|
| 80 |
"object": "model",
|
| 81 |
"created": 1700000000,
|
networks/openai_streamer.py
CHANGED
|
@@ -149,8 +149,10 @@ class OpenaiRequester:
|
|
| 149 |
|
| 150 |
class OpenaiStreamer:
|
| 151 |
def __init__(self):
|
| 152 |
-
self.model = "gpt-3.5"
|
| 153 |
-
self.message_outputer = OpenaiStreamOutputer(
|
|
|
|
|
|
|
| 154 |
self.tokenizer = tiktoken.get_encoding("cl100k_base")
|
| 155 |
|
| 156 |
def count_tokens(self, messages: list[dict]):
|
|
|
|
| 149 |
|
| 150 |
class OpenaiStreamer:
|
| 151 |
def __init__(self):
|
| 152 |
+
self.model = "gpt-3.5-turbo"
|
| 153 |
+
self.message_outputer = OpenaiStreamOutputer(
|
| 154 |
+
owned_by="openai", model="gpt-3.5-turbo"
|
| 155 |
+
)
|
| 156 |
self.tokenizer = tiktoken.get_encoding("cl100k_base")
|
| 157 |
|
| 158 |
def count_tokens(self, messages: list[dict]):
|