Spaces:
Sleeping
Sleeping
File size: 2,454 Bytes
140387c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
from typing import Any, List, Optional, Dict, Union
from typing_extensions import TypedDict, NotRequired, Literal
B_INST, E_INST = "[INST]", "[/INST]"
B_SYS, E_SYS = "<<SYS>>\n", "\n<</SYS>>\n\n"
# Role = Literal["system", "user", "assistant"]
# class Message(TypedDict):
# role: Role
# content: str
class ChatCompletionMessage(TypedDict):
role: Literal["assistant", "user", "system"]
content: str
user: NotRequired[str]
# transformers: Message; llama.cpp: ChatCompletionMessage
Message = ChatCompletionMessage
Dialog = List[Message]
class EmbeddingUsage(TypedDict):
prompt_tokens: int
total_tokens: int
class EmbeddingData(TypedDict):
index: int
object: str
embedding: List[float]
class Embedding(TypedDict):
object: Literal["list"]
model: str
data: List[EmbeddingData]
usage: EmbeddingUsage
class CompletionLogprobs(TypedDict):
text_offset: List[int]
token_logprobs: List[Optional[float]]
tokens: List[str]
top_logprobs: List[Optional[Dict[str, float]]]
class CompletionChoice(TypedDict):
text: str
index: int
logprobs: Optional[CompletionLogprobs]
finish_reason: Optional[str]
class CompletionUsage(TypedDict):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class CompletionChunk(TypedDict):
id: str
object: Literal["text_completion"]
created: int
model: str
choices: List[CompletionChoice]
class Completion(TypedDict):
id: str
object: Literal["text_completion"]
created: int
model: str
choices: List[CompletionChoice]
usage: CompletionUsage
class ChatCompletionChoice(TypedDict):
index: int
message: ChatCompletionMessage
finish_reason: Optional[str]
class ChatCompletion(TypedDict):
id: str
object: Literal["chat.completion"]
created: int
model: str
choices: List[ChatCompletionChoice]
usage: CompletionUsage
class ChatCompletionChunkDeltaEmpty(TypedDict):
pass
class ChatCompletionChunkDelta(TypedDict):
role: NotRequired[Literal["assistant"]]
content: NotRequired[str]
class ChatCompletionChunkChoice(TypedDict):
index: int
delta: Union[ChatCompletionChunkDelta, ChatCompletionChunkDeltaEmpty]
finish_reason: Optional[str]
class ChatCompletionChunk(TypedDict):
id: str
model: str
object: Literal["chat.completion.chunk"]
created: int
choices: List[ChatCompletionChunkChoice]
|