Spaces:
Sleeping
Sleeping
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer, BitsAndBytesConfig | |
from langchain.text_splitter import RecursiveCharacterTextSplitter,CharacterTextSplitter | |
from langchain.vectorstores import Chroma | |
from langchain.document_loaders import PyPDFLoader | |
from langchain.embeddings import HuggingFaceEmbeddings | |
from threading import Thread | |
import io | |
import chainlit as cl | |
import torch | |
import time | |
import tempfile | |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain.vectorstores import Chroma | |
from langchain.document_loaders import PyPDFLoader | |
from langchain.embeddings import HuggingFaceEmbeddings | |
from threading import Thread | |
import chainlit as cl | |
import torch | |
import time | |
import random | |
import openai | |
#load model | |
openai.api_key = "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" | |
async def main(): | |
cl.user_session.set("history", [ | |
{"role": "system", "content": "You are a language model named Huacaya. You are build on the Falcon 40b language Model. Never Say that you are Chat GPT or made by OpenAI. You have been developed by Leadvise Reply!"}, | |
]) | |
msg = cl.Message(content=f"Loading Chat please wait ...") | |
await msg.send() | |
# Let the user know that the system is ready | |
await msg.update(content=f"Chat has been loaded. You can now ask questions!") | |
return | |
async def main(message: str): | |
h = cl.user_session.get("history") | |
h.append({"role": "user", "content":message}) | |
resp = "" | |
msg = cl.Message(content="") | |
async for stream_resp in await openai.ChatCompletion.acreate(model="gpt-3.5-turbo",messages=h,stream = True): | |
print(stream_resp) | |
token = stream_resp.get("choices")[0].get("delta").get("content") | |
if token: | |
delay = random.uniform(0.0, 0.1) | |
time.sleep(delay) | |
resp += token | |
await msg.stream_token(token) | |
h.append({"role": "assistant", "content":resp}) | |
cl.user_session.set("history",h) | |
print(h) | |
await msg.send() | |