Spaces:
Sleeping
Sleeping
minor fix
Browse files
routers/get_chatrespone.py
CHANGED
@@ -28,10 +28,9 @@ async def get_chat_respone(body: ChatInputForm, api_key: str = Depends(get_api_k
|
|
28 |
|
29 |
prompt = get_prompt(body.prompt)
|
30 |
|
31 |
-
promptTemplate = PromptTemplate.from_template(prompt)
|
32 |
try:
|
33 |
llm = OllamaLLM(
|
34 |
-
model=
|
35 |
temperature=0.2,
|
36 |
# huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
|
37 |
)
|
@@ -41,8 +40,6 @@ async def get_chat_respone(body: ChatInputForm, api_key: str = Depends(get_api_k
|
|
41 |
("human", body.textInput)
|
42 |
]
|
43 |
|
44 |
-
|
45 |
-
llm_chain = promptTemplate | llm
|
46 |
response = llm.stream(messages)
|
47 |
|
48 |
return StreamingResponse(get_response(response), media_type='text/event-stream')
|
|
|
28 |
|
29 |
prompt = get_prompt(body.prompt)
|
30 |
|
|
|
31 |
try:
|
32 |
llm = OllamaLLM(
|
33 |
+
model=body.repo_id,
|
34 |
temperature=0.2,
|
35 |
# huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
|
36 |
)
|
|
|
40 |
("human", body.textInput)
|
41 |
]
|
42 |
|
|
|
|
|
43 |
response = llm.stream(messages)
|
44 |
|
45 |
return StreamingResponse(get_response(response), media_type='text/event-stream')
|