anpigon commited on
Commit
685b1d3
ยท
1 Parent(s): 2515509

refactor: Update app.py and libs/llm.py to improve model selection and configuration

Browse files
Files changed (2) hide show
  1. app.py +8 -1
  2. libs/prompt.py +2 -2
app.py CHANGED
@@ -4,6 +4,7 @@ from kiwipiepy import Kiwi
4
  from typing import List, Tuple, Generator, Union
5
 
6
  from langchain_core.output_parsers import StrOutputParser
 
7
  from langchain_core.runnables import RunnablePassthrough, RunnableLambda
8
  from langchain_community.document_transformers import LongContextReorder
9
 
@@ -36,8 +37,14 @@ AVAILABLE_MODELS = {
36
 
37
 
38
  def create_rag_chain(chat_history: List[Tuple[str, str]], model: str):
 
 
 
 
 
 
39
  llm = get_llm(streaming=STREAMING).with_config(configurable={"llm": model})
40
- prompt = get_prompt(chat_history)
41
 
42
  return (
43
  {
 
4
  from typing import List, Tuple, Generator, Union
5
 
6
  from langchain_core.output_parsers import StrOutputParser
7
+ from langchain_core.messages import HumanMessage, AIMessage
8
  from langchain_core.runnables import RunnablePassthrough, RunnableLambda
9
  from langchain_community.document_transformers import LongContextReorder
10
 
 
37
 
38
 
39
  def create_rag_chain(chat_history: List[Tuple[str, str]], model: str):
40
+ print(chat_history)
41
+ langchain_messages = []
42
+ for human, ai in chat_history:
43
+ langchain_messages.append(HumanMessage(content=human))
44
+ langchain_messages.append(AIMessage(content=ai))
45
+
46
  llm = get_llm(streaming=STREAMING).with_config(configurable={"llm": model})
47
+ prompt = get_prompt().partial(history=langchain_messages)
48
 
49
  return (
50
  {
libs/prompt.py CHANGED
@@ -21,11 +21,11 @@ PROMPT_TEMPLATE = """๋‹น์‹ ์€ ํŒ์‚ฌ์ด์ž 20๋…„ ์ฐจ ๋ฒ•๋ฅ  ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค.
21
  """
22
 
23
 
24
- def get_prompt(chat_history):
25
  return ChatPromptTemplate.from_messages(
26
  [
27
  ("system", PROMPT_TEMPLATE),
28
  MessagesPlaceholder(variable_name="history"),
29
  ("human", "{question}"),
30
  ]
31
- ).partial(history=chat_history)
 
21
  """
22
 
23
 
24
+ def get_prompt():
25
  return ChatPromptTemplate.from_messages(
26
  [
27
  ("system", PROMPT_TEMPLATE),
28
  MessagesPlaceholder(variable_name="history"),
29
  ("human", "{question}"),
30
  ]
31
+ )