ddiddu commited on
Commit
ea11672
·
verified ·
1 Parent(s): a998b3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -25
app.py CHANGED
@@ -12,41 +12,19 @@ cv_json = os.getenv('CV_JSON')
12
 
13
  def predict(message, history):
14
  if find(message):
15
- # prompt = (
16
- # # f"As Jisu(she/her/hers)'s personal assistant,"
17
- # # f"Given that: {question_json}, How can I assist with information on: {message}"
18
- # )
19
-
20
- # prompt = "As Jisu(she/her/hers)'s personal assistant, Given that: {}, How can I assist with information on: {}".format(question_json, message)
21
  prompt = os.getenv('PREDICT_PROMPT').format(question_json, message)
22
-
23
  response = llm([HumanMessage(content=prompt)])
24
-
25
  return response.content
26
-
27
- # prompt = (
28
- # # os.getenv('PREDICT_PROMPT1')
29
- # f"As Jisu(she/her/hers)'s personal assistant,"
30
- # f"Given that: {cv_json}, How can I assist with information on: {message}"
31
- # f"If source does not contains relevant information, I will state that the information is not available."
32
- # )
33
 
34
- # prompt = "As Jisu(she/her/hers)'s personal assistant, Given that: {}, How can I assist with information on: {}. If the source does not contain relevant information, I will state that the information is not available.".format(cv_json, message)
35
  prompt = os.getenv('PREDICT_PROMPT1').format(cv_json, message)
36
 
37
  response = llm([HumanMessage(content=prompt)])
38
  return response.content
39
 
40
  def find(message):
41
- prompt = (
42
- "Given the list of questions about Jisu's CV: \n"
43
- "- What are Jisu's current projects?\n"
44
- "- What are Jisu's publications?\n"
45
- "- How can I reach out to Jisu?\n"
46
- f"Determine if the following query matches any of the topics above: '{message}'. Answer 'Yes' if it matches, otherwise answer 'No'."
47
- )
48
-
49
  response = llm([HumanMessage(content=prompt)])
 
50
  if response.content.strip() == 'Yes':
51
  return True
52
  else:
@@ -57,7 +35,6 @@ examples = [
57
  "What are Jisu's current projects?",
58
  "What are Jisu's publications?",
59
  "How can I reach out to Jisu?",
60
- # "How is the answer generated?"
61
  ]
62
 
63
  with gr.Blocks(theme='gradio/soft', fill_height=True) as demo:
 
12
 
13
  def predict(message, history):
14
  if find(message):
 
 
 
 
 
 
15
  prompt = os.getenv('PREDICT_PROMPT').format(question_json, message)
 
16
  response = llm([HumanMessage(content=prompt)])
 
17
  return response.content
 
 
 
 
 
 
 
18
 
 
19
  prompt = os.getenv('PREDICT_PROMPT1').format(cv_json, message)
20
 
21
  response = llm([HumanMessage(content=prompt)])
22
  return response.content
23
 
24
  def find(message):
25
+ prompt = os.getenv('FIND_PROMPT').format(message)
 
 
 
 
 
 
 
26
  response = llm([HumanMessage(content=prompt)])
27
+
28
  if response.content.strip() == 'Yes':
29
  return True
30
  else:
 
35
  "What are Jisu's current projects?",
36
  "What are Jisu's publications?",
37
  "How can I reach out to Jisu?",
 
38
  ]
39
 
40
  with gr.Blocks(theme='gradio/soft', fill_height=True) as demo: