youngtsai commited on
Commit
69f0d33
·
1 Parent(s): a38ffdf

def generate_questions_answers(df_string):

Browse files
Files changed (1) hide show
  1. app.py +7 -10
app.py CHANGED
@@ -584,6 +584,8 @@ def process_youtube_link(password, link):
584
  source = "gcs"
585
  questions = get_questions(video_id, formatted_simple_transcript, source)
586
  questions_json = json.dumps(questions, ensure_ascii=False, indent=2)
 
 
587
  summary_json = get_video_id_summary(video_id, formatted_simple_transcript, source)
588
  summary_text = summary_json["summary"]
589
  summary = summary_json["summary"]
@@ -612,6 +614,7 @@ def process_youtube_link(password, link):
612
  questions[0] if len(questions) > 0 else "", \
613
  questions[1] if len(questions) > 1 else "", \
614
  questions[2] if len(questions) > 2 else "", \
 
615
  original_transcript, \
616
  summary_text, \
617
  summary, \
@@ -1093,7 +1096,7 @@ def generate_questions(df_string):
1093
 
1094
  return questions
1095
 
1096
- def get_questions_answers(video_id, df_string, source):
1097
  if source == "gcs":
1098
  print("===get_questions_answers on gcs===")
1099
  gcs_client = GCS_CLIENT
@@ -1116,7 +1119,6 @@ def get_questions_answers(video_id, df_string, source):
1116
 
1117
  return questions_answers_json
1118
 
1119
-
1120
  def generate_questions_answers(df_string):
1121
  # 使用 OpenAI 生成基于上传数据的问题
1122
  if isinstance(df_string, str):
@@ -1140,12 +1142,6 @@ def generate_questions_answers(df_string):
1140
  {"role": "user", "content": user_content}
1141
  ]
1142
  response_format = { "type": "json_object" }
1143
-
1144
- print("=====messages=====")
1145
- print(messages)
1146
- print("=====messages=====")
1147
-
1148
-
1149
  request_payload = {
1150
  "model": "gpt-4-turbo",
1151
  "messages": messages,
@@ -2892,7 +2888,8 @@ with gr.Blocks(theme=gr.themes.Base(primary_hue=gr.themes.colors.orange, seconda
2892
  questions_json,
2893
  btn_1,
2894
  btn_2,
2895
- btn_3,
 
2896
  df_string_output,
2897
  summary_text,
2898
  df_summarise,
@@ -2916,7 +2913,7 @@ with gr.Blocks(theme=gr.themes.Base(primary_hue=gr.themes.colors.orange, seconda
2916
  key_moments,
2917
  btn_1,
2918
  btn_2,
2919
- btn_3
2920
  ]
2921
  update_state_outputs = [
2922
  content_subject_state,
 
584
  source = "gcs"
585
  questions = get_questions(video_id, formatted_simple_transcript, source)
586
  questions_json = json.dumps(questions, ensure_ascii=False, indent=2)
587
+ questions_answers = get_questions_answers(video_id, formatted_simple_transcript, source)
588
+ questions_answers_json = json.dumps(questions_answers, ensure_ascii=False, indent=2)
589
  summary_json = get_video_id_summary(video_id, formatted_simple_transcript, source)
590
  summary_text = summary_json["summary"]
591
  summary = summary_json["summary"]
 
614
  questions[0] if len(questions) > 0 else "", \
615
  questions[1] if len(questions) > 1 else "", \
616
  questions[2] if len(questions) > 2 else "", \
617
+ questions_answers_json, \
618
  original_transcript, \
619
  summary_text, \
620
  summary, \
 
1096
 
1097
  return questions
1098
 
1099
+ def get_questions_answers(video_id, df_string, source="gcs"):
1100
  if source == "gcs":
1101
  print("===get_questions_answers on gcs===")
1102
  gcs_client = GCS_CLIENT
 
1119
 
1120
  return questions_answers_json
1121
 
 
1122
  def generate_questions_answers(df_string):
1123
  # 使用 OpenAI 生成基于上传数据的问题
1124
  if isinstance(df_string, str):
 
1142
  {"role": "user", "content": user_content}
1143
  ]
1144
  response_format = { "type": "json_object" }
 
 
 
 
 
 
1145
  request_payload = {
1146
  "model": "gpt-4-turbo",
1147
  "messages": messages,
 
2888
  questions_json,
2889
  btn_1,
2890
  btn_2,
2891
+ btn_3,
2892
+ questions_answers_json,
2893
  df_string_output,
2894
  summary_text,
2895
  df_summarise,
 
2913
  key_moments,
2914
  btn_1,
2915
  btn_2,
2916
+ btn_3,
2917
  ]
2918
  update_state_outputs = [
2919
  content_subject_state,