import sys, subprocess, openai, json from youtube_comment_downloader import * from tavily import TavilyClient from pytrends.request import TrendReq # pytrends = TrendReq(hl='en-US', tz=360) api_key = os.getenv("OPENAI_API_KEY") client = openai.OpenAI(api_key=api_key) tavily_api_key = os.getenv("TAVILY_API_KEY") def download_comments(video_id="9P6H2QywDjM", output_file="9P6H2QywDjM.json", limit=10, sort=1): # youtube_comment_downloader 모듈을 호출하여 댓글을 다운로드합니다. subprocess.run([sys.executable, "-m", "youtube_comment_downloader", "--youtubeid", video_id, "--output", output_file, "--limit", limit, "--sort", sort], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) with open(output_file, 'r', encoding='utf-8') as f: return [ {k: json.loads(line)[k] for k in ['text', 'votes', 'replies', 'heart', 'reply', 'time_parsed']} for line in f if line.strip()] def download_comments2(video_id="9P6H2QywDjM", limit=10, sort=1): comments = [] for comment in YoutubeCommentDownloader().get_comments_from_url(f'https://www.youtube.com/watch?v={video_id}', sort_by=sort): comments.append({k: comment.get(k) for k in ['text', 'votes', 'replies', 'heart', 'reply', 'time_parsed']}) if len(comments) >= limit: break return comments def get_tavily_search(keyword): tavily = TavilyClient(api_key=tavily_api_key) return tavily.search( query=f"{keyword} 최신 뉴스", search_depth="advanced", max_results=5, include_answer=True,) def get_recent_news(keyword): response = client.chat.completions.create(model="gpt-4o-mini", messages=[ {"role": "user", "content": f"'{keyword}' 관련 최신 뉴스들 요약해주세요\n 내용: {get_tavily_search(keyword)}"}], max_tokens=500, temperature=0.3) return response.choices[0].message.content def summarize_video(video_id="9P6H2QywDjM"): # TODO return def get_main_character(summarization): # TODO return